Skip to main content

reth_trie_sparse/
parallel.rs

1#[cfg(feature = "trie-debug")]
2use crate::debug_recorder::{LeafUpdateRecord, ProofTrieNodeRecord, RecordedOp, TrieDebugRecorder};
3use crate::{
4    lower::LowerSparseSubtrie, provider::TrieNodeProvider, LeafLookup, LeafLookupError,
5    RlpNodeStackItem, SparseNode, SparseNodeState, SparseNodeType, SparseTrie, SparseTrieUpdates,
6};
7use alloc::{borrow::Cow, boxed::Box, vec, vec::Vec};
8use alloy_primitives::{
9    map::{Entry, HashMap, HashSet},
10    B256, U256,
11};
12use alloy_rlp::Decodable;
13use alloy_trie::{BranchNodeCompact, TrieMask, EMPTY_ROOT_HASH};
14use core::cmp::{Ord, Ordering, PartialOrd};
15use reth_execution_errors::{SparseTrieError, SparseTrieErrorKind, SparseTrieResult};
16#[cfg(feature = "metrics")]
17use reth_primitives_traits::FastInstant as Instant;
18use reth_trie_common::{
19    prefix_set::{PrefixSet, PrefixSetMut},
20    BranchNodeMasks, BranchNodeMasksMap, BranchNodeRef, ExtensionNodeRef, LeafNodeRef, Nibbles,
21    ProofTrieNodeV2, RlpNode, TrieNodeV2,
22};
23use smallvec::SmallVec;
24use tracing::{instrument, trace};
25
26/// The maximum length of a path, in nibbles, which belongs to the upper subtrie of a
27/// [`ParallelSparseTrie`]. All longer paths belong to a lower subtrie.
28pub const UPPER_TRIE_MAX_DEPTH: usize = 2;
29
30/// Number of lower subtries which are managed by the [`ParallelSparseTrie`].
31pub const NUM_LOWER_SUBTRIES: usize = 16usize.pow(UPPER_TRIE_MAX_DEPTH as u32);
32
33/// Configuration for controlling when parallelism is enabled in [`ParallelSparseTrie`] operations.
34#[derive(Debug, Clone, Copy, PartialEq, Eq, Default)]
35pub struct ParallelismThresholds {
36    /// Minimum number of nodes to reveal before parallel processing is enabled.
37    /// When `reveal_nodes` has fewer nodes than this threshold, they will be processed serially.
38    pub min_revealed_nodes: usize,
39    /// Minimum number of changed keys (prefix set length) before parallel processing is enabled
40    /// for hash updates. When updating subtrie hashes with fewer changed keys than this threshold,
41    /// the updates will be processed serially.
42    pub min_updated_nodes: usize,
43}
44
45/// A revealed sparse trie with subtries that can be updated in parallel.
46///
47/// ## Structure
48///
49/// The trie is divided into two tiers for efficient parallel processing:
50/// - **Upper subtrie**: Contains nodes with paths shorter than [`UPPER_TRIE_MAX_DEPTH`]
51/// - **Lower subtries**: An array of [`NUM_LOWER_SUBTRIES`] subtries, each handling nodes with
52///   paths of at least [`UPPER_TRIE_MAX_DEPTH`] nibbles
53///
54/// Node placement is determined by path depth:
55/// - Paths with < [`UPPER_TRIE_MAX_DEPTH`] nibbles go to the upper subtrie
56/// - Paths with >= [`UPPER_TRIE_MAX_DEPTH`] nibbles go to lower subtries, indexed by their first
57///   [`UPPER_TRIE_MAX_DEPTH`] nibbles.
58///
59/// Each lower subtrie tracks its root via the `path` field, which represents the shortest path
60/// in that subtrie. This path will have at least [`UPPER_TRIE_MAX_DEPTH`] nibbles, but may be
61/// longer when an extension node in the upper trie "reaches into" the lower subtrie. For example,
62/// if the upper trie has an extension from `0x1` to `0x12345`, then the lower subtrie for prefix
63/// `0x12` will have its root at path `0x12345` rather than at `0x12`.
64///
65/// ## Node Revealing
66///
67/// The trie uses lazy loading to efficiently handle large state tries. Nodes can be:
68/// - **Blind nodes**: Stored as hashes on [`SparseNode::Branch::blinded_hashes`]
69/// - **Revealed nodes**: Fully loaded nodes (Branch, Extension, Leaf) with complete structure
70///
71/// Note: An empty trie contains an `EmptyRoot` node at the root path, rather than no nodes at all.
72/// A trie with no nodes is blinded, its root may be `EmptyRoot` or some other node type.
73///
74/// Revealing is generally done using pre-loaded node data provided to via `reveal_nodes`. In
75/// certain cases, such as edge-cases when updating/removing leaves, nodes are revealed on-demand.
76///
77/// ## Leaf Operations
78///
79/// **Update**: When updating a leaf, the new value is stored in the appropriate subtrie's values
80/// map. If the leaf is new, the trie structure is updated by walking to the leaf from the root,
81/// creating necessary intermediate branch nodes.
82///
83/// **Removal**: Leaf removal may require parent node modifications. The algorithm walks up the
84/// trie, removing nodes that become empty and converting single-child branches to extensions.
85///
86/// During leaf operations the overall structure of the trie may change, causing nodes to be moved
87/// from the upper to lower trie or vice-versa.
88///
89/// The `prefix_set` is modified during both leaf updates and removals to track changed leaf paths.
90///
91/// ## Root Hash Calculation
92///
93/// Root hash computation follows a bottom-up approach:
94/// 1. Update hashes for all modified lower subtries (can be done in parallel)
95/// 2. Update hashes for the upper subtrie (which may reference lower subtrie hashes)
96/// 3. Calculate the final root hash from the upper subtrie's root node
97///
98/// The `prefix_set` tracks which paths have been modified, enabling incremental updates instead of
99/// recalculating the entire trie.
100///
101/// ## Invariants
102///
103/// - Each leaf entry in the `subtries` and `upper_trie` collection must have a corresponding entry
104///   in `values` collection. If the root node is a leaf, it must also have an entry in `values`.
105/// - All keys in `values` collection are full leaf paths.
106#[derive(Clone, PartialEq, Eq, Debug)]
107pub struct ParallelSparseTrie {
108    /// This contains the trie nodes for the upper part of the trie.
109    upper_subtrie: Box<SparseSubtrie>,
110    /// An array containing the subtries at the second level of the trie.
111    lower_subtries: Box<[LowerSparseSubtrie; NUM_LOWER_SUBTRIES]>,
112    /// Set of prefixes (key paths) that have been marked as updated.
113    /// This is used to track which parts of the trie need to be recalculated.
114    prefix_set: PrefixSetMut,
115    /// Optional tracking of trie updates for later use.
116    updates: Option<SparseTrieUpdates>,
117    /// Branch node masks containing `tree_mask` and `hash_mask` for each path.
118    /// - `tree_mask`: When a bit is set, the corresponding child subtree is stored in the
119    ///   database.
120    /// - `hash_mask`: When a bit is set, the corresponding child is stored as a hash in the
121    ///   database.
122    branch_node_masks: BranchNodeMasksMap,
123    /// Reusable buffer pool used for collecting [`SparseTrieUpdatesAction`]s during hash
124    /// computations.
125    update_actions_buffers: Vec<Vec<SparseTrieUpdatesAction>>,
126    /// Thresholds controlling when parallelism is enabled for different operations.
127    parallelism_thresholds: ParallelismThresholds,
128    /// Tracks heat of lower subtries for smart pruning decisions.
129    /// Hot subtries are skipped during pruning to keep frequently-used data revealed.
130    subtrie_heat: SubtrieModifications,
131    /// Metrics for the parallel sparse trie.
132    #[cfg(feature = "metrics")]
133    metrics: crate::metrics::ParallelSparseTrieMetrics,
134    /// Debug recorder for tracking mutating operations.
135    #[cfg(feature = "trie-debug")]
136    debug_recorder: TrieDebugRecorder,
137}
138
139impl Default for ParallelSparseTrie {
140    fn default() -> Self {
141        Self {
142            upper_subtrie: Box::new(SparseSubtrie {
143                nodes: HashMap::from_iter([(Nibbles::default(), SparseNode::Empty)]),
144                ..Default::default()
145            }),
146            lower_subtries: Box::new(
147                [const { LowerSparseSubtrie::Blind(None) }; NUM_LOWER_SUBTRIES],
148            ),
149            prefix_set: PrefixSetMut::default(),
150            updates: None,
151            branch_node_masks: BranchNodeMasksMap::default(),
152            update_actions_buffers: Vec::default(),
153            parallelism_thresholds: Default::default(),
154            subtrie_heat: SubtrieModifications::default(),
155            #[cfg(feature = "metrics")]
156            metrics: Default::default(),
157            #[cfg(feature = "trie-debug")]
158            debug_recorder: Default::default(),
159        }
160    }
161}
162
163impl SparseTrie for ParallelSparseTrie {
164    fn set_root(
165        &mut self,
166        root: TrieNodeV2,
167        masks: Option<BranchNodeMasks>,
168        retain_updates: bool,
169    ) -> SparseTrieResult<()> {
170        #[cfg(feature = "trie-debug")]
171        self.debug_recorder.record(RecordedOp::SetRoot {
172            node: ProofTrieNodeRecord::from_proof_trie_node_v2(&ProofTrieNodeV2 {
173                path: Nibbles::default(),
174                node: root.clone(),
175                masks,
176            }),
177        });
178
179        // A fresh/cleared `ParallelSparseTrie` has a `SparseNode::Empty` at its root in the upper
180        // subtrie. Delete that so we can reveal the new root node.
181        let path = Nibbles::default();
182        let _removed_root = self.upper_subtrie.nodes.remove(&path).expect("root node should exist");
183        debug_assert_eq!(_removed_root, SparseNode::Empty);
184
185        self.set_updates(retain_updates);
186
187        if let Some(masks) = masks {
188            let branch_path = if let TrieNodeV2::Branch(branch) = &root {
189                branch.key
190            } else {
191                Nibbles::default()
192            };
193
194            self.branch_node_masks.insert(branch_path, masks);
195        }
196
197        self.reveal_upper_node(Nibbles::default(), &root, masks)
198    }
199
200    fn set_updates(&mut self, retain_updates: bool) {
201        self.updates = retain_updates.then(Default::default);
202    }
203
204    fn reveal_nodes(&mut self, nodes: &mut [ProofTrieNodeV2]) -> SparseTrieResult<()> {
205        if nodes.is_empty() {
206            return Ok(())
207        }
208
209        #[cfg(feature = "trie-debug")]
210        self.debug_recorder.record(RecordedOp::RevealNodes {
211            nodes: nodes.iter().map(ProofTrieNodeRecord::from_proof_trie_node_v2).collect(),
212        });
213
214        // Sort nodes first by their subtrie, and secondarily by their path. This allows for
215        // grouping nodes by their subtrie using `chunk_by`.
216        nodes.sort_unstable_by(
217            |ProofTrieNodeV2 { path: path_a, .. }, ProofTrieNodeV2 { path: path_b, .. }| {
218                let subtrie_type_a = SparseSubtrieType::from_path(path_a);
219                let subtrie_type_b = SparseSubtrieType::from_path(path_b);
220                subtrie_type_a.cmp(&subtrie_type_b).then_with(|| path_a.cmp(path_b))
221            },
222        );
223
224        // Update the top-level branch node masks. This is simple and can't be done in parallel.
225        self.branch_node_masks.reserve(nodes.len());
226        for ProofTrieNodeV2 { path, masks, node } in nodes.iter() {
227            if let Some(branch_masks) = masks {
228                // Use proper path for branch nodes by combining path and extension key.
229                let path = if let TrieNodeV2::Branch(branch) = node &&
230                    !branch.key.is_empty()
231                {
232                    let mut path = *path;
233                    path.extend(&branch.key);
234                    path
235                } else {
236                    *path
237                };
238                self.branch_node_masks.insert(path, *branch_masks);
239            }
240        }
241
242        // Due to the sorting all upper subtrie nodes will be at the front of the slice. We split
243        // them off from the rest to be handled specially by
244        // `ParallelSparseTrie::reveal_upper_node`.
245        let num_upper_nodes = nodes
246            .iter()
247            .position(|n| !SparseSubtrieType::path_len_is_upper(n.path.len()))
248            .unwrap_or(nodes.len());
249        let (upper_nodes, lower_nodes) = nodes.split_at(num_upper_nodes);
250
251        // Reserve the capacity of the upper subtrie's `nodes` HashMap before iterating, so we don't
252        // end up making many small capacity changes as we loop.
253        self.upper_subtrie.nodes.reserve(upper_nodes.len());
254        for node in upper_nodes {
255            self.reveal_upper_node(node.path, &node.node, node.masks)?;
256        }
257
258        let reachable_subtries = self.reachable_subtries();
259
260        // For boundary nodes that are blinded in upper subtrie, unset the blinded bit and remember
261        // the hash to pass into `reveal_node`.
262        let hashes_from_upper = nodes
263            .iter()
264            .filter_map(|node| {
265                if node.path.len() == UPPER_TRIE_MAX_DEPTH &&
266                    reachable_subtries.get(path_subtrie_index_unchecked(&node.path)) &&
267                    let SparseNode::Branch { blinded_mask, blinded_hashes, .. } = self
268                        .upper_subtrie
269                        .nodes
270                        .get_mut(&node.path.slice(0..UPPER_TRIE_MAX_DEPTH - 1))
271                        .unwrap()
272                {
273                    let nibble = node.path.last().unwrap();
274                    blinded_mask.is_bit_set(nibble).then(|| {
275                        blinded_mask.unset_bit(nibble);
276                        (node.path, blinded_hashes[nibble as usize])
277                    })
278                } else {
279                    None
280                }
281            })
282            .collect::<HashMap<_, _>>();
283
284        if !self.is_reveal_parallelism_enabled(lower_nodes.len()) {
285            for node in lower_nodes {
286                let idx = path_subtrie_index_unchecked(&node.path);
287                if !reachable_subtries.get(idx) {
288                    trace!(
289                        target: "trie::parallel_sparse",
290                        reveal_path = ?node.path,
291                        "Node's lower subtrie is not reachable, skipping",
292                    );
293                    continue;
294                }
295                // For boundary leaves, check reachability from upper subtrie's parent branch
296                if node.path.len() == UPPER_TRIE_MAX_DEPTH &&
297                    !Self::is_boundary_leaf_reachable(
298                        &self.upper_subtrie.nodes,
299                        &node.path,
300                        &node.node,
301                    )
302                {
303                    trace!(
304                        target: "trie::parallel_sparse",
305                        path = ?node.path,
306                        "Boundary leaf not reachable from upper subtrie, skipping",
307                    );
308                    continue;
309                }
310                self.lower_subtries[idx].reveal(&node.path);
311                self.subtrie_heat.mark_modified(idx);
312                self.lower_subtries[idx].as_revealed_mut().expect("just revealed").reveal_node(
313                    node.path,
314                    &node.node,
315                    node.masks,
316                    hashes_from_upper.get(&node.path).copied(),
317                )?;
318            }
319            return Ok(())
320        }
321
322        #[cfg(not(feature = "std"))]
323        unreachable!("nostd is checked by is_reveal_parallelism_enabled");
324
325        #[cfg(feature = "std")]
326        // Reveal lower subtrie nodes in parallel
327        {
328            use rayon::iter::{IntoParallelIterator, ParallelIterator};
329            use tracing::Span;
330
331            // Capture the current span so it can be propagated to rayon worker threads
332            let parent_span = Span::current();
333
334            // Capture reference to upper subtrie nodes for boundary leaf reachability checks
335            let upper_nodes = &self.upper_subtrie.nodes;
336
337            // Group the nodes by lower subtrie.
338            let results = lower_nodes
339                .chunk_by(|node_a, node_b| {
340                    SparseSubtrieType::from_path(&node_a.path) ==
341                        SparseSubtrieType::from_path(&node_b.path)
342                })
343                // Filter out chunks for unreachable subtries.
344                .filter_map(|nodes| {
345                    let mut nodes = nodes
346                        .iter()
347                        .filter(|node| {
348                            // For boundary leaves, check reachability from upper subtrie's parent
349                            // branch.
350                            if node.path.len() == UPPER_TRIE_MAX_DEPTH &&
351                                !Self::is_boundary_leaf_reachable(
352                                    upper_nodes,
353                                    &node.path,
354                                    &node.node,
355                                )
356                            {
357                                trace!(
358                                    target: "trie::parallel_sparse",
359                                    path = ?node.path,
360                                    "Boundary leaf not reachable from upper subtrie, skipping",
361                                );
362                                false
363                            } else {
364                                true
365                            }
366                        })
367                        .peekable();
368
369                    let node = nodes.peek()?;
370                    let idx =
371                        SparseSubtrieType::from_path(&node.path).lower_index().unwrap_or_else(
372                            || panic!("upper subtrie node {node:?} found amongst lower nodes"),
373                        );
374
375                    if !reachable_subtries.get(idx) {
376                        trace!(
377                            target: "trie::parallel_sparse",
378                            nodes = ?nodes,
379                            "Lower subtrie is not reachable, skipping reveal",
380                        );
381                        return None;
382                    }
383
384                    // due to the nodes being sorted secondarily on their path, and chunk_by keeping
385                    // the first element of each group, the `path` here will necessarily be the
386                    // shortest path being revealed for each subtrie. Therefore we can reveal the
387                    // subtrie itself using this path and retain correct behavior.
388                    self.lower_subtries[idx].reveal(&node.path);
389                    Some((
390                        idx,
391                        self.lower_subtries[idx].take_revealed().expect("just revealed"),
392                        nodes,
393                    ))
394                })
395                .collect::<Vec<_>>()
396                .into_par_iter()
397                .map(|(subtrie_idx, mut subtrie, nodes)| {
398                    // Enter the parent span to propagate context (e.g., hashed_address for storage
399                    // tries) to the worker thread
400                    let _guard = parent_span.enter();
401
402                    // reserve space in the HashMap ahead of time; doing it on a node-by-node basis
403                    // can cause multiple re-allocations as the hashmap grows.
404                    subtrie.nodes.reserve(nodes.size_hint().1.unwrap_or(0));
405
406                    for node in nodes {
407                        // Reveal each node in the subtrie, returning early on any errors
408                        let res = subtrie.reveal_node(
409                            node.path,
410                            &node.node,
411                            node.masks,
412                            hashes_from_upper.get(&node.path).copied(),
413                        );
414                        if res.is_err() {
415                            return (subtrie_idx, subtrie, res.map(|_| ()))
416                        }
417                    }
418                    (subtrie_idx, subtrie, Ok(()))
419                })
420                .collect::<Vec<_>>();
421
422            // Put subtries back which were processed in the rayon pool, collecting the last
423            // seen error in the process and returning that.
424            let mut any_err = Ok(());
425            for (subtrie_idx, subtrie, res) in results {
426                self.lower_subtries[subtrie_idx] = LowerSparseSubtrie::Revealed(subtrie);
427                if res.is_err() {
428                    any_err = res;
429                }
430            }
431
432            any_err
433        }
434    }
435
436    fn update_leaf<P: TrieNodeProvider>(
437        &mut self,
438        full_path: Nibbles,
439        value: Vec<u8>,
440        _provider: P,
441    ) -> SparseTrieResult<()> {
442        debug_assert_eq!(
443            full_path.len(),
444            B256::len_bytes() * 2,
445            "update_leaf full_path must be 64 nibbles (32 bytes), got {} nibbles",
446            full_path.len()
447        );
448
449        trace!(
450            target: "trie::parallel_sparse",
451            ?full_path,
452            value_len = value.len(),
453            "Updating leaf",
454        );
455
456        // Check if the value already exists - if so, just update it (no structural changes needed)
457        if self.upper_subtrie.inner.values.contains_key(&full_path) {
458            self.prefix_set.insert(full_path);
459            self.upper_subtrie.inner.values.insert(full_path, value);
460            return Ok(());
461        }
462        // Also check lower subtries for existing value
463        if let Some(subtrie) = self.lower_subtrie_for_path(&full_path) &&
464            subtrie.inner.values.contains_key(&full_path)
465        {
466            self.prefix_set.insert(full_path);
467            self.lower_subtrie_for_path_mut(&full_path)
468                .expect("subtrie exists")
469                .inner
470                .values
471                .insert(full_path, value);
472            return Ok(());
473        }
474
475        // Insert value into upper subtrie temporarily. We'll move it to the correct subtrie
476        // during traversal, or clean it up if we error.
477        self.upper_subtrie.inner.values.insert(full_path, value.clone());
478
479        // Start at the root, traversing until we find either the node to update or a subtrie to
480        // update.
481        //
482        // We first traverse the upper subtrie for two levels, and moving any created nodes to a
483        // lower subtrie if necessary.
484        //
485        // We use `next` to keep track of the next node that we need to traverse to, and
486        // `new_nodes` to keep track of any nodes that were created during the traversal.
487        let mut new_nodes = Vec::new();
488        let mut next = Some(Nibbles::default());
489
490        // Traverse the upper subtrie to find the node to update or the subtrie to update.
491        //
492        // We stop when the next node to traverse would be in a lower subtrie, or if there are no
493        // more nodes to traverse.
494        while let Some(current) =
495            next.as_mut().filter(|next| SparseSubtrieType::path_len_is_upper(next.len()))
496        {
497            // Traverse the next node, keeping track of any changed nodes and the next step in the
498            // trie. If traversal fails, clean up the value we inserted and propagate the error.
499            let step_result = self.upper_subtrie.update_next_node(current, &full_path);
500
501            if step_result.is_err() {
502                self.upper_subtrie.inner.values.remove(&full_path);
503                return step_result.map(|_| ());
504            }
505
506            match step_result? {
507                LeafUpdateStep::Continue => {}
508                LeafUpdateStep::Complete { inserted_nodes } => {
509                    new_nodes.extend(inserted_nodes);
510                    next = None;
511                }
512                LeafUpdateStep::NodeNotFound => {
513                    next = None;
514                }
515            }
516        }
517
518        // Move nodes from upper subtrie to lower subtries
519        for node_path in &new_nodes {
520            // Skip nodes that belong in the upper subtrie
521            if SparseSubtrieType::path_len_is_upper(node_path.len()) {
522                continue
523            }
524
525            let node =
526                self.upper_subtrie.nodes.remove(node_path).expect("node belongs to upper subtrie");
527
528            // If it's a leaf node, extract its value before getting mutable reference to subtrie.
529            let leaf_value = if let SparseNode::Leaf { key, .. } = &node {
530                let mut leaf_full_path = *node_path;
531                leaf_full_path.extend(key);
532                Some((
533                    leaf_full_path,
534                    self.upper_subtrie
535                        .inner
536                        .values
537                        .remove(&leaf_full_path)
538                        .expect("leaf nodes have associated values entries"),
539                ))
540            } else {
541                None
542            };
543
544            // Get or create the subtrie with the exact node path (not truncated to 2 nibbles).
545            let subtrie = self.subtrie_for_path_mut(node_path);
546
547            // Insert the leaf value if we have one
548            if let Some((leaf_full_path, value)) = leaf_value {
549                subtrie.inner.values.insert(leaf_full_path, value);
550            }
551
552            // Insert the node into the lower subtrie
553            subtrie.nodes.insert(*node_path, node);
554        }
555
556        // If we reached the max depth of the upper trie, we may have had more nodes to insert.
557        if let Some(next_path) = next.filter(|n| !SparseSubtrieType::path_len_is_upper(n.len())) {
558            // The value was inserted into the upper subtrie's `values` at the top of this method.
559            // At this point we know the value is not in the upper subtrie, and the call to
560            // `update_leaf` below will insert it into the lower subtrie. So remove it from the
561            // upper subtrie.
562            self.upper_subtrie.inner.values.remove(&full_path);
563
564            // Use subtrie_for_path to ensure the subtrie has the correct path.
565            //
566            // The next_path here represents where we need to continue traversal, which may
567            // be longer than 2 nibbles if we're following an extension node.
568            let subtrie = self.subtrie_for_path_mut(&next_path);
569
570            // Create an empty root at the subtrie path if the subtrie is empty
571            if subtrie.nodes.is_empty() {
572                subtrie.nodes.insert(subtrie.path, SparseNode::Empty);
573            }
574
575            // If we didn't update the target leaf, we need to call update_leaf on the subtrie
576            // to ensure that the leaf is updated correctly.
577            if let Err(e) = subtrie.update_leaf(full_path, value) {
578                // Clean up: remove the value from lower subtrie if it was inserted
579                if let Some(lower) = self.lower_subtrie_for_path_mut(&full_path) {
580                    lower.inner.values.remove(&full_path);
581                }
582                return Err(e);
583            }
584        }
585
586        // Insert into prefix_set only after all operations succeed
587        self.prefix_set.insert(full_path);
588
589        Ok(())
590    }
591
592    fn remove_leaf<P: TrieNodeProvider>(
593        &mut self,
594        full_path: &Nibbles,
595        _provider: P,
596    ) -> SparseTrieResult<()> {
597        debug_assert_eq!(
598            full_path.len(),
599            B256::len_bytes() * 2,
600            "remove_leaf full_path must be 64 nibbles (32 bytes), got {} nibbles",
601            full_path.len()
602        );
603
604        trace!(
605            target: "trie::parallel_sparse",
606            ?full_path,
607            "Removing leaf",
608        );
609
610        // When removing a leaf node it's possibly necessary to modify its parent node, and possibly
611        // the parent's parent node. It is not ever necessary to descend further than that; once an
612        // extension node is hit it must terminate in a branch or the root, which won't need further
613        // updates. So the situation with maximum updates is:
614        //
615        // - Leaf
616        // - Branch with 2 children, one being this leaf
617        // - Extension
618        //
619        // ...which will result in just a leaf or extension, depending on what the branch's other
620        // child is.
621        //
622        // Therefore, first traverse the trie in order to find the leaf node and at most its parent
623        // and grandparent.
624
625        let leaf_path;
626        let leaf_subtrie_type;
627
628        let mut branch_parent_path: Option<Nibbles> = None;
629        let mut branch_parent_node: Option<SparseNode> = None;
630
631        let mut ext_grandparent_path: Option<Nibbles> = None;
632        let mut ext_grandparent_node: Option<SparseNode> = None;
633
634        let mut curr_path = Nibbles::new(); // start traversal from root
635        let mut curr_subtrie_type = SparseSubtrieType::Upper;
636
637        // List of node paths which need to be marked dirty
638        let mut paths_to_mark_dirty = Vec::new();
639
640        loop {
641            let curr_subtrie = match curr_subtrie_type {
642                SparseSubtrieType::Upper => &mut self.upper_subtrie,
643                SparseSubtrieType::Lower(idx) => {
644                    self.lower_subtries[idx].as_revealed_mut().expect("lower subtrie is revealed")
645                }
646            };
647            let curr_node = curr_subtrie.nodes.get_mut(&curr_path).unwrap();
648
649            match Self::find_next_to_leaf(&curr_path, curr_node, full_path) {
650                FindNextToLeafOutcome::NotFound => return Ok(()), // leaf isn't in the trie
651                FindNextToLeafOutcome::BlindedNode(path) => {
652                    return Err(SparseTrieErrorKind::BlindedNode(path).into())
653                }
654                FindNextToLeafOutcome::Found => {
655                    // this node is the target leaf
656                    leaf_path = curr_path;
657                    leaf_subtrie_type = curr_subtrie_type;
658                    break;
659                }
660                FindNextToLeafOutcome::ContinueFrom(next_path) => {
661                    // Any branches/extensions along the path to the leaf will have their `hash`
662                    // field unset, as it will no longer be valid once the leaf is removed.
663                    match curr_node {
664                        SparseNode::Branch { .. } => {
665                            paths_to_mark_dirty
666                                .push((SparseSubtrieType::from_path(&curr_path), curr_path));
667
668                            // If there is already an extension leading into a branch, then that
669                            // extension is no longer relevant.
670                            match (&branch_parent_path, &ext_grandparent_path) {
671                                (Some(branch), Some(ext)) if branch.len() > ext.len() => {
672                                    ext_grandparent_path = None;
673                                    ext_grandparent_node = None;
674                                }
675                                _ => (),
676                            };
677                            branch_parent_path = Some(curr_path);
678                            branch_parent_node = Some(curr_node.clone());
679                        }
680                        SparseNode::Extension { .. } => {
681                            paths_to_mark_dirty
682                                .push((SparseSubtrieType::from_path(&curr_path), curr_path));
683
684                            // We can assume a new branch node will be found after the extension, so
685                            // there's no need to modify branch_parent_path/node even if it's
686                            // already set.
687                            ext_grandparent_path = Some(curr_path);
688                            ext_grandparent_node = Some(curr_node.clone());
689                        }
690                        SparseNode::Empty | SparseNode::Leaf { .. } => {
691                            unreachable!(
692                                "find_next_to_leaf only continues to a branch or extension"
693                            )
694                        }
695                    }
696
697                    curr_path = next_path;
698
699                    // Update subtrie type if we're crossing into the lower trie.
700                    let next_subtrie_type = SparseSubtrieType::from_path(&curr_path);
701                    if matches!(curr_subtrie_type, SparseSubtrieType::Upper) &&
702                        matches!(next_subtrie_type, SparseSubtrieType::Lower(_))
703                    {
704                        curr_subtrie_type = next_subtrie_type;
705                    }
706                }
707            };
708        }
709
710        // Before mutating, check if branch collapse would require revealing a blinded node.
711        // This ensures remove_leaf is atomic: if it errors, the trie is unchanged.
712        if let (Some(branch_path), Some(SparseNode::Branch { state_mask, blinded_mask, .. })) =
713            (&branch_parent_path, &branch_parent_node)
714        {
715            let mut check_mask = *state_mask;
716            let child_nibble = leaf_path.get_unchecked(branch_path.len());
717            check_mask.unset_bit(child_nibble);
718
719            if check_mask.count_bits() == 1 {
720                let remaining_nibble =
721                    check_mask.first_set_bit_index().expect("state mask is not empty");
722
723                if blinded_mask.is_bit_set(remaining_nibble) {
724                    let mut path = *branch_path;
725                    path.push_unchecked(remaining_nibble);
726                    return Err(SparseTrieErrorKind::BlindedNode(path).into());
727                }
728            }
729        }
730
731        // We've traversed to the leaf and collected its ancestors as necessary. Remove the leaf
732        // from its SparseSubtrie and reset the hashes of the nodes along the path.
733        self.prefix_set.insert(*full_path);
734        let leaf_subtrie = match leaf_subtrie_type {
735            SparseSubtrieType::Upper => &mut self.upper_subtrie,
736            SparseSubtrieType::Lower(idx) => {
737                self.lower_subtries[idx].as_revealed_mut().expect("lower subtrie is revealed")
738            }
739        };
740        leaf_subtrie.inner.values.remove(full_path);
741        for (subtrie_type, path) in paths_to_mark_dirty {
742            let node = match subtrie_type {
743                SparseSubtrieType::Upper => self.upper_subtrie.nodes.get_mut(&path),
744                SparseSubtrieType::Lower(idx) => self.lower_subtries[idx]
745                    .as_revealed_mut()
746                    .expect("lower subtrie is revealed")
747                    .nodes
748                    .get_mut(&path),
749            }
750            .expect("node exists");
751
752            match node {
753                SparseNode::Extension { state, .. } | SparseNode::Branch { state, .. } => {
754                    *state = SparseNodeState::Dirty
755                }
756                SparseNode::Empty | SparseNode::Leaf { .. } => {
757                    unreachable!(
758                        "only branch and extension nodes can be marked dirty when removing a leaf"
759                    )
760                }
761            }
762        }
763        self.remove_node(&leaf_path);
764
765        // If the leaf was at the root replace its node with the empty value. We can stop execution
766        // here, all remaining logic is related to the ancestors of the leaf.
767        if leaf_path.is_empty() {
768            self.upper_subtrie.nodes.insert(leaf_path, SparseNode::Empty);
769            return Ok(())
770        }
771
772        // If there is a parent branch node (very likely, unless the leaf is at the root) execute
773        // any required changes for that node, relative to the removed leaf.
774        if let (
775            Some(branch_path),
776            &Some(SparseNode::Branch { mut state_mask, blinded_mask, ref blinded_hashes, .. }),
777        ) = (&branch_parent_path, &branch_parent_node)
778        {
779            let child_nibble = leaf_path.get_unchecked(branch_path.len());
780            state_mask.unset_bit(child_nibble);
781
782            let new_branch_node = if state_mask.count_bits() == 1 {
783                // If only one child is left set in the branch node, we need to collapse it. Get
784                // full path of the only child node left.
785                let remaining_child_nibble =
786                    state_mask.first_set_bit_index().expect("state mask is not empty");
787                let mut remaining_child_path = *branch_path;
788                remaining_child_path.push_unchecked(remaining_child_nibble);
789
790                trace!(
791                    target: "trie::parallel_sparse",
792                    ?leaf_path,
793                    ?branch_path,
794                    ?remaining_child_path,
795                    "Branch node has only one child",
796                );
797
798                // If the remaining child node is not yet revealed then we have to reveal it here,
799                // otherwise it's not possible to know how to collapse the branch.
800                if blinded_mask.is_bit_set(remaining_child_nibble) {
801                    return Err(SparseTrieErrorKind::BlindedNode(remaining_child_path).into());
802                }
803
804                let remaining_child_node = self
805                    .subtrie_for_path_mut(&remaining_child_path)
806                    .nodes
807                    .get(&remaining_child_path)
808                    .unwrap();
809
810                let (new_branch_node, remove_child) = Self::branch_changes_on_leaf_removal(
811                    branch_path,
812                    &remaining_child_path,
813                    remaining_child_node,
814                );
815
816                if remove_child {
817                    self.move_value_on_leaf_removal(
818                        branch_path,
819                        &new_branch_node,
820                        &remaining_child_path,
821                    );
822                    self.remove_node(&remaining_child_path);
823                }
824
825                if let Some(updates) = self.updates.as_mut() {
826                    updates.updated_nodes.remove(branch_path);
827                    updates.removed_nodes.insert(*branch_path);
828                }
829
830                new_branch_node
831            } else {
832                // If more than one child is left set in the branch, we just re-insert it with the
833                // updated state_mask.
834                SparseNode::Branch {
835                    state_mask,
836                    blinded_mask,
837                    blinded_hashes: blinded_hashes.clone(),
838                    state: SparseNodeState::Dirty,
839                }
840            };
841
842            let branch_subtrie = self.subtrie_for_path_mut(branch_path);
843            branch_subtrie.nodes.insert(*branch_path, new_branch_node.clone());
844            branch_parent_node = Some(new_branch_node);
845        };
846
847        // If there is a grandparent extension node then there will necessarily be a parent branch
848        // node. Execute any required changes for the extension node, relative to the (possibly now
849        // replaced with a leaf or extension) branch node.
850        if let (Some(ext_path), Some(SparseNode::Extension { key: shortkey, .. })) =
851            (ext_grandparent_path, &ext_grandparent_node)
852        {
853            let ext_subtrie = self.subtrie_for_path_mut(&ext_path);
854            let branch_path = branch_parent_path.as_ref().unwrap();
855
856            if let Some(new_ext_node) = Self::extension_changes_on_leaf_removal(
857                &ext_path,
858                shortkey,
859                branch_path,
860                branch_parent_node.as_ref().unwrap(),
861            ) {
862                ext_subtrie.nodes.insert(ext_path, new_ext_node.clone());
863                self.move_value_on_leaf_removal(&ext_path, &new_ext_node, branch_path);
864                self.remove_node(branch_path);
865            }
866        }
867
868        Ok(())
869    }
870
871    #[instrument(level = "trace", target = "trie::sparse::parallel", skip(self))]
872    fn root(&mut self) -> B256 {
873        trace!(target: "trie::parallel_sparse", "Calculating trie root hash");
874
875        #[cfg(feature = "trie-debug")]
876        self.debug_recorder.record(RecordedOp::Root);
877
878        if self.prefix_set.is_empty() &&
879            let Some(rlp_node) = self
880                .upper_subtrie
881                .nodes
882                .get(&Nibbles::default())
883                .and_then(|node| node.cached_rlp_node())
884        {
885            return rlp_node
886                .as_hash()
887                .expect("RLP-encoding of the root node cannot be less than 32 bytes")
888        }
889
890        // Update all lower subtrie hashes
891        self.update_subtrie_hashes();
892
893        // Update hashes for the upper subtrie using our specialized function
894        // that can access both upper and lower subtrie nodes
895        let mut prefix_set = core::mem::take(&mut self.prefix_set).freeze();
896        let root_rlp = self.update_upper_subtrie_hashes(&mut prefix_set);
897
898        // Return the root hash
899        root_rlp.as_hash().unwrap_or(EMPTY_ROOT_HASH)
900    }
901
902    fn is_root_cached(&self) -> bool {
903        self.prefix_set.is_empty() &&
904            self.upper_subtrie
905                .nodes
906                .get(&Nibbles::default())
907                .is_some_and(|node| node.cached_rlp_node().is_some())
908    }
909
910    #[instrument(level = "trace", target = "trie::sparse::parallel", skip(self))]
911    fn update_subtrie_hashes(&mut self) {
912        trace!(target: "trie::parallel_sparse", "Updating subtrie hashes");
913
914        #[cfg(feature = "trie-debug")]
915        self.debug_recorder.record(RecordedOp::UpdateSubtrieHashes);
916
917        // Take changed subtries according to the prefix set
918        let mut prefix_set = core::mem::take(&mut self.prefix_set).freeze();
919        let num_changed_keys = prefix_set.len();
920        let (mut changed_subtries, unchanged_prefix_set) =
921            self.take_changed_lower_subtries(&mut prefix_set);
922
923        // update metrics
924        #[cfg(feature = "metrics")]
925        self.metrics.subtries_updated.record(changed_subtries.len() as f64);
926
927        // Update the prefix set with the keys that didn't have matching subtries
928        self.prefix_set = unchanged_prefix_set;
929
930        // Update subtrie hashes serially parallelism is not enabled
931        if !self.is_update_parallelism_enabled(num_changed_keys) {
932            for changed_subtrie in &mut changed_subtries {
933                changed_subtrie.subtrie.update_hashes(
934                    &mut changed_subtrie.prefix_set,
935                    &mut changed_subtrie.update_actions_buf,
936                    &self.branch_node_masks,
937                );
938            }
939
940            self.insert_changed_subtries(changed_subtries);
941            return
942        }
943
944        #[cfg(not(feature = "std"))]
945        unreachable!("nostd is checked by is_update_parallelism_enabled");
946
947        #[cfg(feature = "std")]
948        // Update subtrie hashes in parallel
949        {
950            use rayon::prelude::*;
951
952            changed_subtries.par_iter_mut().for_each(|changed_subtrie| {
953                #[cfg(feature = "metrics")]
954                let start = Instant::now();
955                changed_subtrie.subtrie.update_hashes(
956                    &mut changed_subtrie.prefix_set,
957                    &mut changed_subtrie.update_actions_buf,
958                    &self.branch_node_masks,
959                );
960                #[cfg(feature = "metrics")]
961                self.metrics.subtrie_hash_update_latency.record(start.elapsed());
962            });
963
964            self.insert_changed_subtries(changed_subtries);
965        }
966    }
967
968    fn get_leaf_value(&self, full_path: &Nibbles) -> Option<&Vec<u8>> {
969        // `subtrie_for_path` is intended for a node path, but here we are using a full key path. So
970        // we need to check if the subtrie that the key might belong to has any nodes; if not then
971        // the key's portion of the trie doesn't have enough depth to reach into the subtrie, and
972        // the key will be in the upper subtrie
973        if let Some(subtrie) = self.subtrie_for_path(full_path) &&
974            !subtrie.is_empty()
975        {
976            return subtrie.inner.values.get(full_path);
977        }
978
979        self.upper_subtrie.inner.values.get(full_path)
980    }
981
982    fn updates_ref(&self) -> Cow<'_, SparseTrieUpdates> {
983        self.updates.as_ref().map_or(Cow::Owned(SparseTrieUpdates::default()), Cow::Borrowed)
984    }
985
986    fn take_updates(&mut self) -> SparseTrieUpdates {
987        match self.updates.take() {
988            Some(updates) => {
989                // NOTE: we need to preserve Some case
990                self.updates = Some(SparseTrieUpdates::with_capacity(
991                    updates.updated_nodes.len(),
992                    updates.removed_nodes.len(),
993                ));
994                updates
995            }
996            None => SparseTrieUpdates::default(),
997        }
998    }
999
1000    fn wipe(&mut self) {
1001        self.upper_subtrie.wipe();
1002        for trie in &mut *self.lower_subtries {
1003            trie.wipe();
1004        }
1005        self.prefix_set = PrefixSetMut::all();
1006        self.updates = self.updates.is_some().then(SparseTrieUpdates::wiped);
1007        self.subtrie_heat.clear();
1008    }
1009
1010    fn clear(&mut self) {
1011        self.upper_subtrie.clear();
1012        self.upper_subtrie.nodes.insert(Nibbles::default(), SparseNode::Empty);
1013        for subtrie in &mut *self.lower_subtries {
1014            subtrie.clear();
1015        }
1016        self.prefix_set.clear();
1017        self.updates = None;
1018        self.branch_node_masks.clear();
1019        self.subtrie_heat.clear();
1020        #[cfg(feature = "trie-debug")]
1021        self.debug_recorder.reset();
1022        // `update_actions_buffers` doesn't need to be cleared; we want to reuse the Vecs it has
1023        // buffered, and all of those are already inherently cleared when they get used.
1024    }
1025
1026    fn find_leaf(
1027        &self,
1028        full_path: &Nibbles,
1029        expected_value: Option<&Vec<u8>>,
1030    ) -> Result<LeafLookup, LeafLookupError> {
1031        // Inclusion proof
1032        //
1033        // First, do a quick check if the value exists in either the upper or lower subtrie's values
1034        // map. We assume that if there exists a leaf node, then its value will be in the `values`
1035        // map.
1036        if let Some(actual_value) = core::iter::once(self.upper_subtrie.as_ref())
1037            .chain(self.lower_subtrie_for_path(full_path))
1038            .filter_map(|subtrie| subtrie.inner.values.get(full_path))
1039            .next()
1040        {
1041            // We found the leaf, check if the value matches (if expected value was provided)
1042            return expected_value
1043                .is_none_or(|v| v == actual_value)
1044                .then_some(LeafLookup::Exists)
1045                .ok_or_else(|| LeafLookupError::ValueMismatch {
1046                    path: *full_path,
1047                    expected: expected_value.cloned(),
1048                    actual: actual_value.clone(),
1049                })
1050        }
1051
1052        // If the value does not exist in the `values` map, then this means that the leaf either:
1053        // - Does not exist in the trie
1054        // - Is missing from the witness
1055        // We traverse the trie to find the location where this leaf would have been, showing
1056        // that it is not in the trie. Or we find a blinded node, showing that the witness is
1057        // not complete.
1058        let mut curr_path = Nibbles::new(); // start traversal from root
1059        let mut curr_subtrie = self.upper_subtrie.as_ref();
1060        let mut curr_subtrie_is_upper = true;
1061
1062        loop {
1063            match curr_subtrie.nodes.get(&curr_path).unwrap() {
1064                SparseNode::Empty => return Ok(LeafLookup::NonExistent),
1065                SparseNode::Leaf { key, .. } => {
1066                    let mut found_full_path = curr_path;
1067                    found_full_path.extend(key);
1068                    assert!(&found_full_path != full_path, "target leaf {full_path:?} found, even though value wasn't in values hashmap");
1069                    return Ok(LeafLookup::NonExistent)
1070                }
1071                SparseNode::Extension { key, .. } => {
1072                    if full_path.len() == curr_path.len() {
1073                        return Ok(LeafLookup::NonExistent)
1074                    }
1075                    curr_path.extend(key);
1076                    if !full_path.starts_with(&curr_path) {
1077                        return Ok(LeafLookup::NonExistent)
1078                    }
1079                }
1080                SparseNode::Branch { state_mask, blinded_mask, blinded_hashes, .. } => {
1081                    if full_path.len() == curr_path.len() {
1082                        return Ok(LeafLookup::NonExistent)
1083                    }
1084                    let nibble = full_path.get_unchecked(curr_path.len());
1085                    if !state_mask.is_bit_set(nibble) {
1086                        return Ok(LeafLookup::NonExistent)
1087                    }
1088                    curr_path.push_unchecked(nibble);
1089                    if blinded_mask.is_bit_set(nibble) {
1090                        return Err(LeafLookupError::BlindedNode {
1091                            path: curr_path,
1092                            hash: blinded_hashes[nibble as usize],
1093                        })
1094                    }
1095                }
1096            }
1097
1098            // If we were previously looking at the upper trie, and the new path is in the
1099            // lower trie, we need to pull out a ref to the lower trie.
1100            if curr_subtrie_is_upper &&
1101                let Some(lower_subtrie) = self.lower_subtrie_for_path(&curr_path)
1102            {
1103                curr_subtrie = lower_subtrie;
1104                curr_subtrie_is_upper = false;
1105            }
1106        }
1107    }
1108
1109    fn shrink_nodes_to(&mut self, size: usize) {
1110        // Distribute the capacity across upper and lower subtries
1111        //
1112        // Always include upper subtrie, plus any lower subtries
1113        let total_subtries = 1 + NUM_LOWER_SUBTRIES;
1114        let size_per_subtrie = size / total_subtries;
1115
1116        // Shrink the upper subtrie
1117        self.upper_subtrie.shrink_nodes_to(size_per_subtrie);
1118
1119        // Shrink lower subtries (works for both revealed and blind with allocation)
1120        for subtrie in &mut *self.lower_subtries {
1121            subtrie.shrink_nodes_to(size_per_subtrie);
1122        }
1123
1124        // shrink masks map
1125        self.branch_node_masks.shrink_to(size);
1126    }
1127
1128    fn shrink_values_to(&mut self, size: usize) {
1129        // Distribute the capacity across upper and lower subtries
1130        //
1131        // Always include upper subtrie, plus any lower subtries
1132        let total_subtries = 1 + NUM_LOWER_SUBTRIES;
1133        let size_per_subtrie = size / total_subtries;
1134
1135        // Shrink the upper subtrie
1136        self.upper_subtrie.shrink_values_to(size_per_subtrie);
1137
1138        // Shrink lower subtries (works for both revealed and blind with allocation)
1139        for subtrie in &mut *self.lower_subtries {
1140            subtrie.shrink_values_to(size_per_subtrie);
1141        }
1142    }
1143
1144    /// O(1) size hint based on total node count (including hash stubs).
1145    fn size_hint(&self) -> usize {
1146        let upper_count = self.upper_subtrie.nodes.len();
1147        let lower_count: usize = self
1148            .lower_subtries
1149            .iter()
1150            .filter_map(|s| s.as_revealed_ref())
1151            .map(|s| s.nodes.len())
1152            .sum();
1153        upper_count + lower_count
1154    }
1155
1156    fn memory_size(&self) -> usize {
1157        self.memory_size()
1158    }
1159
1160    fn prune(&mut self, max_depth: usize) -> usize {
1161        #[cfg(feature = "trie-debug")]
1162        self.debug_recorder.reset();
1163
1164        // Decay heat for subtries not modified this cycle
1165        self.subtrie_heat.decay_and_reset();
1166
1167        // DFS traversal to find nodes at max_depth that can be pruned.
1168        // Collects "effective pruned roots" - children of nodes at max_depth with computed hashes.
1169        // We replace nodes with Hash stubs inline during traversal.
1170        let mut effective_pruned_roots = Vec::<Nibbles>::new();
1171        let mut stack: SmallVec<[(Nibbles, usize); 32]> = SmallVec::new();
1172        stack.push((Nibbles::default(), 0));
1173
1174        // DFS traversal: pop path and depth, skip if subtrie or node not found.
1175        while let Some((path, depth)) = stack.pop() {
1176            // Skip traversal into hot lower subtries beyond max_depth.
1177            // At max_depth, we still need to process the node to convert children to hashes.
1178            // This keeps frequently-modified subtries revealed to avoid expensive re-reveals.
1179            if depth > max_depth &&
1180                let SparseSubtrieType::Lower(idx) = SparseSubtrieType::from_path(&path) &&
1181                self.subtrie_heat.is_hot(idx)
1182            {
1183                continue;
1184            }
1185
1186            let Some(subtrie) = self.subtrie_for_path_mut_untracked(&path) else { continue };
1187            let Some(node) = subtrie.nodes.get_mut(&path) else { continue };
1188
1189            match node {
1190                SparseNode::Empty | SparseNode::Leaf { .. } => {}
1191                SparseNode::Extension { key, state, .. } => {
1192                    // For extension nodes at max depth, collapse both extension and its child
1193                    // branch to preserve invariant of all extension nodes children being revealed.
1194                    if depth == max_depth {
1195                        let Some(hash) = state.cached_hash() else { continue };
1196                        subtrie.nodes.remove(&path);
1197
1198                        let parent_path = path.slice(0..path.len() - 1);
1199                        let SparseNode::Branch { blinded_mask, blinded_hashes, .. } =
1200                            subtrie.nodes.get_mut(&parent_path).unwrap()
1201                        else {
1202                            panic!("expected branch node at path {parent_path:?}");
1203                        };
1204
1205                        let nibble = path.last().unwrap();
1206                        blinded_mask.set_bit(nibble);
1207                        blinded_hashes[nibble as usize] = hash;
1208
1209                        effective_pruned_roots.push(path);
1210                    } else {
1211                        let mut child = path;
1212                        child.extend(key);
1213                        stack.push((child, depth + 1));
1214                    }
1215                }
1216                SparseNode::Branch { state_mask, blinded_mask, blinded_hashes, .. } => {
1217                    // For branch nodes at max depth, collapse all children onto them,
1218                    if depth == max_depth {
1219                        let mut blinded_mask = *blinded_mask;
1220                        let mut blinded_hashes = blinded_hashes.clone();
1221                        for nibble in state_mask.iter() {
1222                            if blinded_mask.is_bit_set(nibble) {
1223                                continue;
1224                            }
1225                            let mut child = path;
1226                            child.push_unchecked(nibble);
1227
1228                            let Entry::Occupied(entry) = self
1229                                .subtrie_for_path_mut_untracked(&child)
1230                                .unwrap()
1231                                .nodes
1232                                .entry(child)
1233                            else {
1234                                panic!("expected node at path {child:?}");
1235                            };
1236
1237                            let Some(hash) = entry.get().cached_hash() else {
1238                                continue;
1239                            };
1240                            entry.remove();
1241                            blinded_mask.set_bit(nibble);
1242                            blinded_hashes[nibble as usize] = hash;
1243                            effective_pruned_roots.push(child);
1244                        }
1245
1246                        let SparseNode::Branch {
1247                            blinded_mask: old_blinded_mask,
1248                            blinded_hashes: old_blinded_hashes,
1249                            ..
1250                        } = self
1251                            .subtrie_for_path_mut_untracked(&path)
1252                            .unwrap()
1253                            .nodes
1254                            .get_mut(&path)
1255                            .unwrap()
1256                        else {
1257                            unreachable!("expected branch node at path {path:?}");
1258                        };
1259                        *old_blinded_mask = blinded_mask;
1260                        *old_blinded_hashes = blinded_hashes;
1261                    } else {
1262                        for nibble in state_mask.iter() {
1263                            if blinded_mask.is_bit_set(nibble) {
1264                                continue;
1265                            }
1266                            let mut child = path;
1267                            child.push_unchecked(nibble);
1268                            stack.push((child, depth + 1));
1269                        }
1270                    }
1271                }
1272            }
1273        }
1274
1275        if effective_pruned_roots.is_empty() {
1276            return 0;
1277        }
1278
1279        let nodes_converted = effective_pruned_roots.len();
1280
1281        // Sort roots by subtrie type (upper first), then by path for efficient partitioning.
1282        effective_pruned_roots.sort_unstable_by(|path_a, path_b| {
1283            let subtrie_type_a = SparseSubtrieType::from_path(path_a);
1284            let subtrie_type_b = SparseSubtrieType::from_path(path_b);
1285            subtrie_type_a.cmp(&subtrie_type_b).then(path_a.cmp(path_b))
1286        });
1287
1288        // Split off upper subtrie roots (they come first due to sorting)
1289        let num_upper_roots = effective_pruned_roots
1290            .iter()
1291            .position(|p| !SparseSubtrieType::path_len_is_upper(p.len()))
1292            .unwrap_or(effective_pruned_roots.len());
1293
1294        let roots_upper = &effective_pruned_roots[..num_upper_roots];
1295        let roots_lower = &effective_pruned_roots[num_upper_roots..];
1296
1297        debug_assert!(
1298            {
1299                let mut all_roots: Vec<_> = effective_pruned_roots.clone();
1300                all_roots.sort_unstable();
1301                all_roots.windows(2).all(|w| !w[1].starts_with(&w[0]))
1302            },
1303            "prune roots must be prefix-free"
1304        );
1305
1306        // Upper prune roots that are prefixes of lower subtrie root paths cause the entire
1307        // subtrie to be cleared (preserving allocations for reuse).
1308        if !roots_upper.is_empty() {
1309            for subtrie in &mut *self.lower_subtries {
1310                let should_clear = subtrie.as_revealed_ref().is_some_and(|s| {
1311                    let search_idx = roots_upper.partition_point(|root| root <= &s.path);
1312                    search_idx > 0 && s.path.starts_with(&roots_upper[search_idx - 1])
1313                });
1314                if should_clear {
1315                    subtrie.clear();
1316                }
1317            }
1318        }
1319
1320        // Upper subtrie: prune nodes and values
1321        self.upper_subtrie.nodes.retain(|p, _| !is_strict_descendant_in(roots_upper, p));
1322        self.upper_subtrie.inner.values.retain(|p, _| {
1323            !starts_with_pruned_in(roots_upper, p) && !starts_with_pruned_in(roots_lower, p)
1324        });
1325
1326        // Process lower subtries using chunk_by to group roots by subtrie
1327        for roots_group in roots_lower.chunk_by(|path_a, path_b| {
1328            SparseSubtrieType::from_path(path_a) == SparseSubtrieType::from_path(path_b)
1329        }) {
1330            let subtrie_idx = path_subtrie_index_unchecked(&roots_group[0]);
1331
1332            // Skip unrevealed/blinded subtries - nothing to prune
1333            let Some(subtrie) = self.lower_subtries[subtrie_idx].as_revealed_mut() else {
1334                continue;
1335            };
1336
1337            // Retain only nodes/values not descended from any pruned root.
1338            subtrie.nodes.retain(|p, _| !is_strict_descendant_in(roots_group, p));
1339            subtrie.inner.values.retain(|p, _| !starts_with_pruned_in(roots_group, p));
1340        }
1341
1342        // Branch node masks pruning
1343        self.branch_node_masks.retain(|p, _| {
1344            if SparseSubtrieType::path_len_is_upper(p.len()) {
1345                !starts_with_pruned_in(roots_upper, p)
1346            } else {
1347                !starts_with_pruned_in(roots_lower, p) && !starts_with_pruned_in(roots_upper, p)
1348            }
1349        });
1350
1351        nodes_converted
1352    }
1353
1354    fn update_leaves(
1355        &mut self,
1356        updates: &mut alloy_primitives::map::B256Map<crate::LeafUpdate>,
1357        mut proof_required_fn: impl FnMut(B256, u8),
1358    ) -> SparseTrieResult<()> {
1359        use crate::{provider::NoRevealProvider, LeafUpdate};
1360
1361        #[cfg(feature = "trie-debug")]
1362        let recorded_updates: Vec<_> =
1363            updates.iter().map(|(k, v)| (*k, LeafUpdateRecord::from(v))).collect();
1364        #[cfg(feature = "trie-debug")]
1365        let mut recorded_proof_targets: Vec<(B256, u8)> = Vec::new();
1366
1367        // Drain updates to avoid cloning keys while preserving the map's allocation.
1368        // On success, entries remain removed; on blinded node failure, they're re-inserted.
1369        let drained: Vec<_> = updates.drain().collect();
1370
1371        for (key, update) in drained {
1372            let full_path = Nibbles::unpack(key);
1373
1374            match update {
1375                LeafUpdate::Changed(value) => {
1376                    if value.is_empty() {
1377                        // Removal: remove_leaf with NoRevealProvider is atomic - returns a
1378                        // retriable error before any mutations (via pre_validate_reveal_chain).
1379                        match self.remove_leaf(&full_path, NoRevealProvider) {
1380                            Ok(()) => {}
1381                            Err(e) => {
1382                                if let Some(path) = Self::get_retriable_path(&e) {
1383                                    let (target_key, min_len) =
1384                                        Self::proof_target_for_path(key, &full_path, &path);
1385                                    proof_required_fn(target_key, min_len);
1386                                    #[cfg(feature = "trie-debug")]
1387                                    recorded_proof_targets.push((target_key, min_len));
1388                                    updates.insert(key, LeafUpdate::Changed(value));
1389                                } else {
1390                                    return Err(e);
1391                                }
1392                            }
1393                        }
1394                    } else {
1395                        // Update/insert: update_leaf is atomic - cleans up on error.
1396                        if let Err(e) = self.update_leaf(full_path, value.clone(), NoRevealProvider)
1397                        {
1398                            if let Some(path) = Self::get_retriable_path(&e) {
1399                                let (target_key, min_len) =
1400                                    Self::proof_target_for_path(key, &full_path, &path);
1401                                proof_required_fn(target_key, min_len);
1402                                #[cfg(feature = "trie-debug")]
1403                                recorded_proof_targets.push((target_key, min_len));
1404                                updates.insert(key, LeafUpdate::Changed(value));
1405                            } else {
1406                                return Err(e);
1407                            }
1408                        }
1409                    }
1410                }
1411                LeafUpdate::Touched => {
1412                    // Touched is read-only: check if path is accessible, request proof if blinded.
1413                    match self.find_leaf(&full_path, None) {
1414                        Err(LeafLookupError::BlindedNode { path, .. }) => {
1415                            let (target_key, min_len) =
1416                                Self::proof_target_for_path(key, &full_path, &path);
1417                            proof_required_fn(target_key, min_len);
1418                            #[cfg(feature = "trie-debug")]
1419                            recorded_proof_targets.push((target_key, min_len));
1420                            updates.insert(key, LeafUpdate::Touched);
1421                        }
1422                        // Path is fully revealed (exists or proven non-existent), no action needed.
1423                        Ok(_) | Err(LeafLookupError::ValueMismatch { .. }) => {}
1424                    }
1425                }
1426            }
1427        }
1428
1429        #[cfg(feature = "trie-debug")]
1430        self.debug_recorder.record(RecordedOp::UpdateLeaves {
1431            updates: recorded_updates,
1432            remaining_keys: updates.keys().copied().collect(),
1433            proof_targets: recorded_proof_targets,
1434        });
1435
1436        Ok(())
1437    }
1438
1439    #[cfg(feature = "trie-debug")]
1440    fn take_debug_recorder(&mut self) -> TrieDebugRecorder {
1441        core::mem::take(&mut self.debug_recorder)
1442    }
1443
1444    fn commit_updates(
1445        &mut self,
1446        updated: &HashMap<Nibbles, BranchNodeCompact>,
1447        removed: &HashSet<Nibbles>,
1448    ) {
1449        // Sync branch_node_masks with what's being committed to DB.
1450        // This ensures that on subsequent root() calls, the masks reflect the actual
1451        // DB state, which is needed for correct removal detection.
1452        self.branch_node_masks.reserve(updated.len());
1453        for (path, node) in updated {
1454            self.branch_node_masks.insert(
1455                *path,
1456                BranchNodeMasks { tree_mask: node.tree_mask, hash_mask: node.hash_mask },
1457            );
1458        }
1459        for path in removed {
1460            self.branch_node_masks.remove(path);
1461        }
1462    }
1463}
1464
1465impl ParallelSparseTrie {
1466    /// Sets the thresholds that control when parallelism is used during operations.
1467    pub const fn with_parallelism_thresholds(mut self, thresholds: ParallelismThresholds) -> Self {
1468        self.parallelism_thresholds = thresholds;
1469        self
1470    }
1471
1472    /// Returns true if retaining updates is enabled for the overall trie.
1473    const fn updates_enabled(&self) -> bool {
1474        self.updates.is_some()
1475    }
1476
1477    /// Returns true if parallelism should be enabled for revealing the given number of nodes.
1478    /// Will always return false in nostd builds.
1479    const fn is_reveal_parallelism_enabled(&self, num_nodes: usize) -> bool {
1480        #[cfg(not(feature = "std"))]
1481        {
1482            let _ = num_nodes;
1483            return false;
1484        }
1485
1486        #[cfg(feature = "std")]
1487        {
1488            num_nodes >= self.parallelism_thresholds.min_revealed_nodes
1489        }
1490    }
1491
1492    /// Returns true if parallelism should be enabled for updating hashes with the given number
1493    /// of changed keys. Will always return false in nostd builds.
1494    const fn is_update_parallelism_enabled(&self, num_changed_keys: usize) -> bool {
1495        #[cfg(not(feature = "std"))]
1496        {
1497            let _ = num_changed_keys;
1498            return false;
1499        }
1500
1501        #[cfg(feature = "std")]
1502        {
1503            num_changed_keys >= self.parallelism_thresholds.min_updated_nodes
1504        }
1505    }
1506
1507    /// Checks if an error is retriable (`BlindedNode` or `NodeNotFoundInProvider`) and extracts
1508    /// the path if so.
1509    ///
1510    /// Both error types indicate that a node needs to be revealed before the operation can
1511    /// succeed. `BlindedNode` occurs when traversing to a Hash node, while `NodeNotFoundInProvider`
1512    /// occurs when `retain_updates` is enabled and an extension node's child needs revealing.
1513    const fn get_retriable_path(e: &SparseTrieError) -> Option<Nibbles> {
1514        match e.kind() {
1515            SparseTrieErrorKind::BlindedNode(path) |
1516            SparseTrieErrorKind::NodeNotFoundInProvider { path } => Some(*path),
1517            _ => None,
1518        }
1519    }
1520
1521    /// Converts a nibbles path to a B256, right-padding with zeros to 64 nibbles.
1522    fn nibbles_to_padded_b256(path: &Nibbles) -> B256 {
1523        let mut bytes = [0u8; 32];
1524        path.pack_to(&mut bytes);
1525        B256::from(bytes)
1526    }
1527
1528    /// Computes the proof target key and `min_len` for a blinded node error.
1529    ///
1530    /// Returns `(target_key, min_len)` where:
1531    /// - `target_key` is `full_key` if `path` is a prefix of `full_path`, otherwise the padded path
1532    /// - `min_len` is always based on `path.len()`
1533    fn proof_target_for_path(full_key: B256, full_path: &Nibbles, path: &Nibbles) -> (B256, u8) {
1534        let min_len = (path.len() as u8).min(64);
1535        let target_key =
1536            if full_path.starts_with(path) { full_key } else { Self::nibbles_to_padded_b256(path) };
1537        (target_key, min_len)
1538    }
1539
1540    /// Creates a new revealed sparse trie from the given root node.
1541    ///
1542    /// This function initializes the internal structures and then reveals the root.
1543    /// It is a convenient method to create a trie when you already have the root node available.
1544    ///
1545    /// # Arguments
1546    ///
1547    /// * `root` - The root node of the trie
1548    /// * `masks` - Trie masks for root branch node
1549    /// * `retain_updates` - Whether to track updates
1550    ///
1551    /// # Returns
1552    ///
1553    /// Self if successful, or an error if revealing fails.
1554    pub fn from_root(
1555        root: TrieNodeV2,
1556        masks: Option<BranchNodeMasks>,
1557        retain_updates: bool,
1558    ) -> SparseTrieResult<Self> {
1559        Self::default().with_root(root, masks, retain_updates)
1560    }
1561
1562    /// Returns a reference to the lower `SparseSubtrie` for the given path, or None if the
1563    /// path belongs to the upper trie, or if the lower subtrie for the path doesn't exist or is
1564    /// blinded.
1565    fn lower_subtrie_for_path(&self, path: &Nibbles) -> Option<&SparseSubtrie> {
1566        match SparseSubtrieType::from_path(path) {
1567            SparseSubtrieType::Upper => None,
1568            SparseSubtrieType::Lower(idx) => self.lower_subtries[idx].as_revealed_ref(),
1569        }
1570    }
1571
1572    /// Returns a mutable reference to the lower `SparseSubtrie` for the given path, or None if the
1573    /// path belongs to the upper trie.
1574    ///
1575    /// This method will create/reveal a new lower subtrie for the given path if one isn't already.
1576    /// If one does exist, but its path field is longer than the given path, then the field will be
1577    /// set to the given path.
1578    fn lower_subtrie_for_path_mut(&mut self, path: &Nibbles) -> Option<&mut SparseSubtrie> {
1579        match SparseSubtrieType::from_path(path) {
1580            SparseSubtrieType::Upper => None,
1581            SparseSubtrieType::Lower(idx) => {
1582                self.lower_subtries[idx].reveal(path);
1583                self.subtrie_heat.mark_modified(idx);
1584                Some(self.lower_subtries[idx].as_revealed_mut().expect("just revealed"))
1585            }
1586        }
1587    }
1588
1589    /// Returns a reference to either the lower or upper `SparseSubtrie` for the given path,
1590    /// depending on the path's length.
1591    ///
1592    /// Returns `None` if a lower subtrie does not exist for the given path.
1593    fn subtrie_for_path(&self, path: &Nibbles) -> Option<&SparseSubtrie> {
1594        if SparseSubtrieType::path_len_is_upper(path.len()) {
1595            Some(&self.upper_subtrie)
1596        } else {
1597            self.lower_subtrie_for_path(path)
1598        }
1599    }
1600
1601    /// Returns a mutable reference to either the lower or upper `SparseSubtrie` for the given path,
1602    /// depending on the path's length.
1603    ///
1604    /// This method will create/reveal a new lower subtrie for the given path if one isn't already.
1605    /// If one does exist, but its path field is longer than the given path, then the field will be
1606    /// set to the given path.
1607    fn subtrie_for_path_mut(&mut self, path: &Nibbles) -> &mut SparseSubtrie {
1608        // We can't just call `lower_subtrie_for_path` and return `upper_subtrie` if it returns
1609        // None, because Rust complains about double mutable borrowing `self`.
1610        if SparseSubtrieType::path_len_is_upper(path.len()) {
1611            &mut self.upper_subtrie
1612        } else {
1613            self.lower_subtrie_for_path_mut(path).unwrap()
1614        }
1615    }
1616
1617    /// Returns a mutable reference to a subtrie without marking it as modified.
1618    /// Used for internal operations like pruning that shouldn't affect heat tracking.
1619    fn subtrie_for_path_mut_untracked(&mut self, path: &Nibbles) -> Option<&mut SparseSubtrie> {
1620        if SparseSubtrieType::path_len_is_upper(path.len()) {
1621            Some(&mut self.upper_subtrie)
1622        } else {
1623            match SparseSubtrieType::from_path(path) {
1624                SparseSubtrieType::Upper => None,
1625                SparseSubtrieType::Lower(idx) => self.lower_subtries[idx].as_revealed_mut(),
1626            }
1627        }
1628    }
1629
1630    /// Returns the next node in the traversal path from the given path towards the leaf for the
1631    /// given full leaf path, or an error if any node along the traversal path is not revealed.
1632    ///
1633    ///
1634    /// ## Panics
1635    ///
1636    /// If `from_path` is not a prefix of `leaf_full_path`.
1637    fn find_next_to_leaf(
1638        from_path: &Nibbles,
1639        from_node: &SparseNode,
1640        leaf_full_path: &Nibbles,
1641    ) -> FindNextToLeafOutcome {
1642        debug_assert!(leaf_full_path.len() >= from_path.len());
1643        debug_assert!(leaf_full_path.starts_with(from_path));
1644
1645        match from_node {
1646            // If empty node is found it means the subtrie doesn't have any nodes in it, let alone
1647            // the target leaf.
1648            SparseNode::Empty => FindNextToLeafOutcome::NotFound,
1649            SparseNode::Leaf { key, .. } => {
1650                let mut found_full_path = *from_path;
1651                found_full_path.extend(key);
1652
1653                if &found_full_path == leaf_full_path {
1654                    return FindNextToLeafOutcome::Found
1655                }
1656                FindNextToLeafOutcome::NotFound
1657            }
1658            SparseNode::Extension { key, .. } => {
1659                if leaf_full_path.len() == from_path.len() {
1660                    return FindNextToLeafOutcome::NotFound
1661                }
1662
1663                let mut child_path = *from_path;
1664                child_path.extend(key);
1665
1666                if !leaf_full_path.starts_with(&child_path) {
1667                    return FindNextToLeafOutcome::NotFound
1668                }
1669                FindNextToLeafOutcome::ContinueFrom(child_path)
1670            }
1671            SparseNode::Branch { state_mask, blinded_mask, .. } => {
1672                if leaf_full_path.len() == from_path.len() {
1673                    return FindNextToLeafOutcome::NotFound
1674                }
1675
1676                let nibble = leaf_full_path.get_unchecked(from_path.len());
1677                if !state_mask.is_bit_set(nibble) {
1678                    return FindNextToLeafOutcome::NotFound
1679                }
1680
1681                let mut child_path = *from_path;
1682                child_path.push_unchecked(nibble);
1683
1684                if blinded_mask.is_bit_set(nibble) {
1685                    return FindNextToLeafOutcome::BlindedNode(child_path);
1686                }
1687
1688                FindNextToLeafOutcome::ContinueFrom(child_path)
1689            }
1690        }
1691    }
1692
1693    /// Called when a child node has collapsed into its parent as part of `remove_leaf`. If the
1694    /// new parent node is a leaf, then the previous child also was, and if the previous child was
1695    /// on a lower subtrie while the parent is on an upper then the leaf value needs to be moved to
1696    /// the upper.
1697    fn move_value_on_leaf_removal(
1698        &mut self,
1699        parent_path: &Nibbles,
1700        new_parent_node: &SparseNode,
1701        prev_child_path: &Nibbles,
1702    ) {
1703        // If the parent path isn't in the upper then it doesn't matter what the new node is,
1704        // there's no situation where a leaf value needs to be moved.
1705        if SparseSubtrieType::from_path(parent_path).lower_index().is_some() {
1706            return;
1707        }
1708
1709        if let SparseNode::Leaf { key, .. } = new_parent_node {
1710            let Some(prev_child_subtrie) = self.lower_subtrie_for_path_mut(prev_child_path) else {
1711                return;
1712            };
1713
1714            let mut leaf_full_path = *parent_path;
1715            leaf_full_path.extend(key);
1716
1717            let val = prev_child_subtrie.inner.values.remove(&leaf_full_path).expect("ParallelSparseTrie is in an inconsistent state, expected value on subtrie which wasn't found");
1718            self.upper_subtrie.inner.values.insert(leaf_full_path, val);
1719        }
1720    }
1721
1722    /// Used by `remove_leaf` to ensure that when a node is removed from a lower subtrie that any
1723    /// externalities are handled. These can include:
1724    /// - Removing the lower subtrie completely, if it is now empty.
1725    /// - Updating the `path` field of the lower subtrie to indicate that its root node has changed.
1726    ///
1727    /// This method assumes that the caller will deal with putting all other nodes in the trie into
1728    /// a consistent state after the removal of this one.
1729    ///
1730    /// ## Panics
1731    ///
1732    /// - If the removed node was not a leaf or extension.
1733    fn remove_node(&mut self, path: &Nibbles) {
1734        let subtrie = self.subtrie_for_path_mut(path);
1735        let node = subtrie.nodes.remove(path);
1736
1737        let Some(idx) = SparseSubtrieType::from_path(path).lower_index() else {
1738            // When removing a node from the upper trie there's nothing special we need to do to fix
1739            // its path field; the upper trie's path is always empty.
1740            return;
1741        };
1742
1743        match node {
1744            Some(SparseNode::Leaf { .. }) => {
1745                // If the leaf was the final node in its lower subtrie then we can blind the
1746                // subtrie, effectively marking it as empty.
1747                if subtrie.nodes.is_empty() {
1748                    self.lower_subtries[idx].clear();
1749                }
1750            }
1751            Some(SparseNode::Extension { key, .. }) => {
1752                // If the removed extension was the root node of a lower subtrie then the lower
1753                // subtrie's `path` needs to be updated to be whatever node the extension used to
1754                // point to.
1755                if &subtrie.path == path {
1756                    subtrie.path.extend(&key);
1757                }
1758            }
1759            _ => panic!("Expected to remove a leaf or extension, but removed {node:?}"),
1760        }
1761    }
1762
1763    /// Given the path to a parent branch node and a child node which is the sole remaining child on
1764    /// that branch after removing a leaf, returns a node to replace the parent branch node and a
1765    /// boolean indicating if the child should be deleted.
1766    ///
1767    /// ## Panics
1768    ///
1769    /// - If either parent or child node is not already revealed.
1770    /// - If parent's path is not a prefix of the child's path.
1771    fn branch_changes_on_leaf_removal(
1772        parent_path: &Nibbles,
1773        remaining_child_path: &Nibbles,
1774        remaining_child_node: &SparseNode,
1775    ) -> (SparseNode, bool) {
1776        debug_assert!(remaining_child_path.len() > parent_path.len());
1777        debug_assert!(remaining_child_path.starts_with(parent_path));
1778
1779        let remaining_child_nibble = remaining_child_path.get_unchecked(parent_path.len());
1780
1781        // If we swap the branch node out either an extension or leaf, depending on
1782        // what its remaining child is.
1783        match remaining_child_node {
1784            SparseNode::Empty => {
1785                panic!("remaining child must have been revealed already")
1786            }
1787            // If the only child is a leaf node, we downgrade the branch node into a
1788            // leaf node, prepending the nibble to the key, and delete the old
1789            // child.
1790            SparseNode::Leaf { key, .. } => {
1791                let mut new_key = Nibbles::from_nibbles_unchecked([remaining_child_nibble]);
1792                new_key.extend(key);
1793                (SparseNode::new_leaf(new_key), true)
1794            }
1795            // If the only child node is an extension node, we downgrade the branch
1796            // node into an even longer extension node, prepending the nibble to the
1797            // key, and delete the old child.
1798            SparseNode::Extension { key, .. } => {
1799                let mut new_key = Nibbles::from_nibbles_unchecked([remaining_child_nibble]);
1800                new_key.extend(key);
1801                (SparseNode::new_ext(new_key), true)
1802            }
1803            // If the only child is a branch node, we downgrade the current branch
1804            // node into a one-nibble extension node.
1805            SparseNode::Branch { .. } => (
1806                SparseNode::new_ext(Nibbles::from_nibbles_unchecked([remaining_child_nibble])),
1807                false,
1808            ),
1809        }
1810    }
1811
1812    /// Given the path to a parent extension and its key, and a child node (not necessarily on this
1813    /// subtrie), returns an optional replacement parent node. If a replacement is returned then the
1814    /// child node should be deleted.
1815    ///
1816    /// ## Panics
1817    ///
1818    /// - If either parent or child node is not already revealed.
1819    /// - If parent's path is not a prefix of the child's path.
1820    fn extension_changes_on_leaf_removal(
1821        parent_path: &Nibbles,
1822        parent_key: &Nibbles,
1823        child_path: &Nibbles,
1824        child: &SparseNode,
1825    ) -> Option<SparseNode> {
1826        debug_assert!(child_path.len() > parent_path.len());
1827        debug_assert!(child_path.starts_with(parent_path));
1828
1829        // If the parent node is an extension node, we need to look at its child to see
1830        // if we need to merge it.
1831        match child {
1832            SparseNode::Empty => {
1833                panic!("child must be revealed")
1834            }
1835            // For a leaf node, we collapse the extension node into a leaf node,
1836            // extending the key. While it's impossible to encounter an extension node
1837            // followed by a leaf node in a complete trie, it's possible here because we
1838            // could have downgraded the extension node's child into a leaf node from a
1839            // branch in a previous call to `branch_changes_on_leaf_removal`.
1840            SparseNode::Leaf { key, .. } => {
1841                let mut new_key = *parent_key;
1842                new_key.extend(key);
1843                Some(SparseNode::new_leaf(new_key))
1844            }
1845            // Similar to the leaf node, for an extension node, we collapse them into one
1846            // extension node, extending the key.
1847            SparseNode::Extension { key, .. } => {
1848                let mut new_key = *parent_key;
1849                new_key.extend(key);
1850                Some(SparseNode::new_ext(new_key))
1851            }
1852            // For a branch node, we just leave the extension node as-is.
1853            SparseNode::Branch { .. } => None,
1854        }
1855    }
1856
1857    /// Drains any [`SparseTrieUpdatesAction`]s from the given subtrie, and applies each action to
1858    /// the given `updates` set. If the given set is None then this is a no-op.
1859    #[instrument(level = "trace", target = "trie::parallel_sparse", skip_all)]
1860    fn apply_subtrie_update_actions(
1861        &mut self,
1862        update_actions: impl Iterator<Item = SparseTrieUpdatesAction>,
1863    ) {
1864        if let Some(updates) = self.updates.as_mut() {
1865            let additional = update_actions.size_hint().0;
1866            updates.updated_nodes.reserve(additional);
1867            updates.removed_nodes.reserve(additional);
1868            for action in update_actions {
1869                match action {
1870                    SparseTrieUpdatesAction::InsertRemoved(path) => {
1871                        updates.updated_nodes.remove(&path);
1872                        updates.removed_nodes.insert(path);
1873                    }
1874                    SparseTrieUpdatesAction::RemoveUpdated(path) => {
1875                        updates.updated_nodes.remove(&path);
1876                    }
1877                    SparseTrieUpdatesAction::InsertUpdated(path, branch_node) => {
1878                        updates.updated_nodes.insert(path, branch_node);
1879                        updates.removed_nodes.remove(&path);
1880                    }
1881                }
1882            }
1883        };
1884    }
1885
1886    /// Updates hashes for the upper subtrie, using nodes from both upper and lower subtries.
1887    #[instrument(level = "trace", target = "trie::parallel_sparse", skip_all, ret)]
1888    fn update_upper_subtrie_hashes(&mut self, prefix_set: &mut PrefixSet) -> RlpNode {
1889        trace!(target: "trie::parallel_sparse", "Updating upper subtrie hashes");
1890
1891        debug_assert!(self.upper_subtrie.inner.buffers.path_stack.is_empty());
1892        self.upper_subtrie.inner.buffers.path_stack.push(RlpNodePathStackItem {
1893            path: Nibbles::default(), // Start from root
1894            is_in_prefix_set: None,
1895        });
1896
1897        #[cfg(feature = "metrics")]
1898        let start = Instant::now();
1899
1900        let mut update_actions_buf =
1901            self.updates_enabled().then(|| self.update_actions_buffers.pop().unwrap_or_default());
1902
1903        while let Some(stack_item) = self.upper_subtrie.inner.buffers.path_stack.pop() {
1904            let path = stack_item.path;
1905            let node = if path.len() < UPPER_TRIE_MAX_DEPTH {
1906                self.upper_subtrie.nodes.get_mut(&path).expect("upper subtrie node must exist")
1907            } else {
1908                let index = path_subtrie_index_unchecked(&path);
1909                let node = self.lower_subtries[index]
1910                    .as_revealed_mut()
1911                    .expect("lower subtrie must exist")
1912                    .nodes
1913                    .get_mut(&path)
1914                    .expect("lower subtrie node must exist");
1915                // Lower subtrie root node RLP nodes must be computed before updating upper subtrie
1916                // hashes
1917                debug_assert!(
1918                    node.cached_rlp_node().is_some(),
1919                    "Lower subtrie root node {node:?} at path {path:?} has no cached RLP node"
1920                );
1921                node
1922            };
1923
1924            // Calculate the RLP node for the current node using upper subtrie
1925            self.upper_subtrie.inner.rlp_node(
1926                prefix_set,
1927                &mut update_actions_buf,
1928                stack_item,
1929                node,
1930                &self.branch_node_masks,
1931            );
1932        }
1933
1934        // If there were any branch node updates as a result of calculating the RLP node for the
1935        // upper trie then apply them to the top-level set.
1936        if let Some(mut update_actions_buf) = update_actions_buf {
1937            self.apply_subtrie_update_actions(
1938                #[allow(clippy::iter_with_drain)]
1939                update_actions_buf.drain(..),
1940            );
1941            self.update_actions_buffers.push(update_actions_buf);
1942        }
1943
1944        #[cfg(feature = "metrics")]
1945        self.metrics.subtrie_upper_hash_latency.record(start.elapsed());
1946
1947        debug_assert_eq!(self.upper_subtrie.inner.buffers.rlp_node_stack.len(), 1);
1948        self.upper_subtrie.inner.buffers.rlp_node_stack.pop().unwrap().rlp_node
1949    }
1950
1951    /// Returns:
1952    /// 1. List of lower [subtries](SparseSubtrie) that have changed according to the provided
1953    ///    [prefix set](PrefixSet). See documentation of [`ChangedSubtrie`] for more details. Lower
1954    ///    subtries whose root node is missing a hash will also be returned; this is required to
1955    ///    handle cases where extensions/leafs get shortened and therefore moved from the upper to a
1956    ///    lower subtrie.
1957    /// 2. Prefix set of keys that do not belong to any lower subtrie.
1958    ///
1959    /// This method helps optimize hash recalculations by identifying which specific
1960    /// lower subtries need to be updated. Each lower subtrie can then be updated in parallel.
1961    ///
1962    /// IMPORTANT: The method removes the subtries from `lower_subtries`, and the caller is
1963    /// responsible for returning them back into the array.
1964    #[instrument(level = "trace", target = "trie::parallel_sparse", skip_all, fields(prefix_set_len = prefix_set.len()))]
1965    fn take_changed_lower_subtries(
1966        &mut self,
1967        prefix_set: &mut PrefixSet,
1968    ) -> (Vec<ChangedSubtrie>, PrefixSetMut) {
1969        // Fast-path: If the prefix set is empty then no subtries can have been changed. Just return
1970        // empty values.
1971        if prefix_set.is_empty() {
1972            return Default::default();
1973        }
1974
1975        // Clone the prefix set to iterate over its keys. Cloning is cheap, it's just an Arc.
1976        let prefix_set_clone = prefix_set.clone();
1977        let mut prefix_set_iter = prefix_set_clone.into_iter().copied().peekable();
1978        let mut changed_subtries = Vec::new();
1979        let mut unchanged_prefix_set = PrefixSetMut::default();
1980        let updates_enabled = self.updates_enabled();
1981
1982        for (index, subtrie) in self.lower_subtries.iter_mut().enumerate() {
1983            if let Some(subtrie) = subtrie.take_revealed_if(|subtrie| {
1984                prefix_set.contains(&subtrie.path) ||
1985                    subtrie
1986                        .nodes
1987                        .get(&subtrie.path)
1988                        .is_some_and(|n| n.cached_rlp_node().is_none())
1989            }) {
1990                let prefix_set = if prefix_set.all() {
1991                    unchanged_prefix_set = PrefixSetMut::all();
1992                    PrefixSetMut::all()
1993                } else {
1994                    // Take those keys from the original prefix set that start with the subtrie path
1995                    //
1996                    // Subtries are stored in the order of their paths, so we can use the same
1997                    // prefix set iterator.
1998                    let mut new_prefix_set = Vec::new();
1999                    while let Some(key) = prefix_set_iter.peek() {
2000                        if key.starts_with(&subtrie.path) {
2001                            // If the key starts with the subtrie path, add it to the new prefix set
2002                            new_prefix_set.push(prefix_set_iter.next().unwrap());
2003                        } else if new_prefix_set.is_empty() && key < &subtrie.path {
2004                            // If we didn't yet have any keys that belong to this subtrie, and the
2005                            // current key is still less than the subtrie path, add it to the
2006                            // unchanged prefix set
2007                            unchanged_prefix_set.insert(prefix_set_iter.next().unwrap());
2008                        } else {
2009                            // If we're past the subtrie path, we're done with this subtrie. Do not
2010                            // advance the iterator, the next key will be processed either by the
2011                            // next subtrie or inserted into the unchanged prefix set.
2012                            break
2013                        }
2014                    }
2015                    PrefixSetMut::from(new_prefix_set)
2016                }
2017                .freeze();
2018
2019                // We need the full path of root node of the lower subtrie to the unchanged prefix
2020                // set, so that we don't skip it when calculating hashes for the upper subtrie.
2021                match subtrie.nodes.get(&subtrie.path) {
2022                    Some(SparseNode::Extension { key, .. } | SparseNode::Leaf { key, .. }) => {
2023                        unchanged_prefix_set.insert(subtrie.path.join(key));
2024                    }
2025                    Some(SparseNode::Branch { .. }) => {
2026                        unchanged_prefix_set.insert(subtrie.path);
2027                    }
2028                    _ => {}
2029                }
2030
2031                let update_actions_buf =
2032                    updates_enabled.then(|| self.update_actions_buffers.pop().unwrap_or_default());
2033
2034                changed_subtries.push(ChangedSubtrie {
2035                    index,
2036                    subtrie,
2037                    prefix_set,
2038                    update_actions_buf,
2039                });
2040            }
2041        }
2042
2043        // Extend the unchanged prefix set with the remaining keys that are not part of any subtries
2044        unchanged_prefix_set.extend_keys(prefix_set_iter);
2045
2046        (changed_subtries, unchanged_prefix_set)
2047    }
2048
2049    /// Returns an iterator over all nodes in the trie in no particular order.
2050    #[cfg(test)]
2051    fn all_nodes(&self) -> impl IntoIterator<Item = (&Nibbles, &SparseNode)> {
2052        let mut nodes = vec![];
2053        for subtrie in self.lower_subtries.iter().filter_map(LowerSparseSubtrie::as_revealed_ref) {
2054            nodes.extend(subtrie.nodes.iter())
2055        }
2056        nodes.extend(self.upper_subtrie.nodes.iter());
2057        nodes
2058    }
2059
2060    /// Reveals a trie node in the upper trie if it has not been revealed before. When revealing
2061    /// branch/extension nodes this may recurse into a lower trie to reveal a child.
2062    ///
2063    /// This function decodes a trie node and inserts it into the trie structure. It handles
2064    /// different node types (leaf, extension, branch) by appropriately adding them to the trie and
2065    /// recursively revealing their children.
2066    ///
2067    /// # Arguments
2068    ///
2069    /// * `path` - The path where the node should be revealed
2070    /// * `node` - The trie node to reveal
2071    /// * `masks` - Branch node masks if known
2072    ///
2073    /// # Returns
2074    ///
2075    /// `Ok(())` if successful, or an error if the node was not revealed.
2076    fn reveal_upper_node(
2077        &mut self,
2078        path: Nibbles,
2079        node: &TrieNodeV2,
2080        masks: Option<BranchNodeMasks>,
2081    ) -> SparseTrieResult<()> {
2082        // Only reveal nodes that can be reached given the current state of the upper trie. If they
2083        // can't be reached, it means that they were removed.
2084        if !self.is_path_reachable_from_upper(&path) {
2085            return Ok(())
2086        }
2087
2088        // Exit early if the node was already revealed before.
2089        if !self.upper_subtrie.reveal_node(path, node, masks, None)? {
2090            if let TrieNodeV2::Branch(branch) = node {
2091                if branch.key.is_empty() {
2092                    return Ok(());
2093                }
2094
2095                // We might still potentially need to reveal a child branch node in the lower
2096                // subtrie, even if the upper subtrie already knew about the extension node.
2097                if SparseSubtrieType::path_len_is_upper(path.len() + branch.key.len()) {
2098                    return Ok(())
2099                }
2100            } else {
2101                return Ok(());
2102            }
2103        }
2104
2105        // The previous upper_trie.reveal_node call will not have revealed any child nodes via
2106        // reveal_node_or_hash if the child node would be found on a lower subtrie. We handle that
2107        // here by manually checking the specific cases where this could happen, and calling
2108        // reveal_node_or_hash for each.
2109        match node {
2110            TrieNodeV2::Branch(branch) => {
2111                let mut branch_path = path;
2112                branch_path.extend(&branch.key);
2113
2114                // If only the parent extension belongs to the upper trie, we need to reveal the
2115                // actual branch node in the corresponding lower subtrie.
2116                if !SparseSubtrieType::path_len_is_upper(branch_path.len()) {
2117                    self.lower_subtrie_for_path_mut(&branch_path)
2118                        .expect("branch_path must have a lower subtrie")
2119                        .reveal_branch(
2120                            branch_path,
2121                            branch.state_mask,
2122                            &branch.stack,
2123                            masks,
2124                            branch.branch_rlp_node.clone(),
2125                        )?
2126                } else if !SparseSubtrieType::path_len_is_upper(branch_path.len() + 1) {
2127                    // If a branch is at the cutoff level of the trie then it will be in the upper
2128                    // trie, but all of its children will be in a lower trie.
2129                    // Check if a child node would be in the lower subtrie, and
2130                    // reveal accordingly.
2131                    for (stack_ptr, idx) in branch.state_mask.iter().enumerate() {
2132                        let mut child_path = branch_path;
2133                        child_path.push_unchecked(idx);
2134                        let child = &branch.stack[stack_ptr];
2135
2136                        // Only reveal children that are not hashes. Hashes are stored on branch
2137                        // nodes directly.
2138                        if !child.is_hash() {
2139                            self.lower_subtrie_for_path_mut(&child_path)
2140                                .expect("child_path must have a lower subtrie")
2141                                .reveal_node(
2142                                    child_path,
2143                                    &TrieNodeV2::decode(&mut branch.stack[stack_ptr].as_ref())?,
2144                                    None,
2145                                    None,
2146                                )?;
2147                        }
2148                    }
2149                }
2150            }
2151            TrieNodeV2::Extension(ext) => {
2152                let mut child_path = path;
2153                child_path.extend(&ext.key);
2154                if let Some(subtrie) = self.lower_subtrie_for_path_mut(&child_path) {
2155                    subtrie.reveal_node(
2156                        child_path,
2157                        &TrieNodeV2::decode(&mut ext.child.as_ref())?,
2158                        None,
2159                        None,
2160                    )?;
2161                }
2162            }
2163            TrieNodeV2::EmptyRoot | TrieNodeV2::Leaf(_) => (),
2164        }
2165
2166        Ok(())
2167    }
2168
2169    /// Return updated subtries back to the trie after executing any actions required on the
2170    /// top-level `SparseTrieUpdates`.
2171    #[instrument(level = "trace", target = "trie::parallel_sparse", skip_all)]
2172    fn insert_changed_subtries(
2173        &mut self,
2174        changed_subtries: impl IntoIterator<Item = ChangedSubtrie>,
2175    ) {
2176        for ChangedSubtrie { index, subtrie, update_actions_buf, .. } in changed_subtries {
2177            if let Some(mut update_actions_buf) = update_actions_buf {
2178                self.apply_subtrie_update_actions(
2179                    #[allow(clippy::iter_with_drain)]
2180                    update_actions_buf.drain(..),
2181                );
2182                self.update_actions_buffers.push(update_actions_buf);
2183            }
2184
2185            self.lower_subtries[index] = LowerSparseSubtrie::Revealed(subtrie);
2186            self.subtrie_heat.mark_modified(index);
2187        }
2188    }
2189
2190    /// Returns a heuristic for the in-memory size of this trie in bytes.
2191    ///
2192    /// This is an approximation that accounts for:
2193    /// - The upper subtrie nodes and values
2194    /// - All revealed lower subtries nodes and values
2195    /// - The prefix set keys
2196    /// - The branch node masks map
2197    /// - Updates if retained
2198    /// - Update action buffers
2199    ///
2200    /// Note: Heap allocations for hash maps may be larger due to load factor overhead.
2201    pub fn memory_size(&self) -> usize {
2202        let mut size = core::mem::size_of::<Self>();
2203
2204        // Upper subtrie
2205        size += self.upper_subtrie.memory_size();
2206
2207        // Lower subtries (both Revealed and Blind with allocation)
2208        for subtrie in self.lower_subtries.iter() {
2209            size += subtrie.memory_size();
2210        }
2211
2212        // Prefix set keys
2213        size += self.prefix_set.len() * core::mem::size_of::<Nibbles>();
2214
2215        // Branch node masks map
2216        size += self.branch_node_masks.len() *
2217            (core::mem::size_of::<Nibbles>() + core::mem::size_of::<BranchNodeMasks>());
2218
2219        // Updates if present
2220        if let Some(updates) = &self.updates {
2221            size += updates.updated_nodes.len() *
2222                (core::mem::size_of::<Nibbles>() + core::mem::size_of::<BranchNodeCompact>());
2223            size += updates.removed_nodes.len() * core::mem::size_of::<Nibbles>();
2224        }
2225
2226        // Update actions buffers
2227        for buf in &self.update_actions_buffers {
2228            size += buf.capacity() * core::mem::size_of::<SparseTrieUpdatesAction>();
2229        }
2230
2231        size
2232    }
2233
2234    /// Determines if the given path can be directly reached from the upper trie.
2235    fn is_path_reachable_from_upper(&self, path: &Nibbles) -> bool {
2236        let mut current = Nibbles::default();
2237        while current.len() < path.len() {
2238            let Some(node) = self.upper_subtrie.nodes.get(&current) else { return false };
2239            match node {
2240                SparseNode::Branch { state_mask, .. } => {
2241                    if !state_mask.is_bit_set(path.get_unchecked(current.len())) {
2242                        return false
2243                    }
2244
2245                    current.push_unchecked(path.get_unchecked(current.len()));
2246                }
2247                SparseNode::Extension { key, .. } => {
2248                    if *key != path.slice(current.len()..current.len() + key.len()) {
2249                        return false
2250                    }
2251                    current.extend(key);
2252                }
2253                SparseNode::Empty | SparseNode::Leaf { .. } => return false,
2254            }
2255        }
2256        true
2257    }
2258
2259    /// Checks if a boundary leaf (at `path.len() == UPPER_TRIE_MAX_DEPTH`) is reachable from its
2260    /// parent branch in the upper subtrie.
2261    ///
2262    /// This is used for leaves that sit at the upper/lower subtrie boundary, where the leaf is
2263    /// in a lower subtrie but its parent branch is in the upper subtrie.
2264    fn is_boundary_leaf_reachable(
2265        upper_nodes: &HashMap<Nibbles, SparseNode>,
2266        path: &Nibbles,
2267        node: &TrieNodeV2,
2268    ) -> bool {
2269        debug_assert_eq!(path.len(), UPPER_TRIE_MAX_DEPTH);
2270
2271        if !matches!(node, TrieNodeV2::Leaf(_)) {
2272            return true
2273        }
2274
2275        let parent_path = path.slice(..path.len() - 1);
2276        let leaf_nibble = path.get_unchecked(path.len() - 1);
2277
2278        match upper_nodes.get(&parent_path) {
2279            Some(SparseNode::Branch { state_mask, .. }) => state_mask.is_bit_set(leaf_nibble),
2280            _ => false,
2281        }
2282    }
2283
2284    /// Returns a bitset of all subtries that are reachable from the upper trie. If subtrie is not
2285    /// reachable it means that it does not exist.
2286    fn reachable_subtries(&self) -> SubtriesBitmap {
2287        let mut reachable = SubtriesBitmap::default();
2288
2289        let mut stack = Vec::new();
2290        stack.push(Nibbles::default());
2291
2292        while let Some(current) = stack.pop() {
2293            let Some(node) = self.upper_subtrie.nodes.get(&current) else { continue };
2294            match node {
2295                SparseNode::Branch { state_mask, .. } => {
2296                    for idx in state_mask.iter() {
2297                        let mut next = current;
2298                        next.push_unchecked(idx);
2299                        if next.len() >= UPPER_TRIE_MAX_DEPTH {
2300                            reachable.set(path_subtrie_index_unchecked(&next));
2301                        } else {
2302                            stack.push(next);
2303                        }
2304                    }
2305                }
2306                SparseNode::Extension { key, .. } => {
2307                    let mut next = current;
2308                    next.extend(key);
2309                    if next.len() >= UPPER_TRIE_MAX_DEPTH {
2310                        reachable.set(path_subtrie_index_unchecked(&next));
2311                    } else {
2312                        stack.push(next);
2313                    }
2314                }
2315                SparseNode::Empty | SparseNode::Leaf { .. } => {}
2316            };
2317        }
2318
2319        reachable
2320    }
2321}
2322
2323/// Bitset tracking which of the 256 lower subtries were modified in the current cycle.
2324#[derive(Clone, Default, PartialEq, Eq, Debug)]
2325struct SubtriesBitmap(U256);
2326
2327impl SubtriesBitmap {
2328    /// Marks a subtrie index as modified.
2329    #[inline]
2330    fn set(&mut self, idx: usize) {
2331        debug_assert!(idx < NUM_LOWER_SUBTRIES);
2332        self.0.set_bit(idx, true);
2333    }
2334
2335    /// Returns whether a subtrie index is marked as modified.
2336    #[inline]
2337    fn get(&self, idx: usize) -> bool {
2338        debug_assert!(idx < NUM_LOWER_SUBTRIES);
2339        self.0.bit(idx)
2340    }
2341
2342    /// Clears all modification flags.
2343    #[inline]
2344    const fn clear(&mut self) {
2345        self.0 = U256::ZERO;
2346    }
2347}
2348
2349/// Tracks heat (modification frequency) for each of the 256 lower subtries.
2350///
2351/// Heat is used to avoid pruning frequently-modified subtries, which would cause
2352/// expensive re-reveal operations on subsequent updates.
2353///
2354/// - Heat is incremented by 2 when a subtrie is modified
2355/// - Heat decays by 1 each prune cycle for subtries not modified that cycle
2356/// - Subtries with heat > 0 are considered "hot" and skipped during pruning
2357#[derive(Clone, PartialEq, Eq, Debug)]
2358struct SubtrieModifications {
2359    /// Heat level (0-255) for each of the 256 lower subtries.
2360    heat: [u8; NUM_LOWER_SUBTRIES],
2361    /// Tracks which subtries were modified in the current cycle.
2362    modified: SubtriesBitmap,
2363}
2364
2365impl Default for SubtrieModifications {
2366    fn default() -> Self {
2367        Self { heat: [0; NUM_LOWER_SUBTRIES], modified: SubtriesBitmap::default() }
2368    }
2369}
2370
2371impl SubtrieModifications {
2372    /// Marks a subtrie as modified, incrementing its heat by 1.
2373    #[inline]
2374    fn mark_modified(&mut self, idx: usize) {
2375        debug_assert!(idx < NUM_LOWER_SUBTRIES);
2376        self.modified.set(idx);
2377        self.heat[idx] = self.heat[idx].saturating_add(1);
2378    }
2379
2380    /// Returns whether a subtrie is currently hot (heat > 0).
2381    #[inline]
2382    fn is_hot(&self, idx: usize) -> bool {
2383        debug_assert!(idx < NUM_LOWER_SUBTRIES);
2384        self.heat[idx] > 0
2385    }
2386
2387    /// Decays heat for subtries not modified this cycle and resets modification tracking.
2388    /// Called at the start of each prune cycle.
2389    fn decay_and_reset(&mut self) {
2390        for (idx, heat) in self.heat.iter_mut().enumerate() {
2391            if !self.modified.get(idx) {
2392                *heat = heat.saturating_sub(1);
2393            }
2394        }
2395        self.modified.clear();
2396    }
2397
2398    /// Clears all heat tracking state.
2399    const fn clear(&mut self) {
2400        self.heat = [0; NUM_LOWER_SUBTRIES];
2401        self.modified.clear();
2402    }
2403}
2404
2405/// This is a subtrie of the [`ParallelSparseTrie`] that contains a map from path to sparse trie
2406/// nodes.
2407#[derive(Clone, PartialEq, Eq, Debug, Default)]
2408pub struct SparseSubtrie {
2409    /// The root path of this subtrie.
2410    ///
2411    /// This is the _full_ path to this subtrie, meaning it includes the first
2412    /// [`UPPER_TRIE_MAX_DEPTH`] nibbles that we also use for indexing subtries in the
2413    /// [`ParallelSparseTrie`].
2414    ///
2415    /// There should be a node for this path in `nodes` map.
2416    pub(crate) path: Nibbles,
2417    /// The map from paths to sparse trie nodes within this subtrie.
2418    nodes: HashMap<Nibbles, SparseNode>,
2419    /// Subset of fields for mutable access while `nodes` field is also being mutably borrowed.
2420    inner: SparseSubtrieInner,
2421}
2422
2423/// Returned by the `find_next_to_leaf` method to indicate either that the leaf has been found,
2424/// traversal should be continued from the given path, or the leaf is not in the trie.
2425enum FindNextToLeafOutcome {
2426    /// `Found` indicates that the leaf was found at the given path.
2427    Found,
2428    /// `ContinueFrom` indicates that traversal should continue from the given path.
2429    ContinueFrom(Nibbles),
2430    /// `NotFound` indicates that there is no way to traverse to the leaf, as it is not in the
2431    /// trie.
2432    NotFound,
2433    /// `BlindedNode` indicates that the node is blinded with the contained hash and cannot be
2434    /// traversed.
2435    BlindedNode(Nibbles),
2436}
2437
2438impl SparseSubtrie {
2439    /// Creates a new empty subtrie with the specified root path.
2440    pub(crate) fn new(path: Nibbles) -> Self {
2441        Self { path, ..Default::default() }
2442    }
2443
2444    /// Returns true if this subtrie has any nodes, false otherwise.
2445    pub(crate) fn is_empty(&self) -> bool {
2446        self.nodes.is_empty()
2447    }
2448
2449    /// Returns true if the current path and its child are both found in the same level.
2450    fn is_child_same_level(current_path: &Nibbles, child_path: &Nibbles) -> bool {
2451        let current_level = core::mem::discriminant(&SparseSubtrieType::from_path(current_path));
2452        let child_level = core::mem::discriminant(&SparseSubtrieType::from_path(child_path));
2453        current_level == child_level
2454    }
2455
2456    /// Checks if a leaf node at the given path is reachable from its parent branch node.
2457    ///
2458    /// Returns `true` if:
2459    /// - The path is at the root (no parent to check)
2460    /// - The parent branch node has the corresponding `state_mask` bit set for this leaf
2461    ///
2462    /// Returns `false` if the parent is a branch node that doesn't have the `state_mask` bit set
2463    /// for this leaf's nibble, meaning the leaf is not reachable.
2464    fn is_leaf_reachable_from_parent(&self, path: &Nibbles) -> bool {
2465        if path.is_empty() {
2466            return true
2467        }
2468
2469        let parent_path = path.slice(..path.len() - 1);
2470        let leaf_nibble = path.get_unchecked(path.len() - 1);
2471
2472        match self.nodes.get(&parent_path) {
2473            Some(SparseNode::Branch { state_mask, .. }) => state_mask.is_bit_set(leaf_nibble),
2474            _ => false,
2475        }
2476    }
2477
2478    /// Updates or inserts a leaf node at the specified key path with the provided RLP-encoded
2479    /// value.
2480    ///
2481    /// If the leaf did not previously exist, this method adjusts the trie structure by inserting
2482    /// new leaf nodes, splitting branch nodes, or collapsing extension nodes as needed.
2483    ///
2484    /// # Returns
2485    ///
2486    /// Returns the path and masks of any blinded node revealed as a result of updating the leaf.
2487    ///
2488    /// If an update requires revealing a blinded node, an error is returned if the blinded
2489    /// provider returns an error.
2490    ///
2491    /// This method is atomic: if an error occurs during structural changes, all modifications
2492    /// are rolled back and the trie state is unchanged.
2493    pub fn update_leaf(&mut self, full_path: Nibbles, value: Vec<u8>) -> SparseTrieResult<()> {
2494        debug_assert!(full_path.starts_with(&self.path));
2495
2496        // Check if value already exists - if so, just update it (no structural changes needed)
2497        if let Entry::Occupied(mut e) = self.inner.values.entry(full_path) {
2498            e.insert(value);
2499            return Ok(())
2500        }
2501
2502        // Here we are starting at the root of the subtrie, and traversing from there.
2503        let mut current = Some(self.path);
2504
2505        while let Some(current_path) = current.as_mut() {
2506            match self.update_next_node(current_path, &full_path)? {
2507                LeafUpdateStep::Continue => {}
2508                LeafUpdateStep::NodeNotFound | LeafUpdateStep::Complete { .. } => break,
2509            }
2510        }
2511
2512        // Only insert the value after all structural changes succeed
2513        self.inner.values.insert(full_path, value);
2514
2515        Ok(())
2516    }
2517
2518    /// Processes the current node, returning what to do next in the leaf update process.
2519    ///
2520    /// This will add or update any nodes in the trie as necessary.
2521    ///
2522    /// Returns a `LeafUpdateStep` containing the next node to process (if any) and
2523    /// the paths of nodes that were inserted during this step.
2524    fn update_next_node(
2525        &mut self,
2526        current: &mut Nibbles,
2527        path: &Nibbles,
2528    ) -> SparseTrieResult<LeafUpdateStep> {
2529        debug_assert!(path.starts_with(&self.path));
2530        debug_assert!(current.starts_with(&self.path));
2531        debug_assert!(path.starts_with(current));
2532        let Some(node) = self.nodes.get_mut(current) else {
2533            return Ok(LeafUpdateStep::NodeNotFound);
2534        };
2535
2536        match node {
2537            SparseNode::Empty => {
2538                // We need to insert the node with a different path and key depending on the path of
2539                // the subtrie.
2540                let path = path.slice(self.path.len()..);
2541                *node = SparseNode::new_leaf(path);
2542                Ok(LeafUpdateStep::complete_with_insertions(vec![*current]))
2543            }
2544            SparseNode::Leaf { key: current_key, .. } => {
2545                current.extend(current_key);
2546
2547                // this leaf is being updated
2548                debug_assert!(current != path, "we already checked leaf presence in the beginning");
2549
2550                // find the common prefix
2551                let common = current.common_prefix_length(path);
2552
2553                // update existing node
2554                let new_ext_key = current.slice(current.len() - current_key.len()..common);
2555                *node = SparseNode::new_ext(new_ext_key);
2556
2557                // create a branch node and corresponding leaves
2558                self.nodes.reserve(3);
2559                let branch_path = current.slice(..common);
2560                let new_leaf_path = path.slice(..=common);
2561                let existing_leaf_path = current.slice(..=common);
2562
2563                self.nodes.insert(
2564                    branch_path,
2565                    SparseNode::new_split_branch(
2566                        current.get_unchecked(common),
2567                        path.get_unchecked(common),
2568                    ),
2569                );
2570                self.nodes.insert(new_leaf_path, SparseNode::new_leaf(path.slice(common + 1..)));
2571                self.nodes
2572                    .insert(existing_leaf_path, SparseNode::new_leaf(current.slice(common + 1..)));
2573
2574                Ok(LeafUpdateStep::complete_with_insertions(vec![
2575                    branch_path,
2576                    new_leaf_path,
2577                    existing_leaf_path,
2578                ]))
2579            }
2580            SparseNode::Extension { key, .. } => {
2581                current.extend(key);
2582
2583                if !path.starts_with(current) {
2584                    // find the common prefix
2585                    let common = current.common_prefix_length(path);
2586                    *key = current.slice(current.len() - key.len()..common);
2587
2588                    // create state mask for new branch node
2589                    // NOTE: this might overwrite the current extension node
2590                    self.nodes.reserve(3);
2591                    let branch_path = current.slice(..common);
2592                    let new_leaf_path = path.slice(..=common);
2593                    let branch = SparseNode::new_split_branch(
2594                        current.get_unchecked(common),
2595                        path.get_unchecked(common),
2596                    );
2597
2598                    self.nodes.insert(branch_path, branch);
2599
2600                    // create new leaf
2601                    let new_leaf = SparseNode::new_leaf(path.slice(common + 1..));
2602                    self.nodes.insert(new_leaf_path, new_leaf);
2603
2604                    let mut inserted_nodes = vec![branch_path, new_leaf_path];
2605
2606                    // recreate extension to previous child if needed
2607                    let key = current.slice(common + 1..);
2608                    if !key.is_empty() {
2609                        let ext_path = current.slice(..=common);
2610                        self.nodes.insert(ext_path, SparseNode::new_ext(key));
2611                        inserted_nodes.push(ext_path);
2612                    }
2613
2614                    return Ok(LeafUpdateStep::complete_with_insertions(inserted_nodes))
2615                }
2616
2617                Ok(LeafUpdateStep::Continue)
2618            }
2619            SparseNode::Branch { state_mask, blinded_mask, .. } => {
2620                let nibble = path.get_unchecked(current.len());
2621                current.push_unchecked(nibble);
2622
2623                if !state_mask.is_bit_set(nibble) {
2624                    state_mask.set_bit(nibble);
2625                    let new_leaf = SparseNode::new_leaf(path.slice(current.len()..));
2626                    self.nodes.insert(*current, new_leaf);
2627                    return Ok(LeafUpdateStep::complete_with_insertions(vec![*current]))
2628                }
2629
2630                if blinded_mask.is_bit_set(nibble) {
2631                    return Err(SparseTrieErrorKind::BlindedNode(*current).into());
2632                }
2633
2634                // If the nibble is set, we can continue traversing the branch.
2635                Ok(LeafUpdateStep::Continue)
2636            }
2637        }
2638    }
2639
2640    /// Reveals a branch node at the given path.
2641    fn reveal_branch(
2642        &mut self,
2643        path: Nibbles,
2644        state_mask: TrieMask,
2645        children: &[RlpNode],
2646        masks: Option<BranchNodeMasks>,
2647        rlp_node: Option<RlpNode>,
2648    ) -> SparseTrieResult<()> {
2649        match self.nodes.entry(path) {
2650            Entry::Occupied(_) => {
2651                // Branch already revealed, do nothing
2652                return Ok(());
2653            }
2654            Entry::Vacant(entry) => {
2655                let state =
2656                    match rlp_node.as_ref() {
2657                        Some(rlp_node) => SparseNodeState::Cached {
2658                            rlp_node: rlp_node.clone(),
2659                            store_in_db_trie: Some(masks.is_some_and(|m| {
2660                                !m.hash_mask.is_empty() || !m.tree_mask.is_empty()
2661                            })),
2662                        },
2663                        None => SparseNodeState::Dirty,
2664                    };
2665
2666                let mut blinded_mask = TrieMask::default();
2667                let mut blinded_hashes = Box::new([B256::ZERO; 16]);
2668
2669                for (stack_ptr, idx) in state_mask.iter().enumerate() {
2670                    let mut child_path = path;
2671                    child_path.push_unchecked(idx);
2672                    let child = &children[stack_ptr];
2673
2674                    if let Some(hash) = child.as_hash() {
2675                        blinded_mask.set_bit(idx);
2676                        blinded_hashes[idx as usize] = hash;
2677                    }
2678                }
2679
2680                entry.insert(SparseNode::Branch {
2681                    state_mask,
2682                    state,
2683                    blinded_mask,
2684                    blinded_hashes,
2685                });
2686            }
2687        }
2688
2689        // For a branch node, iterate over all children. This must happen second so leaf
2690        // children can check connectivity with parent branch.
2691        for (stack_ptr, idx) in state_mask.iter().enumerate() {
2692            let mut child_path = path;
2693            child_path.push_unchecked(idx);
2694            let child = &children[stack_ptr];
2695            if !child.is_hash() && Self::is_child_same_level(&path, &child_path) {
2696                // Reveal each child node or hash it has, but only if the child is on
2697                // the same level as the parent.
2698                self.reveal_node(
2699                    child_path,
2700                    &TrieNodeV2::decode(&mut child.as_ref())?,
2701                    None,
2702                    None,
2703                )?;
2704            }
2705        }
2706
2707        Ok(())
2708    }
2709
2710    /// Internal implementation of the method of the same name on `ParallelSparseTrie`.
2711    ///
2712    /// This accepts `hash_from_upper` to handle cases when boundary nodes revealed in lower subtrie
2713    /// but its blinded hash is known from the upper subtrie.
2714    fn reveal_node(
2715        &mut self,
2716        path: Nibbles,
2717        node: &TrieNodeV2,
2718        masks: Option<BranchNodeMasks>,
2719        hash_from_upper: Option<B256>,
2720    ) -> SparseTrieResult<bool> {
2721        debug_assert!(path.starts_with(&self.path));
2722
2723        // If the node is already revealed, do nothing.
2724        if self.nodes.contains_key(&path) {
2725            return Ok(false);
2726        }
2727
2728        // If the hash is provided from the upper subtrie, use it. Otherwise, find the parent branch
2729        // node, unset its blinded bit and use the hash.
2730        let hash = if let Some(hash) = hash_from_upper {
2731            Some(hash)
2732        } else if path.len() != UPPER_TRIE_MAX_DEPTH && !path.is_empty() {
2733            let Some(SparseNode::Branch { state_mask, blinded_mask, blinded_hashes, .. }) =
2734                self.nodes.get_mut(&path.slice(0..path.len() - 1))
2735            else {
2736                return Ok(false);
2737            };
2738            let nibble = path.last().unwrap();
2739            if !state_mask.is_bit_set(nibble) {
2740                return Ok(false);
2741            }
2742
2743            blinded_mask.is_bit_set(nibble).then(|| {
2744                blinded_mask.unset_bit(nibble);
2745                blinded_hashes[nibble as usize]
2746            })
2747        } else {
2748            None
2749        };
2750
2751        trace!(
2752            target: "trie::parallel_sparse",
2753            ?path,
2754            ?node,
2755            ?masks,
2756            "Revealing node",
2757        );
2758
2759        match node {
2760            TrieNodeV2::EmptyRoot => {
2761                // For an empty root, ensure that we are at the root path, and at the upper subtrie.
2762                debug_assert!(path.is_empty());
2763                debug_assert!(self.path.is_empty());
2764                self.nodes.insert(path, SparseNode::Empty);
2765            }
2766            TrieNodeV2::Branch(branch) => {
2767                if branch.key.is_empty() {
2768                    self.reveal_branch(
2769                        path,
2770                        branch.state_mask,
2771                        &branch.stack,
2772                        masks,
2773                        hash.as_ref().map(RlpNode::word_rlp),
2774                    )?;
2775                    return Ok(true);
2776                }
2777
2778                self.nodes.insert(
2779                    path,
2780                    SparseNode::Extension {
2781                        key: branch.key,
2782                        state: hash
2783                            .as_ref()
2784                            .map(|hash| SparseNodeState::Cached {
2785                                rlp_node: RlpNode::word_rlp(hash),
2786                                // Inherit `store_in_db_trie` from the child branch
2787                                // node masks so that the memoized hash can be used
2788                                // without needing to fetch the child branch.
2789                                store_in_db_trie: Some(masks.is_some_and(|m| {
2790                                    !m.hash_mask.is_empty() || !m.tree_mask.is_empty()
2791                                })),
2792                            })
2793                            .unwrap_or(SparseNodeState::Dirty),
2794                    },
2795                );
2796
2797                let mut branch_path = path;
2798                branch_path.extend(&branch.key);
2799
2800                // Exit early if the actual branch node does not belong to this subtrie.
2801                if !Self::is_child_same_level(&path, &branch_path) {
2802                    return Ok(true);
2803                }
2804
2805                // Reveal the actual branch node.
2806                self.reveal_branch(
2807                    branch_path,
2808                    branch.state_mask,
2809                    &branch.stack,
2810                    masks,
2811                    branch.branch_rlp_node.clone(),
2812                )?;
2813            }
2814            TrieNodeV2::Extension(_) => unreachable!(),
2815            TrieNodeV2::Leaf(leaf) => {
2816                // Skip the reachability check when path.len() == UPPER_TRIE_MAX_DEPTH because
2817                // at that boundary the leaf is in the lower subtrie but its parent branch is in
2818                // the upper subtrie. The subtrie cannot check connectivity across the upper/lower
2819                // boundary, so that check happens in `reveal_nodes` instead.
2820                if path.len() != UPPER_TRIE_MAX_DEPTH && !self.is_leaf_reachable_from_parent(&path)
2821                {
2822                    trace!(
2823                        target: "trie::parallel_sparse",
2824                        ?path,
2825                        "Leaf not reachable from parent branch, skipping",
2826                    );
2827                    return Ok(false)
2828                }
2829
2830                let mut full_key = path;
2831                full_key.extend(&leaf.key);
2832
2833                match self.inner.values.entry(full_key) {
2834                    Entry::Occupied(_) => {
2835                        trace!(
2836                            target: "trie::parallel_sparse",
2837                            ?path,
2838                            ?full_key,
2839                            "Leaf full key value already present, skipping",
2840                        );
2841                        return Ok(false)
2842                    }
2843                    Entry::Vacant(entry) => {
2844                        entry.insert(leaf.value.clone());
2845                    }
2846                }
2847
2848                self.nodes.insert(
2849                    path,
2850                    SparseNode::Leaf {
2851                        key: leaf.key,
2852                        state: hash
2853                            .as_ref()
2854                            .map(|hash| SparseNodeState::Cached {
2855                                rlp_node: RlpNode::word_rlp(hash),
2856                                store_in_db_trie: Some(false),
2857                            })
2858                            .unwrap_or(SparseNodeState::Dirty),
2859                    },
2860                );
2861            }
2862        }
2863
2864        Ok(true)
2865    }
2866
2867    /// Recalculates and updates the RLP hashes for the changed nodes in this subtrie.
2868    ///
2869    /// The function starts from the subtrie root, traverses down to leaves, and then calculates
2870    /// the hashes from leaves back up to the root. It uses a stack from [`SparseSubtrieBuffers`] to
2871    /// track the traversal and accumulate RLP encodings.
2872    ///
2873    /// # Parameters
2874    ///
2875    /// - `prefix_set`: The set of trie paths whose nodes have changed.
2876    /// - `update_actions`: A buffer which `SparseTrieUpdatesAction`s will be written to in the
2877    ///   event that any changes to the top-level updates are required. If None then update
2878    ///   retention is disabled.
2879    /// - `branch_node_masks`: The tree and hash masks for branch nodes.
2880    ///
2881    /// # Returns
2882    ///
2883    /// A tuple containing the root node of the updated subtrie.
2884    ///
2885    /// # Panics
2886    ///
2887    /// If the node at the root path does not exist.
2888    #[instrument(level = "trace", target = "trie::parallel_sparse", skip_all, fields(root = ?self.path), ret)]
2889    fn update_hashes(
2890        &mut self,
2891        prefix_set: &mut PrefixSet,
2892        update_actions: &mut Option<Vec<SparseTrieUpdatesAction>>,
2893        branch_node_masks: &BranchNodeMasksMap,
2894    ) -> RlpNode {
2895        trace!(target: "trie::parallel_sparse", "Updating subtrie hashes");
2896
2897        debug_assert!(prefix_set.iter().all(|path| path.starts_with(&self.path)));
2898
2899        debug_assert!(self.inner.buffers.path_stack.is_empty());
2900        self.inner
2901            .buffers
2902            .path_stack
2903            .push(RlpNodePathStackItem { path: self.path, is_in_prefix_set: None });
2904
2905        while let Some(stack_item) = self.inner.buffers.path_stack.pop() {
2906            let path = stack_item.path;
2907            let node = self
2908                .nodes
2909                .get_mut(&path)
2910                .unwrap_or_else(|| panic!("node at path {path:?} does not exist"));
2911
2912            self.inner.rlp_node(prefix_set, update_actions, stack_item, node, branch_node_masks);
2913        }
2914
2915        debug_assert_eq!(self.inner.buffers.rlp_node_stack.len(), 1);
2916        self.inner.buffers.rlp_node_stack.pop().unwrap().rlp_node
2917    }
2918
2919    /// Removes all nodes and values from the subtrie, resetting it to a blank state
2920    /// with only an empty root node. This is used when a storage root is deleted.
2921    fn wipe(&mut self) {
2922        self.nodes.clear();
2923        self.nodes.insert(Nibbles::default(), SparseNode::Empty);
2924        self.inner.clear();
2925    }
2926
2927    /// Clears the subtrie, keeping the data structures allocated.
2928    pub(crate) fn clear(&mut self) {
2929        self.nodes.clear();
2930        self.inner.clear();
2931    }
2932
2933    /// Shrinks the capacity of the subtrie's node storage.
2934    pub(crate) fn shrink_nodes_to(&mut self, size: usize) {
2935        self.nodes.shrink_to(size);
2936    }
2937
2938    /// Shrinks the capacity of the subtrie's value storage.
2939    pub(crate) fn shrink_values_to(&mut self, size: usize) {
2940        self.inner.values.shrink_to(size);
2941    }
2942
2943    /// Returns a heuristic for the in-memory size of this subtrie in bytes.
2944    pub(crate) fn memory_size(&self) -> usize {
2945        let mut size = core::mem::size_of::<Self>();
2946
2947        // Nodes map: key (Nibbles) + value (SparseNode)
2948        for (path, node) in &self.nodes {
2949            size += core::mem::size_of::<Nibbles>();
2950            size += path.len(); // Nibbles heap allocation
2951            size += node.memory_size();
2952        }
2953
2954        // Values map: key (Nibbles) + value (Vec<u8>)
2955        for (path, value) in &self.inner.values {
2956            size += core::mem::size_of::<Nibbles>();
2957            size += path.len(); // Nibbles heap allocation
2958            size += core::mem::size_of::<Vec<u8>>() + value.capacity();
2959        }
2960
2961        // Buffers
2962        size += self.inner.buffers.memory_size();
2963
2964        size
2965    }
2966}
2967
2968/// Helper type for [`SparseSubtrie`] to mutably access only a subset of fields from the original
2969/// struct.
2970#[derive(Clone, PartialEq, Eq, Debug, Default)]
2971struct SparseSubtrieInner {
2972    /// Map from leaf key paths to their values.
2973    /// All values are stored here instead of directly in leaf nodes.
2974    values: HashMap<Nibbles, Vec<u8>>,
2975    /// Reusable buffers for [`SparseSubtrie::update_hashes`].
2976    buffers: SparseSubtrieBuffers,
2977}
2978
2979impl SparseSubtrieInner {
2980    /// Computes the RLP encoding and its hash for a single (trie node)[`SparseNode`].
2981    ///
2982    /// # Deferred Processing
2983    ///
2984    /// When an extension or a branch node depends on child nodes that haven't been computed yet,
2985    /// the function pushes the current node back onto the path stack along with its children,
2986    /// then returns early. This allows the iterative algorithm to process children first before
2987    /// retrying the parent.
2988    ///
2989    /// # Parameters
2990    ///
2991    /// - `prefix_set`: Set of prefixes (key paths) that have been marked as updated
2992    /// - `update_actions`: A buffer which `SparseTrieUpdatesAction`s will be written to in the
2993    ///   event that any changes to the top-level updates are required. If None then update
2994    ///   retention is disabled.
2995    /// - `stack_item`: The stack item to process
2996    /// - `node`: The sparse node to process (will be mutated to update hash)
2997    /// - `branch_node_masks`: The tree and hash masks for branch nodes.
2998    ///
2999    /// # Side Effects
3000    ///
3001    /// - Updates the node's hash field after computing RLP
3002    /// - Pushes nodes to [`SparseSubtrieBuffers::path_stack`] to manage traversal
3003    /// - May push items onto the path stack for deferred processing
3004    ///
3005    /// # Exit condition
3006    ///
3007    /// Once all nodes have been processed and all RLPs and hashes calculated, pushes the root node
3008    /// onto the [`SparseSubtrieBuffers::rlp_node_stack`] and exits.
3009    fn rlp_node(
3010        &mut self,
3011        prefix_set: &mut PrefixSet,
3012        update_actions: &mut Option<Vec<SparseTrieUpdatesAction>>,
3013        mut stack_item: RlpNodePathStackItem,
3014        node: &mut SparseNode,
3015        branch_node_masks: &BranchNodeMasksMap,
3016    ) {
3017        let path = stack_item.path;
3018        trace!(
3019            target: "trie::parallel_sparse",
3020            ?path,
3021            ?node,
3022            "Calculating node RLP"
3023        );
3024
3025        // Check if the path is in the prefix set.
3026        // First, check the cached value. If it's `None`, then check the prefix set, and update
3027        // the cached value.
3028        let mut prefix_set_contains = |path: &Nibbles| {
3029            *stack_item.is_in_prefix_set.get_or_insert_with(|| prefix_set.contains(path))
3030        };
3031
3032        let (rlp_node, node_type) = match node {
3033            SparseNode::Empty => (RlpNode::word_rlp(&EMPTY_ROOT_HASH), SparseNodeType::Empty),
3034            SparseNode::Leaf { key, state } => {
3035                let mut path = path;
3036                path.extend(key);
3037                let value = self.values.get(&path);
3038
3039                // Check if we should use cached RLP:
3040                // - If there's a cached RLP and the path is not in prefix_set, use cached
3041                // - If the value is not in this subtrie's values (e.g., lower subtrie leaf being
3042                //   processed via upper subtrie), we must use cached RLP
3043                let cached_rlp_node = state.cached_rlp_node();
3044                let use_cached =
3045                    cached_rlp_node.is_some() && (!prefix_set_contains(&path) || value.is_none());
3046
3047                if let Some(rlp_node) = use_cached.then(|| cached_rlp_node.unwrap()) {
3048                    // Return the cached RLP
3049                    (rlp_node.clone(), SparseNodeType::Leaf)
3050                } else {
3051                    // Encode the leaf node and update its RlpNode
3052                    let value = value.expect("leaf value must exist in subtrie");
3053                    self.buffers.rlp_buf.clear();
3054                    let rlp_node = LeafNodeRef { key, value }.rlp(&mut self.buffers.rlp_buf);
3055                    *state = SparseNodeState::Cached {
3056                        rlp_node: rlp_node.clone(),
3057                        store_in_db_trie: Some(false),
3058                    };
3059                    trace!(
3060                        target: "trie::parallel_sparse",
3061                        ?path,
3062                        ?key,
3063                        value = %alloy_primitives::hex::encode(value),
3064                        ?rlp_node,
3065                        "Calculated leaf RLP node",
3066                    );
3067                    (rlp_node, SparseNodeType::Leaf)
3068                }
3069            }
3070            SparseNode::Extension { key, state } => {
3071                let mut child_path = path;
3072                child_path.extend(key);
3073                if let Some((rlp_node, store_in_db_trie)) = state
3074                    .cached_rlp_node()
3075                    .zip(state.store_in_db_trie())
3076                    .filter(|_| !prefix_set_contains(&path))
3077                {
3078                    // If the node is already computed, and the node path is not in
3079                    // the prefix set, return the pre-computed node
3080                    (
3081                        rlp_node.clone(),
3082                        SparseNodeType::Extension { store_in_db_trie: Some(store_in_db_trie) },
3083                    )
3084                } else if self.buffers.rlp_node_stack.last().is_some_and(|e| e.path == child_path) {
3085                    // Top of the stack has the child node, we can encode the extension node and
3086                    // update its hash
3087                    let RlpNodeStackItem { path: _, rlp_node: child, node_type: child_node_type } =
3088                        self.buffers.rlp_node_stack.pop().unwrap();
3089                    self.buffers.rlp_buf.clear();
3090                    let rlp_node =
3091                        ExtensionNodeRef::new(key, &child).rlp(&mut self.buffers.rlp_buf);
3092
3093                    let store_in_db_trie_value = child_node_type.store_in_db_trie();
3094
3095                    trace!(
3096                        target: "trie::parallel_sparse",
3097                        ?path,
3098                        ?child_path,
3099                        ?child_node_type,
3100                        "Extension node"
3101                    );
3102
3103                    *state = SparseNodeState::Cached {
3104                        rlp_node: rlp_node.clone(),
3105                        store_in_db_trie: store_in_db_trie_value,
3106                    };
3107
3108                    (
3109                        rlp_node,
3110                        SparseNodeType::Extension {
3111                            // Inherit the `store_in_db_trie` flag from the child node, which is
3112                            // always the branch node
3113                            store_in_db_trie: store_in_db_trie_value,
3114                        },
3115                    )
3116                } else {
3117                    // Need to defer processing until child is computed, on the next
3118                    // invocation update the node's hash.
3119                    self.buffers.path_stack.extend([
3120                        RlpNodePathStackItem {
3121                            path,
3122                            is_in_prefix_set: Some(prefix_set_contains(&path)),
3123                        },
3124                        RlpNodePathStackItem { path: child_path, is_in_prefix_set: None },
3125                    ]);
3126                    return
3127                }
3128            }
3129            SparseNode::Branch { state_mask, state, blinded_mask, blinded_hashes } => {
3130                if let Some((rlp_node, store_in_db_trie)) = state
3131                    .cached_rlp_node()
3132                    .zip(state.store_in_db_trie())
3133                    .filter(|_| !prefix_set_contains(&path))
3134                {
3135                    let node_type =
3136                        SparseNodeType::Branch { store_in_db_trie: Some(store_in_db_trie) };
3137
3138                    trace!(
3139                        target: "trie::parallel_sparse",
3140                        ?path,
3141                        ?node_type,
3142                        ?rlp_node,
3143                        "Adding node to RLP node stack (cached branch)"
3144                    );
3145
3146                    // If the node hash is already computed, and the node path is not in
3147                    // the prefix set, return the pre-computed hash
3148                    self.buffers.rlp_node_stack.push(RlpNodeStackItem {
3149                        path,
3150                        rlp_node: rlp_node.clone(),
3151                        node_type,
3152                    });
3153                    return
3154                }
3155
3156                let retain_updates = update_actions.is_some() && prefix_set_contains(&path);
3157
3158                self.buffers.branch_child_buf.clear();
3159                // Walk children in a reverse order from `f` to `0`, so we pop the `0` first
3160                // from the stack and keep walking in the sorted order.
3161                for bit in state_mask.iter().rev() {
3162                    let mut child = path;
3163                    child.push_unchecked(bit);
3164
3165                    if !blinded_mask.is_bit_set(bit) {
3166                        self.buffers.branch_child_buf.push(child);
3167                    }
3168                }
3169
3170                self.buffers.branch_value_stack_buf.resize(state_mask.len(), Default::default());
3171
3172                let mut tree_mask = TrieMask::default();
3173                let mut hash_mask = TrieMask::default();
3174                let mut hashes = Vec::new();
3175
3176                // Lazy lookup for branch node masks - shared across loop iterations
3177                let mut path_masks_storage = None;
3178                let mut path_masks =
3179                    || *path_masks_storage.get_or_insert_with(|| branch_node_masks.get(&path));
3180
3181                for (i, child_nibble) in state_mask.iter().enumerate().rev() {
3182                    let mut child_path = path;
3183                    child_path.push_unchecked(child_nibble);
3184
3185                    let (child, child_node_type) = if blinded_mask.is_bit_set(child_nibble) {
3186                        (
3187                            RlpNode::word_rlp(&blinded_hashes[child_nibble as usize]),
3188                            SparseNodeType::Hash,
3189                        )
3190                    } else if self
3191                        .buffers
3192                        .rlp_node_stack
3193                        .last()
3194                        .is_some_and(|e| e.path == child_path)
3195                    {
3196                        let RlpNodeStackItem { path: _, rlp_node, node_type } =
3197                            self.buffers.rlp_node_stack.pop().unwrap();
3198
3199                        (rlp_node, node_type)
3200                    } else {
3201                        // Need to defer processing until children are computed, on the next
3202                        // invocation update the node's hash.
3203                        self.buffers.path_stack.push(RlpNodePathStackItem {
3204                            path,
3205                            is_in_prefix_set: Some(prefix_set_contains(&path)),
3206                        });
3207                        self.buffers.path_stack.extend(
3208                            self.buffers
3209                                .branch_child_buf
3210                                .drain(..)
3211                                .map(|path| RlpNodePathStackItem { path, is_in_prefix_set: None }),
3212                        );
3213                        return
3214                    };
3215
3216                    // Update the masks only if we need to retain trie updates
3217                    if retain_updates {
3218                        // Determine whether we need to set trie mask bit.
3219                        let should_set_tree_mask_bit =
3220                            if let Some(store_in_db_trie) = child_node_type.store_in_db_trie() {
3221                                // A branch or an extension node explicitly set the
3222                                // `store_in_db_trie` flag
3223                                store_in_db_trie
3224                            } else {
3225                                // A blinded node has the tree mask bit set
3226                                child_node_type.is_hash() &&
3227                                    path_masks().is_some_and(|masks| {
3228                                        masks.tree_mask.is_bit_set(child_nibble)
3229                                    })
3230                            };
3231                        if should_set_tree_mask_bit {
3232                            tree_mask.set_bit(child_nibble);
3233                        }
3234                        // Set the hash mask. If a child node is a revealed branch node OR
3235                        // is a blinded node that has its hash mask bit set according to the
3236                        // database, set the hash mask bit and save the hash.
3237                        let hash = child.as_hash().filter(|_| {
3238                            child_node_type.is_branch() ||
3239                                (child_node_type.is_hash() &&
3240                                    path_masks().is_some_and(|masks| {
3241                                        masks.hash_mask.is_bit_set(child_nibble)
3242                                    }))
3243                        });
3244                        if let Some(hash) = hash {
3245                            hash_mask.set_bit(child_nibble);
3246                            hashes.push(hash);
3247                        }
3248                    }
3249
3250                    // Insert children in the resulting buffer in a normal order,
3251                    // because initially we iterated in reverse.
3252                    // SAFETY: i < len and len is never 0
3253                    self.buffers.branch_value_stack_buf[i] = child;
3254                }
3255
3256                trace!(
3257                    target: "trie::parallel_sparse",
3258                    ?path,
3259                    ?tree_mask,
3260                    ?hash_mask,
3261                    "Branch node masks"
3262                );
3263
3264                // Top of the stack has all children node, we can encode the branch node and
3265                // update its hash
3266                self.buffers.rlp_buf.clear();
3267                let branch_node_ref =
3268                    BranchNodeRef::new(&self.buffers.branch_value_stack_buf, *state_mask);
3269                let rlp_node = branch_node_ref.rlp(&mut self.buffers.rlp_buf);
3270
3271                // Save a branch node update only if it's not a root node, and we need to
3272                // persist updates.
3273                let store_in_db_trie_value = if let Some(update_actions) =
3274                    update_actions.as_mut().filter(|_| retain_updates && !path.is_empty())
3275                {
3276                    let store_in_db_trie = !tree_mask.is_empty() || !hash_mask.is_empty();
3277                    if store_in_db_trie {
3278                        // Store in DB trie if there are either any children that are stored in
3279                        // the DB trie, or any children represent hashed values
3280                        hashes.reverse();
3281                        let branch_node =
3282                            BranchNodeCompact::new(*state_mask, tree_mask, hash_mask, hashes, None);
3283                        update_actions
3284                            .push(SparseTrieUpdatesAction::InsertUpdated(path, branch_node));
3285                    } else {
3286                        // New tree and hash masks are empty - check previous state
3287                        let prev_had_masks = path_masks()
3288                            .is_some_and(|m| !m.tree_mask.is_empty() || !m.hash_mask.is_empty());
3289                        if prev_had_masks {
3290                            // Previously had masks, now empty - mark as removed
3291                            update_actions.push(SparseTrieUpdatesAction::InsertRemoved(path));
3292                        } else {
3293                            // Previously empty too - just remove the update
3294                            update_actions.push(SparseTrieUpdatesAction::RemoveUpdated(path));
3295                        }
3296                    }
3297
3298                    store_in_db_trie
3299                } else {
3300                    false
3301                };
3302
3303                *state = SparseNodeState::Cached {
3304                    rlp_node: rlp_node.clone(),
3305                    store_in_db_trie: Some(store_in_db_trie_value),
3306                };
3307
3308                (
3309                    rlp_node,
3310                    SparseNodeType::Branch { store_in_db_trie: Some(store_in_db_trie_value) },
3311                )
3312            }
3313        };
3314
3315        trace!(
3316            target: "trie::parallel_sparse",
3317            ?path,
3318            ?node_type,
3319            ?rlp_node,
3320            "Adding node to RLP node stack"
3321        );
3322        self.buffers.rlp_node_stack.push(RlpNodeStackItem { path, rlp_node, node_type });
3323    }
3324
3325    /// Clears the subtrie, keeping the data structures allocated.
3326    fn clear(&mut self) {
3327        self.values.clear();
3328        self.buffers.clear();
3329    }
3330}
3331
3332/// Represents the outcome of processing a node during leaf insertion
3333#[derive(Clone, Debug, PartialEq, Eq, Default)]
3334pub enum LeafUpdateStep {
3335    /// Continue traversing to the next node
3336    Continue,
3337    /// Update is complete with nodes inserted
3338    Complete {
3339        /// The node paths that were inserted during this step
3340        inserted_nodes: Vec<Nibbles>,
3341    },
3342    /// The node was not found
3343    #[default]
3344    NodeNotFound,
3345}
3346
3347impl LeafUpdateStep {
3348    /// Creates a step indicating completion with inserted nodes
3349    pub const fn complete_with_insertions(inserted_nodes: Vec<Nibbles>) -> Self {
3350        Self::Complete { inserted_nodes }
3351    }
3352}
3353
3354/// Sparse Subtrie Type.
3355///
3356/// Used to determine the type of subtrie a certain path belongs to:
3357/// - Paths in the range `0x..=0xf` belong to the upper subtrie.
3358/// - Paths in the range `0x00..` belong to one of the lower subtries. The index of the lower
3359///   subtrie is determined by the first [`UPPER_TRIE_MAX_DEPTH`] nibbles of the path.
3360///
3361/// There can be at most [`NUM_LOWER_SUBTRIES`] lower subtries.
3362#[derive(Clone, Copy, PartialEq, Eq, Debug)]
3363pub enum SparseSubtrieType {
3364    /// Upper subtrie with paths in the range `0x..=0xf`
3365    Upper,
3366    /// Lower subtrie with paths in the range `0x00..`. Includes the index of the subtrie,
3367    /// according to the path prefix.
3368    Lower(usize),
3369}
3370
3371impl SparseSubtrieType {
3372    /// Returns true if a node at a path of the given length would be placed in the upper subtrie.
3373    ///
3374    /// Nodes with paths shorter than [`UPPER_TRIE_MAX_DEPTH`] nibbles belong to the upper subtrie,
3375    /// while longer paths belong to the lower subtries.
3376    pub const fn path_len_is_upper(len: usize) -> bool {
3377        len < UPPER_TRIE_MAX_DEPTH
3378    }
3379
3380    /// Returns the type of subtrie based on the given path.
3381    pub fn from_path(path: &Nibbles) -> Self {
3382        if Self::path_len_is_upper(path.len()) {
3383            Self::Upper
3384        } else {
3385            Self::Lower(path_subtrie_index_unchecked(path))
3386        }
3387    }
3388
3389    /// Returns the index of the lower subtrie, if it exists.
3390    pub const fn lower_index(&self) -> Option<usize> {
3391        match self {
3392            Self::Upper => None,
3393            Self::Lower(index) => Some(*index),
3394        }
3395    }
3396}
3397
3398impl Ord for SparseSubtrieType {
3399    /// Orders two [`SparseSubtrieType`]s such that `Upper` is less than `Lower(_)`, and `Lower`s
3400    /// are ordered by their index.
3401    fn cmp(&self, other: &Self) -> Ordering {
3402        match (self, other) {
3403            (Self::Upper, Self::Upper) => Ordering::Equal,
3404            (Self::Upper, Self::Lower(_)) => Ordering::Less,
3405            (Self::Lower(_), Self::Upper) => Ordering::Greater,
3406            (Self::Lower(idx_a), Self::Lower(idx_b)) if idx_a == idx_b => Ordering::Equal,
3407            (Self::Lower(idx_a), Self::Lower(idx_b)) => idx_a.cmp(idx_b),
3408        }
3409    }
3410}
3411
3412impl PartialOrd for SparseSubtrieType {
3413    fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
3414        Some(self.cmp(other))
3415    }
3416}
3417
3418/// Collection of reusable buffers for calculating subtrie hashes.
3419///
3420/// These buffers reduce allocations when computing RLP representations during trie updates.
3421#[derive(Clone, PartialEq, Eq, Debug, Default)]
3422pub struct SparseSubtrieBuffers {
3423    /// Stack of RLP node paths
3424    path_stack: Vec<RlpNodePathStackItem>,
3425    /// Stack of RLP nodes
3426    rlp_node_stack: Vec<RlpNodeStackItem>,
3427    /// Reusable branch child path
3428    branch_child_buf: Vec<Nibbles>,
3429    /// Reusable branch value stack
3430    branch_value_stack_buf: Vec<RlpNode>,
3431    /// Reusable RLP buffer
3432    rlp_buf: Vec<u8>,
3433}
3434
3435impl SparseSubtrieBuffers {
3436    /// Clears all buffers.
3437    fn clear(&mut self) {
3438        self.path_stack.clear();
3439        self.rlp_node_stack.clear();
3440        self.branch_child_buf.clear();
3441        self.branch_value_stack_buf.clear();
3442        self.rlp_buf.clear();
3443    }
3444
3445    /// Returns a heuristic for the in-memory size of these buffers in bytes.
3446    const fn memory_size(&self) -> usize {
3447        let mut size = core::mem::size_of::<Self>();
3448
3449        size += self.path_stack.capacity() * core::mem::size_of::<RlpNodePathStackItem>();
3450        size += self.rlp_node_stack.capacity() * core::mem::size_of::<RlpNodeStackItem>();
3451        size += self.branch_child_buf.capacity() * core::mem::size_of::<Nibbles>();
3452        size += self.branch_value_stack_buf.capacity() * core::mem::size_of::<RlpNode>();
3453        size += self.rlp_buf.capacity();
3454
3455        size
3456    }
3457}
3458
3459/// RLP node path stack item.
3460#[derive(Clone, PartialEq, Eq, Debug)]
3461pub struct RlpNodePathStackItem {
3462    /// Path to the node.
3463    pub path: Nibbles,
3464    /// Whether the path is in the prefix set. If [`None`], then unknown yet.
3465    pub is_in_prefix_set: Option<bool>,
3466}
3467
3468/// Changed subtrie.
3469#[derive(Debug)]
3470struct ChangedSubtrie {
3471    /// Lower subtrie index in the range [0, [`NUM_LOWER_SUBTRIES`]).
3472    index: usize,
3473    /// Changed subtrie
3474    subtrie: Box<SparseSubtrie>,
3475    /// Prefix set of keys that belong to the subtrie.
3476    prefix_set: PrefixSet,
3477    /// Reusable buffer for collecting [`SparseTrieUpdatesAction`]s during computations. Will be
3478    /// None if update retention is disabled.
3479    update_actions_buf: Option<Vec<SparseTrieUpdatesAction>>,
3480}
3481
3482/// Convert first [`UPPER_TRIE_MAX_DEPTH`] nibbles of the path into a lower subtrie index in the
3483/// range [0, [`NUM_LOWER_SUBTRIES`]).
3484///
3485/// # Panics
3486///
3487/// If the path is shorter than [`UPPER_TRIE_MAX_DEPTH`] nibbles.
3488fn path_subtrie_index_unchecked(path: &Nibbles) -> usize {
3489    debug_assert_eq!(UPPER_TRIE_MAX_DEPTH, 2);
3490    let idx = path.get_byte_unchecked(0) as usize;
3491    // SAFETY: always true.
3492    unsafe { core::hint::assert_unchecked(idx < NUM_LOWER_SUBTRIES) };
3493    idx
3494}
3495
3496/// Checks if `path` is a strict descendant of any root in a sorted slice.
3497///
3498/// Uses binary search to find the candidate root that could be an ancestor.
3499/// Returns `true` if `path` starts with a root and is longer (strict descendant).
3500fn is_strict_descendant_in(roots: &[Nibbles], path: &Nibbles) -> bool {
3501    if roots.is_empty() {
3502        return false;
3503    }
3504    debug_assert!(roots.windows(2).all(|w| w[0] <= w[1]), "roots must be sorted by path");
3505    let idx = roots.partition_point(|root| root <= path);
3506    if idx > 0 {
3507        let candidate = &roots[idx - 1];
3508        if path.starts_with(candidate) && path.len() > candidate.len() {
3509            return true;
3510        }
3511    }
3512    false
3513}
3514
3515/// Checks if `path` starts with any root in a sorted slice (inclusive).
3516///
3517/// Uses binary search to find the candidate root that could be a prefix.
3518/// Returns `true` if `path` starts with a root (including exact match).
3519fn starts_with_pruned_in(roots: &[Nibbles], path: &Nibbles) -> bool {
3520    if roots.is_empty() {
3521        return false;
3522    }
3523    debug_assert!(roots.windows(2).all(|w| w[0] <= w[1]), "roots must be sorted by path");
3524    let idx = roots.partition_point(|root| root <= path);
3525    if idx > 0 {
3526        let candidate = &roots[idx - 1];
3527        if path.starts_with(candidate) {
3528            return true;
3529        }
3530    }
3531    false
3532}
3533
3534/// Used by lower subtries to communicate updates to the top-level [`SparseTrieUpdates`] set.
3535#[derive(Clone, Debug, Eq, PartialEq)]
3536enum SparseTrieUpdatesAction {
3537    /// Remove the path from the `updated_nodes`, if it was present, and add it to `removed_nodes`.
3538    InsertRemoved(Nibbles),
3539    /// Remove the path from the `updated_nodes`, if it was present, leaving `removed_nodes`
3540    /// unaffected.
3541    RemoveUpdated(Nibbles),
3542    /// Insert the branch node into `updated_nodes`.
3543    InsertUpdated(Nibbles, BranchNodeCompact),
3544}
3545
3546#[cfg(test)]
3547mod tests {
3548    use super::{
3549        path_subtrie_index_unchecked, LowerSparseSubtrie, ParallelSparseTrie, SparseSubtrie,
3550        SparseSubtrieType,
3551    };
3552    use crate::{
3553        parallel::ChangedSubtrie,
3554        provider::{DefaultTrieNodeProvider, NoRevealProvider},
3555        trie::SparseNodeState,
3556        LeafLookup, LeafLookupError, SparseNode, SparseTrie, SparseTrieUpdates,
3557    };
3558    use alloy_primitives::{
3559        b256, hex,
3560        map::{B256Set, HashMap},
3561        B256, U256,
3562    };
3563    use alloy_rlp::{Decodable, Encodable};
3564    use alloy_trie::{proof::AddedRemovedKeys, BranchNodeCompact, Nibbles};
3565    use assert_matches::assert_matches;
3566    use itertools::Itertools;
3567    use proptest::{prelude::*, sample::SizeRange};
3568    use proptest_arbitrary_interop::arb;
3569    use reth_execution_errors::SparseTrieErrorKind;
3570    use reth_primitives_traits::Account;
3571    use reth_provider::{
3572        test_utils::create_test_provider_factory, StorageSettingsCache, TrieWriter,
3573    };
3574    use reth_trie::{
3575        hashed_cursor::{noop::NoopHashedCursor, HashedPostStateCursor},
3576        node_iter::{TrieElement, TrieNodeIter},
3577        trie_cursor::{noop::NoopAccountTrieCursor, TrieCursor, TrieCursorFactory},
3578        walker::TrieWalker,
3579        HashedPostState,
3580    };
3581    use reth_trie_common::{
3582        prefix_set::PrefixSetMut,
3583        proof::{ProofNodes, ProofRetainer},
3584        updates::TrieUpdates,
3585        BranchNodeMasks, BranchNodeMasksMap, BranchNodeRef, BranchNodeV2, ExtensionNode,
3586        HashBuilder, LeafNode, ProofTrieNodeV2, RlpNode, TrieMask, TrieNode, TrieNodeV2,
3587        EMPTY_ROOT_HASH,
3588    };
3589    use reth_trie_db::DatabaseTrieCursorFactory;
3590    use std::collections::{BTreeMap, BTreeSet};
3591
3592    /// Pad nibbles to the length of a B256 hash with zeros on the right.
3593    fn pad_nibbles_right(mut nibbles: Nibbles) -> Nibbles {
3594        nibbles.extend(&Nibbles::from_nibbles_unchecked(vec![
3595            0;
3596            B256::len_bytes() * 2 - nibbles.len()
3597        ]));
3598        nibbles
3599    }
3600
3601    /// Create a leaf key (suffix) for a leaf at a given position depth.
3602    /// `suffix` contains the non-zero nibbles, padded with zeros to reach `total_len`.
3603    fn leaf_key(suffix: impl AsRef<[u8]>, total_len: usize) -> Nibbles {
3604        let suffix = suffix.as_ref();
3605        let mut nibbles = Nibbles::from_nibbles(suffix);
3606        nibbles.extend(&Nibbles::from_nibbles_unchecked(vec![0; total_len - suffix.len()]));
3607        nibbles
3608    }
3609
3610    fn create_account(nonce: u64) -> Account {
3611        Account { nonce, ..Default::default() }
3612    }
3613
3614    fn large_account_value() -> Vec<u8> {
3615        let account = Account {
3616            nonce: 0x123456789abcdef,
3617            balance: U256::from(0x123456789abcdef0123456789abcdef_u128),
3618            ..Default::default()
3619        };
3620        let mut buf = Vec::new();
3621        account.into_trie_account(EMPTY_ROOT_HASH).encode(&mut buf);
3622        buf
3623    }
3624
3625    fn encode_account_value(nonce: u64) -> Vec<u8> {
3626        let account = Account { nonce, ..Default::default() };
3627        let trie_account = account.into_trie_account(EMPTY_ROOT_HASH);
3628        let mut buf = Vec::new();
3629        trie_account.encode(&mut buf);
3630        buf
3631    }
3632
3633    /// Test context that provides helper methods for trie testing
3634    #[derive(Default)]
3635    struct ParallelSparseTrieTestContext;
3636
3637    impl ParallelSparseTrieTestContext {
3638        /// Assert that a lower subtrie exists at the given path
3639        fn assert_subtrie_exists(&self, trie: &ParallelSparseTrie, path: &Nibbles) {
3640            let idx = path_subtrie_index_unchecked(path);
3641            assert!(
3642                trie.lower_subtries[idx].as_revealed_ref().is_some(),
3643                "Expected lower subtrie at path {path:?} to exist",
3644            );
3645        }
3646
3647        /// Get a lower subtrie, panicking if it doesn't exist
3648        fn get_subtrie<'a>(
3649            &self,
3650            trie: &'a ParallelSparseTrie,
3651            path: &Nibbles,
3652        ) -> &'a SparseSubtrie {
3653            let idx = path_subtrie_index_unchecked(path);
3654            trie.lower_subtries[idx]
3655                .as_revealed_ref()
3656                .unwrap_or_else(|| panic!("Lower subtrie at path {path:?} should exist"))
3657        }
3658
3659        /// Assert that a lower subtrie has a specific path field value
3660        fn assert_subtrie_path(
3661            &self,
3662            trie: &ParallelSparseTrie,
3663            subtrie_prefix: impl AsRef<[u8]>,
3664            expected_path: impl AsRef<[u8]>,
3665        ) {
3666            let subtrie_prefix = Nibbles::from_nibbles(subtrie_prefix);
3667            let expected_path = Nibbles::from_nibbles(expected_path);
3668            let idx = path_subtrie_index_unchecked(&subtrie_prefix);
3669
3670            let subtrie = trie.lower_subtries[idx].as_revealed_ref().unwrap_or_else(|| {
3671                panic!("Lower subtrie at prefix {subtrie_prefix:?} should exist")
3672            });
3673
3674            assert_eq!(
3675                subtrie.path, expected_path,
3676                "Subtrie at prefix {subtrie_prefix:?} should have path {expected_path:?}, but has {:?}",
3677                subtrie.path
3678            );
3679        }
3680
3681        /// Create test leaves with consecutive account values
3682        fn create_test_leaves(&self, paths: &[&[u8]]) -> Vec<(Nibbles, Vec<u8>)> {
3683            paths
3684                .iter()
3685                .enumerate()
3686                .map(|(i, path)| {
3687                    (
3688                        pad_nibbles_right(Nibbles::from_nibbles(path)),
3689                        encode_account_value(i as u64 + 1),
3690                    )
3691                })
3692                .collect()
3693        }
3694
3695        /// Create a single test leaf with the given path and value nonce
3696        fn create_test_leaf(&self, path: impl AsRef<[u8]>, value_nonce: u64) -> (Nibbles, Vec<u8>) {
3697            (pad_nibbles_right(Nibbles::from_nibbles(path)), encode_account_value(value_nonce))
3698        }
3699
3700        /// Update multiple leaves in the trie
3701        fn update_leaves(
3702            &self,
3703            trie: &mut ParallelSparseTrie,
3704            leaves: impl IntoIterator<Item = (Nibbles, Vec<u8>)>,
3705        ) {
3706            for (path, value) in leaves {
3707                trie.update_leaf(path, value, DefaultTrieNodeProvider).unwrap();
3708            }
3709        }
3710
3711        /// Create an assertion builder for a subtrie
3712        fn assert_subtrie<'a>(
3713            &self,
3714            trie: &'a ParallelSparseTrie,
3715            path: Nibbles,
3716        ) -> SubtrieAssertion<'a> {
3717            self.assert_subtrie_exists(trie, &path);
3718            let subtrie = self.get_subtrie(trie, &path);
3719            SubtrieAssertion::new(subtrie)
3720        }
3721
3722        /// Create an assertion builder for the upper subtrie
3723        fn assert_upper_subtrie<'a>(&self, trie: &'a ParallelSparseTrie) -> SubtrieAssertion<'a> {
3724            SubtrieAssertion::new(&trie.upper_subtrie)
3725        }
3726
3727        /// Assert the root, trie updates, and nodes against the hash builder output.
3728        fn assert_with_hash_builder(
3729            &self,
3730            trie: &mut ParallelSparseTrie,
3731            hash_builder_root: B256,
3732            hash_builder_updates: TrieUpdates,
3733            hash_builder_proof_nodes: ProofNodes,
3734        ) {
3735            assert_eq!(trie.root(), hash_builder_root);
3736            pretty_assertions::assert_eq!(
3737                BTreeMap::from_iter(trie.updates_ref().updated_nodes.clone()),
3738                BTreeMap::from_iter(hash_builder_updates.account_nodes)
3739            );
3740            assert_eq_parallel_sparse_trie_proof_nodes(trie, hash_builder_proof_nodes);
3741        }
3742    }
3743
3744    /// Assertion builder for subtrie structure
3745    struct SubtrieAssertion<'a> {
3746        subtrie: &'a SparseSubtrie,
3747    }
3748
3749    impl<'a> SubtrieAssertion<'a> {
3750        fn new(subtrie: &'a SparseSubtrie) -> Self {
3751            Self { subtrie }
3752        }
3753
3754        fn has_branch(self, path: &Nibbles, expected_mask_bits: &[u8]) -> Self {
3755            match self.subtrie.nodes.get(path) {
3756                Some(SparseNode::Branch { state_mask, .. }) => {
3757                    for bit in expected_mask_bits {
3758                        assert!(
3759                            state_mask.is_bit_set(*bit),
3760                            "Expected branch at {path:?} to have bit {bit} set, instead mask is: {state_mask:?}",
3761                        );
3762                    }
3763                }
3764                node => panic!("Expected branch node at {path:?}, found {node:?}"),
3765            }
3766            self
3767        }
3768
3769        fn has_leaf(self, path: &Nibbles, expected_key: &Nibbles) -> Self {
3770            match self.subtrie.nodes.get(path) {
3771                Some(SparseNode::Leaf { key, .. }) => {
3772                    assert_eq!(
3773                        *key, *expected_key,
3774                        "Expected leaf at {path:?} to have key {expected_key:?}, found {key:?}",
3775                    );
3776                }
3777                node => panic!("Expected leaf node at {path:?}, found {node:?}"),
3778            }
3779            self
3780        }
3781
3782        fn has_extension(self, path: &Nibbles, expected_key: &Nibbles) -> Self {
3783            match self.subtrie.nodes.get(path) {
3784                Some(SparseNode::Extension { key, .. }) => {
3785                    assert_eq!(
3786                        *key, *expected_key,
3787                        "Expected extension at {path:?} to have key {expected_key:?}, found {key:?}",
3788                    );
3789                }
3790                node => panic!("Expected extension node at {path:?}, found {node:?}"),
3791            }
3792            self
3793        }
3794
3795        fn has_value(self, path: &Nibbles, expected_value: &[u8]) -> Self {
3796            let actual = self.subtrie.inner.values.get(path);
3797            assert_eq!(
3798                actual.map(|v| v.as_slice()),
3799                Some(expected_value),
3800                "Expected value at {path:?} to be {expected_value:?}, found {actual:?}",
3801            );
3802            self
3803        }
3804
3805        fn has_no_value(self, path: &Nibbles) -> Self {
3806            let actual = self.subtrie.inner.values.get(path);
3807            assert!(actual.is_none(), "Expected no value at {path:?}, but found {actual:?}");
3808            self
3809        }
3810    }
3811
3812    fn create_leaf_node(key: impl AsRef<[u8]>, value_nonce: u64) -> TrieNodeV2 {
3813        TrieNodeV2::Leaf(LeafNode::new(
3814            Nibbles::from_nibbles(key),
3815            encode_account_value(value_nonce),
3816        ))
3817    }
3818
3819    fn create_branch_node(
3820        key: Nibbles,
3821        children_indices: &[u8],
3822        child_hashes: impl IntoIterator<Item = RlpNode>,
3823    ) -> TrieNodeV2 {
3824        let mut stack = Vec::new();
3825        let mut state_mask = TrieMask::default();
3826
3827        for (&idx, hash) in children_indices.iter().zip(child_hashes) {
3828            state_mask.set_bit(idx);
3829            stack.push(hash);
3830        }
3831
3832        let branch_rlp_node = if key.is_empty() {
3833            None
3834        } else {
3835            Some(RlpNode::from_rlp(&alloy_rlp::encode(BranchNodeRef::new(&stack, state_mask))))
3836        };
3837
3838        TrieNodeV2::Branch(BranchNodeV2::new(key, stack, state_mask, branch_rlp_node))
3839    }
3840
3841    fn create_branch_node_with_children(
3842        children_indices: &[u8],
3843        child_hashes: impl IntoIterator<Item = RlpNode>,
3844    ) -> TrieNodeV2 {
3845        create_branch_node(Nibbles::default(), children_indices, child_hashes)
3846    }
3847
3848    /// Calculate the state root by feeding the provided state to the hash builder and retaining the
3849    /// proofs for the provided targets.
3850    ///
3851    /// Returns the state root and the retained proof nodes.
3852    fn run_hash_builder(
3853        state: impl IntoIterator<Item = (Nibbles, Account)> + Clone,
3854        trie_cursor: impl TrieCursor,
3855        destroyed_accounts: B256Set,
3856        proof_targets: impl IntoIterator<Item = Nibbles>,
3857    ) -> (B256, TrieUpdates, ProofNodes, HashMap<Nibbles, TrieMask>, HashMap<Nibbles, TrieMask>)
3858    {
3859        let mut account_rlp = Vec::new();
3860
3861        let mut hash_builder = HashBuilder::default()
3862            .with_updates(true)
3863            .with_proof_retainer(ProofRetainer::from_iter(proof_targets).with_added_removed_keys(
3864                Some(AddedRemovedKeys::default().with_assume_added(true)),
3865            ));
3866
3867        let mut prefix_set = PrefixSetMut::default();
3868        prefix_set.extend_keys(state.clone().into_iter().map(|(nibbles, _)| nibbles));
3869        prefix_set.extend_keys(destroyed_accounts.iter().map(Nibbles::unpack));
3870        let walker = TrieWalker::<_>::state_trie(trie_cursor, prefix_set.freeze())
3871            .with_deletions_retained(true);
3872        let hashed_post_state = HashedPostState::default()
3873            .with_accounts(state.into_iter().map(|(nibbles, account)| {
3874                (nibbles.pack().into_inner().unwrap().into(), Some(account))
3875            }))
3876            .into_sorted();
3877        let mut node_iter = TrieNodeIter::state_trie(
3878            walker,
3879            HashedPostStateCursor::new_account(
3880                NoopHashedCursor::<Account>::default(),
3881                &hashed_post_state,
3882            ),
3883        );
3884
3885        while let Some(node) = node_iter.try_next().unwrap() {
3886            match node {
3887                TrieElement::Branch(branch) => {
3888                    hash_builder.add_branch(branch.key, branch.value, branch.children_are_in_trie);
3889                }
3890                TrieElement::Leaf(key, account) => {
3891                    let account = account.into_trie_account(EMPTY_ROOT_HASH);
3892                    account.encode(&mut account_rlp);
3893
3894                    hash_builder.add_leaf(Nibbles::unpack(key), &account_rlp);
3895                    account_rlp.clear();
3896                }
3897            }
3898        }
3899        let root = hash_builder.root();
3900        let proof_nodes = hash_builder.take_proof_nodes();
3901        let branch_node_hash_masks = hash_builder
3902            .updated_branch_nodes
3903            .clone()
3904            .unwrap_or_default()
3905            .iter()
3906            .map(|(path, node)| (*path, node.hash_mask))
3907            .collect();
3908        let branch_node_tree_masks = hash_builder
3909            .updated_branch_nodes
3910            .clone()
3911            .unwrap_or_default()
3912            .iter()
3913            .map(|(path, node)| (*path, node.tree_mask))
3914            .collect();
3915
3916        let mut trie_updates = TrieUpdates::default();
3917        let removed_keys = node_iter.walker.take_removed_keys();
3918        trie_updates.finalize(hash_builder, removed_keys, destroyed_accounts);
3919
3920        (root, trie_updates, proof_nodes, branch_node_hash_masks, branch_node_tree_masks)
3921    }
3922
3923    /// Returns a `ParallelSparseTrie` pre-loaded with the given nodes, as well as leaf values
3924    /// inferred from any provided leaf nodes.
3925    fn new_test_trie<Nodes>(nodes: Nodes) -> ParallelSparseTrie
3926    where
3927        Nodes: Iterator<Item = (Nibbles, SparseNode)>,
3928    {
3929        let mut trie = ParallelSparseTrie::default().with_updates(true);
3930
3931        for (path, node) in nodes {
3932            let subtrie = trie.subtrie_for_path_mut(&path);
3933            if let SparseNode::Leaf { key, .. } = &node {
3934                let mut full_key = path;
3935                full_key.extend(key);
3936                subtrie.inner.values.insert(full_key, "LEAF VALUE".into());
3937            }
3938            subtrie.nodes.insert(path, node);
3939        }
3940        trie
3941    }
3942
3943    fn parallel_sparse_trie_nodes(
3944        sparse_trie: &ParallelSparseTrie,
3945    ) -> impl IntoIterator<Item = (&Nibbles, &SparseNode)> {
3946        let lower_sparse_nodes = sparse_trie
3947            .lower_subtries
3948            .iter()
3949            .filter_map(|subtrie| subtrie.as_revealed_ref())
3950            .flat_map(|subtrie| subtrie.nodes.iter());
3951
3952        let upper_sparse_nodes = sparse_trie.upper_subtrie.nodes.iter();
3953
3954        lower_sparse_nodes.chain(upper_sparse_nodes).sorted_by_key(|(path, _)| *path)
3955    }
3956
3957    /// Assert that the parallel sparse trie nodes and the proof nodes from the hash builder are
3958    /// equal.
3959    fn assert_eq_parallel_sparse_trie_proof_nodes(
3960        sparse_trie: &ParallelSparseTrie,
3961        proof_nodes: ProofNodes,
3962    ) {
3963        let proof_nodes = proof_nodes
3964            .into_nodes_sorted()
3965            .into_iter()
3966            .map(|(path, node)| (path, TrieNodeV2::decode(&mut node.as_ref()).unwrap()));
3967
3968        let all_sparse_nodes = parallel_sparse_trie_nodes(sparse_trie);
3969
3970        for ((proof_node_path, proof_node), (sparse_node_path, sparse_node)) in
3971            proof_nodes.zip(all_sparse_nodes)
3972        {
3973            assert_eq!(&proof_node_path, sparse_node_path);
3974
3975            let equals = match (&proof_node, &sparse_node) {
3976                // Both nodes are empty
3977                (TrieNodeV2::EmptyRoot, SparseNode::Empty) => true,
3978                // Both nodes are branches and have the same state mask
3979                (
3980                    TrieNodeV2::Branch(BranchNodeV2 { state_mask: proof_state_mask, .. }),
3981                    SparseNode::Branch { state_mask: sparse_state_mask, .. },
3982                ) => proof_state_mask == sparse_state_mask,
3983                // Both nodes are extensions and have the same key
3984                (
3985                    TrieNodeV2::Extension(ExtensionNode { key: proof_key, .. }),
3986                    SparseNode::Extension { key: sparse_key, .. },
3987                ) |
3988                // Both nodes are leaves and have the same key
3989                (
3990                    TrieNodeV2::Leaf(LeafNode { key: proof_key, .. }),
3991                    SparseNode::Leaf { key: sparse_key, .. },
3992                ) => proof_key == sparse_key,
3993                // Empty and hash nodes are specific to the sparse trie, skip them
3994                (_, SparseNode::Empty) => continue,
3995                _ => false,
3996            };
3997            assert!(
3998                equals,
3999                "path: {proof_node_path:?}\nproof node: {proof_node:?}\nsparse node: {sparse_node:?}"
4000            );
4001        }
4002    }
4003
4004    #[test]
4005    fn test_get_changed_subtries_empty() {
4006        let mut trie = ParallelSparseTrie::default();
4007        let mut prefix_set = PrefixSetMut::from([Nibbles::default()]).freeze();
4008
4009        let (subtries, unchanged_prefix_set) = trie.take_changed_lower_subtries(&mut prefix_set);
4010        assert!(subtries.is_empty());
4011        assert_eq!(unchanged_prefix_set, PrefixSetMut::from(prefix_set.iter().copied()));
4012    }
4013
4014    #[test]
4015    fn test_get_changed_subtries() {
4016        // Create a trie with three subtries
4017        let mut trie = ParallelSparseTrie::default();
4018        let subtrie_1 = Box::new(SparseSubtrie::new(Nibbles::from_nibbles([0x0, 0x0])));
4019        let subtrie_1_index = path_subtrie_index_unchecked(&subtrie_1.path);
4020        let subtrie_2 = Box::new(SparseSubtrie::new(Nibbles::from_nibbles([0x1, 0x0])));
4021        let subtrie_2_index = path_subtrie_index_unchecked(&subtrie_2.path);
4022        let subtrie_3 = Box::new(SparseSubtrie::new(Nibbles::from_nibbles([0x3, 0x0])));
4023        let subtrie_3_index = path_subtrie_index_unchecked(&subtrie_3.path);
4024
4025        // Add subtries at specific positions
4026        trie.lower_subtries[subtrie_1_index] = LowerSparseSubtrie::Revealed(subtrie_1.clone());
4027        trie.lower_subtries[subtrie_2_index] = LowerSparseSubtrie::Revealed(subtrie_2.clone());
4028        trie.lower_subtries[subtrie_3_index] = LowerSparseSubtrie::Revealed(subtrie_3);
4029
4030        let unchanged_prefix_set = PrefixSetMut::from([
4031            Nibbles::from_nibbles([0x0]),
4032            Nibbles::from_nibbles([0x2, 0x0, 0x0]),
4033        ]);
4034        // Create a prefix set with the keys that match only the second subtrie
4035        let mut prefix_set = PrefixSetMut::from([
4036            // Match second subtrie
4037            Nibbles::from_nibbles([0x1, 0x0, 0x0]),
4038            Nibbles::from_nibbles([0x1, 0x0, 0x1, 0x0]),
4039        ]);
4040        prefix_set.extend(unchanged_prefix_set);
4041        let mut prefix_set = prefix_set.freeze();
4042
4043        // Second subtrie should be removed and returned
4044        let (subtries, unchanged_prefix_set) = trie.take_changed_lower_subtries(&mut prefix_set);
4045        assert_eq!(
4046            subtries
4047                .into_iter()
4048                .map(|ChangedSubtrie { index, subtrie, prefix_set, .. }| {
4049                    (index, subtrie, prefix_set.iter().copied().collect::<Vec<_>>())
4050                })
4051                .collect::<Vec<_>>(),
4052            vec![(
4053                subtrie_2_index,
4054                subtrie_2,
4055                vec![
4056                    Nibbles::from_nibbles([0x1, 0x0, 0x0]),
4057                    Nibbles::from_nibbles([0x1, 0x0, 0x1, 0x0])
4058                ]
4059            )]
4060        );
4061        assert_eq!(unchanged_prefix_set, unchanged_prefix_set);
4062        assert!(trie.lower_subtries[subtrie_2_index].as_revealed_ref().is_none());
4063
4064        // First subtrie should remain unchanged
4065        assert_eq!(trie.lower_subtries[subtrie_1_index], LowerSparseSubtrie::Revealed(subtrie_1));
4066    }
4067
4068    #[test]
4069    fn test_get_changed_subtries_all() {
4070        // Create a trie with three subtries
4071        let mut trie = ParallelSparseTrie::default();
4072        let subtrie_1 = Box::new(SparseSubtrie::new(Nibbles::from_nibbles([0x0, 0x0])));
4073        let subtrie_1_index = path_subtrie_index_unchecked(&subtrie_1.path);
4074        let subtrie_2 = Box::new(SparseSubtrie::new(Nibbles::from_nibbles([0x1, 0x0])));
4075        let subtrie_2_index = path_subtrie_index_unchecked(&subtrie_2.path);
4076        let subtrie_3 = Box::new(SparseSubtrie::new(Nibbles::from_nibbles([0x3, 0x0])));
4077        let subtrie_3_index = path_subtrie_index_unchecked(&subtrie_3.path);
4078
4079        // Add subtries at specific positions
4080        trie.lower_subtries[subtrie_1_index] = LowerSparseSubtrie::Revealed(subtrie_1.clone());
4081        trie.lower_subtries[subtrie_2_index] = LowerSparseSubtrie::Revealed(subtrie_2.clone());
4082        trie.lower_subtries[subtrie_3_index] = LowerSparseSubtrie::Revealed(subtrie_3.clone());
4083
4084        // Create a prefix set that matches any key
4085        let mut prefix_set = PrefixSetMut::all().freeze();
4086
4087        // All subtries should be removed and returned
4088        let (subtries, unchanged_prefix_set) = trie.take_changed_lower_subtries(&mut prefix_set);
4089        assert_eq!(
4090            subtries
4091                .into_iter()
4092                .map(|ChangedSubtrie { index, subtrie, prefix_set, .. }| {
4093                    (index, subtrie, prefix_set.all())
4094                })
4095                .collect::<Vec<_>>(),
4096            vec![
4097                (subtrie_1_index, subtrie_1, true),
4098                (subtrie_2_index, subtrie_2, true),
4099                (subtrie_3_index, subtrie_3, true)
4100            ]
4101        );
4102        assert_eq!(unchanged_prefix_set, PrefixSetMut::all());
4103
4104        assert!(trie.lower_subtries.iter().all(|subtrie| subtrie.as_revealed_ref().is_none()));
4105    }
4106
4107    #[test]
4108    fn test_sparse_subtrie_type() {
4109        assert_eq!(SparseSubtrieType::from_path(&Nibbles::new()), SparseSubtrieType::Upper);
4110        assert_eq!(
4111            SparseSubtrieType::from_path(&Nibbles::from_nibbles([0])),
4112            SparseSubtrieType::Upper
4113        );
4114        assert_eq!(
4115            SparseSubtrieType::from_path(&Nibbles::from_nibbles([15])),
4116            SparseSubtrieType::Upper
4117        );
4118        assert_eq!(
4119            SparseSubtrieType::from_path(&Nibbles::from_nibbles([0, 0])),
4120            SparseSubtrieType::Lower(0)
4121        );
4122        assert_eq!(
4123            SparseSubtrieType::from_path(&Nibbles::from_nibbles([0, 0, 0])),
4124            SparseSubtrieType::Lower(0)
4125        );
4126        assert_eq!(
4127            SparseSubtrieType::from_path(&Nibbles::from_nibbles([0, 1])),
4128            SparseSubtrieType::Lower(1)
4129        );
4130        assert_eq!(
4131            SparseSubtrieType::from_path(&Nibbles::from_nibbles([0, 1, 0])),
4132            SparseSubtrieType::Lower(1)
4133        );
4134        assert_eq!(
4135            SparseSubtrieType::from_path(&Nibbles::from_nibbles([0, 15])),
4136            SparseSubtrieType::Lower(15)
4137        );
4138        assert_eq!(
4139            SparseSubtrieType::from_path(&Nibbles::from_nibbles([15, 0])),
4140            SparseSubtrieType::Lower(240)
4141        );
4142        assert_eq!(
4143            SparseSubtrieType::from_path(&Nibbles::from_nibbles([15, 1])),
4144            SparseSubtrieType::Lower(241)
4145        );
4146        assert_eq!(
4147            SparseSubtrieType::from_path(&Nibbles::from_nibbles([15, 15])),
4148            SparseSubtrieType::Lower(255)
4149        );
4150        assert_eq!(
4151            SparseSubtrieType::from_path(&Nibbles::from_nibbles([15, 15, 15])),
4152            SparseSubtrieType::Lower(255)
4153        );
4154    }
4155
4156    #[test]
4157    fn test_reveal_node_leaves() {
4158        // Reveal leaf in the upper trie. A root branch with child 0x1 makes path [0x1]
4159        // reachable for the subsequent reveal_nodes call.
4160        let root_branch =
4161            create_branch_node_with_children(&[0x1], [RlpNode::word_rlp(&B256::repeat_byte(0xAA))]);
4162        let mut trie = ParallelSparseTrie::from_root(root_branch, None, false).unwrap();
4163
4164        {
4165            let path = Nibbles::from_nibbles([0x1]);
4166            let node = create_leaf_node([0x2, 0x3], 42);
4167            let masks = None;
4168
4169            trie.reveal_nodes(&mut [ProofTrieNodeV2 { path, node, masks }]).unwrap();
4170
4171            assert_matches!(
4172                trie.upper_subtrie.nodes.get(&path),
4173                Some(SparseNode::Leaf { key, state: SparseNodeState::Cached { .. } })
4174                if key == &Nibbles::from_nibbles([0x2, 0x3])
4175            );
4176
4177            let full_path = Nibbles::from_nibbles([0x1, 0x2, 0x3]);
4178            assert_eq!(
4179                trie.upper_subtrie.inner.values.get(&full_path),
4180                Some(&encode_account_value(42))
4181            );
4182        }
4183
4184        // Reveal leaf in a lower trie. A separate trie is needed because the structure at
4185        // [0x1] conflicts: the upper trie test placed a leaf there, but reaching [0x1, 0x2]
4186        // requires a branch at [0x1]. A root branch → branch at [0x1] with child 0x2
4187        // makes path [0x1, 0x2] reachable.
4188        let root_branch =
4189            create_branch_node_with_children(&[0x1], [RlpNode::word_rlp(&B256::repeat_byte(0xAA))]);
4190        let branch_at_1 =
4191            create_branch_node_with_children(&[0x2], [RlpNode::word_rlp(&B256::repeat_byte(0xBB))]);
4192        let mut trie = ParallelSparseTrie::from_root(root_branch, None, false).unwrap();
4193        trie.reveal_nodes(&mut [ProofTrieNodeV2 {
4194            path: Nibbles::from_nibbles([0x1]),
4195            node: branch_at_1,
4196            masks: None,
4197        }])
4198        .unwrap();
4199
4200        {
4201            let path = Nibbles::from_nibbles([0x1, 0x2]);
4202            let node = create_leaf_node([0x3, 0x4], 42);
4203            let masks = None;
4204
4205            trie.reveal_nodes(&mut [ProofTrieNodeV2 { path, node, masks }]).unwrap();
4206
4207            // Check that the lower subtrie was created
4208            let idx = path_subtrie_index_unchecked(&path);
4209            assert!(trie.lower_subtries[idx].as_revealed_ref().is_some());
4210
4211            // Check that the lower subtrie's path was correctly set
4212            let lower_subtrie = trie.lower_subtries[idx].as_revealed_ref().unwrap();
4213            assert_eq!(lower_subtrie.path, path);
4214
4215            assert_matches!(
4216                lower_subtrie.nodes.get(&path),
4217                Some(SparseNode::Leaf { key, state: SparseNodeState::Cached { .. } })
4218                if key == &Nibbles::from_nibbles([0x3, 0x4])
4219            );
4220        }
4221
4222        // Reveal leaf in a lower trie with a longer path, shouldn't result in the subtrie's root
4223        // path changing.
4224        {
4225            let path = Nibbles::from_nibbles([0x1, 0x2, 0x3]);
4226            let node = create_leaf_node([0x4, 0x5], 42);
4227            let masks = None;
4228
4229            trie.reveal_nodes(&mut [ProofTrieNodeV2 { path, node, masks }]).unwrap();
4230
4231            // Check that the lower subtrie's path hasn't changed
4232            let idx = path_subtrie_index_unchecked(&path);
4233            let lower_subtrie = trie.lower_subtries[idx].as_revealed_ref().unwrap();
4234            assert_eq!(lower_subtrie.path, Nibbles::from_nibbles([0x1, 0x2]));
4235        }
4236    }
4237
4238    #[test]
4239    fn test_reveal_node_branch_all_upper() {
4240        let path = Nibbles::new();
4241        let child_hashes = [
4242            RlpNode::word_rlp(&B256::repeat_byte(0x11)),
4243            RlpNode::word_rlp(&B256::repeat_byte(0x22)),
4244        ];
4245        let node = create_branch_node_with_children(&[0x0, 0x5], child_hashes.clone());
4246        let masks = None;
4247        let trie = ParallelSparseTrie::from_root(node, masks, true).unwrap();
4248
4249        // Branch node should be in upper trie
4250        assert_eq!(
4251            trie.upper_subtrie.nodes.get(&path).unwrap(),
4252            &SparseNode::new_branch(
4253                0b0000000000100001.into(),
4254                &[(0, child_hashes[0].as_hash().unwrap()), (5, child_hashes[1].as_hash().unwrap())]
4255            )
4256        );
4257
4258        // Children should not be revealed yet
4259        let child_path_0 = Nibbles::from_nibbles([0x0]);
4260        let child_path_5 = Nibbles::from_nibbles([0x5]);
4261        assert!(!trie.upper_subtrie.nodes.contains_key(&child_path_0));
4262        assert!(!trie.upper_subtrie.nodes.contains_key(&child_path_5));
4263    }
4264
4265    #[test]
4266    fn test_reveal_node_branch_cross_level() {
4267        // Set up root branch with nibble 0x1 so path [0x1] is reachable.
4268        let root_branch =
4269            create_branch_node_with_children(&[0x1], [RlpNode::word_rlp(&B256::repeat_byte(0xAA))]);
4270        let mut trie = ParallelSparseTrie::from_root(root_branch, None, false).unwrap();
4271
4272        let path = Nibbles::from_nibbles([0x1]); // Exactly 1 nibbles - boundary case
4273        let child_hashes = [
4274            RlpNode::word_rlp(&B256::repeat_byte(0x33)),
4275            RlpNode::word_rlp(&B256::repeat_byte(0x44)),
4276            RlpNode::word_rlp(&B256::repeat_byte(0x55)),
4277        ];
4278        let node = create_branch_node_with_children(&[0x0, 0x7, 0xf], child_hashes.clone());
4279        let masks = None;
4280
4281        trie.reveal_nodes(&mut [ProofTrieNodeV2 { path, node, masks }]).unwrap();
4282
4283        // Branch node should be in upper trie, hash is memoized from the previous Hash node
4284        assert_eq!(
4285            trie.upper_subtrie.nodes.get(&path).unwrap(),
4286            &SparseNode::new_branch(
4287                0b1000000010000001.into(),
4288                &[
4289                    (0x0, child_hashes[0].as_hash().unwrap()),
4290                    (0x7, child_hashes[1].as_hash().unwrap()),
4291                    (0xf, child_hashes[2].as_hash().unwrap())
4292                ]
4293            )
4294            .with_state(SparseNodeState::Cached {
4295                rlp_node: RlpNode::word_rlp(&B256::repeat_byte(0xAA)),
4296                store_in_db_trie: Some(false),
4297            })
4298        );
4299
4300        // All children should be in lower tries since they have paths of length 3
4301        let child_paths = [
4302            Nibbles::from_nibbles([0x1, 0x0]),
4303            Nibbles::from_nibbles([0x1, 0x7]),
4304            Nibbles::from_nibbles([0x1, 0xf]),
4305        ];
4306
4307        let mut children = child_paths
4308            .iter()
4309            .map(|path| ProofTrieNodeV2 {
4310                path: *path,
4311                node: create_leaf_node([0x0], 1),
4312                masks: None,
4313            })
4314            .collect::<Vec<_>>();
4315
4316        trie.reveal_nodes(&mut children).unwrap();
4317
4318        // Branch node should still be in upper trie but without any blinded children
4319        assert_matches!(
4320            trie.upper_subtrie.nodes.get(&path),
4321            Some(&SparseNode::Branch {
4322                state_mask,
4323                state: SparseNodeState::Cached { ref rlp_node, store_in_db_trie: Some(false) },
4324                blinded_mask,
4325                ..
4326            }) if state_mask == 0b1000000010000001.into() && blinded_mask.is_empty() && *rlp_node == RlpNode::word_rlp(&B256::repeat_byte(0xAA))
4327        );
4328
4329        for (i, child_path) in child_paths.iter().enumerate() {
4330            let idx = path_subtrie_index_unchecked(child_path);
4331            let lower_subtrie = trie.lower_subtries[idx].as_revealed_ref().unwrap();
4332            assert_eq!(&lower_subtrie.path, child_path);
4333            assert_eq!(
4334                lower_subtrie.nodes.get(child_path),
4335                Some(&SparseNode::Leaf {
4336                    key: Nibbles::from_nibbles([0x0]),
4337                    state: SparseNodeState::Cached {
4338                        rlp_node: child_hashes[i].clone(),
4339                        store_in_db_trie: Some(false)
4340                    }
4341                })
4342            );
4343        }
4344    }
4345
4346    #[test]
4347    fn test_update_subtrie_hashes_prefix_set_matching() {
4348        // Create a trie with a root branch that makes paths [0x0, ...] and [0x3, ...]
4349        // reachable from the upper trie.
4350        let root_branch = create_branch_node_with_children(
4351            &[0x0, 0x3],
4352            [
4353                RlpNode::word_rlp(&B256::repeat_byte(0xAA)),
4354                RlpNode::word_rlp(&B256::repeat_byte(0xBB)),
4355            ],
4356        );
4357        let mut trie = ParallelSparseTrie::from_root(root_branch, None, false).unwrap();
4358
4359        // Create leaf paths.
4360        let leaf_1_full_path = Nibbles::from_nibbles([0; 64]);
4361        let leaf_1_path = leaf_1_full_path.slice(..2);
4362        let leaf_1_key = leaf_1_full_path.slice(2..);
4363        let leaf_2_full_path = Nibbles::from_nibbles([vec![0, 1], vec![0; 62]].concat());
4364        let leaf_2_path = leaf_2_full_path.slice(..2);
4365        let leaf_2_key = leaf_2_full_path.slice(2..);
4366        let leaf_3_full_path = Nibbles::from_nibbles([vec![0, 2], vec![0; 62]].concat());
4367        let leaf_1 = create_leaf_node(leaf_1_key.to_vec(), 1);
4368        let leaf_2 = create_leaf_node(leaf_2_key.to_vec(), 2);
4369
4370        // Create branch node at [0x0] with only children 0x0 and 0x1.
4371        // Child 0x2 (leaf_3) will be inserted via update_leaf to create a fresh node
4372        // with hash: None.
4373        let child_hashes = [
4374            RlpNode::word_rlp(&B256::repeat_byte(0x00)),
4375            RlpNode::word_rlp(&B256::repeat_byte(0x11)),
4376        ];
4377        let branch_path = Nibbles::from_nibbles([0x0]);
4378        let branch_node = create_branch_node_with_children(&[0x0, 0x1], child_hashes);
4379
4380        // Reveal the existing nodes
4381        trie.reveal_nodes(&mut [
4382            ProofTrieNodeV2 { path: branch_path, node: branch_node, masks: None },
4383            ProofTrieNodeV2 { path: leaf_1_path, node: leaf_1, masks: None },
4384            ProofTrieNodeV2 { path: leaf_2_path, node: leaf_2, masks: None },
4385        ])
4386        .unwrap();
4387
4388        // Insert leaf_3 via update_leaf. This modifies the branch at [0x0] to add child
4389        // 0x2 and creates a fresh leaf node with hash: None in the lower subtrie.
4390        let provider = NoRevealProvider;
4391        trie.update_leaf(leaf_3_full_path, encode_account_value(3), provider).unwrap();
4392
4393        // Calculate subtrie indexes
4394        let subtrie_1_index = SparseSubtrieType::from_path(&leaf_1_path).lower_index().unwrap();
4395        let subtrie_2_index = SparseSubtrieType::from_path(&leaf_2_path).lower_index().unwrap();
4396        let leaf_3_path = leaf_3_full_path.slice(..2);
4397        let subtrie_3_index = SparseSubtrieType::from_path(&leaf_3_path).lower_index().unwrap();
4398
4399        let mut unchanged_prefix_set = PrefixSetMut::from([
4400            Nibbles::from_nibbles([0x0]),
4401            leaf_2_full_path,
4402            Nibbles::from_nibbles([0x3, 0x0, 0x0]),
4403        ]);
4404        // Create a prefix set with the keys that match only the second subtrie
4405        let mut prefix_set = PrefixSetMut::from([
4406            // Match second subtrie
4407            Nibbles::from_nibbles([0x0, 0x1, 0x0]),
4408            Nibbles::from_nibbles([0x0, 0x1, 0x1, 0x0]),
4409        ]);
4410        prefix_set.extend(unchanged_prefix_set.clone());
4411        trie.prefix_set = prefix_set;
4412
4413        // Update subtrie hashes
4414        trie.update_subtrie_hashes();
4415
4416        // We expect that leaf 3 (0x02) should have been added to the prefix set, because it is
4417        // missing a hash and is the root node of a lower subtrie, and therefore would need to have
4418        // that hash calculated by `update_upper_subtrie_hashes`.
4419        unchanged_prefix_set.insert(leaf_3_full_path);
4420
4421        // Check that the prefix set was updated
4422        assert_eq!(
4423            trie.prefix_set.clone().freeze().into_iter().collect::<Vec<_>>(),
4424            unchanged_prefix_set.freeze().into_iter().collect::<Vec<_>>()
4425        );
4426        // Check that subtries were returned back to the array
4427        assert!(trie.lower_subtries[subtrie_1_index].as_revealed_ref().is_some());
4428        assert!(trie.lower_subtries[subtrie_2_index].as_revealed_ref().is_some());
4429        assert!(trie.lower_subtries[subtrie_3_index].as_revealed_ref().is_some());
4430    }
4431
4432    #[test]
4433    fn test_subtrie_update_hashes() {
4434        let mut subtrie = Box::new(SparseSubtrie::new(Nibbles::from_nibbles([0x0, 0x0])));
4435
4436        // Create leaf nodes with paths 0x0...0, 0x00001...0, 0x0010...0
4437        let leaf_1_full_path = Nibbles::from_nibbles([0; 64]);
4438        let leaf_1_path = leaf_1_full_path.slice(..5);
4439        let leaf_1_key = leaf_1_full_path.slice(5..);
4440        let leaf_2_full_path = Nibbles::from_nibbles([vec![0, 0, 0, 0, 1], vec![0; 59]].concat());
4441        let leaf_2_path = leaf_2_full_path.slice(..5);
4442        let leaf_2_key = leaf_2_full_path.slice(5..);
4443        let leaf_3_full_path = Nibbles::from_nibbles([vec![0, 0, 1], vec![0; 61]].concat());
4444        let leaf_3_path = leaf_3_full_path.slice(..3);
4445        let leaf_3_key = leaf_3_full_path.slice(3..);
4446
4447        let account_1 = create_account(1);
4448        let account_2 = create_account(2);
4449        let account_3 = create_account(3);
4450        let leaf_1 = create_leaf_node(leaf_1_key.to_vec(), account_1.nonce);
4451        let leaf_2 = create_leaf_node(leaf_2_key.to_vec(), account_2.nonce);
4452        let leaf_3 = create_leaf_node(leaf_3_key.to_vec(), account_3.nonce);
4453
4454        // Create bottom branch node
4455        let extension_path = Nibbles::from_nibbles([0, 0, 0]);
4456        let branch_1_path = Nibbles::from_nibbles([0, 0, 0, 0]);
4457        let branch_1 = create_branch_node(
4458            Nibbles::from_nibbles([0]),
4459            &[0, 1],
4460            vec![
4461                RlpNode::from_rlp(&alloy_rlp::encode(&leaf_1)),
4462                RlpNode::from_rlp(&alloy_rlp::encode(&leaf_2)),
4463            ],
4464        );
4465
4466        // Create top branch node
4467        let branch_2_path = Nibbles::from_nibbles([0, 0]);
4468        let branch_2 = create_branch_node_with_children(
4469            &[0, 1],
4470            vec![
4471                RlpNode::from_rlp(&alloy_rlp::encode(&branch_1)),
4472                RlpNode::from_rlp(&alloy_rlp::encode(&leaf_3)),
4473            ],
4474        );
4475
4476        // Reveal nodes
4477        subtrie.reveal_node(branch_2_path, &branch_2, None, None).unwrap();
4478        subtrie.reveal_node(extension_path, &branch_1, None, None).unwrap();
4479        subtrie.reveal_node(leaf_1_path, &leaf_1, None, None).unwrap();
4480        subtrie.reveal_node(leaf_2_path, &leaf_2, None, None).unwrap();
4481        subtrie.reveal_node(leaf_3_path, &leaf_3, None, None).unwrap();
4482
4483        // Run hash builder for two leaf nodes
4484        let (_, _, proof_nodes, _, _) = run_hash_builder(
4485            [
4486                (leaf_1_full_path, account_1),
4487                (leaf_2_full_path, account_2),
4488                (leaf_3_full_path, account_3),
4489            ],
4490            NoopAccountTrieCursor::default(),
4491            Default::default(),
4492            [extension_path, branch_2_path, leaf_1_full_path, leaf_2_full_path, leaf_3_full_path],
4493        );
4494
4495        // Update hashes for the subtrie
4496        subtrie.update_hashes(
4497            &mut PrefixSetMut::from([leaf_1_full_path, leaf_2_full_path, leaf_3_full_path])
4498                .freeze(),
4499            &mut None,
4500            &BranchNodeMasksMap::default(),
4501        );
4502
4503        // Compare hashes between hash builder and subtrie
4504        let hash_builder_branch_1_hash =
4505            RlpNode::from_rlp(proof_nodes.get(&branch_1_path).unwrap().as_ref()).as_hash().unwrap();
4506        let subtrie_branch_1_hash =
4507            subtrie.nodes.get(&branch_1_path).unwrap().cached_hash().unwrap();
4508        assert_eq!(hash_builder_branch_1_hash, subtrie_branch_1_hash);
4509
4510        let hash_builder_extension_hash =
4511            RlpNode::from_rlp(proof_nodes.get(&extension_path).unwrap().as_ref())
4512                .as_hash()
4513                .unwrap();
4514        let subtrie_extension_hash =
4515            subtrie.nodes.get(&extension_path).unwrap().cached_hash().unwrap();
4516        assert_eq!(hash_builder_extension_hash, subtrie_extension_hash);
4517
4518        let hash_builder_branch_2_hash =
4519            RlpNode::from_rlp(proof_nodes.get(&branch_2_path).unwrap().as_ref()).as_hash().unwrap();
4520        let subtrie_branch_2_hash =
4521            subtrie.nodes.get(&branch_2_path).unwrap().cached_hash().unwrap();
4522        assert_eq!(hash_builder_branch_2_hash, subtrie_branch_2_hash);
4523
4524        let subtrie_leaf_1_hash = subtrie.nodes.get(&leaf_1_path).unwrap().cached_hash().unwrap();
4525        let hash_builder_leaf_1_hash =
4526            RlpNode::from_rlp(proof_nodes.get(&leaf_1_path).unwrap().as_ref()).as_hash().unwrap();
4527        assert_eq!(hash_builder_leaf_1_hash, subtrie_leaf_1_hash);
4528
4529        let hash_builder_leaf_2_hash =
4530            RlpNode::from_rlp(proof_nodes.get(&leaf_2_path).unwrap().as_ref()).as_hash().unwrap();
4531        let subtrie_leaf_2_hash = subtrie.nodes.get(&leaf_2_path).unwrap().cached_hash().unwrap();
4532        assert_eq!(hash_builder_leaf_2_hash, subtrie_leaf_2_hash);
4533
4534        let hash_builder_leaf_3_hash =
4535            RlpNode::from_rlp(proof_nodes.get(&leaf_3_path).unwrap().as_ref()).as_hash().unwrap();
4536        let subtrie_leaf_3_hash = subtrie.nodes.get(&leaf_3_path).unwrap().cached_hash().unwrap();
4537        assert_eq!(hash_builder_leaf_3_hash, subtrie_leaf_3_hash);
4538    }
4539
4540    #[test]
4541    fn test_remove_leaf_branch_becomes_extension() {
4542        //
4543        // 0x:      Extension (Key = 5)
4544        // 0x5:     └── Branch (Mask = 1001)
4545        // 0x50:        ├── 0 -> Extension (Key = 23)
4546        // 0x5023:      │        └── Branch (Mask = 0101)
4547        // 0x50231:     │            ├── 1 -> Leaf
4548        // 0x50233:     │            └── 3 -> Leaf
4549        // 0x53:        └── 3 -> Leaf (Key = 7)
4550        //
4551        // After removing 0x53, extension+branch+extension become a single extension
4552        //
4553        let mut trie = new_test_trie(
4554            [
4555                (Nibbles::default(), SparseNode::new_ext(Nibbles::from_nibbles([0x5]))),
4556                (Nibbles::from_nibbles([0x5]), SparseNode::new_branch(TrieMask::new(0b1001), &[])),
4557                (
4558                    Nibbles::from_nibbles([0x5, 0x0]),
4559                    SparseNode::new_ext(Nibbles::from_nibbles([0x2, 0x3])),
4560                ),
4561                (
4562                    Nibbles::from_nibbles([0x5, 0x0, 0x2, 0x3]),
4563                    SparseNode::new_branch(TrieMask::new(0b0101), &[]),
4564                ),
4565                (
4566                    Nibbles::from_nibbles([0x5, 0x0, 0x2, 0x3, 0x1]),
4567                    SparseNode::new_leaf(leaf_key([], 59)),
4568                ),
4569                (
4570                    Nibbles::from_nibbles([0x5, 0x0, 0x2, 0x3, 0x3]),
4571                    SparseNode::new_leaf(leaf_key([], 59)),
4572                ),
4573                (Nibbles::from_nibbles([0x5, 0x3]), SparseNode::new_leaf(leaf_key([0x7], 62))),
4574            ]
4575            .into_iter(),
4576        );
4577
4578        let provider = NoRevealProvider;
4579
4580        // Remove the leaf with a full path of 0x537
4581        let leaf_full_path = pad_nibbles_right(Nibbles::from_nibbles([0x5, 0x3, 0x7]));
4582        trie.remove_leaf(&leaf_full_path, provider).unwrap();
4583
4584        let upper_subtrie = &trie.upper_subtrie;
4585        let lower_subtrie_50 = trie.lower_subtries[0x50].as_revealed_ref().unwrap();
4586
4587        // Check that the `SparseSubtrie` the leaf was removed from was itself removed, as it is now
4588        // empty.
4589        assert_matches!(trie.lower_subtries[0x53].as_revealed_ref(), None);
4590
4591        // Check that the leaf node was removed, and that its parent/grandparent were modified
4592        // appropriately.
4593        assert_matches!(
4594            upper_subtrie.nodes.get(&Nibbles::from_nibbles([])),
4595            Some(SparseNode::Extension{ key, ..})
4596            if key == &Nibbles::from_nibbles([0x5, 0x0, 0x2, 0x3])
4597        );
4598        assert_matches!(upper_subtrie.nodes.get(&Nibbles::from_nibbles([0x5])), None);
4599        assert_matches!(lower_subtrie_50.nodes.get(&Nibbles::from_nibbles([0x5, 0x0])), None);
4600        assert_matches!(
4601            lower_subtrie_50.nodes.get(&Nibbles::from_nibbles([0x5, 0x0, 0x2, 0x3])),
4602            Some(SparseNode::Branch{ state_mask, .. })
4603            if *state_mask == 0b0101.into()
4604        );
4605    }
4606
4607    #[test]
4608    fn test_remove_leaf_branch_becomes_leaf() {
4609        //
4610        // 0x:      Branch (Mask = 0011)
4611        // 0x0:     ├── 0 -> Leaf (Key = 12)
4612        // 0x1:     └── 1 -> Leaf (Key = 34)
4613        //
4614        // After removing 0x012, branch becomes a leaf
4615        //
4616        let mut trie = new_test_trie(
4617            [
4618                (Nibbles::default(), SparseNode::new_branch(TrieMask::new(0b0011), &[])),
4619                (Nibbles::from_nibbles([0x0]), SparseNode::new_leaf(leaf_key([0x1, 0x2], 63))),
4620                (Nibbles::from_nibbles([0x1]), SparseNode::new_leaf(leaf_key([0x3, 0x4], 63))),
4621            ]
4622            .into_iter(),
4623        );
4624
4625        // Add the branch node to updated_nodes to simulate it being modified earlier
4626        if let Some(updates) = trie.updates.as_mut() {
4627            updates
4628                .updated_nodes
4629                .insert(Nibbles::default(), BranchNodeCompact::new(0b11, 0, 0, vec![], None));
4630        }
4631
4632        let provider = NoRevealProvider;
4633
4634        // Remove the leaf with a full path of 0x012
4635        let leaf_full_path = pad_nibbles_right(Nibbles::from_nibbles([0x0, 0x1, 0x2]));
4636        trie.remove_leaf(&leaf_full_path, provider).unwrap();
4637
4638        let upper_subtrie = &trie.upper_subtrie;
4639
4640        // Check that the leaf's value was removed
4641        assert_matches!(upper_subtrie.inner.values.get(&leaf_full_path), None);
4642
4643        // Check that the branch node collapsed into a leaf node with the remaining child's key
4644        assert_matches!(
4645            upper_subtrie.nodes.get(&Nibbles::default()),
4646            Some(SparseNode::Leaf{ key, ..})
4647            if key == &pad_nibbles_right(Nibbles::from_nibbles([0x1, 0x3, 0x4]))
4648        );
4649
4650        // Check that the remaining child node was removed
4651        assert_matches!(upper_subtrie.nodes.get(&Nibbles::from_nibbles([0x1])), None);
4652        // Check that the removed child node was also removed
4653        assert_matches!(upper_subtrie.nodes.get(&Nibbles::from_nibbles([0x0])), None);
4654
4655        // Check that updates were tracked correctly when branch collapsed
4656        let updates = trie.updates.as_ref().unwrap();
4657
4658        // The branch at root should be marked as removed since it collapsed
4659        assert!(updates.removed_nodes.contains(&Nibbles::default()));
4660
4661        // The branch should no longer be in updated_nodes
4662        assert!(!updates.updated_nodes.contains_key(&Nibbles::default()));
4663    }
4664
4665    #[test]
4666    fn test_remove_leaf_extension_becomes_leaf() {
4667        //
4668        // 0x:      Extension (Key = 5)
4669        // 0x5:     └── Branch (Mask = 0011)
4670        // 0x50:        ├── 0 -> Leaf (Key = 12)
4671        // 0x51:        └── 1 -> Leaf (Key = 34)
4672        //
4673        // After removing 0x5012, extension+branch becomes a leaf
4674        //
4675        let mut trie = new_test_trie(
4676            [
4677                (Nibbles::default(), SparseNode::new_ext(Nibbles::from_nibbles([0x5]))),
4678                (Nibbles::from_nibbles([0x5]), SparseNode::new_branch(TrieMask::new(0b0011), &[])),
4679                (Nibbles::from_nibbles([0x5, 0x0]), SparseNode::new_leaf(leaf_key([0x1, 0x2], 62))),
4680                (Nibbles::from_nibbles([0x5, 0x1]), SparseNode::new_leaf(leaf_key([0x3, 0x4], 62))),
4681            ]
4682            .into_iter(),
4683        );
4684
4685        let provider = NoRevealProvider;
4686
4687        // Remove the leaf with a full path of 0x5012
4688        let leaf_full_path = pad_nibbles_right(Nibbles::from_nibbles([0x5, 0x0, 0x1, 0x2]));
4689        trie.remove_leaf(&leaf_full_path, provider).unwrap();
4690
4691        let upper_subtrie = &trie.upper_subtrie;
4692
4693        // Check that both lower subtries were removed. 0x50 should have been removed because
4694        // removing its leaf made it empty. 0x51 should have been removed after its own leaf was
4695        // collapsed into the upper trie, leaving it also empty.
4696        assert_matches!(trie.lower_subtries[0x50].as_revealed_ref(), None);
4697        assert_matches!(trie.lower_subtries[0x51].as_revealed_ref(), None);
4698
4699        // Check that the other leaf's value was moved to the upper trie
4700        let other_leaf_full_value = pad_nibbles_right(Nibbles::from_nibbles([0x5, 0x1, 0x3, 0x4]));
4701        assert_matches!(upper_subtrie.inner.values.get(&other_leaf_full_value), Some(_));
4702
4703        // Check that the extension node collapsed into a leaf node
4704        assert_matches!(
4705            upper_subtrie.nodes.get(&Nibbles::default()),
4706            Some(SparseNode::Leaf{ key, ..})
4707            if key == &pad_nibbles_right(Nibbles::from_nibbles([0x5, 0x1, 0x3, 0x4]))
4708        );
4709
4710        // Check that intermediate nodes were removed
4711        assert_matches!(upper_subtrie.nodes.get(&Nibbles::from_nibbles([0x5])), None);
4712    }
4713
4714    #[test]
4715    fn test_remove_leaf_branch_on_branch() {
4716        //
4717        // 0x:      Branch (Mask = 0101)
4718        // 0x0:     ├── 0 -> Leaf (Key = 12)
4719        // 0x2:     └── 2 -> Branch (Mask = 0011)
4720        // 0x20:        ├── 0 -> Leaf (Key = 34)
4721        // 0x21:        └── 1 -> Leaf (Key = 56)
4722        //
4723        // After removing 0x2034, the inner branch becomes a leaf
4724        //
4725        let mut trie = new_test_trie(
4726            [
4727                (Nibbles::default(), SparseNode::new_branch(TrieMask::new(0b0101), &[])),
4728                (Nibbles::from_nibbles([0x0]), SparseNode::new_leaf(leaf_key([0x1, 0x2], 63))),
4729                (Nibbles::from_nibbles([0x2]), SparseNode::new_branch(TrieMask::new(0b0011), &[])),
4730                (Nibbles::from_nibbles([0x2, 0x0]), SparseNode::new_leaf(leaf_key([0x3, 0x4], 62))),
4731                (Nibbles::from_nibbles([0x2, 0x1]), SparseNode::new_leaf(leaf_key([0x5, 0x6], 62))),
4732            ]
4733            .into_iter(),
4734        );
4735
4736        let provider = NoRevealProvider;
4737
4738        // Remove the leaf with a full path of 0x2034
4739        let leaf_full_path = pad_nibbles_right(Nibbles::from_nibbles([0x2, 0x0, 0x3, 0x4]));
4740        trie.remove_leaf(&leaf_full_path, provider).unwrap();
4741
4742        let upper_subtrie = &trie.upper_subtrie;
4743
4744        // Check that both lower subtries were removed. 0x20 should have been removed because
4745        // removing its leaf made it empty. 0x21 should have been removed after its own leaf was
4746        // collapsed into the upper trie, leaving it also empty.
4747        assert_matches!(trie.lower_subtries[0x20].as_revealed_ref(), None);
4748        assert_matches!(trie.lower_subtries[0x21].as_revealed_ref(), None);
4749
4750        // Check that the other leaf's value was moved to the upper trie
4751        let other_leaf_full_value = pad_nibbles_right(Nibbles::from_nibbles([0x2, 0x1, 0x5, 0x6]));
4752        assert_matches!(upper_subtrie.inner.values.get(&other_leaf_full_value), Some(_));
4753
4754        // Check that the root branch still exists unchanged
4755        assert_matches!(
4756            upper_subtrie.nodes.get(&Nibbles::default()),
4757            Some(SparseNode::Branch{ state_mask, .. })
4758            if *state_mask == 0b0101.into()
4759        );
4760
4761        // Check that the inner branch became an extension
4762        assert_matches!(
4763            upper_subtrie.nodes.get(&Nibbles::from_nibbles([0x2])),
4764            Some(SparseNode::Leaf{ key, ..})
4765            if key == &leaf_key([0x1, 0x5, 0x6], 63)
4766        );
4767    }
4768
4769    #[test]
4770    fn test_remove_leaf_lower_subtrie_root_path_update() {
4771        //
4772        // 0x:        Extension (Key = 123, root of lower subtrie)
4773        // 0x123:     └── Branch (Mask = 0011000)
4774        // 0x1233:        ├── 3 -> Leaf (Key = [])
4775        // 0x1234:        └── 4 -> Extension (Key = 5)
4776        // 0x12345:           └── Branch (Mask = 0011)
4777        // 0x123450:              ├── 0 -> Leaf (Key = [])
4778        // 0x123451:              └── 1 -> Leaf (Key = [])
4779        //
4780        // After removing leaf at 0x1233, the branch at 0x123 becomes an extension to 0x12345, which
4781        // then gets merged with the root extension at 0x. The lower subtrie's `path` field should
4782        // be updated from 0x123 to 0x12345.
4783        //
4784        let mut trie = new_test_trie(
4785            [
4786                (Nibbles::default(), SparseNode::new_ext(Nibbles::from_nibbles([0x1, 0x2, 0x3]))),
4787                (
4788                    Nibbles::from_nibbles([0x1, 0x2, 0x3]),
4789                    SparseNode::new_branch(TrieMask::new(0b0011000), &[]),
4790                ),
4791                (
4792                    Nibbles::from_nibbles([0x1, 0x2, 0x3, 0x3]),
4793                    SparseNode::new_leaf(leaf_key([], 60)),
4794                ),
4795                (
4796                    Nibbles::from_nibbles([0x1, 0x2, 0x3, 0x4]),
4797                    SparseNode::new_ext(Nibbles::from_nibbles([0x5])),
4798                ),
4799                (
4800                    Nibbles::from_nibbles([0x1, 0x2, 0x3, 0x4, 0x5]),
4801                    SparseNode::new_branch(TrieMask::new(0b0011), &[]),
4802                ),
4803                (
4804                    Nibbles::from_nibbles([0x1, 0x2, 0x3, 0x4, 0x5, 0x0]),
4805                    SparseNode::new_leaf(leaf_key([], 58)),
4806                ),
4807                (
4808                    Nibbles::from_nibbles([0x1, 0x2, 0x3, 0x4, 0x5, 0x1]),
4809                    SparseNode::new_leaf(leaf_key([], 58)),
4810                ),
4811            ]
4812            .into_iter(),
4813        );
4814
4815        let provider = NoRevealProvider;
4816
4817        // Verify initial state - the lower subtrie's path should be 0x123
4818        let lower_subtrie_root_path = Nibbles::from_nibbles([0x1, 0x2, 0x3]);
4819        assert_matches!(
4820            trie.lower_subtrie_for_path_mut(&lower_subtrie_root_path),
4821            Some(subtrie)
4822            if subtrie.path == lower_subtrie_root_path
4823        );
4824
4825        // Remove the leaf at 0x1233
4826        let leaf_full_path = pad_nibbles_right(Nibbles::from_nibbles([0x1, 0x2, 0x3, 0x3]));
4827        trie.remove_leaf(&leaf_full_path, provider).unwrap();
4828
4829        // After removal:
4830        // 1. The branch at 0x123 should become an extension to 0x12345
4831        // 2. That extension should merge with the root extension at 0x
4832        // 3. The lower subtrie's path should be updated to 0x12345
4833        let lower_subtrie = trie.lower_subtries[0x12].as_revealed_ref().unwrap();
4834        assert_eq!(lower_subtrie.path, Nibbles::from_nibbles([0x1, 0x2, 0x3, 0x4, 0x5]));
4835
4836        // Verify the root extension now points all the way to 0x12345
4837        assert_matches!(
4838            trie.upper_subtrie.nodes.get(&Nibbles::default()),
4839            Some(SparseNode::Extension { key, .. })
4840            if key == &Nibbles::from_nibbles([0x1, 0x2, 0x3, 0x4, 0x5])
4841        );
4842
4843        // Verify the branch at 0x12345 hasn't been modified
4844        assert_matches!(
4845            lower_subtrie.nodes.get(&Nibbles::from_nibbles([0x1, 0x2, 0x3, 0x4, 0x5])),
4846            Some(SparseNode::Branch { state_mask, .. })
4847            if state_mask == &TrieMask::new(0b0011)
4848        );
4849    }
4850
4851    #[test]
4852    fn test_remove_leaf_remaining_child_needs_reveal() {
4853        //
4854        // 0x:      Branch (Mask = 0011)
4855        // 0x0:     ├── 0 -> Leaf (Key = 12)
4856        // 0x1:     └── 1 -> Hash (blinded leaf)
4857        //
4858        // After removing 0x012, the hash node needs to be revealed to collapse the branch
4859        //
4860        let mut trie = new_test_trie(
4861            [
4862                (
4863                    Nibbles::default(),
4864                    SparseNode::new_branch(
4865                        TrieMask::new(0b0011),
4866                        &[(0x1, B256::repeat_byte(0xab))],
4867                    ),
4868                ),
4869                (Nibbles::from_nibbles([0x0]), SparseNode::new_leaf(leaf_key([0x1, 0x2], 63))),
4870            ]
4871            .into_iter(),
4872        );
4873
4874        // Create a mock provider that will reveal the blinded leaf
4875        let revealed_leaf = create_leaf_node(leaf_key([0x3, 0x4], 63).to_vec(), 42);
4876        let mut encoded = Vec::new();
4877        revealed_leaf.encode(&mut encoded);
4878
4879        // Try removing the leaf with a full path of 0x012, this should fail because the leaf is
4880        // blinded
4881        let leaf_full_path = pad_nibbles_right(Nibbles::from_nibbles([0x0, 0x1, 0x2]));
4882        let Err(err) = trie.remove_leaf(&leaf_full_path, NoRevealProvider) else {
4883            panic!("expected error");
4884        };
4885        assert_matches!(err.kind(), SparseTrieErrorKind::BlindedNode(path) if *path == Nibbles::from_nibbles([0x1]));
4886
4887        // Now reveal the leaf and try removing it again
4888        trie.reveal_nodes(&mut [ProofTrieNodeV2 {
4889            path: Nibbles::from_nibbles([0x1]),
4890            node: revealed_leaf,
4891            masks: None,
4892        }])
4893        .unwrap();
4894        trie.remove_leaf(&leaf_full_path, NoRevealProvider).unwrap();
4895
4896        let upper_subtrie = &trie.upper_subtrie;
4897
4898        // Check that the leaf value was removed
4899        assert_matches!(upper_subtrie.inner.values.get(&leaf_full_path), None);
4900
4901        // Check that the branch node collapsed into a leaf node with the revealed child's key
4902        assert_matches!(
4903            upper_subtrie.nodes.get(&Nibbles::default()),
4904            Some(SparseNode::Leaf{ key, ..})
4905            if key == &pad_nibbles_right(Nibbles::from_nibbles([0x1, 0x3, 0x4]))
4906        );
4907
4908        // Check that the remaining child node was removed (since it was merged)
4909        assert_matches!(upper_subtrie.nodes.get(&Nibbles::from_nibbles([0x1])), None);
4910    }
4911
4912    #[test]
4913    fn test_remove_leaf_root() {
4914        //
4915        // 0x:      Leaf (Key = 123)
4916        //
4917        // After removing 0x123, the trie becomes empty
4918        //
4919        let mut trie = new_test_trie(core::iter::once((
4920            Nibbles::default(),
4921            SparseNode::new_leaf(pad_nibbles_right(Nibbles::from_nibbles([0x1, 0x2, 0x3]))),
4922        )));
4923
4924        let provider = NoRevealProvider;
4925
4926        // Remove the leaf with a full key of 0x123
4927        let leaf_full_path = pad_nibbles_right(Nibbles::from_nibbles([0x1, 0x2, 0x3]));
4928        trie.remove_leaf(&leaf_full_path, provider).unwrap();
4929
4930        let upper_subtrie = &trie.upper_subtrie;
4931
4932        // Check that the leaf value was removed
4933        assert_matches!(upper_subtrie.inner.values.get(&leaf_full_path), None);
4934
4935        // Check that the root node was changed to Empty
4936        assert_matches!(upper_subtrie.nodes.get(&Nibbles::default()), Some(SparseNode::Empty));
4937    }
4938
4939    #[test]
4940    fn test_remove_leaf_unsets_hash_along_path() {
4941        //
4942        // Creates a trie structure:
4943        // 0x:      Branch (with hash set)
4944        // 0x0:     ├── Extension (with hash set)
4945        // 0x01:    │   └── Branch (with hash set)
4946        // 0x012:   │       ├── Leaf (Key = 34, with hash set)
4947        // 0x013:   │       ├── Leaf (Key = 56, with hash set)
4948        // 0x014:   │       └── Leaf (Key = 78, with hash set)
4949        // 0x1:     └── Leaf (Key = 78, with hash set)
4950        //
4951        // When removing leaf at 0x01234, all nodes along the path (root branch,
4952        // extension at 0x0, branch at 0x01) should have their hash field unset
4953        //
4954
4955        let make_revealed = |hash: B256| SparseNodeState::Cached {
4956            rlp_node: RlpNode::word_rlp(&hash),
4957            store_in_db_trie: None,
4958        };
4959        let mut trie = new_test_trie(
4960            [
4961                (
4962                    Nibbles::default(),
4963                    SparseNode::Branch {
4964                        state_mask: TrieMask::new(0b0011),
4965                        state: make_revealed(B256::repeat_byte(0x10)),
4966                        blinded_mask: Default::default(),
4967                        blinded_hashes: Default::default(),
4968                    },
4969                ),
4970                (
4971                    Nibbles::from_nibbles([0x0]),
4972                    SparseNode::Extension {
4973                        key: Nibbles::from_nibbles([0x1]),
4974                        state: make_revealed(B256::repeat_byte(0x20)),
4975                    },
4976                ),
4977                (
4978                    Nibbles::from_nibbles([0x0, 0x1]),
4979                    SparseNode::Branch {
4980                        state_mask: TrieMask::new(0b11100),
4981                        state: make_revealed(B256::repeat_byte(0x30)),
4982                        blinded_mask: Default::default(),
4983                        blinded_hashes: Default::default(),
4984                    },
4985                ),
4986                (
4987                    Nibbles::from_nibbles([0x0, 0x1, 0x2]),
4988                    SparseNode::Leaf {
4989                        key: leaf_key([0x3, 0x4], 61),
4990                        state: make_revealed(B256::repeat_byte(0x40)),
4991                    },
4992                ),
4993                (
4994                    Nibbles::from_nibbles([0x0, 0x1, 0x3]),
4995                    SparseNode::Leaf {
4996                        key: leaf_key([0x5, 0x6], 61),
4997                        state: make_revealed(B256::repeat_byte(0x50)),
4998                    },
4999                ),
5000                (
5001                    Nibbles::from_nibbles([0x0, 0x1, 0x4]),
5002                    SparseNode::Leaf {
5003                        key: leaf_key([0x6, 0x7], 61),
5004                        state: make_revealed(B256::repeat_byte(0x60)),
5005                    },
5006                ),
5007                (
5008                    Nibbles::from_nibbles([0x1]),
5009                    SparseNode::Leaf {
5010                        key: leaf_key([0x7, 0x8], 63),
5011                        state: make_revealed(B256::repeat_byte(0x70)),
5012                    },
5013                ),
5014            ]
5015            .into_iter(),
5016        );
5017
5018        let provider = NoRevealProvider;
5019
5020        // Remove a leaf which does not exist; this should have no effect.
5021        trie.remove_leaf(
5022            &pad_nibbles_right(Nibbles::from_nibbles([0x0, 0x1, 0x2, 0x3, 0x4, 0xF])),
5023            provider,
5024        )
5025        .unwrap();
5026        for (path, node) in trie.all_nodes() {
5027            assert!(node.cached_hash().is_some(), "path {path:?} should still have a hash");
5028        }
5029
5030        // Remove the leaf at path 0x01234
5031        let leaf_full_path = pad_nibbles_right(Nibbles::from_nibbles([0x0, 0x1, 0x2, 0x3, 0x4]));
5032        trie.remove_leaf(&leaf_full_path, provider).unwrap();
5033
5034        let upper_subtrie = &trie.upper_subtrie;
5035        let lower_subtrie_10 = trie.lower_subtries[0x01].as_revealed_ref().unwrap();
5036
5037        // Verify that hash fields are unset for all nodes along the path to the removed leaf
5038        assert_matches!(
5039            upper_subtrie.nodes.get(&Nibbles::default()),
5040            Some(SparseNode::Branch { state: SparseNodeState::Dirty, .. })
5041        );
5042        assert_matches!(
5043            upper_subtrie.nodes.get(&Nibbles::from_nibbles([0x0])),
5044            Some(SparseNode::Extension { state: SparseNodeState::Dirty, .. })
5045        );
5046        assert_matches!(
5047            lower_subtrie_10.nodes.get(&Nibbles::from_nibbles([0x0, 0x1])),
5048            Some(SparseNode::Branch { state: SparseNodeState::Dirty, .. })
5049        );
5050
5051        // Verify that nodes not on the path still have their hashes
5052        assert_matches!(
5053            upper_subtrie.nodes.get(&Nibbles::from_nibbles([0x1])),
5054            Some(SparseNode::Leaf { state: SparseNodeState::Cached { .. }, .. })
5055        );
5056        assert_matches!(
5057            lower_subtrie_10.nodes.get(&Nibbles::from_nibbles([0x0, 0x1, 0x3])),
5058            Some(SparseNode::Leaf { state: SparseNodeState::Cached { .. }, .. })
5059        );
5060        assert_matches!(
5061            lower_subtrie_10.nodes.get(&Nibbles::from_nibbles([0x0, 0x1, 0x4])),
5062            Some(SparseNode::Leaf { state: SparseNodeState::Cached { .. }, .. })
5063        );
5064    }
5065
5066    #[test]
5067    fn test_parallel_sparse_trie_root() {
5068        // Step 1: Create the trie structure
5069        // Extension node at 0x with key 0x2 (goes to upper subtrie)
5070        let extension_path = Nibbles::new();
5071        let extension_key = Nibbles::from_nibbles([0x2]);
5072
5073        // Branch node at 0x2 with children 0 and 1 (goes to upper subtrie)
5074        let branch_path = Nibbles::from_nibbles([0x2]);
5075
5076        // Leaf nodes at 0x20 and 0x21 (go to lower subtries)
5077        let leaf_1_path = Nibbles::from_nibbles([0x2, 0x0]);
5078        let leaf_1_key = Nibbles::from_nibbles(vec![0; 62]); // Remaining key
5079        let leaf_1_full_path = Nibbles::from_nibbles([vec![0x2, 0x0], vec![0; 62]].concat());
5080
5081        let leaf_2_path = Nibbles::from_nibbles([0x2, 0x1]);
5082        let leaf_2_key = Nibbles::from_nibbles(vec![0; 62]); // Remaining key
5083        let leaf_2_full_path = Nibbles::from_nibbles([vec![0x2, 0x1], vec![0; 62]].concat());
5084
5085        // Create accounts
5086        let account_1 = create_account(1);
5087        let account_2 = create_account(2);
5088
5089        // Create leaf nodes
5090        let leaf_1 = create_leaf_node(leaf_1_key.to_vec(), account_1.nonce);
5091        let leaf_2 = create_leaf_node(leaf_2_key.to_vec(), account_2.nonce);
5092
5093        // Create branch node with children at indices 0 and 1
5094        let branch = create_branch_node(
5095            extension_key,
5096            &[0, 1],
5097            vec![
5098                RlpNode::from_rlp(&alloy_rlp::encode(&leaf_1)),
5099                RlpNode::from_rlp(&alloy_rlp::encode(&leaf_2)),
5100            ],
5101        );
5102
5103        // Step 2: Reveal nodes in the trie
5104        let mut trie = ParallelSparseTrie::from_root(branch, None, true).unwrap();
5105        trie.reveal_nodes(&mut [
5106            ProofTrieNodeV2 { path: leaf_1_path, node: leaf_1, masks: None },
5107            ProofTrieNodeV2 { path: leaf_2_path, node: leaf_2, masks: None },
5108        ])
5109        .unwrap();
5110
5111        // Step 3: Reset hashes for all revealed nodes to test actual hash calculation
5112        // Reset upper subtrie node hashes
5113        trie.upper_subtrie
5114            .nodes
5115            .get_mut(&extension_path)
5116            .unwrap()
5117            .set_state(SparseNodeState::Dirty);
5118        trie.upper_subtrie.nodes.get_mut(&branch_path).unwrap().set_state(SparseNodeState::Dirty);
5119
5120        // Reset lower subtrie node hashes
5121        let leaf_1_subtrie_idx = path_subtrie_index_unchecked(&leaf_1_path);
5122        let leaf_2_subtrie_idx = path_subtrie_index_unchecked(&leaf_2_path);
5123
5124        trie.lower_subtries[leaf_1_subtrie_idx]
5125            .as_revealed_mut()
5126            .unwrap()
5127            .nodes
5128            .get_mut(&leaf_1_path)
5129            .unwrap()
5130            .set_state(SparseNodeState::Dirty);
5131        trie.lower_subtries[leaf_2_subtrie_idx]
5132            .as_revealed_mut()
5133            .unwrap()
5134            .nodes
5135            .get_mut(&leaf_2_path)
5136            .unwrap()
5137            .set_state(SparseNodeState::Dirty);
5138
5139        // Step 4: Add changed leaf node paths to prefix set
5140        trie.prefix_set.insert(leaf_1_full_path);
5141        trie.prefix_set.insert(leaf_2_full_path);
5142
5143        // Step 5: Calculate root using our implementation
5144        let root = trie.root();
5145
5146        // Step 6: Calculate root using HashBuilder for comparison
5147        let (hash_builder_root, _, _proof_nodes, _, _) = run_hash_builder(
5148            [(leaf_1_full_path, account_1), (leaf_2_full_path, account_2)],
5149            NoopAccountTrieCursor::default(),
5150            Default::default(),
5151            [extension_path, branch_path, leaf_1_full_path, leaf_2_full_path],
5152        );
5153
5154        // Step 7: Verify the roots match
5155        assert_eq!(root, hash_builder_root);
5156
5157        // Verify hashes were computed
5158        let leaf_1_subtrie = trie.lower_subtries[leaf_1_subtrie_idx].as_revealed_ref().unwrap();
5159        let leaf_2_subtrie = trie.lower_subtries[leaf_2_subtrie_idx].as_revealed_ref().unwrap();
5160        assert!(trie.upper_subtrie.nodes.get(&extension_path).unwrap().cached_hash().is_some());
5161        assert!(trie.upper_subtrie.nodes.get(&branch_path).unwrap().cached_hash().is_some());
5162        assert!(leaf_1_subtrie.nodes.get(&leaf_1_path).unwrap().cached_hash().is_some());
5163        assert!(leaf_2_subtrie.nodes.get(&leaf_2_path).unwrap().cached_hash().is_some());
5164    }
5165
5166    #[test]
5167    fn sparse_trie_empty_update_one() {
5168        let ctx = ParallelSparseTrieTestContext;
5169
5170        let key = Nibbles::unpack(B256::with_last_byte(42));
5171        let value = || Account::default();
5172        let value_encoded = || {
5173            let mut account_rlp = Vec::new();
5174            value().into_trie_account(EMPTY_ROOT_HASH).encode(&mut account_rlp);
5175            account_rlp
5176        };
5177
5178        let (hash_builder_root, hash_builder_updates, hash_builder_proof_nodes, _, _) =
5179            run_hash_builder(
5180                [(key, value())],
5181                NoopAccountTrieCursor::default(),
5182                Default::default(),
5183                [key],
5184            );
5185
5186        let mut sparse = ParallelSparseTrie::default().with_updates(true);
5187        ctx.update_leaves(&mut sparse, [(key, value_encoded())]);
5188        ctx.assert_with_hash_builder(
5189            &mut sparse,
5190            hash_builder_root,
5191            hash_builder_updates,
5192            hash_builder_proof_nodes,
5193        );
5194    }
5195
5196    #[test]
5197    fn sparse_trie_empty_update_multiple_lower_nibbles() {
5198        let ctx = ParallelSparseTrieTestContext;
5199
5200        let paths = (0..=16).map(|b| Nibbles::unpack(B256::with_last_byte(b))).collect::<Vec<_>>();
5201        let value = || Account::default();
5202        let value_encoded = || {
5203            let mut account_rlp = Vec::new();
5204            value().into_trie_account(EMPTY_ROOT_HASH).encode(&mut account_rlp);
5205            account_rlp
5206        };
5207
5208        let (hash_builder_root, hash_builder_updates, hash_builder_proof_nodes, _, _) =
5209            run_hash_builder(
5210                paths.iter().copied().zip(core::iter::repeat_with(value)),
5211                NoopAccountTrieCursor::default(),
5212                Default::default(),
5213                paths.clone(),
5214            );
5215
5216        let mut sparse = ParallelSparseTrie::default().with_updates(true);
5217        ctx.update_leaves(
5218            &mut sparse,
5219            paths.into_iter().zip(core::iter::repeat_with(value_encoded)),
5220        );
5221
5222        ctx.assert_with_hash_builder(
5223            &mut sparse,
5224            hash_builder_root,
5225            hash_builder_updates,
5226            hash_builder_proof_nodes,
5227        );
5228    }
5229
5230    #[test]
5231    fn sparse_trie_empty_update_multiple_upper_nibbles() {
5232        let paths = (239..=255).map(|b| Nibbles::unpack(B256::repeat_byte(b))).collect::<Vec<_>>();
5233        let value = || Account::default();
5234        let value_encoded = || {
5235            let mut account_rlp = Vec::new();
5236            value().into_trie_account(EMPTY_ROOT_HASH).encode(&mut account_rlp);
5237            account_rlp
5238        };
5239
5240        let (hash_builder_root, hash_builder_updates, hash_builder_proof_nodes, _, _) =
5241            run_hash_builder(
5242                paths.iter().copied().zip(core::iter::repeat_with(value)),
5243                NoopAccountTrieCursor::default(),
5244                Default::default(),
5245                paths.clone(),
5246            );
5247
5248        let provider = DefaultTrieNodeProvider;
5249        let mut sparse = ParallelSparseTrie::default().with_updates(true);
5250        for path in &paths {
5251            sparse.update_leaf(*path, value_encoded(), &provider).unwrap();
5252        }
5253        let sparse_root = sparse.root();
5254        let sparse_updates = sparse.take_updates();
5255
5256        assert_eq!(sparse_root, hash_builder_root);
5257        assert_eq!(sparse_updates.updated_nodes, hash_builder_updates.account_nodes);
5258        assert_eq_parallel_sparse_trie_proof_nodes(&sparse, hash_builder_proof_nodes);
5259    }
5260
5261    #[test]
5262    fn sparse_trie_empty_update_multiple() {
5263        let ctx = ParallelSparseTrieTestContext;
5264
5265        let paths = (0..=255)
5266            .map(|b| {
5267                Nibbles::unpack(if b % 2 == 0 {
5268                    B256::repeat_byte(b)
5269                } else {
5270                    B256::with_last_byte(b)
5271                })
5272            })
5273            .collect::<Vec<_>>();
5274        let value = || Account::default();
5275        let value_encoded = || {
5276            let mut account_rlp = Vec::new();
5277            value().into_trie_account(EMPTY_ROOT_HASH).encode(&mut account_rlp);
5278            account_rlp
5279        };
5280
5281        let (hash_builder_root, hash_builder_updates, hash_builder_proof_nodes, _, _) =
5282            run_hash_builder(
5283                paths.iter().sorted_unstable().copied().zip(core::iter::repeat_with(value)),
5284                NoopAccountTrieCursor::default(),
5285                Default::default(),
5286                paths.clone(),
5287            );
5288
5289        let mut sparse = ParallelSparseTrie::default().with_updates(true);
5290        ctx.update_leaves(
5291            &mut sparse,
5292            paths.iter().copied().zip(core::iter::repeat_with(value_encoded)),
5293        );
5294        ctx.assert_with_hash_builder(
5295            &mut sparse,
5296            hash_builder_root,
5297            hash_builder_updates,
5298            hash_builder_proof_nodes,
5299        );
5300    }
5301
5302    #[test]
5303    fn sparse_trie_empty_update_repeated() {
5304        let ctx = ParallelSparseTrieTestContext;
5305
5306        let paths = (0..=255).map(|b| Nibbles::unpack(B256::repeat_byte(b))).collect::<Vec<_>>();
5307        let old_value = Account { nonce: 1, ..Default::default() };
5308        let old_value_encoded = {
5309            let mut account_rlp = Vec::new();
5310            old_value.into_trie_account(EMPTY_ROOT_HASH).encode(&mut account_rlp);
5311            account_rlp
5312        };
5313        let new_value = Account { nonce: 2, ..Default::default() };
5314        let new_value_encoded = {
5315            let mut account_rlp = Vec::new();
5316            new_value.into_trie_account(EMPTY_ROOT_HASH).encode(&mut account_rlp);
5317            account_rlp
5318        };
5319
5320        let (hash_builder_root, hash_builder_updates, hash_builder_proof_nodes, _, _) =
5321            run_hash_builder(
5322                paths.iter().copied().zip(core::iter::repeat_with(|| old_value)),
5323                NoopAccountTrieCursor::default(),
5324                Default::default(),
5325                paths.clone(),
5326            );
5327
5328        let mut sparse = ParallelSparseTrie::default().with_updates(true);
5329        ctx.update_leaves(
5330            &mut sparse,
5331            paths.iter().copied().zip(core::iter::repeat(old_value_encoded)),
5332        );
5333        ctx.assert_with_hash_builder(
5334            &mut sparse,
5335            hash_builder_root,
5336            hash_builder_updates,
5337            hash_builder_proof_nodes,
5338        );
5339
5340        let (hash_builder_root, hash_builder_updates, hash_builder_proof_nodes, _, _) =
5341            run_hash_builder(
5342                paths.iter().copied().zip(core::iter::repeat(new_value)),
5343                NoopAccountTrieCursor::default(),
5344                Default::default(),
5345                paths.clone(),
5346            );
5347
5348        ctx.update_leaves(
5349            &mut sparse,
5350            paths.iter().copied().zip(core::iter::repeat(new_value_encoded)),
5351        );
5352        ctx.assert_with_hash_builder(
5353            &mut sparse,
5354            hash_builder_root,
5355            hash_builder_updates,
5356            hash_builder_proof_nodes,
5357        );
5358    }
5359
5360    #[test]
5361    fn sparse_trie_remove_leaf() {
5362        let ctx = ParallelSparseTrieTestContext;
5363        let provider = DefaultTrieNodeProvider;
5364        let mut sparse = ParallelSparseTrie::default();
5365
5366        let value = alloy_rlp::encode_fixed_size(&U256::ZERO).to_vec();
5367
5368        ctx.update_leaves(
5369            &mut sparse,
5370            [
5371                (
5372                    pad_nibbles_right(Nibbles::from_nibbles([0x5, 0x0, 0x2, 0x3, 0x1])),
5373                    value.clone(),
5374                ),
5375                (
5376                    pad_nibbles_right(Nibbles::from_nibbles([0x5, 0x0, 0x2, 0x3, 0x3])),
5377                    value.clone(),
5378                ),
5379                (
5380                    pad_nibbles_right(Nibbles::from_nibbles([0x5, 0x2, 0x0, 0x1, 0x3])),
5381                    value.clone(),
5382                ),
5383                (
5384                    pad_nibbles_right(Nibbles::from_nibbles([0x5, 0x3, 0x1, 0x0, 0x2])),
5385                    value.clone(),
5386                ),
5387                (
5388                    pad_nibbles_right(Nibbles::from_nibbles([0x5, 0x3, 0x3, 0x0, 0x2])),
5389                    value.clone(),
5390                ),
5391                (pad_nibbles_right(Nibbles::from_nibbles([0x5, 0x3, 0x3, 0x2, 0x0])), value),
5392            ],
5393        );
5394
5395        // Extension (Key = 5)
5396        // └── Branch (Mask = 1011)
5397        //     ├── 0 -> Extension (Key = 23)
5398        //     │        └── Branch (Mask = 0101)
5399        //     │              ├── 1 -> Leaf (Key = 1, Path = 50231)
5400        //     │              └── 3 -> Leaf (Key = 3, Path = 50233)
5401        //     ├── 2 -> Leaf (Key = 013, Path = 52013)
5402        //     └── 3 -> Branch (Mask = 0101)
5403        //                ├── 1 -> Leaf (Key = 3102, Path = 53102)
5404        //                └── 3 -> Branch (Mask = 1010)
5405        //                       ├── 0 -> Leaf (Key = 3302, Path = 53302)
5406        //                       └── 2 -> Leaf (Key = 3320, Path = 53320)
5407        pretty_assertions::assert_eq!(
5408            parallel_sparse_trie_nodes(&sparse)
5409                .into_iter()
5410                .map(|(k, v)| (*k, v.clone()))
5411                .collect::<BTreeMap<_, _>>(),
5412            BTreeMap::from_iter([
5413                (Nibbles::default(), SparseNode::new_ext(Nibbles::from_nibbles([0x5]))),
5414                (Nibbles::from_nibbles([0x5]), SparseNode::new_branch(0b1101.into(), &[])),
5415                (
5416                    Nibbles::from_nibbles([0x5, 0x0]),
5417                    SparseNode::new_ext(Nibbles::from_nibbles([0x2, 0x3]))
5418                ),
5419                (
5420                    Nibbles::from_nibbles([0x5, 0x0, 0x2, 0x3]),
5421                    SparseNode::new_branch(0b1010.into(), &[])
5422                ),
5423                (
5424                    Nibbles::from_nibbles([0x5, 0x0, 0x2, 0x3, 0x1]),
5425                    SparseNode::new_leaf(leaf_key([], 59))
5426                ),
5427                (
5428                    Nibbles::from_nibbles([0x5, 0x0, 0x2, 0x3, 0x3]),
5429                    SparseNode::new_leaf(leaf_key([], 59))
5430                ),
5431                (
5432                    Nibbles::from_nibbles([0x5, 0x2]),
5433                    SparseNode::new_leaf(leaf_key([0x0, 0x1, 0x3], 62))
5434                ),
5435                (Nibbles::from_nibbles([0x5, 0x3]), SparseNode::new_branch(0b1010.into(), &[])),
5436                (
5437                    Nibbles::from_nibbles([0x5, 0x3, 0x1]),
5438                    SparseNode::new_leaf(leaf_key([0x0, 0x2], 61))
5439                ),
5440                (
5441                    Nibbles::from_nibbles([0x5, 0x3, 0x3]),
5442                    SparseNode::new_branch(0b0101.into(), &[])
5443                ),
5444                (
5445                    Nibbles::from_nibbles([0x5, 0x3, 0x3, 0x0]),
5446                    SparseNode::new_leaf(leaf_key([0x2], 60))
5447                ),
5448                (
5449                    Nibbles::from_nibbles([0x5, 0x3, 0x3, 0x2]),
5450                    SparseNode::new_leaf(leaf_key([0x0], 60))
5451                )
5452            ])
5453        );
5454
5455        sparse
5456            .remove_leaf(
5457                &pad_nibbles_right(Nibbles::from_nibbles([0x5, 0x2, 0x0, 0x1, 0x3])),
5458                &provider,
5459            )
5460            .unwrap();
5461
5462        // Extension (Key = 5)
5463        // └── Branch (Mask = 1001)
5464        //     ├── 0 -> Extension (Key = 23)
5465        //     │        └── Branch (Mask = 0101)
5466        //     │              ├── 1 -> Leaf (Path = 50231...)
5467        //     │              └── 3 -> Leaf (Path = 50233...)
5468        //     └── 3 -> Branch (Mask = 0101)
5469        //                ├── 1 -> Leaf (Path = 53102...)
5470        //                └── 3 -> Branch (Mask = 1010)
5471        //                       ├── 0 -> Leaf (Path = 53302...)
5472        //                       └── 2 -> Leaf (Path = 53320...)
5473        pretty_assertions::assert_eq!(
5474            parallel_sparse_trie_nodes(&sparse)
5475                .into_iter()
5476                .map(|(k, v)| (*k, v.clone()))
5477                .collect::<BTreeMap<_, _>>(),
5478            BTreeMap::from_iter([
5479                (Nibbles::default(), SparseNode::new_ext(Nibbles::from_nibbles([0x5]))),
5480                (Nibbles::from_nibbles([0x5]), SparseNode::new_branch(0b1001.into(), &[])),
5481                (
5482                    Nibbles::from_nibbles([0x5, 0x0]),
5483                    SparseNode::new_ext(Nibbles::from_nibbles([0x2, 0x3]))
5484                ),
5485                (
5486                    Nibbles::from_nibbles([0x5, 0x0, 0x2, 0x3]),
5487                    SparseNode::new_branch(0b1010.into(), &[])
5488                ),
5489                (
5490                    Nibbles::from_nibbles([0x5, 0x0, 0x2, 0x3, 0x1]),
5491                    SparseNode::new_leaf(leaf_key([], 59))
5492                ),
5493                (
5494                    Nibbles::from_nibbles([0x5, 0x0, 0x2, 0x3, 0x3]),
5495                    SparseNode::new_leaf(leaf_key([], 59))
5496                ),
5497                (Nibbles::from_nibbles([0x5, 0x3]), SparseNode::new_branch(0b1010.into(), &[])),
5498                (
5499                    Nibbles::from_nibbles([0x5, 0x3, 0x1]),
5500                    SparseNode::new_leaf(leaf_key([0x0, 0x2], 61))
5501                ),
5502                (
5503                    Nibbles::from_nibbles([0x5, 0x3, 0x3]),
5504                    SparseNode::new_branch(0b0101.into(), &[])
5505                ),
5506                (
5507                    Nibbles::from_nibbles([0x5, 0x3, 0x3, 0x0]),
5508                    SparseNode::new_leaf(leaf_key([0x2], 60))
5509                ),
5510                (
5511                    Nibbles::from_nibbles([0x5, 0x3, 0x3, 0x2]),
5512                    SparseNode::new_leaf(leaf_key([0x0], 60))
5513                )
5514            ])
5515        );
5516
5517        sparse
5518            .remove_leaf(
5519                &pad_nibbles_right(Nibbles::from_nibbles([0x5, 0x0, 0x2, 0x3, 0x1])),
5520                &provider,
5521            )
5522            .unwrap();
5523
5524        // Extension (Key = 5)
5525        // └── Branch (Mask = 1001)
5526        //     ├── 0 -> Leaf (Path = 50233...)
5527        //     └── 3 -> Branch (Mask = 0101)
5528        //                ├── 1 -> Leaf (Path = 53102...)
5529        //                └── 3 -> Branch (Mask = 1010)
5530        //                       ├── 0 -> Leaf (Path = 53302...)
5531        //                       └── 2 -> Leaf (Path = 53320...)
5532        pretty_assertions::assert_eq!(
5533            parallel_sparse_trie_nodes(&sparse)
5534                .into_iter()
5535                .map(|(k, v)| (*k, v.clone()))
5536                .collect::<BTreeMap<_, _>>(),
5537            BTreeMap::from_iter([
5538                (Nibbles::default(), SparseNode::new_ext(Nibbles::from_nibbles([0x5]))),
5539                (Nibbles::from_nibbles([0x5]), SparseNode::new_branch(0b1001.into(), &[])),
5540                (
5541                    Nibbles::from_nibbles([0x5, 0x0]),
5542                    SparseNode::new_leaf(leaf_key([0x2, 0x3, 0x3], 62))
5543                ),
5544                (Nibbles::from_nibbles([0x5, 0x3]), SparseNode::new_branch(0b1010.into(), &[])),
5545                (
5546                    Nibbles::from_nibbles([0x5, 0x3, 0x1]),
5547                    SparseNode::new_leaf(leaf_key([0x0, 0x2], 61))
5548                ),
5549                (
5550                    Nibbles::from_nibbles([0x5, 0x3, 0x3]),
5551                    SparseNode::new_branch(0b0101.into(), &[])
5552                ),
5553                (
5554                    Nibbles::from_nibbles([0x5, 0x3, 0x3, 0x0]),
5555                    SparseNode::new_leaf(leaf_key([0x2], 60))
5556                ),
5557                (
5558                    Nibbles::from_nibbles([0x5, 0x3, 0x3, 0x2]),
5559                    SparseNode::new_leaf(leaf_key([0x0], 60))
5560                )
5561            ])
5562        );
5563
5564        sparse
5565            .remove_leaf(
5566                &pad_nibbles_right(Nibbles::from_nibbles([0x5, 0x3, 0x1, 0x0, 0x2])),
5567                &provider,
5568            )
5569            .unwrap();
5570
5571        // Extension (Key = 5)
5572        // └── Branch (Mask = 1001)
5573        //     ├── 0 -> Leaf (Path = 50233...)
5574        //     └── 3 -> Branch (Mask = 1010)
5575        //                ├── 0 -> Leaf (Path = 53302...)
5576        //                └── 2 -> Leaf (Path = 53320...)
5577        pretty_assertions::assert_eq!(
5578            parallel_sparse_trie_nodes(&sparse)
5579                .into_iter()
5580                .map(|(k, v)| (*k, v.clone()))
5581                .collect::<BTreeMap<_, _>>(),
5582            BTreeMap::from_iter([
5583                (Nibbles::default(), SparseNode::new_ext(Nibbles::from_nibbles([0x5]))),
5584                (Nibbles::from_nibbles([0x5]), SparseNode::new_branch(0b1001.into(), &[])),
5585                (
5586                    Nibbles::from_nibbles([0x5, 0x0]),
5587                    SparseNode::new_leaf(leaf_key([0x2, 0x3, 0x3], 62))
5588                ),
5589                (
5590                    Nibbles::from_nibbles([0x5, 0x3]),
5591                    SparseNode::new_ext(Nibbles::from_nibbles([0x3]))
5592                ),
5593                (
5594                    Nibbles::from_nibbles([0x5, 0x3, 0x3]),
5595                    SparseNode::new_branch(0b0101.into(), &[])
5596                ),
5597                (
5598                    Nibbles::from_nibbles([0x5, 0x3, 0x3, 0x0]),
5599                    SparseNode::new_leaf(leaf_key([0x2], 60))
5600                ),
5601                (
5602                    Nibbles::from_nibbles([0x5, 0x3, 0x3, 0x2]),
5603                    SparseNode::new_leaf(leaf_key([0x0], 60))
5604                )
5605            ])
5606        );
5607
5608        sparse
5609            .remove_leaf(
5610                &pad_nibbles_right(Nibbles::from_nibbles([0x5, 0x3, 0x3, 0x2, 0x0])),
5611                &provider,
5612            )
5613            .unwrap();
5614
5615        // Extension (Key = 5)
5616        // └── Branch (Mask = 1001)
5617        //     ├── 0 -> Leaf (Path = 50233...)
5618        //     └── 3 -> Leaf (Path = 53302...)
5619        pretty_assertions::assert_eq!(
5620            parallel_sparse_trie_nodes(&sparse)
5621                .into_iter()
5622                .map(|(k, v)| (*k, v.clone()))
5623                .collect::<BTreeMap<_, _>>(),
5624            BTreeMap::from_iter([
5625                (Nibbles::default(), SparseNode::new_ext(Nibbles::from_nibbles([0x5]))),
5626                (Nibbles::from_nibbles([0x5]), SparseNode::new_branch(0b1001.into(), &[])),
5627                (
5628                    Nibbles::from_nibbles([0x5, 0x0]),
5629                    SparseNode::new_leaf(leaf_key([0x2, 0x3, 0x3], 62))
5630                ),
5631                (
5632                    Nibbles::from_nibbles([0x5, 0x3]),
5633                    SparseNode::new_leaf(leaf_key([0x3, 0x0, 0x2], 62))
5634                ),
5635            ])
5636        );
5637
5638        sparse
5639            .remove_leaf(
5640                &pad_nibbles_right(Nibbles::from_nibbles([0x5, 0x0, 0x2, 0x3, 0x3])),
5641                &provider,
5642            )
5643            .unwrap();
5644
5645        // Leaf (Path = 53302...)
5646        pretty_assertions::assert_eq!(
5647            parallel_sparse_trie_nodes(&sparse)
5648                .into_iter()
5649                .map(|(k, v)| (*k, v.clone()))
5650                .collect::<BTreeMap<_, _>>(),
5651            BTreeMap::from_iter([(
5652                Nibbles::default(),
5653                SparseNode::new_leaf(pad_nibbles_right(Nibbles::from_nibbles([
5654                    0x5, 0x3, 0x3, 0x0, 0x2
5655                ])))
5656            ),])
5657        );
5658
5659        sparse
5660            .remove_leaf(
5661                &pad_nibbles_right(Nibbles::from_nibbles([0x5, 0x3, 0x3, 0x0, 0x2])),
5662                &provider,
5663            )
5664            .unwrap();
5665
5666        // Empty
5667        pretty_assertions::assert_eq!(
5668            parallel_sparse_trie_nodes(&sparse)
5669                .into_iter()
5670                .map(|(k, v)| (*k, v.clone()))
5671                .collect::<BTreeMap<_, _>>(),
5672            BTreeMap::from_iter([(Nibbles::default(), SparseNode::Empty)])
5673        );
5674    }
5675
5676    #[test]
5677    fn sparse_trie_remove_leaf_blinded() {
5678        let leaf = LeafNode::new(
5679            Nibbles::default(),
5680            alloy_rlp::encode_fixed_size(&U256::from(1)).to_vec(),
5681        );
5682        let branch = TrieNodeV2::Branch(BranchNodeV2::new(
5683            Nibbles::default(),
5684            vec![
5685                RlpNode::word_rlp(&B256::repeat_byte(1)),
5686                RlpNode::from_raw_rlp(&alloy_rlp::encode(leaf.clone())).unwrap(),
5687            ],
5688            TrieMask::new(0b11),
5689            None,
5690        ));
5691
5692        let provider = DefaultTrieNodeProvider;
5693        let mut sparse = ParallelSparseTrie::from_root(
5694            branch.clone(),
5695            Some(BranchNodeMasks {
5696                hash_mask: TrieMask::new(0b01),
5697                tree_mask: TrieMask::default(),
5698            }),
5699            false,
5700        )
5701        .unwrap();
5702
5703        // Reveal a branch node and one of its children
5704        //
5705        // Branch (Mask = 11)
5706        // ├── 0 -> Hash (Path = 0)
5707        // └── 1 -> Leaf (Path = 1)
5708        sparse
5709            .reveal_nodes(&mut [
5710                ProofTrieNodeV2 {
5711                    path: Nibbles::default(),
5712                    node: branch,
5713                    masks: Some(BranchNodeMasks {
5714                        hash_mask: TrieMask::default(),
5715                        tree_mask: TrieMask::new(0b01),
5716                    }),
5717                },
5718                ProofTrieNodeV2 {
5719                    path: Nibbles::from_nibbles([0x1]),
5720                    node: TrieNodeV2::Leaf(leaf),
5721                    masks: None,
5722                },
5723            ])
5724            .unwrap();
5725
5726        // Removing a blinded leaf should result in an error
5727        assert_matches!(
5728            sparse.remove_leaf(&pad_nibbles_right(Nibbles::from_nibbles([0x0])), &provider).map_err(|e| e.into_kind()),
5729            Err(SparseTrieErrorKind::BlindedNode(path)) if path == Nibbles::from_nibbles([0x0])
5730        );
5731    }
5732
5733    #[test]
5734    fn sparse_trie_remove_leaf_non_existent() {
5735        let leaf = LeafNode::new(
5736            Nibbles::default(),
5737            alloy_rlp::encode_fixed_size(&U256::from(1)).to_vec(),
5738        );
5739        let branch = TrieNodeV2::Branch(BranchNodeV2::new(
5740            Nibbles::default(),
5741            vec![
5742                RlpNode::word_rlp(&B256::repeat_byte(1)),
5743                RlpNode::from_raw_rlp(&alloy_rlp::encode(leaf.clone())).unwrap(),
5744            ],
5745            TrieMask::new(0b11),
5746            None,
5747        ));
5748
5749        let provider = DefaultTrieNodeProvider;
5750        let mut sparse = ParallelSparseTrie::from_root(
5751            branch.clone(),
5752            Some(BranchNodeMasks {
5753                hash_mask: TrieMask::new(0b01),
5754                tree_mask: TrieMask::default(),
5755            }),
5756            false,
5757        )
5758        .unwrap();
5759
5760        // Reveal a branch node and one of its children
5761        //
5762        // Branch (Mask = 11)
5763        // ├── 0 -> Hash (Path = 0)
5764        // └── 1 -> Leaf (Path = 1)
5765        sparse
5766            .reveal_nodes(&mut [
5767                ProofTrieNodeV2 {
5768                    path: Nibbles::default(),
5769                    node: branch,
5770                    masks: Some(BranchNodeMasks {
5771                        hash_mask: TrieMask::default(),
5772                        tree_mask: TrieMask::new(0b01),
5773                    }),
5774                },
5775                ProofTrieNodeV2 {
5776                    path: Nibbles::from_nibbles([0x1]),
5777                    node: TrieNodeV2::Leaf(leaf),
5778                    masks: None,
5779                },
5780            ])
5781            .unwrap();
5782
5783        // Removing a non-existent leaf should be a noop
5784        let sparse_old = sparse.clone();
5785        assert_matches!(
5786            sparse.remove_leaf(&pad_nibbles_right(Nibbles::from_nibbles([0x2])), &provider),
5787            Ok(())
5788        );
5789        assert_eq!(sparse, sparse_old);
5790    }
5791
5792    #[test]
5793    fn sparse_trie_fuzz() {
5794        // Having only the first 3 nibbles set, we narrow down the range of keys
5795        // to 4096 different hashes. It allows us to generate collisions more likely
5796        // to test the sparse trie updates.
5797        const KEY_NIBBLES_LEN: usize = 3;
5798
5799        fn test(updates: Vec<(BTreeMap<Nibbles, Account>, BTreeSet<Nibbles>)>) {
5800            {
5801                let mut state = BTreeMap::default();
5802                let default_provider = DefaultTrieNodeProvider;
5803                let provider_factory = create_test_provider_factory();
5804                let mut sparse = ParallelSparseTrie::default().with_updates(true);
5805
5806                for (update, keys_to_delete) in updates {
5807                    // Insert state updates into the sparse trie and calculate the root
5808                    for (key, account) in update.clone() {
5809                        let account = account.into_trie_account(EMPTY_ROOT_HASH);
5810                        let mut account_rlp = Vec::new();
5811                        account.encode(&mut account_rlp);
5812                        sparse.update_leaf(key, account_rlp, &default_provider).unwrap();
5813                    }
5814                    // We need to clone the sparse trie, so that all updated branch nodes are
5815                    // preserved, and not only those that were changed after the last call to
5816                    // `root()`.
5817                    let mut updated_sparse = sparse.clone();
5818                    let sparse_root = updated_sparse.root();
5819                    let sparse_updates = updated_sparse.take_updates();
5820
5821                    // Insert state updates into the hash builder and calculate the root
5822                    state.extend(update);
5823                    let provider = provider_factory.provider().unwrap();
5824                    let (hash_builder_root, hash_builder_updates, hash_builder_proof_nodes, _, _) =
5825                        reth_trie_db::with_adapter!(provider_factory, |A| {
5826                            let trie_cursor =
5827                                DatabaseTrieCursorFactory::<_, A>::new(provider.tx_ref());
5828                            run_hash_builder(
5829                                state.clone(),
5830                                trie_cursor.account_trie_cursor().unwrap(),
5831                                Default::default(),
5832                                state.keys().copied(),
5833                            )
5834                        });
5835
5836                    // Extract account nodes before moving hash_builder_updates
5837                    let hash_builder_account_nodes = hash_builder_updates.account_nodes.clone();
5838
5839                    // Write trie updates to the database
5840                    let provider_rw = provider_factory.provider_rw().unwrap();
5841                    provider_rw.write_trie_updates(hash_builder_updates).unwrap();
5842                    provider_rw.commit().unwrap();
5843
5844                    // Assert that the sparse trie root matches the hash builder root
5845                    assert_eq!(sparse_root, hash_builder_root);
5846                    // Assert that the sparse trie updates match the hash builder updates
5847                    pretty_assertions::assert_eq!(
5848                        BTreeMap::from_iter(sparse_updates.updated_nodes),
5849                        BTreeMap::from_iter(hash_builder_account_nodes)
5850                    );
5851                    // Assert that the sparse trie nodes match the hash builder proof nodes
5852                    assert_eq_parallel_sparse_trie_proof_nodes(
5853                        &updated_sparse,
5854                        hash_builder_proof_nodes,
5855                    );
5856
5857                    // Delete some keys from both the hash builder and the sparse trie and check
5858                    // that the sparse trie root still matches the hash builder root
5859                    for key in &keys_to_delete {
5860                        state.remove(key).unwrap();
5861                        sparse.remove_leaf(key, &default_provider).unwrap();
5862                    }
5863
5864                    // We need to clone the sparse trie, so that all updated branch nodes are
5865                    // preserved, and not only those that were changed after the last call to
5866                    // `root()`.
5867                    let mut updated_sparse = sparse.clone();
5868                    let sparse_root = updated_sparse.root();
5869                    let sparse_updates = updated_sparse.take_updates();
5870
5871                    let provider = provider_factory.provider().unwrap();
5872                    let (hash_builder_root, hash_builder_updates, hash_builder_proof_nodes, _, _) =
5873                        reth_trie_db::with_adapter!(provider_factory, |A| {
5874                            let trie_cursor =
5875                                DatabaseTrieCursorFactory::<_, A>::new(provider.tx_ref());
5876                            run_hash_builder(
5877                                state.clone(),
5878                                trie_cursor.account_trie_cursor().unwrap(),
5879                                keys_to_delete
5880                                    .iter()
5881                                    .map(|nibbles| B256::from_slice(&nibbles.pack()))
5882                                    .collect(),
5883                                state.keys().copied(),
5884                            )
5885                        });
5886
5887                    // Extract account nodes before moving hash_builder_updates
5888                    let hash_builder_account_nodes = hash_builder_updates.account_nodes.clone();
5889
5890                    // Write trie updates to the database
5891                    let provider_rw = provider_factory.provider_rw().unwrap();
5892                    provider_rw.write_trie_updates(hash_builder_updates).unwrap();
5893                    provider_rw.commit().unwrap();
5894
5895                    // Assert that the sparse trie root matches the hash builder root
5896                    assert_eq!(sparse_root, hash_builder_root);
5897                    // Assert that the sparse trie updates match the hash builder updates
5898                    pretty_assertions::assert_eq!(
5899                        BTreeMap::from_iter(sparse_updates.updated_nodes),
5900                        BTreeMap::from_iter(hash_builder_account_nodes)
5901                    );
5902                    // Assert that the sparse trie nodes match the hash builder proof nodes
5903                    assert_eq_parallel_sparse_trie_proof_nodes(
5904                        &updated_sparse,
5905                        hash_builder_proof_nodes,
5906                    );
5907                }
5908            }
5909        }
5910
5911        fn transform_updates(
5912            updates: Vec<BTreeMap<Nibbles, Account>>,
5913            mut rng: impl rand::Rng,
5914        ) -> Vec<(BTreeMap<Nibbles, Account>, BTreeSet<Nibbles>)> {
5915            let mut keys = BTreeSet::new();
5916            updates
5917                .into_iter()
5918                .map(|update| {
5919                    keys.extend(update.keys().copied());
5920
5921                    let keys_to_delete_len = update.len() / 2;
5922                    let keys_to_delete = (0..keys_to_delete_len)
5923                        .map(|_| {
5924                            let key =
5925                                *rand::seq::IteratorRandom::choose(keys.iter(), &mut rng).unwrap();
5926                            keys.take(&key).unwrap()
5927                        })
5928                        .collect();
5929
5930                    (update, keys_to_delete)
5931                })
5932                .collect::<Vec<_>>()
5933        }
5934
5935        proptest!(ProptestConfig::with_cases(10), |(
5936            updates in proptest::collection::vec(
5937                proptest::collection::btree_map(
5938                    any_with::<Nibbles>(SizeRange::new(KEY_NIBBLES_LEN..=KEY_NIBBLES_LEN)).prop_map(pad_nibbles_right),
5939                    arb::<Account>(),
5940                    1..50,
5941                ),
5942                1..50,
5943            ).prop_perturb(transform_updates)
5944        )| {
5945            test(updates)
5946        });
5947    }
5948
5949    #[test]
5950    fn sparse_trie_two_leaves_at_lower_roots() {
5951        let provider = DefaultTrieNodeProvider;
5952        let mut trie = ParallelSparseTrie::default().with_updates(true);
5953        let key_50 = Nibbles::unpack(hex!(
5954            "0x5000000000000000000000000000000000000000000000000000000000000000"
5955        ));
5956        let key_51 = Nibbles::unpack(hex!(
5957            "0x5100000000000000000000000000000000000000000000000000000000000000"
5958        ));
5959
5960        let account = Account::default().into_trie_account(EMPTY_ROOT_HASH);
5961        let mut account_rlp = Vec::new();
5962        account.encode(&mut account_rlp);
5963
5964        // Add a leaf and calculate the root.
5965        trie.update_leaf(key_50, account_rlp.clone(), &provider).unwrap();
5966        trie.root();
5967
5968        // Add a second leaf and assert that the root is the expected value.
5969        trie.update_leaf(key_51, account_rlp.clone(), &provider).unwrap();
5970
5971        let expected_root =
5972            hex!("0xdaf0ef9f91a2f179bb74501209effdb5301db1697bcab041eca2234b126e25de");
5973        let root = trie.root();
5974        assert_eq!(root, expected_root);
5975        assert_eq!(SparseTrieUpdates::default(), trie.take_updates());
5976    }
5977
5978    /// We have three leaves that share the same prefix: 0x00, 0x01 and 0x02. Hash builder trie has
5979    /// only nodes 0x00 and 0x01, and we have proofs for them. Node B is new and inserted in the
5980    /// sparse trie first.
5981    ///
5982    /// 1. Reveal the hash builder proof to leaf 0x00 in the sparse trie.
5983    /// 2. Insert leaf 0x01 into the sparse trie.
5984    /// 3. Reveal the hash builder proof to leaf 0x02 in the sparse trie.
5985    ///
5986    /// The hash builder proof to the leaf 0x02 didn't have the leaf 0x01 at the corresponding
5987    /// nibble of the branch node, so we need to adjust the branch node instead of fully
5988    /// replacing it.
5989    #[test]
5990    fn sparse_trie_reveal_node_1() {
5991        let key1 = || pad_nibbles_right(Nibbles::from_nibbles_unchecked([0x00]));
5992        let key2 = || pad_nibbles_right(Nibbles::from_nibbles_unchecked([0x01]));
5993        let key3 = || pad_nibbles_right(Nibbles::from_nibbles_unchecked([0x02]));
5994        let value = || Account::default();
5995        let value_encoded = || {
5996            let mut account_rlp = Vec::new();
5997            value().into_trie_account(EMPTY_ROOT_HASH).encode(&mut account_rlp);
5998            account_rlp
5999        };
6000
6001        // Generate the proof for the root node and initialize the sparse trie with it
6002        let (_, _, hash_builder_proof_nodes, branch_node_hash_masks, branch_node_tree_masks) =
6003            run_hash_builder(
6004                [(key1(), value()), (key3(), value())],
6005                NoopAccountTrieCursor::default(),
6006                Default::default(),
6007                [Nibbles::default()],
6008            );
6009
6010        let provider = DefaultTrieNodeProvider;
6011        let masks = match (
6012            branch_node_hash_masks.get(&Nibbles::default()).copied(),
6013            branch_node_tree_masks.get(&Nibbles::default()).copied(),
6014        ) {
6015            (Some(h), Some(t)) => Some(BranchNodeMasks { hash_mask: h, tree_mask: t }),
6016            (Some(h), None) => {
6017                Some(BranchNodeMasks { hash_mask: h, tree_mask: TrieMask::default() })
6018            }
6019            (None, Some(t)) => {
6020                Some(BranchNodeMasks { hash_mask: TrieMask::default(), tree_mask: t })
6021            }
6022            (None, None) => None,
6023        };
6024        let mut sparse = ParallelSparseTrie::from_root(
6025            TrieNodeV2::decode(&mut &hash_builder_proof_nodes.nodes_sorted()[0].1[..]).unwrap(),
6026            masks,
6027            false,
6028        )
6029        .unwrap();
6030
6031        // Generate the proof for the first key and reveal it in the sparse trie
6032        let (_, _, hash_builder_proof_nodes, branch_node_hash_masks, branch_node_tree_masks) =
6033            run_hash_builder(
6034                [(key1(), value()), (key3(), value())],
6035                NoopAccountTrieCursor::default(),
6036                Default::default(),
6037                [key1()],
6038            );
6039        let mut revealed_nodes: Vec<ProofTrieNodeV2> = hash_builder_proof_nodes
6040            .nodes_sorted()
6041            .into_iter()
6042            .map(|(path, node)| {
6043                let hash_mask = branch_node_hash_masks.get(&path).copied();
6044                let tree_mask = branch_node_tree_masks.get(&path).copied();
6045                let masks = BranchNodeMasks::from_optional(hash_mask, tree_mask);
6046                ProofTrieNodeV2 { path, node: TrieNodeV2::decode(&mut &node[..]).unwrap(), masks }
6047            })
6048            .collect();
6049        sparse.reveal_nodes(&mut revealed_nodes).unwrap();
6050
6051        // Check that the branch node exists with only two nibbles set
6052        assert_matches!(
6053            sparse.upper_subtrie.nodes.get(&Nibbles::default()),
6054            Some(&SparseNode::Branch { state_mask, state: SparseNodeState::Dirty, .. }) if state_mask == TrieMask::new(0b101)
6055        );
6056
6057        // Insert the leaf for the second key
6058        sparse.update_leaf(key2(), value_encoded(), &provider).unwrap();
6059
6060        // Check that the branch node was updated and another nibble was set
6061        assert_matches!(
6062            sparse.upper_subtrie.nodes.get(&Nibbles::default()),
6063            Some(&SparseNode::Branch { state_mask, state: SparseNodeState::Dirty, .. }) if state_mask == TrieMask::new(0b111)
6064        );
6065
6066        // Generate the proof for the third key and reveal it in the sparse trie
6067        let (_, _, hash_builder_proof_nodes, branch_node_hash_masks, branch_node_tree_masks) =
6068            run_hash_builder(
6069                [(key1(), value()), (key3(), value())],
6070                NoopAccountTrieCursor::default(),
6071                Default::default(),
6072                [key3()],
6073            );
6074        let mut revealed_nodes: Vec<ProofTrieNodeV2> = hash_builder_proof_nodes
6075            .nodes_sorted()
6076            .into_iter()
6077            .map(|(path, node)| {
6078                let hash_mask = branch_node_hash_masks.get(&path).copied();
6079                let tree_mask = branch_node_tree_masks.get(&path).copied();
6080                let masks = BranchNodeMasks::from_optional(hash_mask, tree_mask);
6081                ProofTrieNodeV2 { path, node: TrieNodeV2::decode(&mut &node[..]).unwrap(), masks }
6082            })
6083            .collect();
6084        sparse.reveal_nodes(&mut revealed_nodes).unwrap();
6085
6086        // Check that nothing changed in the branch node
6087        assert_matches!(
6088            sparse.upper_subtrie.nodes.get(&Nibbles::default()),
6089            Some(&SparseNode::Branch { state_mask, state: SparseNodeState::Dirty, .. }) if state_mask == TrieMask::new(0b111)
6090        );
6091
6092        // Generate the nodes for the full trie with all three key using the hash builder, and
6093        // compare them to the sparse trie
6094        let (_, _, hash_builder_proof_nodes, _, _) = run_hash_builder(
6095            [(key1(), value()), (key2(), value()), (key3(), value())],
6096            NoopAccountTrieCursor::default(),
6097            Default::default(),
6098            [key1(), key2(), key3()],
6099        );
6100
6101        assert_eq_parallel_sparse_trie_proof_nodes(&sparse, hash_builder_proof_nodes);
6102    }
6103
6104    /// We have three leaves: 0x0000, 0x0101, and 0x0102. Hash builder trie has all nodes, and we
6105    /// have proofs for them.
6106    ///
6107    /// 1. Reveal the hash builder proof to leaf 0x00 in the sparse trie.
6108    /// 2. Remove leaf 0x00 from the sparse trie (that will remove the branch node and create an
6109    ///    extension node with the key 0x0000).
6110    /// 3. Reveal the hash builder proof to leaf 0x0101 in the sparse trie.
6111    ///
6112    /// The hash builder proof to the leaf 0x0101 had a branch node in the path, but we turned it
6113    /// into an extension node, so it should ignore this node.
6114    #[test]
6115    fn sparse_trie_reveal_node_2() {
6116        let key1 = || pad_nibbles_right(Nibbles::from_nibbles_unchecked([0x00, 0x00]));
6117        let key2 = || pad_nibbles_right(Nibbles::from_nibbles_unchecked([0x01, 0x01]));
6118        let key3 = || pad_nibbles_right(Nibbles::from_nibbles_unchecked([0x01, 0x02]));
6119        let value = || Account::default();
6120
6121        // Generate the proof for the root node and initialize the sparse trie with it
6122        let (_, _, hash_builder_proof_nodes, branch_node_hash_masks, branch_node_tree_masks) =
6123            run_hash_builder(
6124                [(key1(), value()), (key2(), value()), (key3(), value())],
6125                NoopAccountTrieCursor::default(),
6126                Default::default(),
6127                [Nibbles::default()],
6128            );
6129
6130        let provider = DefaultTrieNodeProvider;
6131        let masks = match (
6132            branch_node_hash_masks.get(&Nibbles::default()).copied(),
6133            branch_node_tree_masks.get(&Nibbles::default()).copied(),
6134        ) {
6135            (Some(h), Some(t)) => Some(BranchNodeMasks { hash_mask: h, tree_mask: t }),
6136            (Some(h), None) => {
6137                Some(BranchNodeMasks { hash_mask: h, tree_mask: TrieMask::default() })
6138            }
6139            (None, Some(t)) => {
6140                Some(BranchNodeMasks { hash_mask: TrieMask::default(), tree_mask: t })
6141            }
6142            (None, None) => None,
6143        };
6144        let mut sparse = ParallelSparseTrie::from_root(
6145            TrieNodeV2::decode(&mut &hash_builder_proof_nodes.nodes_sorted()[0].1[..]).unwrap(),
6146            masks,
6147            false,
6148        )
6149        .unwrap();
6150
6151        // Generate the proof for the children of the root branch node and reveal it in the sparse
6152        // trie
6153        let (_, _, hash_builder_proof_nodes, branch_node_hash_masks, branch_node_tree_masks) =
6154            run_hash_builder(
6155                [(key1(), value()), (key2(), value()), (key3(), value())],
6156                NoopAccountTrieCursor::default(),
6157                Default::default(),
6158                [key1(), Nibbles::from_nibbles_unchecked([0x01])],
6159            );
6160        let mut revealed_nodes: Vec<ProofTrieNodeV2> = hash_builder_proof_nodes
6161            .nodes_sorted()
6162            .into_iter()
6163            .map(|(path, node)| {
6164                let hash_mask = branch_node_hash_masks.get(&path).copied();
6165                let tree_mask = branch_node_tree_masks.get(&path).copied();
6166                let masks = BranchNodeMasks::from_optional(hash_mask, tree_mask);
6167                ProofTrieNodeV2 { path, node: TrieNodeV2::decode(&mut &node[..]).unwrap(), masks }
6168            })
6169            .collect();
6170        sparse.reveal_nodes(&mut revealed_nodes).unwrap();
6171
6172        // Check that the branch node exists
6173        assert_matches!(
6174            sparse.upper_subtrie.nodes.get(&Nibbles::default()),
6175            Some(&SparseNode::Branch { state_mask, state: SparseNodeState::Dirty, .. }) if state_mask == TrieMask::new(0b11)
6176        );
6177
6178        // Remove the leaf for the first key
6179        sparse.remove_leaf(&key1(), &provider).unwrap();
6180
6181        // Check that the branch node was turned into an extension node
6182        assert_eq!(
6183            sparse.upper_subtrie.nodes.get(&Nibbles::default()),
6184            Some(&SparseNode::new_ext(Nibbles::from_nibbles_unchecked([0x01])))
6185        );
6186
6187        // Generate the proof for the third key and reveal it in the sparse trie
6188        let (_, _, hash_builder_proof_nodes, branch_node_hash_masks, branch_node_tree_masks) =
6189            run_hash_builder(
6190                [(key1(), value()), (key2(), value()), (key3(), value())],
6191                NoopAccountTrieCursor::default(),
6192                Default::default(),
6193                [key2()],
6194            );
6195        let mut revealed_nodes: Vec<ProofTrieNodeV2> = hash_builder_proof_nodes
6196            .nodes_sorted()
6197            .into_iter()
6198            .map(|(path, node)| {
6199                let hash_mask = branch_node_hash_masks.get(&path).copied();
6200                let tree_mask = branch_node_tree_masks.get(&path).copied();
6201                let masks = BranchNodeMasks::from_optional(hash_mask, tree_mask);
6202                ProofTrieNodeV2 { path, node: TrieNodeV2::decode(&mut &node[..]).unwrap(), masks }
6203            })
6204            .collect();
6205        sparse.reveal_nodes(&mut revealed_nodes).unwrap();
6206
6207        // Check that nothing changed in the extension node
6208        assert_eq!(
6209            sparse.upper_subtrie.nodes.get(&Nibbles::default()),
6210            Some(&SparseNode::new_ext(Nibbles::from_nibbles_unchecked([0x01])))
6211        );
6212    }
6213
6214    /// We have two leaves that share the same prefix: 0x0001 and 0x0002, and a leaf with a
6215    /// different prefix: 0x0100. Hash builder trie has only the first two leaves, and we have
6216    /// proofs for them.
6217    ///
6218    /// 1. Insert the leaf 0x0100 into the sparse trie, and check that the root extension node was
6219    ///    turned into a branch node.
6220    /// 2. Reveal the leaf 0x0001 in the sparse trie, and check that the root branch node wasn't
6221    ///    overwritten with the extension node from the proof.
6222    #[test]
6223    fn sparse_trie_reveal_node_3() {
6224        let key1 = || pad_nibbles_right(Nibbles::from_nibbles_unchecked([0x00, 0x01]));
6225        let key2 = || pad_nibbles_right(Nibbles::from_nibbles_unchecked([0x00, 0x02]));
6226        let key3 = || pad_nibbles_right(Nibbles::from_nibbles_unchecked([0x01, 0x00]));
6227        let value = || Account::default();
6228        let value_encoded = || {
6229            let mut account_rlp = Vec::new();
6230            value().into_trie_account(EMPTY_ROOT_HASH).encode(&mut account_rlp);
6231            account_rlp
6232        };
6233
6234        // Generate the proof for the root node and initialize the sparse trie with it
6235        let (_, _, hash_builder_proof_nodes, branch_node_hash_masks, branch_node_tree_masks) =
6236            run_hash_builder(
6237                [(key1(), value()), (key2(), value())],
6238                NoopAccountTrieCursor::default(),
6239                Default::default(),
6240                [Nibbles::default()],
6241            );
6242
6243        let mut nodes = Vec::new();
6244
6245        for (path, node) in hash_builder_proof_nodes.nodes_sorted() {
6246            let hash_mask = branch_node_hash_masks.get(&path).copied();
6247            let tree_mask = branch_node_tree_masks.get(&path).copied();
6248            let masks = BranchNodeMasks::from_optional(hash_mask, tree_mask);
6249            nodes.push((path, TrieNode::decode(&mut &node[..]).unwrap(), masks));
6250        }
6251
6252        nodes.sort_unstable_by(|a, b| reth_trie_common::depth_first_cmp(&a.0, &b.0));
6253
6254        let nodes = ProofTrieNodeV2::from_sorted_trie_nodes(nodes);
6255
6256        let provider = DefaultTrieNodeProvider;
6257        let mut sparse =
6258            ParallelSparseTrie::from_root(nodes[0].node.clone(), nodes[0].masks, false).unwrap();
6259
6260        // Check that the root extension node exists
6261        assert_matches!(
6262            sparse.upper_subtrie.nodes.get(&Nibbles::default()),
6263            Some(SparseNode::Extension { key, state: SparseNodeState::Dirty }) if *key == Nibbles::from_nibbles([0x00])
6264        );
6265
6266        // Insert the leaf with a different prefix
6267        sparse.update_leaf(key3(), value_encoded(), &provider).unwrap();
6268
6269        // Check that the extension node was turned into a branch node
6270        assert_eq!(
6271            sparse.upper_subtrie.nodes.get(&Nibbles::default()),
6272            Some(&SparseNode::new_branch(TrieMask::new(0b11), &[]))
6273        );
6274
6275        // Generate the proof for the first key and reveal it in the sparse trie
6276        let (_, _, hash_builder_proof_nodes, branch_node_hash_masks, branch_node_tree_masks) =
6277            run_hash_builder(
6278                [(key1(), value()), (key2(), value())],
6279                NoopAccountTrieCursor::default(),
6280                Default::default(),
6281                [key1()],
6282            );
6283        let mut revealed_nodes: Vec<ProofTrieNodeV2> = hash_builder_proof_nodes
6284            .nodes_sorted()
6285            .into_iter()
6286            .map(|(path, node)| {
6287                let hash_mask = branch_node_hash_masks.get(&path).copied();
6288                let tree_mask = branch_node_tree_masks.get(&path).copied();
6289                let masks = BranchNodeMasks::from_optional(hash_mask, tree_mask);
6290                ProofTrieNodeV2 { path, node: TrieNodeV2::decode(&mut &node[..]).unwrap(), masks }
6291            })
6292            .collect();
6293        sparse.reveal_nodes(&mut revealed_nodes).unwrap();
6294
6295        // Check that the branch node wasn't overwritten by the extension node in the proof
6296        assert_eq!(
6297            sparse.upper_subtrie.nodes.get(&Nibbles::default()),
6298            Some(&SparseNode::new_branch(TrieMask::new(0b11), &[]))
6299        );
6300    }
6301
6302    #[test]
6303    fn test_update_leaf_cross_level() {
6304        let ctx = ParallelSparseTrieTestContext;
6305        let mut trie = ParallelSparseTrie::from_root(TrieNodeV2::EmptyRoot, None, true).unwrap();
6306
6307        // Test adding leaves that demonstrate the cross-level behavior
6308        // Based on the example: leaves 0x1234, 0x1245, 0x1334, 0x1345
6309        //
6310        // Final trie structure:
6311        // Upper trie:
6312        //   0x: Extension { key: 0x1 }
6313        //   └── 0x1: Branch { state_mask: 0x1100 }
6314        //       └── Subtrie (0x12): pointer to lower subtrie
6315        //       └── Subtrie (0x13): pointer to lower subtrie
6316        //
6317        // Lower subtrie (0x12):
6318        //   0x12: Branch { state_mask: 0x8 | 0x10 }
6319        //   ├── 0x123: Leaf { key: 0x4 }
6320        //   └── 0x124: Leaf { key: 0x5 }
6321        //
6322        // Lower subtrie (0x13):
6323        //   0x13: Branch { state_mask: 0x8 | 0x10 }
6324        //   ├── 0x133: Leaf { key: 0x4 }
6325        //   └── 0x134: Leaf { key: 0x5 }
6326
6327        // First add leaf 0x1345 - this should create a leaf in upper trie at 0x
6328        let (leaf1_path, value1) = ctx.create_test_leaf([0x1, 0x3, 0x4, 0x5], 1);
6329        trie.update_leaf(leaf1_path, value1.clone(), DefaultTrieNodeProvider).unwrap();
6330
6331        // Verify upper trie has a leaf at the root with key 1345
6332        ctx.assert_upper_subtrie(&trie)
6333            .has_leaf(
6334                &Nibbles::default(),
6335                &pad_nibbles_right(Nibbles::from_nibbles([0x1, 0x3, 0x4, 0x5])),
6336            )
6337            .has_value(&leaf1_path, &value1);
6338
6339        // Add leaf 0x1234 - this should go first in the upper subtrie
6340        let (leaf2_path, value2) = ctx.create_test_leaf([0x1, 0x2, 0x3, 0x4], 2);
6341        trie.update_leaf(leaf2_path, value2.clone(), DefaultTrieNodeProvider).unwrap();
6342
6343        // Upper trie should now have a branch at 0x1
6344        ctx.assert_upper_subtrie(&trie)
6345            .has_branch(&Nibbles::from_nibbles([0x1]), &[0x2, 0x3])
6346            .has_no_value(&leaf1_path)
6347            .has_no_value(&leaf2_path);
6348
6349        // Add leaf 0x1245 - this should cause a branch and create the 0x12 subtrie
6350        let (leaf3_path, value3) = ctx.create_test_leaf([0x1, 0x2, 0x4, 0x5], 3);
6351        trie.update_leaf(leaf3_path, value3.clone(), DefaultTrieNodeProvider).unwrap();
6352
6353        // Verify lower subtrie at 0x12 exists with correct structure
6354        ctx.assert_subtrie(&trie, Nibbles::from_nibbles([0x1, 0x2]))
6355            .has_branch(&Nibbles::from_nibbles([0x1, 0x2]), &[0x3, 0x4])
6356            .has_leaf(&Nibbles::from_nibbles([0x1, 0x2, 0x3]), &leaf_key([0x4], 61))
6357            .has_leaf(&Nibbles::from_nibbles([0x1, 0x2, 0x4]), &leaf_key([0x5], 61))
6358            .has_value(&leaf2_path, &value2)
6359            .has_value(&leaf3_path, &value3);
6360
6361        // Add leaf 0x1334 - this should create another lower subtrie
6362        let (leaf4_path, value4) = ctx.create_test_leaf([0x1, 0x3, 0x3, 0x4], 4);
6363        trie.update_leaf(leaf4_path, value4.clone(), DefaultTrieNodeProvider).unwrap();
6364
6365        // Verify lower subtrie at 0x13 exists with correct values
6366        ctx.assert_subtrie(&trie, Nibbles::from_nibbles([0x1, 0x3]))
6367            .has_value(&leaf1_path, &value1)
6368            .has_value(&leaf4_path, &value4);
6369
6370        // Verify the 0x12 subtrie still has its values
6371        ctx.assert_subtrie(&trie, Nibbles::from_nibbles([0x1, 0x2]))
6372            .has_value(&leaf2_path, &value2)
6373            .has_value(&leaf3_path, &value3);
6374
6375        // Upper trie has no values
6376        ctx.assert_upper_subtrie(&trie)
6377            .has_extension(&Nibbles::default(), &Nibbles::from_nibbles([0x1]))
6378            .has_branch(&Nibbles::from_nibbles([0x1]), &[0x2, 0x3])
6379            .has_no_value(&leaf1_path)
6380            .has_no_value(&leaf2_path)
6381            .has_no_value(&leaf3_path)
6382            .has_no_value(&leaf4_path);
6383    }
6384
6385    #[test]
6386    fn test_update_leaf_split_at_level_boundary() {
6387        let ctx = ParallelSparseTrieTestContext;
6388        let mut trie = ParallelSparseTrie::from_root(TrieNodeV2::EmptyRoot, None, true).unwrap();
6389
6390        // This test demonstrates what happens when we insert leaves that cause
6391        // splitting exactly at the upper/lower trie boundary (2 nibbles).
6392        //
6393        // Final trie structure:
6394        // Upper trie:
6395        //   0x: Extension { key: 0x12 }
6396        //       └── Subtrie (0x12): pointer to lower subtrie
6397        //
6398        // Lower subtrie (0x12):
6399        //   0x12: Branch { state_mask: 0x4 | 0x8 }
6400        //   ├── 0x122: Leaf { key: 0x4 }
6401        //   └── 0x123: Leaf { key: 0x4 }
6402
6403        // First insert a leaf that ends exactly at the boundary (2 nibbles)
6404        let (first_leaf_path, first_value) = ctx.create_test_leaf([0x1, 0x2, 0x2, 0x4], 1);
6405
6406        trie.update_leaf(first_leaf_path, first_value.clone(), DefaultTrieNodeProvider).unwrap();
6407
6408        // In an empty trie, the first leaf becomes the root, regardless of path length
6409        ctx.assert_upper_subtrie(&trie)
6410            .has_leaf(
6411                &Nibbles::default(),
6412                &pad_nibbles_right(Nibbles::from_nibbles([0x1, 0x2, 0x2, 0x4])),
6413            )
6414            .has_value(&first_leaf_path, &first_value);
6415
6416        // Now insert another leaf that shares the same 2-nibble prefix
6417        let (second_leaf_path, second_value) = ctx.create_test_leaf([0x1, 0x2, 0x3, 0x4], 2);
6418
6419        trie.update_leaf(second_leaf_path, second_value.clone(), DefaultTrieNodeProvider).unwrap();
6420
6421        // Now both leaves should be in a lower subtrie at index [0x1, 0x2]
6422        ctx.assert_subtrie(&trie, Nibbles::from_nibbles([0x1, 0x2]))
6423            .has_branch(&Nibbles::from_nibbles([0x1, 0x2]), &[0x2, 0x3])
6424            .has_leaf(&Nibbles::from_nibbles([0x1, 0x2, 0x2]), &leaf_key([0x4], 61))
6425            .has_leaf(&Nibbles::from_nibbles([0x1, 0x2, 0x3]), &leaf_key([0x4], 61))
6426            .has_value(&first_leaf_path, &first_value)
6427            .has_value(&second_leaf_path, &second_value);
6428
6429        // Upper subtrie should no longer have these values
6430        ctx.assert_upper_subtrie(&trie)
6431            .has_no_value(&first_leaf_path)
6432            .has_no_value(&second_leaf_path);
6433    }
6434
6435    #[test]
6436    fn test_update_subtrie_with_multiple_leaves() {
6437        let ctx = ParallelSparseTrieTestContext;
6438        let mut trie = ParallelSparseTrie::from_root(TrieNodeV2::EmptyRoot, None, true).unwrap();
6439
6440        // First, add multiple leaves that will create a subtrie structure
6441        // All leaves share the prefix [0x1, 0x2] to ensure they create a subtrie
6442        //
6443        // This should result in a trie with the following structure:
6444        // 0x: Extension { key: 0x12 }
6445        //  └── Subtrie (0x12):
6446        //      0x12: Branch { state_mask: 0x3 | 0x4 }
6447        //      ├── 0x123: Branch { state_mask: 0x4 | 0x5 }
6448        //      │   ├── 0x1234: Leaf { key: 0x }
6449        //      │   └── 0x1235: Leaf { key: 0x }
6450        //      └── 0x124: Branch { state_mask: 0x6 | 0x7 }
6451        //          ├── 0x1246: Leaf { key: 0x }
6452        //          └── 0x1247: Leaf { key: 0x }
6453        let leaves = ctx.create_test_leaves(&[
6454            &[0x1, 0x2, 0x3, 0x4],
6455            &[0x1, 0x2, 0x3, 0x5],
6456            &[0x1, 0x2, 0x4, 0x6],
6457            &[0x1, 0x2, 0x4, 0x7],
6458        ]);
6459
6460        // Insert all leaves
6461        ctx.update_leaves(&mut trie, leaves.clone());
6462
6463        // Verify the upper subtrie has an extension node at the root with key 0x12
6464        ctx.assert_upper_subtrie(&trie)
6465            .has_extension(&Nibbles::default(), &Nibbles::from_nibbles([0x1, 0x2]));
6466
6467        // Verify the subtrie structure using fluent assertions
6468        ctx.assert_subtrie(&trie, Nibbles::from_nibbles([0x1, 0x2]))
6469            .has_branch(&Nibbles::from_nibbles([0x1, 0x2]), &[0x3, 0x4])
6470            .has_branch(&Nibbles::from_nibbles([0x1, 0x2, 0x3]), &[0x4, 0x5])
6471            .has_branch(&Nibbles::from_nibbles([0x1, 0x2, 0x4]), &[0x6, 0x7])
6472            .has_value(&leaves[0].0, &leaves[0].1)
6473            .has_value(&leaves[1].0, &leaves[1].1)
6474            .has_value(&leaves[2].0, &leaves[2].1)
6475            .has_value(&leaves[3].0, &leaves[3].1);
6476
6477        // Now update one of the leaves with a new value
6478        let updated_path = pad_nibbles_right(Nibbles::from_nibbles([0x1, 0x2, 0x3, 0x4]));
6479        let (_, updated_value) = ctx.create_test_leaf([0x1, 0x2, 0x3, 0x4], 100);
6480
6481        trie.update_leaf(updated_path, updated_value.clone(), DefaultTrieNodeProvider).unwrap();
6482
6483        // Verify the subtrie structure is maintained and value is updated
6484        // The branch structure should remain the same and all values should be present
6485        ctx.assert_subtrie(&trie, Nibbles::from_nibbles([0x1, 0x2]))
6486            .has_branch(&Nibbles::from_nibbles([0x1, 0x2]), &[0x3, 0x4])
6487            .has_value(&updated_path, &updated_value)
6488            .has_value(&leaves[1].0, &leaves[1].1)
6489            .has_value(&leaves[2].0, &leaves[2].1)
6490            .has_value(&leaves[3].0, &leaves[3].1);
6491
6492        // Add a new leaf that extends an existing branch
6493        let (new_leaf_path, new_leaf_value) = ctx.create_test_leaf([0x1, 0x2, 0x3, 0x6], 200);
6494
6495        trie.update_leaf(new_leaf_path, new_leaf_value.clone(), DefaultTrieNodeProvider).unwrap();
6496
6497        // Verify the branch at [0x1, 0x2, 0x3] now has an additional child
6498        ctx.assert_subtrie(&trie, Nibbles::from_nibbles([0x1, 0x2]))
6499            .has_branch(&Nibbles::from_nibbles([0x1, 0x2, 0x3]), &[0x4, 0x5, 0x6])
6500            .has_value(&new_leaf_path, &new_leaf_value);
6501    }
6502
6503    #[test]
6504    fn test_update_subtrie_extension_node_subtrie() {
6505        let ctx = ParallelSparseTrieTestContext;
6506        let mut trie = ParallelSparseTrie::from_root(TrieNodeV2::EmptyRoot, None, true).unwrap();
6507
6508        // All leaves share the prefix [0x1, 0x2] to ensure they create a subtrie
6509        //
6510        // This should result in a trie with the following structure
6511        // 0x: Extension { key: 0x123 }
6512        //  └── Subtrie (0x12):
6513        //      0x123: Branch { state_mask: 0x3 | 0x4 }
6514        //      ├── 0x123: Leaf { key: 0x4 }
6515        //      └── 0x124: Leaf { key: 0x5 }
6516        let leaves = ctx.create_test_leaves(&[&[0x1, 0x2, 0x3, 0x4], &[0x1, 0x2, 0x3, 0x5]]);
6517
6518        // Insert all leaves
6519        ctx.update_leaves(&mut trie, leaves.clone());
6520
6521        // Verify the upper subtrie has an extension node at the root with key 0x123
6522        ctx.assert_upper_subtrie(&trie)
6523            .has_extension(&Nibbles::default(), &Nibbles::from_nibbles([0x1, 0x2, 0x3]));
6524
6525        // Verify the lower subtrie structure
6526        ctx.assert_subtrie(&trie, Nibbles::from_nibbles([0x1, 0x2]))
6527            .has_branch(&Nibbles::from_nibbles([0x1, 0x2, 0x3]), &[0x4, 0x5])
6528            .has_value(&leaves[0].0, &leaves[0].1)
6529            .has_value(&leaves[1].0, &leaves[1].1);
6530    }
6531
6532    #[test]
6533    fn update_subtrie_extension_node_cross_level() {
6534        let ctx = ParallelSparseTrieTestContext;
6535        let mut trie = ParallelSparseTrie::from_root(TrieNodeV2::EmptyRoot, None, true).unwrap();
6536
6537        // First, add multiple leaves that will create a subtrie structure
6538        // All leaves share the prefix [0x1, 0x2] to ensure they create a branch node and subtrie
6539        //
6540        // This should result in a trie with the following structure
6541        // 0x: Extension { key: 0x12 }
6542        //  └── Subtrie (0x12):
6543        //      0x12: Branch { state_mask: 0x3 | 0x4 }
6544        //      ├── 0x123: Leaf { key: 0x4 }
6545        //      └── 0x124: Leaf { key: 0x5 }
6546        let leaves = ctx.create_test_leaves(&[&[0x1, 0x2, 0x3, 0x4], &[0x1, 0x2, 0x4, 0x5]]);
6547
6548        // Insert all leaves
6549        ctx.update_leaves(&mut trie, leaves.clone());
6550
6551        // Verify the upper subtrie has an extension node at the root with key 0x12
6552        ctx.assert_upper_subtrie(&trie)
6553            .has_extension(&Nibbles::default(), &Nibbles::from_nibbles([0x1, 0x2]));
6554
6555        // Verify the lower subtrie structure
6556        ctx.assert_subtrie(&trie, Nibbles::from_nibbles([0x1, 0x2]))
6557            .has_branch(&Nibbles::from_nibbles([0x1, 0x2]), &[0x3, 0x4])
6558            .has_leaf(&Nibbles::from_nibbles([0x1, 0x2, 0x3]), &leaf_key([0x4], 61))
6559            .has_leaf(&Nibbles::from_nibbles([0x1, 0x2, 0x4]), &leaf_key([0x5], 61))
6560            .has_value(&leaves[0].0, &leaves[0].1)
6561            .has_value(&leaves[1].0, &leaves[1].1);
6562    }
6563
6564    #[test]
6565    fn test_update_single_nibble_paths() {
6566        let ctx = ParallelSparseTrieTestContext;
6567        let mut trie = ParallelSparseTrie::from_root(TrieNodeV2::EmptyRoot, None, true).unwrap();
6568
6569        // Test edge case: single nibble paths that create branches in upper trie
6570        //
6571        // Final trie structure:
6572        // Upper trie:
6573        //   0x: Branch { state_mask: 0x1 | 0x2 | 0x4 | 0x8 }
6574        //   ├── 0x0: Leaf { key: 0x }
6575        //   ├── 0x1: Leaf { key: 0x }
6576        //   ├── 0x2: Leaf { key: 0x }
6577        //   └── 0x3: Leaf { key: 0x }
6578
6579        // Insert leaves with single nibble paths
6580        let (leaf1_path, value1) = ctx.create_test_leaf([0x0], 1);
6581        let (leaf2_path, value2) = ctx.create_test_leaf([0x1], 2);
6582        let (leaf3_path, value3) = ctx.create_test_leaf([0x2], 3);
6583        let (leaf4_path, value4) = ctx.create_test_leaf([0x3], 4);
6584
6585        ctx.update_leaves(
6586            &mut trie,
6587            [
6588                (leaf1_path, value1.clone()),
6589                (leaf2_path, value2.clone()),
6590                (leaf3_path, value3.clone()),
6591                (leaf4_path, value4.clone()),
6592            ],
6593        );
6594
6595        // Verify upper trie has a branch at root with 4 children
6596        ctx.assert_upper_subtrie(&trie)
6597            .has_branch(&Nibbles::default(), &[0x0, 0x1, 0x2, 0x3])
6598            .has_leaf(&Nibbles::from_nibbles([0x0]), &leaf_key([], 63))
6599            .has_leaf(&Nibbles::from_nibbles([0x1]), &leaf_key([], 63))
6600            .has_leaf(&Nibbles::from_nibbles([0x2]), &leaf_key([], 63))
6601            .has_leaf(&Nibbles::from_nibbles([0x3]), &leaf_key([], 63))
6602            .has_value(&leaf1_path, &value1)
6603            .has_value(&leaf2_path, &value2)
6604            .has_value(&leaf3_path, &value3)
6605            .has_value(&leaf4_path, &value4);
6606    }
6607
6608    #[test]
6609    fn test_update_deep_extension_chain() {
6610        let ctx = ParallelSparseTrieTestContext;
6611        let mut trie = ParallelSparseTrie::from_root(TrieNodeV2::EmptyRoot, None, true).unwrap();
6612
6613        // Test edge case: deep extension chains that span multiple levels
6614        //
6615        // Final trie structure:
6616        // Upper trie:
6617        //   0x: Extension { key: 0x111111 }
6618        //       └── Subtrie (0x11): pointer to lower subtrie
6619        //
6620        // Lower subtrie (0x11):
6621        //   0x111111: Branch { state_mask: 0x1 | 0x2 }
6622        //   ├── 0x1111110: Leaf { key: 0x }
6623        //   └── 0x1111111: Leaf { key: 0x }
6624
6625        // Create leaves with a long common prefix
6626        let (leaf1_path, value1) = ctx.create_test_leaf([0x1, 0x1, 0x1, 0x1, 0x1, 0x1, 0x0], 1);
6627        let (leaf2_path, value2) = ctx.create_test_leaf([0x1, 0x1, 0x1, 0x1, 0x1, 0x1, 0x1], 2);
6628
6629        ctx.update_leaves(&mut trie, [(leaf1_path, value1.clone()), (leaf2_path, value2.clone())]);
6630
6631        // Verify upper trie has extension with the full common prefix
6632        ctx.assert_upper_subtrie(&trie).has_extension(
6633            &Nibbles::default(),
6634            &Nibbles::from_nibbles([0x1, 0x1, 0x1, 0x1, 0x1, 0x1]),
6635        );
6636
6637        // Verify lower subtrie has branch structure
6638        ctx.assert_subtrie(&trie, Nibbles::from_nibbles([0x1, 0x1]))
6639            .has_branch(&Nibbles::from_nibbles([0x1, 0x1, 0x1, 0x1, 0x1, 0x1]), &[0x0, 0x1])
6640            .has_leaf(
6641                &Nibbles::from_nibbles([0x1, 0x1, 0x1, 0x1, 0x1, 0x1, 0x0]),
6642                &leaf_key([], 57),
6643            )
6644            .has_leaf(
6645                &Nibbles::from_nibbles([0x1, 0x1, 0x1, 0x1, 0x1, 0x1, 0x1]),
6646                &leaf_key([], 57),
6647            )
6648            .has_value(&leaf1_path, &value1)
6649            .has_value(&leaf2_path, &value2);
6650    }
6651
6652    #[test]
6653    fn test_update_branch_with_all_nibbles() {
6654        let ctx = ParallelSparseTrieTestContext;
6655        let mut trie = ParallelSparseTrie::from_root(TrieNodeV2::EmptyRoot, None, true).unwrap();
6656
6657        // Test edge case: branch node with all 16 possible nibble children
6658        //
6659        // Final trie structure:
6660        // Upper trie:
6661        //   0x: Extension { key: 0xA }
6662        //       └── Subtrie (0xA0): pointer to lower subtrie
6663        //
6664        // Lower subtrie (0xA0):
6665        //   0xA0: Branch { state_mask: 0xFFFF } (all 16 children)
6666        //   ├── 0xA00: Leaf { key: 0x }
6667        //   ├── 0xA01: Leaf { key: 0x }
6668        //   ├── 0xA02: Leaf { key: 0x }
6669        //   ... (all nibbles 0x0 through 0xF)
6670        //   └── 0xA0F: Leaf { key: 0x }
6671
6672        // Create leaves for all 16 possible nibbles
6673        let mut leaves = Vec::new();
6674        for nibble in 0x0..=0xF {
6675            let (path, value) = ctx.create_test_leaf([0xA, 0x0, nibble], nibble as u64 + 1);
6676            leaves.push((path, value));
6677        }
6678
6679        // Insert all leaves
6680        ctx.update_leaves(&mut trie, leaves.iter().cloned());
6681
6682        // Verify upper trie structure
6683        ctx.assert_upper_subtrie(&trie)
6684            .has_extension(&Nibbles::default(), &Nibbles::from_nibbles([0xA, 0x0]));
6685
6686        // Verify lower subtrie has branch with all 16 children
6687        let mut subtrie_assert =
6688            ctx.assert_subtrie(&trie, Nibbles::from_nibbles([0xA, 0x0])).has_branch(
6689                &Nibbles::from_nibbles([0xA, 0x0]),
6690                &[0x0, 0x1, 0x2, 0x3, 0x4, 0x5, 0x6, 0x7, 0x8, 0x9, 0xA, 0xB, 0xC, 0xD, 0xE, 0xF],
6691            );
6692
6693        // Verify all leaves exist
6694        for (i, (path, value)) in leaves.iter().enumerate() {
6695            subtrie_assert = subtrie_assert
6696                .has_leaf(&Nibbles::from_nibbles([0xA, 0x0, i as u8]), &leaf_key([], 61))
6697                .has_value(path, value);
6698        }
6699    }
6700
6701    #[test]
6702    fn test_update_creates_multiple_subtries() {
6703        let ctx = ParallelSparseTrieTestContext;
6704        let mut trie = ParallelSparseTrie::from_root(TrieNodeV2::EmptyRoot, None, true).unwrap();
6705
6706        // Test edge case: updates that create multiple subtries at once
6707        //
6708        // Final trie structure:
6709        // Upper trie:
6710        //   0x: Extension { key: 0x0 }
6711        //       └── 0x0: Branch { state_mask: 0xF }
6712        //           ├── Subtrie (0x00): pointer
6713        //           ├── Subtrie (0x01): pointer
6714        //           ├── Subtrie (0x02): pointer
6715        //           └── Subtrie (0x03): pointer
6716        //
6717        // Each lower subtrie has leaves:
6718        //   0xXY: Leaf { key: 0xZ... }
6719
6720        // Create leaves that will force multiple subtries
6721        let leaves = [
6722            ctx.create_test_leaf([0x0, 0x0, 0x1, 0x2], 1),
6723            ctx.create_test_leaf([0x0, 0x1, 0x3, 0x4], 2),
6724            ctx.create_test_leaf([0x0, 0x2, 0x5, 0x6], 3),
6725            ctx.create_test_leaf([0x0, 0x3, 0x7, 0x8], 4),
6726        ];
6727
6728        // Insert all leaves
6729        ctx.update_leaves(&mut trie, leaves.iter().cloned());
6730
6731        // Verify upper trie has extension then branch
6732        ctx.assert_upper_subtrie(&trie)
6733            .has_extension(&Nibbles::default(), &Nibbles::from_nibbles([0x0]))
6734            .has_branch(&Nibbles::from_nibbles([0x0]), &[0x0, 0x1, 0x2, 0x3]);
6735
6736        // Verify each subtrie exists and contains its leaf
6737        for (i, (leaf_path, leaf_value)) in leaves.iter().enumerate() {
6738            let subtrie_path = Nibbles::from_nibbles([0x0, i as u8]);
6739            let full_path: [u8; 4] = match i {
6740                0 => [0x0, 0x0, 0x1, 0x2],
6741                1 => [0x0, 0x1, 0x3, 0x4],
6742                2 => [0x0, 0x2, 0x5, 0x6],
6743                3 => [0x0, 0x3, 0x7, 0x8],
6744                _ => unreachable!(),
6745            };
6746            ctx.assert_subtrie(&trie, subtrie_path)
6747                .has_leaf(&subtrie_path, &leaf_key(&full_path[2..], 62))
6748                .has_value(leaf_path, leaf_value);
6749        }
6750    }
6751
6752    #[test]
6753    fn test_update_extension_to_branch_transformation() {
6754        let ctx = ParallelSparseTrieTestContext;
6755        let mut trie = ParallelSparseTrie::from_root(TrieNodeV2::EmptyRoot, None, true).unwrap();
6756
6757        // Test edge case: extension node transforms to branch when split
6758        //
6759        // Initial state after first two leaves:
6760        // Upper trie:
6761        //   0x: Extension { key: 0xFF0 }
6762        //       └── Subtrie (0xFF): pointer
6763        //
6764        // After third leaf (0xF0...):
6765        // Upper trie:
6766        //   0x: Extension { key: 0xF }
6767        //       └── 0xF: Branch { state_mask: 0x10 | 0x8000 }
6768        //           ├── Subtrie (0xF0): pointer
6769        //           └── Subtrie (0xFF): pointer
6770
6771        // First two leaves share prefix 0xFF0
6772        let (leaf1_path, value1) = ctx.create_test_leaf([0xF, 0xF, 0x0, 0x1], 1);
6773        let (leaf2_path, value2) = ctx.create_test_leaf([0xF, 0xF, 0x0, 0x2], 2);
6774        let (leaf3_path, value3) = ctx.create_test_leaf([0xF, 0x0, 0x0, 0x3], 3);
6775
6776        ctx.update_leaves(&mut trie, [(leaf1_path, value1.clone()), (leaf2_path, value2.clone())]);
6777
6778        // Verify initial extension structure
6779        ctx.assert_upper_subtrie(&trie)
6780            .has_extension(&Nibbles::default(), &Nibbles::from_nibbles([0xF, 0xF, 0x0]));
6781
6782        // Add leaf that splits the extension
6783        ctx.update_leaves(&mut trie, [(leaf3_path, value3.clone())]);
6784
6785        // Verify transformed structure
6786        ctx.assert_upper_subtrie(&trie)
6787            .has_extension(&Nibbles::default(), &Nibbles::from_nibbles([0xF]))
6788            .has_branch(&Nibbles::from_nibbles([0xF]), &[0x0, 0xF]);
6789
6790        // Verify subtries
6791        ctx.assert_subtrie(&trie, Nibbles::from_nibbles([0xF, 0xF]))
6792            .has_branch(&Nibbles::from_nibbles([0xF, 0xF, 0x0]), &[0x1, 0x2])
6793            .has_leaf(&Nibbles::from_nibbles([0xF, 0xF, 0x0, 0x1]), &leaf_key([], 60))
6794            .has_leaf(&Nibbles::from_nibbles([0xF, 0xF, 0x0, 0x2]), &leaf_key([], 60))
6795            .has_value(&leaf1_path, &value1)
6796            .has_value(&leaf2_path, &value2);
6797
6798        ctx.assert_subtrie(&trie, Nibbles::from_nibbles([0xF, 0x0]))
6799            .has_leaf(&Nibbles::from_nibbles([0xF, 0x0]), &leaf_key([0x0, 0x3], 62))
6800            .has_value(&leaf3_path, &value3);
6801    }
6802
6803    #[test]
6804    fn test_update_long_shared_prefix_at_boundary() {
6805        let ctx = ParallelSparseTrieTestContext;
6806        let mut trie = ParallelSparseTrie::from_root(TrieNodeV2::EmptyRoot, None, true).unwrap();
6807
6808        // Test edge case: leaves with long shared prefix that ends exactly at 2-nibble boundary
6809        //
6810        // Final trie structure:
6811        // Upper trie:
6812        //   0x: Extension { key: 0xAB }
6813        //       └── Subtrie (0xAB): pointer to lower subtrie
6814        //
6815        // Lower subtrie (0xAB):
6816        //   0xAB: Branch { state_mask: 0x1000 | 0x2000 }
6817        //   ├── 0xABC: Leaf { key: 0xDEF }
6818        //   └── 0xABD: Leaf { key: 0xEF0 }
6819
6820        // Create leaves that share exactly 2 nibbles
6821        let (leaf1_path, value1) = ctx.create_test_leaf([0xA, 0xB, 0xC, 0xD, 0xE, 0xF], 1);
6822        let (leaf2_path, value2) = ctx.create_test_leaf([0xA, 0xB, 0xD, 0xE, 0xF, 0x0], 2);
6823
6824        trie.update_leaf(leaf1_path, value1.clone(), DefaultTrieNodeProvider).unwrap();
6825        trie.update_leaf(leaf2_path, value2.clone(), DefaultTrieNodeProvider).unwrap();
6826
6827        // Verify upper trie structure
6828        ctx.assert_upper_subtrie(&trie)
6829            .has_extension(&Nibbles::default(), &Nibbles::from_nibbles([0xA, 0xB]));
6830
6831        // Verify lower subtrie structure
6832        ctx.assert_subtrie(&trie, Nibbles::from_nibbles([0xA, 0xB]))
6833            .has_branch(&Nibbles::from_nibbles([0xA, 0xB]), &[0xC, 0xD])
6834            .has_leaf(&Nibbles::from_nibbles([0xA, 0xB, 0xC]), &leaf_key([0xD, 0xE, 0xF], 61))
6835            .has_leaf(&Nibbles::from_nibbles([0xA, 0xB, 0xD]), &leaf_key([0xE, 0xF, 0x0], 61))
6836            .has_value(&leaf1_path, &value1)
6837            .has_value(&leaf2_path, &value2);
6838    }
6839
6840    #[test]
6841    fn test_update_branch_to_extension_collapse() {
6842        let ctx = ParallelSparseTrieTestContext;
6843        let mut trie = ParallelSparseTrie::from_root(TrieNodeV2::EmptyRoot, None, true).unwrap();
6844
6845        // Test creating a trie with leaves that share a long common prefix
6846        //
6847        // Initial state with 3 leaves (0x1234, 0x2345, 0x2356):
6848        // Upper trie:
6849        //   0x: Branch { state_mask: 0x6 }
6850        //       ├── 0x1: Leaf { key: 0x234 }
6851        //       └── 0x2: Extension { key: 0x3 }
6852        //           └── Subtrie (0x23): pointer
6853        // Lower subtrie (0x23):
6854        //   0x23: Branch { state_mask: 0x30 }
6855        //       ├── 0x234: Leaf { key: 0x5 }
6856        //       └── 0x235: Leaf { key: 0x6 }
6857        //
6858        // Then we create a new trie with leaves (0x1234, 0x1235, 0x1236):
6859        // Expected structure:
6860        // Upper trie:
6861        //   0x: Extension { key: 0x123 }
6862        //       └── Subtrie (0x12): pointer
6863        // Lower subtrie (0x12):
6864        //   0x123: Branch { state_mask: 0x70 } // bits 4, 5, 6 set
6865        //       ├── 0x1234: Leaf { key: 0x }
6866        //       ├── 0x1235: Leaf { key: 0x }
6867        //       └── 0x1236: Leaf { key: 0x }
6868
6869        // Create initial leaves
6870        let (leaf1_path, value1) = ctx.create_test_leaf([0x1, 0x2, 0x3, 0x4], 1);
6871        let (leaf2_path, value2) = ctx.create_test_leaf([0x2, 0x3, 0x4, 0x5], 2);
6872        let (leaf3_path, value3) = ctx.create_test_leaf([0x2, 0x3, 0x5, 0x6], 3);
6873
6874        trie.update_leaf(leaf1_path, value1, DefaultTrieNodeProvider).unwrap();
6875        trie.update_leaf(leaf2_path, value2, DefaultTrieNodeProvider).unwrap();
6876        trie.update_leaf(leaf3_path, value3, DefaultTrieNodeProvider).unwrap();
6877
6878        // Verify initial structure has branch at root
6879        ctx.assert_upper_subtrie(&trie).has_branch(&Nibbles::default(), &[0x1, 0x2]);
6880
6881        // Now update to create a pattern where extension is more efficient
6882        // Replace leaves to all share prefix 0x123
6883        let (new_leaf1_path, new_value1) = ctx.create_test_leaf([0x1, 0x2, 0x3, 0x4], 10);
6884        let (new_leaf2_path, new_value2) = ctx.create_test_leaf([0x1, 0x2, 0x3, 0x5], 11);
6885        let (new_leaf3_path, new_value3) = ctx.create_test_leaf([0x1, 0x2, 0x3, 0x6], 12);
6886
6887        // Clear and add new leaves
6888        let mut trie = ParallelSparseTrie::from_root(TrieNodeV2::EmptyRoot, None, true).unwrap();
6889        trie.update_leaf(new_leaf1_path, new_value1.clone(), DefaultTrieNodeProvider).unwrap();
6890        trie.update_leaf(new_leaf2_path, new_value2.clone(), DefaultTrieNodeProvider).unwrap();
6891        trie.update_leaf(new_leaf3_path, new_value3.clone(), DefaultTrieNodeProvider).unwrap();
6892
6893        // Verify new structure has extension
6894        ctx.assert_upper_subtrie(&trie)
6895            .has_extension(&Nibbles::default(), &Nibbles::from_nibbles([0x1, 0x2, 0x3]));
6896
6897        // Verify lower subtrie path was correctly updated to 0x123
6898        ctx.assert_subtrie_path(&trie, [0x1, 0x2], [0x1, 0x2, 0x3]);
6899
6900        // Verify lower subtrie - all three leaves should be properly inserted
6901        ctx.assert_subtrie(&trie, Nibbles::from_nibbles([0x1, 0x2]))
6902            .has_branch(&Nibbles::from_nibbles([0x1, 0x2, 0x3]), &[0x4, 0x5, 0x6]) // All three children
6903            .has_leaf(&Nibbles::from_nibbles([0x1, 0x2, 0x3, 0x4]), &leaf_key([], 60))
6904            .has_leaf(&Nibbles::from_nibbles([0x1, 0x2, 0x3, 0x5]), &leaf_key([], 60))
6905            .has_leaf(&Nibbles::from_nibbles([0x1, 0x2, 0x3, 0x6]), &leaf_key([], 60))
6906            .has_value(&new_leaf1_path, &new_value1)
6907            .has_value(&new_leaf2_path, &new_value2)
6908            .has_value(&new_leaf3_path, &new_value3);
6909    }
6910
6911    #[test]
6912    fn test_update_shared_prefix_patterns() {
6913        let ctx = ParallelSparseTrieTestContext;
6914        let mut trie = ParallelSparseTrie::from_root(TrieNodeV2::EmptyRoot, None, true).unwrap();
6915
6916        // Test edge case: different patterns of shared prefixes
6917        //
6918        // Final trie structure:
6919        // Upper trie:
6920        //   0x: Branch { state_mask: 0x6 }
6921        //       ├── 0x1: Leaf { key: 0x234 }
6922        //       └── 0x2: Extension { key: 0x3 }
6923        //           └── Subtrie (0x23): pointer
6924        //
6925        // Lower subtrie (0x23):
6926        //   0x23: Branch { state_mask: 0x10 | 0x20 }
6927        //   ├── 0x234: Leaf { key: 0x5 }
6928        //   └── 0x235: Leaf { key: 0x6 }
6929
6930        // Create leaves with different shared prefix patterns
6931        let (leaf1_path, value1) = ctx.create_test_leaf([0x1, 0x2, 0x3, 0x4], 1);
6932        let (leaf2_path, value2) = ctx.create_test_leaf([0x2, 0x3, 0x4, 0x5], 2);
6933        let (leaf3_path, value3) = ctx.create_test_leaf([0x2, 0x3, 0x5, 0x6], 3);
6934
6935        trie.update_leaf(leaf1_path, value1, DefaultTrieNodeProvider).unwrap();
6936        trie.update_leaf(leaf2_path, value2.clone(), DefaultTrieNodeProvider).unwrap();
6937        trie.update_leaf(leaf3_path, value3.clone(), DefaultTrieNodeProvider).unwrap();
6938
6939        // Verify upper trie structure
6940        ctx.assert_upper_subtrie(&trie)
6941            .has_branch(&Nibbles::default(), &[0x1, 0x2])
6942            .has_leaf(&Nibbles::from_nibbles([0x1]), &leaf_key([0x2, 0x3, 0x4], 63))
6943            .has_extension(&Nibbles::from_nibbles([0x2]), &Nibbles::from_nibbles([0x3]));
6944
6945        // Verify lower subtrie structure
6946        ctx.assert_subtrie(&trie, Nibbles::from_nibbles([0x2, 0x3]))
6947            .has_branch(&Nibbles::from_nibbles([0x2, 0x3]), &[0x4, 0x5])
6948            .has_leaf(&Nibbles::from_nibbles([0x2, 0x3, 0x4]), &leaf_key([0x5], 61))
6949            .has_leaf(&Nibbles::from_nibbles([0x2, 0x3, 0x5]), &leaf_key([0x6], 61))
6950            .has_value(&leaf2_path, &value2)
6951            .has_value(&leaf3_path, &value3);
6952    }
6953
6954    #[test]
6955    fn test_progressive_branch_creation() {
6956        let ctx = ParallelSparseTrieTestContext;
6957        let mut trie = ParallelSparseTrie::from_root(TrieNodeV2::EmptyRoot, None, true).unwrap();
6958
6959        // Test starting with a single leaf and progressively adding leaves
6960        // that create branch nodes at shorter and shorter paths
6961        //
6962        // Step 1: Add leaf at 0x12345
6963        // Upper trie:
6964        //   0x: Leaf { key: 0x12345 }
6965        //
6966        // Step 2: Add leaf at 0x12346
6967        // Upper trie:
6968        //   0x: Extension { key: 0x1234 }
6969        //       └── Subtrie (0x12): pointer
6970        // Lower subtrie (0x12):
6971        //   0x1234: Branch { state_mask: 0x60 }  // bits 5 and 6 set
6972        //       ├── 0x12345: Leaf { key: 0x }
6973        //       └── 0x12346: Leaf { key: 0x }
6974        //
6975        // Step 3: Add leaf at 0x1235
6976        // Lower subtrie (0x12) updates to:
6977        //   0x123: Branch { state_mask: 0x30 }  // bits 4 and 5 set
6978        //       ├── 0x1234: Branch { state_mask: 0x60 }
6979        //       │   ├── 0x12345: Leaf { key: 0x }
6980        //       │   └── 0x12346: Leaf { key: 0x }
6981        //       └── 0x1235: Leaf { key: 0x }
6982        //
6983        // Step 4: Add leaf at 0x124
6984        // Lower subtrie (0x12) updates to:
6985        //   0x12: Branch { state_mask: 0x18 }  // bits 3 and 4 set
6986        //       ├── 0x123: Branch { state_mask: 0x30 }
6987        //       │   ├── 0x1234: Branch { state_mask: 0x60 }
6988        //       │   │   ├── 0x12345: Leaf { key: 0x }
6989        //       │   │   └── 0x12346: Leaf { key: 0x }
6990        //       │   └── 0x1235: Leaf { key: 0x }
6991        //       └── 0x124: Leaf { key: 0x }
6992
6993        // Step 1: Add first leaf - initially stored as leaf in upper trie
6994        let (leaf1_path, value1) = ctx.create_test_leaf([0x1, 0x2, 0x3, 0x4, 0x5], 1);
6995        trie.update_leaf(leaf1_path, value1.clone(), DefaultTrieNodeProvider).unwrap();
6996
6997        // Verify leaf node in upper trie (optimized single-leaf case)
6998        ctx.assert_upper_subtrie(&trie)
6999            .has_leaf(
7000                &Nibbles::default(),
7001                &pad_nibbles_right(Nibbles::from_nibbles([0x1, 0x2, 0x3, 0x4, 0x5])),
7002            )
7003            .has_value(&leaf1_path, &value1);
7004
7005        // Step 2: Add leaf at 0x12346 - creates branch at 0x1234
7006        let (leaf2_path, value2) = ctx.create_test_leaf([0x1, 0x2, 0x3, 0x4, 0x6], 2);
7007        trie.update_leaf(leaf2_path, value2.clone(), DefaultTrieNodeProvider).unwrap();
7008
7009        // Verify extension now goes to 0x1234
7010        ctx.assert_upper_subtrie(&trie)
7011            .has_extension(&Nibbles::default(), &Nibbles::from_nibbles([0x1, 0x2, 0x3, 0x4]));
7012
7013        // Verify subtrie path updated to 0x1234
7014        ctx.assert_subtrie_path(&trie, [0x1, 0x2], [0x1, 0x2, 0x3, 0x4]);
7015
7016        ctx.assert_subtrie(&trie, Nibbles::from_nibbles([0x1, 0x2]))
7017            .has_branch(&Nibbles::from_nibbles([0x1, 0x2, 0x3, 0x4]), &[0x5, 0x6])
7018            .has_leaf(&Nibbles::from_nibbles([0x1, 0x2, 0x3, 0x4, 0x5]), &leaf_key([], 59))
7019            .has_leaf(&Nibbles::from_nibbles([0x1, 0x2, 0x3, 0x4, 0x6]), &leaf_key([], 59))
7020            .has_value(&leaf1_path, &value1)
7021            .has_value(&leaf2_path, &value2);
7022
7023        // Step 3: Add leaf at 0x1235 - creates branch at 0x123
7024        let (leaf3_path, value3) = ctx.create_test_leaf([0x1, 0x2, 0x3, 0x5], 3);
7025        trie.update_leaf(leaf3_path, value3.clone(), DefaultTrieNodeProvider).unwrap();
7026
7027        // Verify extension now goes to 0x123
7028        ctx.assert_upper_subtrie(&trie)
7029            .has_extension(&Nibbles::default(), &Nibbles::from_nibbles([0x1, 0x2, 0x3]));
7030
7031        // Verify subtrie path updated to 0x123
7032        ctx.assert_subtrie_path(&trie, [0x1, 0x2], [0x1, 0x2, 0x3]);
7033
7034        ctx.assert_subtrie(&trie, Nibbles::from_nibbles([0x1, 0x2]))
7035            .has_branch(&Nibbles::from_nibbles([0x1, 0x2, 0x3]), &[0x4, 0x5])
7036            .has_branch(&Nibbles::from_nibbles([0x1, 0x2, 0x3, 0x4]), &[0x5, 0x6])
7037            .has_leaf(&Nibbles::from_nibbles([0x1, 0x2, 0x3, 0x5]), &leaf_key([], 60))
7038            .has_value(&leaf1_path, &value1)
7039            .has_value(&leaf2_path, &value2)
7040            .has_value(&leaf3_path, &value3);
7041
7042        // Step 4: Add leaf at 0x124 - creates branch at 0x12 (subtrie root)
7043        let (leaf4_path, value4) = ctx.create_test_leaf([0x1, 0x2, 0x4], 4);
7044        trie.update_leaf(leaf4_path, value4.clone(), DefaultTrieNodeProvider).unwrap();
7045
7046        // Verify extension now goes to 0x12
7047        ctx.assert_upper_subtrie(&trie)
7048            .has_extension(&Nibbles::default(), &Nibbles::from_nibbles([0x1, 0x2]));
7049
7050        // Verify subtrie path updated to 0x12
7051        ctx.assert_subtrie_path(&trie, [0x1, 0x2], [0x1, 0x2]);
7052
7053        // Verify final structure
7054        ctx.assert_subtrie(&trie, Nibbles::from_nibbles([0x1, 0x2]))
7055            .has_branch(&Nibbles::from_nibbles([0x1, 0x2]), &[0x3, 0x4])
7056            .has_branch(&Nibbles::from_nibbles([0x1, 0x2, 0x3]), &[0x4, 0x5])
7057            .has_branch(&Nibbles::from_nibbles([0x1, 0x2, 0x3, 0x4]), &[0x5, 0x6])
7058            .has_leaf(&Nibbles::from_nibbles([0x1, 0x2, 0x4]), &leaf_key([], 61))
7059            .has_value(&leaf1_path, &value1)
7060            .has_value(&leaf2_path, &value2)
7061            .has_value(&leaf3_path, &value3)
7062            .has_value(&leaf4_path, &value4);
7063    }
7064
7065    #[test]
7066    fn test_update_max_depth_paths() {
7067        let ctx = ParallelSparseTrieTestContext;
7068        let mut trie = ParallelSparseTrie::from_root(TrieNodeV2::EmptyRoot, None, true).unwrap();
7069
7070        // Test edge case: very long paths (64 nibbles - max for addresses/storage)
7071        //
7072        // Final trie structure:
7073        // Upper trie:
7074        //   0x: Extension { key: 0xFF }
7075        //       └── Subtrie (0xFF): pointer
7076        //
7077        // Lower subtrie (0xFF):
7078        //   Has very long paths with slight differences at the end
7079
7080        // Create two 64-nibble paths that differ only in the last nibble
7081        let mut path1_nibbles = vec![0xF; 63];
7082        path1_nibbles.push(0x0);
7083        let mut path2_nibbles = vec![0xF; 63];
7084        path2_nibbles.push(0x1);
7085
7086        let (leaf1_path, value1) = ctx.create_test_leaf(&path1_nibbles, 1);
7087        let (leaf2_path, value2) = ctx.create_test_leaf(&path2_nibbles, 2);
7088
7089        trie.update_leaf(leaf1_path, value1.clone(), DefaultTrieNodeProvider).unwrap();
7090        trie.update_leaf(leaf2_path, value2.clone(), DefaultTrieNodeProvider).unwrap();
7091
7092        // The common prefix of 63 F's will create a very long extension
7093        let extension_key = vec![0xF; 63];
7094        ctx.assert_upper_subtrie(&trie)
7095            .has_extension(&Nibbles::default(), &Nibbles::from_nibbles(&extension_key));
7096
7097        // Verify the subtrie has the branch at the end
7098        ctx.assert_subtrie(&trie, Nibbles::from_nibbles([0xF, 0xF]))
7099            .has_branch(&Nibbles::from_nibbles(&path1_nibbles[..63]), &[0x0, 0x1])
7100            .has_value(&leaf1_path, &value1)
7101            .has_value(&leaf2_path, &value2);
7102    }
7103
7104    #[test]
7105    fn test_hoodie_block_1_data() {
7106        // Reveal node at path Nibbles(0x) - root branch node
7107        let root_branch_stack = vec![
7108            hex!("a0550b6aba4dd4582a2434d2cbdad8d3007d09f622d7a6e6eaa7a49385823c2fa2"),
7109            hex!("a04788a4975a9e1efd29b834fd80fdfe8a57cc1b1c5ace6d30ce5a36a15e0092b3"),
7110            hex!("a093aeccf87da304e6f7d09edc5d7bd3a552808866d2149dd0940507a8f9bfa910"),
7111            hex!("a08b5b423ba68d0dec2eca1f408076f9170678505eb4a5db2abbbd83bb37666949"),
7112            hex!("a08592f62216af4218098a78acad7cf472a727fb55e6c27d3cfdf2774d4518eb83"),
7113            hex!("a0ef02aeee845cb64c11f85edc1a3094227c26445952554b8a9248915d80c746c3"),
7114            hex!("a0df2529ee3a1ce4df5a758cf17e6a86d0fb5ea22ab7071cf60af6412e9b0a428a"),
7115            hex!("a0acaa1092db69cd5a63676685827b3484c4b80dc1d3361f6073bbb9240101e144"),
7116            hex!("a09c3f2bb2a729d71f246a833353ade65667716bb330e0127a3299a42d11200f93"),
7117            hex!("a0ce978470f4c0b1f8069570563a14d2b79d709add2db4bf22dd9b6aed3271c566"),
7118            hex!("a095f783cd1d464a60e3c8adcadc28c6eb9fec7306664df39553be41dccc909606"),
7119            hex!("a0a9083f5fb914b255e1feb5d951a4dfddacf3c8003ef1d1ec6a13bb6ba5b2ac62"),
7120            hex!("a0fec113d537d8577cd361e0cabf5e95ef58f1cc34318292fdecce9fae57c3e094"),
7121            hex!("a08b7465f5fe8b3e3c0d087cb7521310d4065ef2a0ee43bf73f68dee8a5742b3dd"),
7122            hex!("a0c589aa1ae3d5fd87d8640957f7d5184a4ac06f393b453a8e8ed7e8fba0d385c8"),
7123            hex!("a0b516d6f3352f87beab4ed6e7322f191fc7a147686500ef4de7dd290ad784ef51"),
7124        ];
7125
7126        let root_branch_rlp_stack: Vec<RlpNode> = root_branch_stack
7127            .iter()
7128            .map(|hex_str| RlpNode::from_raw_rlp(&hex_str[..]).unwrap())
7129            .collect();
7130
7131        let root_branch_node = BranchNodeV2::new(
7132            Default::default(),
7133            root_branch_rlp_stack,
7134            TrieMask::new(0b1111111111111111), // state_mask: all 16 children present
7135            None,
7136        );
7137
7138        let root_branch_masks = Some(BranchNodeMasks {
7139            hash_mask: TrieMask::new(0b1111111111111111),
7140            tree_mask: TrieMask::new(0b1111111111111111),
7141        });
7142
7143        let mut trie = ParallelSparseTrie::from_root(
7144            TrieNodeV2::Branch(root_branch_node),
7145            root_branch_masks,
7146            true,
7147        )
7148        .unwrap();
7149
7150        // Reveal node at path Nibbles(0x3) - branch node
7151        let branch_0x3_stack = vec![
7152            hex!("a09da7d9755fe0c558b3c3de9fdcdf9f28ae641f38c9787b05b73ab22ae53af3e2"),
7153            hex!("a0d9990bf0b810d1145ecb2b011fd68c63cc85564e6724166fd4a9520180706e5f"),
7154            hex!("a0f60eb4b12132a40df05d9bbdb88bbde0185a3f097f3c76bf4200c23eda26cf86"),
7155            hex!("a0ca976997ddaf06f18992f6207e4f6a05979d07acead96568058789017cc6d06b"),
7156            hex!("a04d78166b48044fdc28ed22d2fd39c8df6f8aaa04cb71d3a17286856f6893ff83"),
7157            hex!("a021d4f90c34d3f1706e78463b6482bca77a3aa1cd059a3f326c42a1cfd30b9b60"),
7158            hex!("a0fc3b71c33e2e6b77c5e494c1db7fdbb447473f003daf378c7a63ba9bf3f0049d"),
7159            hex!("a0e33ed2be194a3d93d343e85642447c93a9d0cfc47a016c2c23d14c083be32a7c"),
7160            hex!("a07b8e7a21c1178d28074f157b50fca85ee25c12568ff8e9706dcbcdacb77bf854"),
7161            hex!("a0973274526811393ea0bf4811ca9077531db00d06b86237a2ecd683f55ba4bcb0"),
7162            hex!("a03a93d726d7487874e51b52d8d534c63aa2a689df18e3b307c0d6cb0a388b00f3"),
7163            hex!("a06aa67101d011d1c22fe739ef83b04b5214a3e2f8e1a2625d8bfdb116b447e86f"),
7164            hex!("a02dd545b33c62d33a183e127a08a4767fba891d9f3b94fc20a2ca02600d6d1fff"),
7165            hex!("a0fe6db87d00f06d53bff8169fa497571ff5af1addfb715b649b4d79dd3e394b04"),
7166            hex!("a0d9240a9d2d5851d05a97ff3305334dfdb0101e1e321fc279d2bb3cad6afa8fc8"),
7167            hex!("a01b69c6ab5173de8a8ec53a6ebba965713a4cc7feb86cb3e230def37c230ca2b2"),
7168        ];
7169
7170        let branch_0x3_rlp_stack: Vec<RlpNode> = branch_0x3_stack
7171            .iter()
7172            .map(|hex_str| RlpNode::from_raw_rlp(&hex_str[..]).unwrap())
7173            .collect();
7174
7175        let branch_0x3_node = BranchNodeV2::new(
7176            Default::default(),
7177            branch_0x3_rlp_stack,
7178            TrieMask::new(0b1111111111111111), // state_mask: all 16 children present
7179            None,
7180        );
7181
7182        let branch_0x3_masks = Some(BranchNodeMasks {
7183            hash_mask: TrieMask::new(0b0100010000010101),
7184            tree_mask: TrieMask::new(0b0100000000000000),
7185        });
7186
7187        // Reveal node at path Nibbles(0x37) - leaf node
7188        let leaf_path = Nibbles::from_nibbles([0x3, 0x7]);
7189        let leaf_key = Nibbles::unpack(
7190            &hex!("d65eaa92c6bc4c13a5ec45527f0c18ea8932588728769ec7aecfe6d9f32e42")[..],
7191        );
7192        let leaf_value = hex!("f8440180a056e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421a0f57acd40259872606d76197ef052f3d35588dadf919ee1f0e3cb9b62d3f4b02c").to_vec();
7193
7194        let leaf_node = LeafNode::new(leaf_key, leaf_value);
7195        let leaf_masks = None;
7196
7197        trie.reveal_nodes(&mut [
7198            ProofTrieNodeV2 {
7199                path: Nibbles::from_nibbles([0x3]),
7200                node: TrieNodeV2::Branch(branch_0x3_node),
7201                masks: branch_0x3_masks,
7202            },
7203            ProofTrieNodeV2 {
7204                path: leaf_path,
7205                node: TrieNodeV2::Leaf(leaf_node),
7206                masks: leaf_masks,
7207            },
7208        ])
7209        .unwrap();
7210
7211        // Update leaf with its new value
7212        let mut leaf_full_path = leaf_path;
7213        leaf_full_path.extend(&leaf_key);
7214
7215        let leaf_new_value = vec![
7216            248, 68, 1, 128, 160, 224, 163, 152, 169, 122, 160, 155, 102, 53, 41, 0, 47, 28, 205,
7217            190, 199, 5, 215, 108, 202, 22, 138, 70, 196, 178, 193, 208, 18, 96, 95, 63, 238, 160,
7218            245, 122, 205, 64, 37, 152, 114, 96, 109, 118, 25, 126, 240, 82, 243, 211, 85, 136,
7219            218, 223, 145, 158, 225, 240, 227, 203, 155, 98, 211, 244, 176, 44,
7220        ];
7221
7222        trie.update_leaf(leaf_full_path, leaf_new_value.clone(), DefaultTrieNodeProvider).unwrap();
7223
7224        // Sanity checks before calculating the root
7225        assert_eq!(
7226            Some(&leaf_new_value),
7227            trie.lower_subtrie_for_path(&leaf_path).unwrap().inner.values.get(&leaf_full_path)
7228        );
7229        assert!(trie.upper_subtrie.inner.values.is_empty());
7230
7231        // Assert the root hash matches the expected value
7232        let expected_root =
7233            b256!("0x29b07de8376e9ce7b3a69e9b102199869514d3f42590b5abc6f7d48ec9b8665c");
7234        assert_eq!(trie.root(), expected_root);
7235    }
7236
7237    #[test]
7238    fn find_leaf_existing_leaf() {
7239        // Create a simple trie with one leaf
7240        let provider = DefaultTrieNodeProvider;
7241        let mut sparse = ParallelSparseTrie::default();
7242        let path = pad_nibbles_right(Nibbles::from_nibbles([0x1, 0x2, 0x3]));
7243        let value = b"test_value".to_vec();
7244
7245        sparse.update_leaf(path, value.clone(), &provider).unwrap();
7246
7247        // Check that the leaf exists
7248        let result = sparse.find_leaf(&path, None);
7249        assert_matches!(result, Ok(LeafLookup::Exists));
7250
7251        // Check with expected value matching
7252        let result = sparse.find_leaf(&path, Some(&value));
7253        assert_matches!(result, Ok(LeafLookup::Exists));
7254    }
7255
7256    #[test]
7257    fn find_leaf_value_mismatch() {
7258        // Create a simple trie with one leaf
7259        let provider = DefaultTrieNodeProvider;
7260        let mut sparse = ParallelSparseTrie::default();
7261        let path = pad_nibbles_right(Nibbles::from_nibbles([0x1, 0x2, 0x3]));
7262        let value = b"test_value".to_vec();
7263        let wrong_value = b"wrong_value".to_vec();
7264
7265        sparse.update_leaf(path, value, &provider).unwrap();
7266
7267        // Check with wrong expected value
7268        let result = sparse.find_leaf(&path, Some(&wrong_value));
7269        assert_matches!(
7270            result,
7271            Err(LeafLookupError::ValueMismatch { path: p, expected: Some(e), actual: _a }) if p == path && e == wrong_value
7272        );
7273    }
7274
7275    #[test]
7276    fn find_leaf_not_found_empty_trie() {
7277        // Empty trie
7278        let sparse = ParallelSparseTrie::default();
7279        let path = Nibbles::from_nibbles([0x1, 0x2, 0x3]);
7280
7281        // Leaf should not exist
7282        let result = sparse.find_leaf(&path, None);
7283        assert_matches!(result, Ok(LeafLookup::NonExistent));
7284    }
7285
7286    #[test]
7287    fn find_leaf_empty_trie() {
7288        let sparse = ParallelSparseTrie::default();
7289        let path = Nibbles::from_nibbles_unchecked([0x1, 0x2, 0x3, 0x4]);
7290
7291        let result = sparse.find_leaf(&path, None);
7292        assert_matches!(result, Ok(LeafLookup::NonExistent));
7293    }
7294
7295    #[test]
7296    fn find_leaf_exists_no_value_check() {
7297        let provider = DefaultTrieNodeProvider;
7298        let mut sparse = ParallelSparseTrie::default();
7299        let path = pad_nibbles_right(Nibbles::from_nibbles([0x1, 0x2, 0x3, 0x4]));
7300        sparse.update_leaf(path, encode_account_value(0), &provider).unwrap();
7301
7302        let result = sparse.find_leaf(&path, None);
7303        assert_matches!(result, Ok(LeafLookup::Exists));
7304    }
7305
7306    #[test]
7307    fn find_leaf_exists_with_value_check_ok() {
7308        let provider = DefaultTrieNodeProvider;
7309        let mut sparse = ParallelSparseTrie::default();
7310        let path = pad_nibbles_right(Nibbles::from_nibbles([0x1, 0x2, 0x3, 0x4]));
7311        let value = encode_account_value(0);
7312        sparse.update_leaf(path, value.clone(), &provider).unwrap();
7313
7314        let result = sparse.find_leaf(&path, Some(&value));
7315        assert_matches!(result, Ok(LeafLookup::Exists));
7316    }
7317
7318    #[test]
7319    fn find_leaf_exclusion_branch_divergence() {
7320        let provider = DefaultTrieNodeProvider;
7321        let mut sparse = ParallelSparseTrie::default();
7322        let path1 = pad_nibbles_right(Nibbles::from_nibbles([0x1, 0x2, 0x3, 0x4])); // Creates branch at 0x12
7323        let path2 = pad_nibbles_right(Nibbles::from_nibbles([0x1, 0x2, 0x5, 0x6])); // Belongs to same branch
7324        let search_path = pad_nibbles_right(Nibbles::from_nibbles([0x1, 0x2, 0x7, 0x8])); // Diverges at nibble 7
7325
7326        sparse.update_leaf(path1, encode_account_value(0), &provider).unwrap();
7327        sparse.update_leaf(path2, encode_account_value(1), &provider).unwrap();
7328
7329        let result = sparse.find_leaf(&search_path, None);
7330        assert_matches!(result, Ok(LeafLookup::NonExistent))
7331    }
7332
7333    #[test]
7334    fn find_leaf_exclusion_extension_divergence() {
7335        let provider = DefaultTrieNodeProvider;
7336        let mut sparse = ParallelSparseTrie::default();
7337        // This will create an extension node at root with key 0x12
7338        let path1 = pad_nibbles_right(Nibbles::from_nibbles([0x1, 0x2, 0x3, 0x4, 0x5, 0x6]));
7339        // This path diverges from the extension key
7340        let search_path = pad_nibbles_right(Nibbles::from_nibbles([0x1, 0x2, 0x7, 0x8]));
7341
7342        sparse.update_leaf(path1, encode_account_value(0), &provider).unwrap();
7343
7344        let result = sparse.find_leaf(&search_path, None);
7345        assert_matches!(result, Ok(LeafLookup::NonExistent))
7346    }
7347
7348    #[test]
7349    fn find_leaf_exclusion_leaf_divergence() {
7350        let provider = DefaultTrieNodeProvider;
7351        let mut sparse = ParallelSparseTrie::default();
7352        let existing_leaf_path = pad_nibbles_right(Nibbles::from_nibbles([0x1, 0x2, 0x3, 0x4]));
7353        let search_path = pad_nibbles_right(Nibbles::from_nibbles([0x1, 0x2, 0x3, 0x4, 0x5, 0x6]));
7354
7355        sparse.update_leaf(existing_leaf_path, encode_account_value(0), &provider).unwrap();
7356
7357        let result = sparse.find_leaf(&search_path, None);
7358        assert_matches!(result, Ok(LeafLookup::NonExistent))
7359    }
7360
7361    #[test]
7362    fn find_leaf_exclusion_path_ends_at_branch() {
7363        let provider = DefaultTrieNodeProvider;
7364        let mut sparse = ParallelSparseTrie::default();
7365        let path1 = pad_nibbles_right(Nibbles::from_nibbles([0x1, 0x2, 0x3, 0x4])); // Creates branch at 0x12
7366        let path2 = pad_nibbles_right(Nibbles::from_nibbles([0x1, 0x2, 0x5, 0x6]));
7367        let search_path = pad_nibbles_right(Nibbles::from_nibbles([0x1, 0x2])); // Path of the branch itself
7368
7369        sparse.update_leaf(path1, encode_account_value(0), &provider).unwrap();
7370        sparse.update_leaf(path2, encode_account_value(1), &provider).unwrap();
7371
7372        let result = sparse.find_leaf(&search_path, None);
7373        assert_matches!(result, Ok(LeafLookup::NonExistent));
7374    }
7375
7376    #[test]
7377    fn find_leaf_error_blinded_node_at_leaf_path() {
7378        // Scenario: The node *at* the leaf path is blinded.
7379        let blinded_hash = B256::repeat_byte(0xBB);
7380        let leaf_path = Nibbles::from_nibbles_unchecked([0x1, 0x2, 0x3, 0x4]);
7381
7382        let sparse = new_test_trie(
7383            [
7384                (
7385                    // Ext 0x12
7386                    Nibbles::default(),
7387                    SparseNode::new_ext(Nibbles::from_nibbles_unchecked([0x1, 0x2])),
7388                ),
7389                (
7390                    // Ext 0x123
7391                    Nibbles::from_nibbles_unchecked([0x1, 0x2]),
7392                    SparseNode::new_ext(Nibbles::from_nibbles_unchecked([0x3])),
7393                ),
7394                (
7395                    // Branch at 0x123, child 4
7396                    Nibbles::from_nibbles_unchecked([0x1, 0x2, 0x3]),
7397                    SparseNode::new_branch(TrieMask::new(0b10000), &[(0x4, blinded_hash)]),
7398                ),
7399            ]
7400            .into_iter(),
7401        );
7402
7403        let result = sparse.find_leaf(&leaf_path, None);
7404
7405        // Should error because it hit the blinded node exactly at the leaf path
7406        assert_matches!(result, Err(LeafLookupError::BlindedNode { path, hash })
7407            if path == leaf_path && hash == blinded_hash
7408        );
7409    }
7410
7411    #[test]
7412    fn find_leaf_error_blinded_node() {
7413        let blinded_hash = B256::repeat_byte(0xAA);
7414        let path_to_blind = Nibbles::from_nibbles_unchecked([0x1]);
7415        let search_path = Nibbles::from_nibbles_unchecked([0x1, 0x2, 0x3, 0x4]);
7416
7417        let sparse = new_test_trie(
7418            [
7419                // Root is a branch with child 0x1 (blinded) and 0x5 (revealed leaf)
7420                // So we set Bit 1 and Bit 5 in the state_mask
7421                (
7422                    Nibbles::default(),
7423                    SparseNode::new_branch(TrieMask::new(0b100010), &[(0x1, blinded_hash)]),
7424                ),
7425                (
7426                    Nibbles::from_nibbles_unchecked([0x5]),
7427                    SparseNode::new_leaf(Nibbles::from_nibbles_unchecked([0x6, 0x7, 0x8])),
7428                ),
7429            ]
7430            .into_iter(),
7431        );
7432
7433        let result = sparse.find_leaf(&search_path, None);
7434
7435        // Should error because it hit the blinded node at path 0x1
7436        assert_matches!(result, Err(LeafLookupError::BlindedNode { path, hash })
7437            if path == path_to_blind && hash == blinded_hash
7438        );
7439    }
7440
7441    #[test]
7442    fn test_mainnet_block_24185431_storage_0x6ba784ee() {
7443        reth_tracing::init_test_tracing();
7444
7445        // Reveal branch at 0x3 with full state
7446        let mut branch_0x3_hashes = vec![
7447            B256::from(hex!("fc11ba8de4b220b8f19a09f0676c69b8e18bae1350788392640069e59b41733d")),
7448            B256::from(hex!("8afe085cc6685680bd8ba4bac6e65937a4babf737dc5e7413d21cdda958e8f74")),
7449            B256::from(hex!("c7b6f7c0fc601a27aece6ec178fd9be17cdee77c4884ecfbe1ee459731eb57da")),
7450            B256::from(hex!("71c1aec60db78a2deb4e10399b979a2ed5be42b4ee0c0a17c614f9ddc9f9072e")),
7451            B256::from(hex!("e9261302e7c0b77930eaf1851b585210906cd01e015ab6be0f7f3c0cc947c32a")),
7452            B256::from(hex!("38ce8f369c56bd77fabdf679b27265b1f8d0a54b09ef612c8ee8ddfc6b3fab95")),
7453            B256::from(hex!("7b507a8936a28c5776b647d1c4bda0bbbb3d0d227f16c5f5ebba58d02e31918d")),
7454            B256::from(hex!("0f456b9457a824a81e0eb555aa861461acb38674dcf36959b3b26deb24ed0af9")),
7455            B256::from(hex!("2145420289652722ad199ba932622e3003c779d694fa5a2acfb2f77b0782b38a")),
7456            B256::from(hex!("2c1a04dce1a9e2f1cfbf8806edce50a356dfa58e7e7c542c848541502613b796")),
7457            B256::from(hex!("dad7ca55186ac8f40d4450dc874166df8267b44abc07e684d9507260f5712df3")),
7458            B256::from(hex!("3a8c2a1d7d2423e92965ec29014634e7f0307ded60b1a63d28c86c3222b24236")),
7459            B256::from(hex!("4e9929e6728b3a7bf0db6a0750ab376045566b556c9c605e606ecb8ec25200d7")),
7460            B256::from(hex!("1797c36f98922f52292c161590057a1b5582d5503e3370bcfbf6fd939f3ec98b")),
7461            B256::from(hex!("9e514589a9c9210b783c19fa3f0b384bbfaefe98f10ea189a2bfc58c6bf000a1")),
7462            B256::from(hex!("85bdaabbcfa583cbd049650e41d3d19356bd833b3ed585cf225a3548557c7fa3")),
7463        ];
7464        let branch_0x3_node = create_branch_node(
7465            Nibbles::from_nibbles([0x3]),
7466            &[0x0, 0x1, 0x2, 0x3, 0x4, 0x5, 0x6, 0x7, 0x8, 0x9, 0xa, 0xb, 0xc, 0xd, 0xe, 0xf],
7467            branch_0x3_hashes.iter().map(RlpNode::word_rlp),
7468        );
7469
7470        // Reveal branch at 0x31
7471        let branch_0x31_hashes = vec![B256::from(hex!(
7472            "3ca994ba59ce70b83fee1f01731c8dac4fdd0f70ade79bf9b0695c4c53531aab"
7473        ))];
7474        let branch_0x31_node = create_branch_node_with_children(
7475            &[0xc],
7476            branch_0x31_hashes.into_iter().map(|h| RlpNode::word_rlp(&h)),
7477        );
7478
7479        // Reveal leaf at 0x31b0b645a6c4a0a1bb3d2f0c1d31c39f4aba2e3b015928a8eef7161e28388b81
7480        let leaf_path = hex!("31b0b645a6c4a0a1bb3d2f0c1d31c39f4aba2e3b015928a8eef7161e28388b81");
7481        let leaf_nibbles = Nibbles::unpack(leaf_path.as_slice());
7482        let leaf_value = hex!("0009ae8ce8245bff").to_vec();
7483
7484        // Reveal branch at 0x31c
7485        let branch_0x31c_hashes = vec![
7486            B256::from(hex!("1a68fdb36b77e9332b49a977faf800c22d0199e6cecf44032bb083c78943e540")),
7487            B256::from(hex!("cd4622c6df6fd7172c7fed1b284ef241e0f501b4c77b675ef10c612bd0948a7a")),
7488            B256::from(hex!("abf3603d2f991787e21f1709ee4c7375d85dfc506995c0435839fccf3fe2add4")),
7489        ];
7490        let branch_0x31c_node = create_branch_node_with_children(
7491            &[0x3, 0x7, 0xc],
7492            branch_0x31c_hashes.into_iter().map(|h| RlpNode::word_rlp(&h)),
7493        );
7494
7495        // Reveal the trie structure using ProofTrieNode
7496        let mut proof_nodes = vec![ProofTrieNodeV2 {
7497            path: Nibbles::from_nibbles([0x3, 0x1]),
7498            node: branch_0x31_node,
7499            masks: Some(BranchNodeMasks {
7500                tree_mask: TrieMask::new(4096),
7501                hash_mask: TrieMask::new(4096),
7502            }),
7503        }];
7504
7505        // Create a sparse trie and reveal nodes
7506        let mut trie = ParallelSparseTrie::default()
7507            .with_root(
7508                branch_0x3_node,
7509                Some(BranchNodeMasks {
7510                    tree_mask: TrieMask::new(26099),
7511                    hash_mask: TrieMask::new(65535),
7512                }),
7513                true,
7514            )
7515            .expect("root revealed");
7516
7517        trie.reveal_nodes(&mut proof_nodes).unwrap();
7518
7519        // Update the leaf in order to reveal it in the trie
7520        trie.update_leaf(leaf_nibbles, leaf_value, NoRevealProvider).unwrap();
7521
7522        // Now try deleting the leaf
7523        let Err(err) = trie.remove_leaf(&leaf_nibbles, NoRevealProvider) else {
7524            panic!("expected blinded node error");
7525        };
7526        assert_matches!(err.kind(), SparseTrieErrorKind::BlindedNode(path) if path == &Nibbles::from_nibbles([0x3, 0x1, 0xc]));
7527
7528        trie.reveal_nodes(&mut [ProofTrieNodeV2 {
7529            path: Nibbles::from_nibbles([0x3, 0x1, 0xc]),
7530            node: branch_0x31c_node,
7531            masks: Some(BranchNodeMasks { tree_mask: 0.into(), hash_mask: 4096.into() }),
7532        }])
7533        .unwrap();
7534
7535        // Now remove the leaf again, this should succeed
7536        trie.remove_leaf(&leaf_nibbles, NoRevealProvider).unwrap();
7537
7538        // Compute the root to trigger updates
7539        let _ = trie.root();
7540
7541        // Assert the resulting branch node updates
7542        let updates = trie.updates_ref();
7543
7544        // Check that the branch at 0x3 was updated with the expected structure
7545        let branch_0x3_update = updates
7546            .updated_nodes
7547            .get(&Nibbles::from_nibbles([0x3]))
7548            .expect("Branch at 0x3 should be in updates");
7549
7550        // We no longer expect to track the hash for child 1
7551        branch_0x3_hashes.remove(1);
7552
7553        // Expected structure from prompt.md
7554        let expected_branch = BranchNodeCompact::new(
7555            0b1111111111111111,
7556            0b0110010111110011,
7557            0b1111111111111101,
7558            branch_0x3_hashes,
7559            None,
7560        );
7561
7562        assert_eq!(branch_0x3_update, &expected_branch);
7563    }
7564
7565    #[test]
7566    fn test_get_leaf_value_lower_subtrie() {
7567        // This test demonstrates that get_leaf_value must look in the correct subtrie,
7568        // not always in upper_subtrie.
7569
7570        // Set up a root branch pointing to nibble 0x1, and a branch at [0x1] pointing to
7571        // nibble 0x2, so that the lower subtrie at [0x1, 0x2] is reachable.
7572        let root_branch =
7573            create_branch_node_with_children(&[0x1], [RlpNode::word_rlp(&B256::repeat_byte(0xAA))]);
7574        let branch_at_1 =
7575            create_branch_node_with_children(&[0x2], [RlpNode::word_rlp(&B256::repeat_byte(0xBB))]);
7576        let mut trie = ParallelSparseTrie::from_root(root_branch, None, false).unwrap();
7577        trie.reveal_nodes(&mut [ProofTrieNodeV2 {
7578            path: Nibbles::from_nibbles([0x1]),
7579            node: branch_at_1,
7580            masks: None,
7581        }])
7582        .unwrap();
7583
7584        // Create a leaf node with path >= 2 nibbles (will go to lower subtrie)
7585        let leaf_path = Nibbles::from_nibbles([0x1, 0x2]);
7586        let leaf_key = Nibbles::from_nibbles([0x3, 0x4]);
7587        let leaf_node = create_leaf_node(leaf_key.to_vec(), 42);
7588
7589        // Reveal the leaf node
7590        trie.reveal_nodes(&mut [ProofTrieNodeV2 { path: leaf_path, node: leaf_node, masks: None }])
7591            .unwrap();
7592
7593        // The full path is leaf_path + leaf_key
7594        let full_path = Nibbles::from_nibbles([0x1, 0x2, 0x3, 0x4]);
7595
7596        // Verify the value is stored in the lower subtrie, not upper
7597        let idx = path_subtrie_index_unchecked(&leaf_path);
7598        let lower_subtrie = trie.lower_subtries[idx].as_revealed_ref().unwrap();
7599        assert!(
7600            lower_subtrie.inner.values.contains_key(&full_path),
7601            "value should be in lower subtrie"
7602        );
7603        assert!(
7604            !trie.upper_subtrie.inner.values.contains_key(&full_path),
7605            "value should NOT be in upper subtrie"
7606        );
7607
7608        // get_leaf_value should find the value
7609        assert!(
7610            trie.get_leaf_value(&full_path).is_some(),
7611            "get_leaf_value should find the value in lower subtrie"
7612        );
7613    }
7614
7615    /// Test that `get_leaf_value` correctly returns values stored via `update_leaf`
7616    /// when the leaf node ends up in the upper subtrie (depth < 2).
7617    ///
7618    /// This can happen when the trie is sparse and the leaf is inserted at the root level.
7619    /// Previously, `get_leaf_value` only checked the lower subtrie based on the full path,
7620    /// missing values stored in `upper_subtrie.inner.values`.
7621    #[test]
7622    fn test_get_leaf_value_upper_subtrie_via_update_leaf() {
7623        let provider = NoRevealProvider;
7624
7625        // Create an empty trie with an empty root
7626        let mut trie = ParallelSparseTrie::default()
7627            .with_root(TrieNodeV2::EmptyRoot, None, false)
7628            .expect("root revealed");
7629
7630        // Create a full 64-nibble path (like a real account hash)
7631        let full_path = pad_nibbles_right(Nibbles::from_nibbles([0x0, 0xA, 0xB, 0xC]));
7632        let value = encode_account_value(42);
7633
7634        // Insert the leaf - since the trie is empty, the leaf node will be created
7635        // at the root level (depth 0), which is in the upper subtrie
7636        trie.update_leaf(full_path, value.clone(), provider).unwrap();
7637
7638        // Verify the value is stored in upper_subtrie (where update_leaf puts it)
7639        assert!(
7640            trie.upper_subtrie.inner.values.contains_key(&full_path),
7641            "value should be in upper subtrie after update_leaf"
7642        );
7643
7644        // Verify the value can be retrieved via get_leaf_value
7645        // Before the fix, this would return None because get_leaf_value only
7646        // checked the lower subtrie based on the path length
7647        let retrieved = trie.get_leaf_value(&full_path);
7648        assert_eq!(retrieved, Some(&value));
7649    }
7650
7651    /// Test that `get_leaf_value` works for values in both upper and lower subtries.
7652    #[test]
7653    fn test_get_leaf_value_upper_and_lower_subtries() {
7654        let provider = NoRevealProvider;
7655
7656        // Create an empty trie
7657        let mut trie = ParallelSparseTrie::default()
7658            .with_root(TrieNodeV2::EmptyRoot, None, false)
7659            .expect("root revealed");
7660
7661        // Insert first leaf - will be at root level (upper subtrie)
7662        let path1 = pad_nibbles_right(Nibbles::from_nibbles([0x0, 0xA]));
7663        let value1 = encode_account_value(1);
7664        trie.update_leaf(path1, value1.clone(), provider).unwrap();
7665
7666        // Insert second leaf with different prefix - creates a branch
7667        let path2 = pad_nibbles_right(Nibbles::from_nibbles([0x1, 0xB]));
7668        let value2 = encode_account_value(2);
7669        trie.update_leaf(path2, value2.clone(), provider).unwrap();
7670
7671        // Both values should be retrievable
7672        assert_eq!(trie.get_leaf_value(&path1), Some(&value1));
7673        assert_eq!(trie.get_leaf_value(&path2), Some(&value2));
7674    }
7675
7676    /// Test that `get_leaf_value` works for storage tries which are often very sparse.
7677    #[test]
7678    fn test_get_leaf_value_sparse_storage_trie() {
7679        let provider = NoRevealProvider;
7680
7681        // Simulate a storage trie with a single slot
7682        let mut trie = ParallelSparseTrie::default()
7683            .with_root(TrieNodeV2::EmptyRoot, None, false)
7684            .expect("root revealed");
7685
7686        // Single storage slot - leaf will be at root (depth 0)
7687        let slot_path = pad_nibbles_right(Nibbles::from_nibbles([0x2, 0x9]));
7688        let slot_value = alloy_rlp::encode(U256::from(12345));
7689        trie.update_leaf(slot_path, slot_value.clone(), provider).unwrap();
7690
7691        // Value should be retrievable
7692        assert_eq!(trie.get_leaf_value(&slot_path), Some(&slot_value));
7693    }
7694
7695    #[test]
7696    fn test_prune_empty_suffix_key_regression() {
7697        // Regression test: when a leaf has an empty suffix key (full path == node path),
7698        // the value must be removed when that path becomes a pruned root.
7699        // This catches the bug where is_strict_descendant fails to remove p == pruned_root.
7700
7701        use crate::provider::DefaultTrieNodeProvider;
7702
7703        let provider = DefaultTrieNodeProvider;
7704        let mut parallel = ParallelSparseTrie::default();
7705
7706        // Large value to ensure nodes have hashes (RLP >= 32 bytes)
7707        let value = {
7708            let account = Account {
7709                nonce: 0x123456789abcdef,
7710                balance: U256::from(0x123456789abcdef0123456789abcdef_u128),
7711                ..Default::default()
7712            };
7713            let mut buf = Vec::new();
7714            account.into_trie_account(EMPTY_ROOT_HASH).encode(&mut buf);
7715            buf
7716        };
7717
7718        // Create a trie with multiple leaves to force a branch at root
7719        for i in 0..16u8 {
7720            parallel
7721                .update_leaf(
7722                    pad_nibbles_right(Nibbles::from_nibbles([i, 0x1, 0x2, 0x3, 0x4, 0x5])),
7723                    value.clone(),
7724                    &provider,
7725                )
7726                .unwrap();
7727        }
7728
7729        // Compute root to get hashes
7730        let root_before = parallel.root();
7731
7732        // Prune at depth 0: the children of root become pruned roots
7733        parallel.prune(0);
7734
7735        let root_after = parallel.root();
7736        assert_eq!(root_before, root_after, "root hash must be preserved");
7737
7738        // Key assertion: values under pruned paths must be removed
7739        // With the bug, values at pruned_root paths (not strict descendants) would remain
7740        for i in 0..16u8 {
7741            let path = pad_nibbles_right(Nibbles::from_nibbles([i, 0x1, 0x2, 0x3, 0x4, 0x5]));
7742            assert!(
7743                parallel.get_leaf_value(&path).is_none(),
7744                "value at {:?} should be removed after prune",
7745                path
7746            );
7747        }
7748    }
7749
7750    #[test]
7751    fn test_prune_at_various_depths() {
7752        // Test depths 0 and 1, which are in the Upper subtrie (no heat tracking).
7753        // Depth 2 is the boundary where Lower subtries start (UPPER_TRIE_MAX_DEPTH=2),
7754        // and with `depth >= max_depth` heat check, hot Lower subtries at depth 2
7755        // are protected from pruning traversal.
7756        for max_depth in [0, 1] {
7757            let provider = DefaultTrieNodeProvider;
7758            let mut trie = ParallelSparseTrie::default();
7759
7760            let value = large_account_value();
7761
7762            for i in 0..4u8 {
7763                for j in 0..4u8 {
7764                    for k in 0..4u8 {
7765                        trie.update_leaf(
7766                            pad_nibbles_right(Nibbles::from_nibbles([i, j, k, 0x1, 0x2, 0x3])),
7767                            value.clone(),
7768                            &provider,
7769                        )
7770                        .unwrap();
7771                    }
7772                }
7773            }
7774
7775            let root_before = trie.root();
7776            let nodes_before = trie.size_hint();
7777
7778            // Prune multiple times to allow heat to fully decay.
7779            // Heat starts at 1 and decays by 1 each cycle for unmodified subtries,
7780            // so we need 2 prune cycles: 1→0, then actual prune.
7781            for _ in 0..2 {
7782                trie.prune(max_depth);
7783            }
7784
7785            let root_after = trie.root();
7786            assert_eq!(root_before, root_after, "root hash should be preserved after prune");
7787
7788            let nodes_after = trie.size_hint();
7789            assert!(
7790                nodes_after < nodes_before,
7791                "node count should decrease after prune at depth {max_depth}"
7792            );
7793
7794            if max_depth == 0 {
7795                // Root with 4 blinded hashes for children at [0], [1], [2], [3]
7796                assert_eq!(nodes_after, 1, "root");
7797            }
7798        }
7799    }
7800
7801    #[test]
7802    fn test_prune_empty_trie() {
7803        let mut trie = ParallelSparseTrie::default();
7804        trie.prune(2);
7805        let root = trie.root();
7806        assert_eq!(root, EMPTY_ROOT_HASH, "empty trie should have empty root hash");
7807    }
7808
7809    #[test]
7810    fn test_prune_preserves_root_hash() {
7811        let provider = DefaultTrieNodeProvider;
7812        let mut trie = ParallelSparseTrie::default();
7813
7814        let value = large_account_value();
7815
7816        for i in 0..8u8 {
7817            for j in 0..4u8 {
7818                trie.update_leaf(
7819                    pad_nibbles_right(Nibbles::from_nibbles([i, j, 0x3, 0x4, 0x5, 0x6])),
7820                    value.clone(),
7821                    &provider,
7822                )
7823                .unwrap();
7824            }
7825        }
7826
7827        let root_before = trie.root();
7828        trie.prune(1);
7829        let root_after = trie.root();
7830        assert_eq!(root_before, root_after, "root hash must be preserved after prune");
7831    }
7832
7833    #[test]
7834    fn test_prune_single_leaf_trie() {
7835        let provider = DefaultTrieNodeProvider;
7836        let mut trie = ParallelSparseTrie::default();
7837
7838        let value = large_account_value();
7839        trie.update_leaf(
7840            pad_nibbles_right(Nibbles::from_nibbles([0x1, 0x2, 0x3, 0x4])),
7841            value,
7842            &provider,
7843        )
7844        .unwrap();
7845
7846        let root_before = trie.root();
7847        let nodes_before = trie.size_hint();
7848
7849        trie.prune(0);
7850
7851        let root_after = trie.root();
7852        assert_eq!(root_before, root_after, "root hash should be preserved");
7853        assert_eq!(trie.size_hint(), nodes_before, "single leaf trie should not change");
7854    }
7855
7856    #[test]
7857    fn test_prune_deep_depth_no_effect() {
7858        let provider = DefaultTrieNodeProvider;
7859        let mut trie = ParallelSparseTrie::default();
7860
7861        let value = large_account_value();
7862
7863        for i in 0..4u8 {
7864            trie.update_leaf(
7865                pad_nibbles_right(Nibbles::from_nibbles([i, 0x2, 0x3, 0x4])),
7866                value.clone(),
7867                &provider,
7868            )
7869            .unwrap();
7870        }
7871
7872        trie.root();
7873        let nodes_before = trie.size_hint();
7874
7875        trie.prune(100);
7876
7877        assert_eq!(nodes_before, trie.size_hint(), "deep prune should have no effect");
7878    }
7879
7880    #[test]
7881    fn test_prune_extension_node_depth_semantics() {
7882        let provider = DefaultTrieNodeProvider;
7883        let mut trie = ParallelSparseTrie::default();
7884
7885        let value = large_account_value();
7886
7887        trie.update_leaf(
7888            pad_nibbles_right(Nibbles::from_nibbles([0, 1, 2, 3, 0, 5, 6, 7])),
7889            value.clone(),
7890            &provider,
7891        )
7892        .unwrap();
7893        trie.update_leaf(
7894            pad_nibbles_right(Nibbles::from_nibbles([0, 1, 2, 3, 1, 5, 6, 7])),
7895            value,
7896            &provider,
7897        )
7898        .unwrap();
7899
7900        let root_before = trie.root();
7901        // Prune multiple times to allow heat to fully decay.
7902        // Heat starts at 1 and decays by 1 each cycle for unmodified subtries,
7903        // so we need 2 prune cycles: 1→0, then actual prune.
7904        for _ in 0..2 {
7905            trie.prune(1);
7906        }
7907
7908        assert_eq!(root_before, trie.root(), "root hash should be preserved");
7909        // Root + branch
7910        assert_eq!(trie.size_hint(), 2, "root + extension + hash stubs after prune(1)");
7911    }
7912
7913    #[test]
7914    fn test_prune_root_hash_preserved() {
7915        let provider = DefaultTrieNodeProvider;
7916        let mut trie = ParallelSparseTrie::default();
7917
7918        // Create two 64-nibble paths that differ only in the first nibble
7919        let key1 = Nibbles::unpack(B256::repeat_byte(0x00));
7920        let key2 = Nibbles::unpack(B256::repeat_byte(0x11));
7921
7922        let large_value = large_account_value();
7923        trie.update_leaf(key1, large_value.clone(), &provider).unwrap();
7924        trie.update_leaf(key2, large_value, &provider).unwrap();
7925
7926        let root_before = trie.root();
7927
7928        trie.prune(0);
7929
7930        assert_eq!(root_before, trie.root(), "root hash must be preserved after pruning");
7931    }
7932
7933    #[test]
7934    fn test_prune_mixed_embedded_and_hashed() {
7935        let provider = DefaultTrieNodeProvider;
7936        let mut trie = ParallelSparseTrie::default();
7937
7938        let large_value = large_account_value();
7939        let small_value = vec![0x80];
7940
7941        for i in 0..8u8 {
7942            let value = if i < 4 { large_value.clone() } else { small_value.clone() };
7943            trie.update_leaf(
7944                pad_nibbles_right(Nibbles::from_nibbles([i, 0x1, 0x2, 0x3])),
7945                value,
7946                &provider,
7947            )
7948            .unwrap();
7949        }
7950
7951        let root_before = trie.root();
7952        trie.prune(0);
7953        assert_eq!(root_before, trie.root(), "root hash must be preserved");
7954    }
7955
7956    #[test]
7957    fn test_prune_many_lower_subtries() {
7958        let provider = DefaultTrieNodeProvider;
7959
7960        let large_value = large_account_value();
7961
7962        let mut keys = Vec::new();
7963        for first in 0..16u8 {
7964            for second in 0..16u8 {
7965                keys.push(pad_nibbles_right(Nibbles::from_nibbles([
7966                    first, second, 0x1, 0x2, 0x3, 0x4,
7967                ])));
7968            }
7969        }
7970
7971        let mut trie = ParallelSparseTrie::default();
7972
7973        for key in &keys {
7974            trie.update_leaf(*key, large_value.clone(), &provider).unwrap();
7975        }
7976
7977        let root_before = trie.root();
7978
7979        // Prune multiple times to allow heat to fully decay.
7980        // Heat starts at 1 and decays by 1 each cycle for unmodified subtries.
7981        let mut total_pruned = 0;
7982        for _ in 0..2 {
7983            total_pruned += trie.prune(1);
7984        }
7985
7986        assert!(total_pruned > 0, "should have pruned some nodes");
7987        assert_eq!(root_before, trie.root(), "root hash should be preserved");
7988
7989        for key in &keys {
7990            assert!(trie.get_leaf_value(key).is_none(), "value should be pruned");
7991        }
7992    }
7993
7994    #[test]
7995    fn test_prune_max_depth_overflow() {
7996        // Verify that max_depth > 255 is not truncated (was u8, now usize)
7997        let provider = DefaultTrieNodeProvider;
7998        let mut trie = ParallelSparseTrie::default();
7999
8000        let value = large_account_value();
8001
8002        for i in 0..4u8 {
8003            trie.update_leaf(
8004                pad_nibbles_right(Nibbles::from_nibbles([i, 0x1, 0x2, 0x3])),
8005                value.clone(),
8006                &provider,
8007            )
8008            .unwrap();
8009        }
8010
8011        trie.root();
8012        let nodes_before = trie.size_hint();
8013
8014        // If depth were truncated to u8, 300 would become 44 and might prune something
8015        trie.prune(300);
8016
8017        assert_eq!(
8018            nodes_before,
8019            trie.size_hint(),
8020            "prune(300) should have no effect on a shallow trie"
8021        );
8022    }
8023
8024    #[test]
8025    fn test_prune_fast_path_case2_update_after() {
8026        // Test fast-path Case 2: upper prune root is prefix of lower subtrie.
8027        // After pruning, we should be able to update leaves without panic.
8028        let provider = DefaultTrieNodeProvider;
8029        let mut trie = ParallelSparseTrie::default();
8030
8031        let value = large_account_value();
8032
8033        // Create keys that span into lower subtries (path.len() >= UPPER_TRIE_MAX_DEPTH)
8034        // UPPER_TRIE_MAX_DEPTH is typically 2, so paths of length 3+ go to lower subtries
8035        for first in 0..4u8 {
8036            for second in 0..4u8 {
8037                trie.update_leaf(
8038                    pad_nibbles_right(Nibbles::from_nibbles([
8039                        first, second, 0x1, 0x2, 0x3, 0x4, 0x5, 0x6,
8040                    ])),
8041                    value.clone(),
8042                    &provider,
8043                )
8044                .unwrap();
8045            }
8046        }
8047
8048        let root_before = trie.root();
8049
8050        // Prune at depth 0 - upper roots become prefixes of lower subtrie paths
8051        trie.prune(0);
8052
8053        let root_after = trie.root();
8054        assert_eq!(root_before, root_after, "root hash should be preserved");
8055
8056        // Now try to update a leaf - this should not panic even though lower subtries
8057        // were replaced with Blind(None)
8058        let new_path =
8059            pad_nibbles_right(Nibbles::from_nibbles([0x5, 0x5, 0x1, 0x2, 0x3, 0x4, 0x5, 0x6]));
8060        trie.update_leaf(new_path, value, &provider).unwrap();
8061
8062        // The trie should still be functional
8063        let _ = trie.root();
8064    }
8065
8066    // update_leaves tests
8067
8068    #[test]
8069    fn test_update_leaves_successful_update() {
8070        use crate::LeafUpdate;
8071        use alloy_primitives::map::B256Map;
8072        use std::cell::RefCell;
8073
8074        let provider = DefaultTrieNodeProvider;
8075        let mut trie = ParallelSparseTrie::default();
8076
8077        // Create a leaf in the trie using a full-length key
8078        let b256_key = B256::with_last_byte(42);
8079        let key = Nibbles::unpack(b256_key);
8080        let value = encode_account_value(1);
8081        trie.update_leaf(key, value, &provider).unwrap();
8082
8083        // Create update map with a new value for the same key
8084        let new_value = encode_account_value(2);
8085
8086        let mut updates: B256Map<LeafUpdate> = B256Map::default();
8087        updates.insert(b256_key, LeafUpdate::Changed(new_value));
8088
8089        let proof_targets = RefCell::new(Vec::new());
8090        trie.update_leaves(&mut updates, |path, min_len| {
8091            proof_targets.borrow_mut().push((path, min_len));
8092        })
8093        .unwrap();
8094
8095        // Update should succeed: map empty, callback not invoked
8096        assert!(updates.is_empty(), "Update map should be empty after successful update");
8097        assert!(
8098            proof_targets.borrow().is_empty(),
8099            "Callback should not be invoked for revealed paths"
8100        );
8101    }
8102
8103    #[test]
8104    fn test_update_leaves_insert_new_leaf() {
8105        use crate::LeafUpdate;
8106        use alloy_primitives::map::B256Map;
8107        use std::cell::RefCell;
8108
8109        let mut trie = ParallelSparseTrie::default();
8110
8111        // Insert a NEW leaf (key doesn't exist yet) via update_leaves
8112        let b256_key = B256::with_last_byte(99);
8113        let new_value = encode_account_value(42);
8114
8115        let mut updates: B256Map<LeafUpdate> = B256Map::default();
8116        updates.insert(b256_key, LeafUpdate::Changed(new_value.clone()));
8117
8118        let proof_targets = RefCell::new(Vec::new());
8119        trie.update_leaves(&mut updates, |path, min_len| {
8120            proof_targets.borrow_mut().push((path, min_len));
8121        })
8122        .unwrap();
8123
8124        // Insert should succeed: map empty, callback not invoked
8125        assert!(updates.is_empty(), "Update map should be empty after successful insert");
8126        assert!(
8127            proof_targets.borrow().is_empty(),
8128            "Callback should not be invoked for new leaf insert"
8129        );
8130
8131        // Verify the leaf was actually inserted
8132        let full_path = Nibbles::unpack(b256_key);
8133        assert_eq!(
8134            trie.get_leaf_value(&full_path),
8135            Some(&new_value),
8136            "New leaf value should be retrievable"
8137        );
8138    }
8139
8140    #[test]
8141    fn test_update_leaves_blinded_node() {
8142        use crate::LeafUpdate;
8143        use alloy_primitives::map::B256Map;
8144        use std::cell::RefCell;
8145
8146        // Create a trie with a blinded node
8147        // Use a small value that fits in RLP encoding
8148        let small_value = alloy_rlp::encode_fixed_size(&U256::from(1)).to_vec();
8149        let leaf = LeafNode::new(
8150            Nibbles::default(), // short key for RLP encoding
8151            small_value,
8152        );
8153        let branch = TrieNodeV2::Branch(BranchNodeV2::new(
8154            Nibbles::default(),
8155            vec![
8156                RlpNode::word_rlp(&B256::repeat_byte(1)), // blinded child at 0
8157                RlpNode::from_raw_rlp(&alloy_rlp::encode(leaf.clone())).unwrap(), // revealed at 1
8158            ],
8159            TrieMask::new(0b11),
8160            None,
8161        ));
8162
8163        let mut trie = ParallelSparseTrie::from_root(
8164            branch.clone(),
8165            Some(BranchNodeMasks {
8166                hash_mask: TrieMask::new(0b01),
8167                tree_mask: TrieMask::default(),
8168            }),
8169            false,
8170        )
8171        .unwrap();
8172
8173        // Reveal only the branch and one child, leaving child 0 as a Hash node
8174        trie.reveal_node(
8175            Nibbles::default(),
8176            branch,
8177            Some(BranchNodeMasks {
8178                hash_mask: TrieMask::default(),
8179                tree_mask: TrieMask::new(0b01),
8180            }),
8181        )
8182        .unwrap();
8183        trie.reveal_node(Nibbles::from_nibbles([0x1]), TrieNodeV2::Leaf(leaf), None).unwrap();
8184
8185        // The path 0x0... is blinded (Hash node)
8186        // Create an update targeting the blinded path using a full B256 key
8187        let b256_key = B256::ZERO; // starts with 0x0...
8188
8189        let new_value = encode_account_value(42);
8190        let mut updates: B256Map<LeafUpdate> = B256Map::default();
8191        updates.insert(b256_key, LeafUpdate::Changed(new_value));
8192
8193        let proof_targets = RefCell::new(Vec::new());
8194        let prefix_set_len_before = trie.prefix_set.len();
8195        trie.update_leaves(&mut updates, |path, min_len| {
8196            proof_targets.borrow_mut().push((path, min_len));
8197        })
8198        .unwrap();
8199
8200        // Update should remain in map (blinded node)
8201        assert!(!updates.is_empty(), "Update should remain in map when hitting blinded node");
8202
8203        // prefix_set should be unchanged after failed update
8204        assert_eq!(
8205            trie.prefix_set.len(),
8206            prefix_set_len_before,
8207            "prefix_set should be unchanged after failed update on blinded node"
8208        );
8209
8210        // Callback should be invoked
8211        let targets = proof_targets.borrow();
8212        assert!(!targets.is_empty(), "Callback should be invoked for blinded path");
8213
8214        // min_len should equal the blinded node's path length (1 nibble)
8215        assert_eq!(targets[0].1, 1, "min_len should equal blinded node path length");
8216    }
8217
8218    #[test]
8219    fn test_update_leaves_removal() {
8220        use crate::LeafUpdate;
8221        use alloy_primitives::map::B256Map;
8222        use std::cell::RefCell;
8223
8224        let provider = DefaultTrieNodeProvider;
8225        let mut trie = ParallelSparseTrie::default();
8226
8227        // Create two leaves so removal doesn't result in empty trie issues
8228        // Use full-length keys
8229        let b256_key1 = B256::with_last_byte(1);
8230        let b256_key2 = B256::with_last_byte(2);
8231        let key1 = Nibbles::unpack(b256_key1);
8232        let key2 = Nibbles::unpack(b256_key2);
8233        let value = encode_account_value(1);
8234        trie.update_leaf(key1, value.clone(), &provider).unwrap();
8235        trie.update_leaf(key2, value, &provider).unwrap();
8236
8237        // Create an update to remove key1 (empty value = removal)
8238        let mut updates: B256Map<LeafUpdate> = B256Map::default();
8239        updates.insert(b256_key1, LeafUpdate::Changed(vec![])); // empty = removal
8240
8241        let proof_targets = RefCell::new(Vec::new());
8242        trie.update_leaves(&mut updates, |path, min_len| {
8243            proof_targets.borrow_mut().push((path, min_len));
8244        })
8245        .unwrap();
8246
8247        // Removal should succeed: map empty
8248        assert!(updates.is_empty(), "Update map should be empty after successful removal");
8249    }
8250
8251    #[test]
8252    fn test_update_leaves_removal_blinded() {
8253        use crate::LeafUpdate;
8254        use alloy_primitives::map::B256Map;
8255        use std::cell::RefCell;
8256
8257        // Create a trie with a blinded node
8258        // Use a small value that fits in RLP encoding
8259        let small_value = alloy_rlp::encode_fixed_size(&U256::from(1)).to_vec();
8260        let leaf = LeafNode::new(
8261            Nibbles::default(), // short key for RLP encoding
8262            small_value,
8263        );
8264        let branch = TrieNodeV2::Branch(BranchNodeV2::new(
8265            Nibbles::default(),
8266            vec![
8267                RlpNode::word_rlp(&B256::repeat_byte(1)), // blinded child at 0
8268                RlpNode::from_raw_rlp(&alloy_rlp::encode(leaf.clone())).unwrap(), // revealed at 1
8269            ],
8270            TrieMask::new(0b11),
8271            None,
8272        ));
8273
8274        let mut trie = ParallelSparseTrie::from_root(
8275            branch.clone(),
8276            Some(BranchNodeMasks {
8277                hash_mask: TrieMask::new(0b01),
8278                tree_mask: TrieMask::default(),
8279            }),
8280            false,
8281        )
8282        .unwrap();
8283
8284        trie.reveal_node(
8285            Nibbles::default(),
8286            branch,
8287            Some(BranchNodeMasks {
8288                hash_mask: TrieMask::default(),
8289                tree_mask: TrieMask::new(0b01),
8290            }),
8291        )
8292        .unwrap();
8293        trie.reveal_node(Nibbles::from_nibbles([0x1]), TrieNodeV2::Leaf(leaf), None).unwrap();
8294
8295        // Simulate having a known value behind the blinded node
8296        let b256_key = B256::ZERO; // starts with 0x0...
8297        let full_path = Nibbles::unpack(b256_key);
8298
8299        // Insert the value into the trie's values map (simulating we know about it)
8300        let old_value = encode_account_value(99);
8301        trie.upper_subtrie.inner.values.insert(full_path, old_value.clone());
8302
8303        let mut updates: B256Map<LeafUpdate> = B256Map::default();
8304        updates.insert(b256_key, LeafUpdate::Changed(vec![])); // empty = removal
8305
8306        let proof_targets = RefCell::new(Vec::new());
8307        let prefix_set_len_before = trie.prefix_set.len();
8308        trie.update_leaves(&mut updates, |path, min_len| {
8309            proof_targets.borrow_mut().push((path, min_len));
8310        })
8311        .unwrap();
8312
8313        // Callback should be invoked
8314        assert!(
8315            !proof_targets.borrow().is_empty(),
8316            "Callback should be invoked when removal hits blinded node"
8317        );
8318
8319        // Update should remain in map
8320        assert!(!updates.is_empty(), "Update should remain in map when removal hits blinded node");
8321
8322        // Original value should be preserved (reverted)
8323        assert_eq!(
8324            trie.upper_subtrie.inner.values.get(&full_path),
8325            Some(&old_value),
8326            "Original value should be preserved after failed removal"
8327        );
8328
8329        // prefix_set should be unchanged after failed removal
8330        assert_eq!(
8331            trie.prefix_set.len(),
8332            prefix_set_len_before,
8333            "prefix_set should be unchanged after failed removal on blinded node"
8334        );
8335    }
8336
8337    #[test]
8338    fn test_update_leaves_removal_branch_collapse_blinded() {
8339        use crate::LeafUpdate;
8340        use alloy_primitives::map::B256Map;
8341        use std::cell::RefCell;
8342
8343        // Create a branch node at root with two children:
8344        // - Child at nibble 0: a blinded Hash node
8345        // - Child at nibble 1: a revealed Leaf node
8346        let small_value = alloy_rlp::encode_fixed_size(&U256::from(1)).to_vec();
8347        let leaf = LeafNode::new(Nibbles::default(), small_value);
8348        let branch = TrieNodeV2::Branch(BranchNodeV2::new(
8349            Nibbles::default(),
8350            vec![
8351                RlpNode::word_rlp(&B256::repeat_byte(1)), // blinded child at nibble 0
8352                RlpNode::from_raw_rlp(&alloy_rlp::encode(leaf.clone())).unwrap(), /* leaf at nibble 1 */
8353            ],
8354            TrieMask::new(0b11),
8355            None,
8356        ));
8357
8358        let mut trie = ParallelSparseTrie::from_root(
8359            branch.clone(),
8360            Some(BranchNodeMasks {
8361                hash_mask: TrieMask::new(0b01), // nibble 0 is hashed
8362                tree_mask: TrieMask::default(),
8363            }),
8364            false,
8365        )
8366        .unwrap();
8367
8368        // Reveal the branch and the leaf at nibble 1, leaving nibble 0 as Hash node
8369        trie.reveal_node(
8370            Nibbles::default(),
8371            branch,
8372            Some(BranchNodeMasks {
8373                hash_mask: TrieMask::default(),
8374                tree_mask: TrieMask::new(0b01),
8375            }),
8376        )
8377        .unwrap();
8378        trie.reveal_node(Nibbles::from_nibbles([0x1]), TrieNodeV2::Leaf(leaf), None).unwrap();
8379
8380        // Insert the leaf's value into the values map for the revealed leaf
8381        // Use B256 key that starts with nibble 1 (0x10 has first nibble = 1)
8382        let b256_key = B256::with_last_byte(0x10);
8383        let full_path = Nibbles::unpack(b256_key);
8384        let leaf_value = encode_account_value(42);
8385        trie.upper_subtrie.inner.values.insert(full_path, leaf_value.clone());
8386
8387        // Record state before update_leaves
8388        let prefix_set_len_before = trie.prefix_set.len();
8389        let node_count_before = trie.upper_subtrie.nodes.len() +
8390            trie.lower_subtries
8391                .iter()
8392                .filter_map(|s| s.as_revealed_ref())
8393                .map(|s| s.nodes.len())
8394                .sum::<usize>();
8395
8396        let mut updates: B256Map<LeafUpdate> = B256Map::default();
8397        updates.insert(b256_key, LeafUpdate::Changed(vec![])); // removal
8398
8399        let proof_targets = RefCell::new(Vec::new());
8400        trie.update_leaves(&mut updates, |path, min_len| {
8401            proof_targets.borrow_mut().push((path, min_len));
8402        })
8403        .unwrap();
8404
8405        // Assert: update remains in map (removal blocked by blinded sibling)
8406        assert!(
8407            !updates.is_empty(),
8408            "Update should remain in map when removal would collapse branch with blinded sibling"
8409        );
8410
8411        // Assert: callback was invoked for the blinded path
8412        assert!(
8413            !proof_targets.borrow().is_empty(),
8414            "Callback should be invoked for blinded sibling path"
8415        );
8416
8417        // Assert: prefix_set unchanged (atomic failure)
8418        assert_eq!(
8419            trie.prefix_set.len(),
8420            prefix_set_len_before,
8421            "prefix_set should be unchanged after atomic failure"
8422        );
8423
8424        // Assert: node count unchanged
8425        let node_count_after = trie.upper_subtrie.nodes.len() +
8426            trie.lower_subtries
8427                .iter()
8428                .filter_map(|s| s.as_revealed_ref())
8429                .map(|s| s.nodes.len())
8430                .sum::<usize>();
8431        assert_eq!(
8432            node_count_before, node_count_after,
8433            "Node count should be unchanged after atomic failure"
8434        );
8435
8436        // Assert: the leaf value still exists (not removed)
8437        assert_eq!(
8438            trie.upper_subtrie.inner.values.get(&full_path),
8439            Some(&leaf_value),
8440            "Leaf value should still exist after failed removal"
8441        );
8442    }
8443
8444    #[test]
8445    fn test_update_leaves_touched() {
8446        use crate::LeafUpdate;
8447        use alloy_primitives::map::B256Map;
8448        use std::cell::RefCell;
8449
8450        let provider = DefaultTrieNodeProvider;
8451        let mut trie = ParallelSparseTrie::default();
8452
8453        // Create a leaf in the trie using a full-length key
8454        let b256_key = B256::with_last_byte(42);
8455        let key = Nibbles::unpack(b256_key);
8456        let value = encode_account_value(1);
8457        trie.update_leaf(key, value, &provider).unwrap();
8458
8459        // Create a Touched update for the existing key
8460        let mut updates: B256Map<LeafUpdate> = B256Map::default();
8461        updates.insert(b256_key, LeafUpdate::Touched);
8462
8463        let proof_targets = RefCell::new(Vec::new());
8464        let prefix_set_len_before = trie.prefix_set.len();
8465
8466        trie.update_leaves(&mut updates, |path, min_len| {
8467            proof_targets.borrow_mut().push((path, min_len));
8468        })
8469        .unwrap();
8470
8471        // Update should be removed (path is accessible)
8472        assert!(updates.is_empty(), "Touched update should be removed for accessible path");
8473
8474        // No callback
8475        assert!(
8476            proof_targets.borrow().is_empty(),
8477            "Callback should not be invoked for accessible path"
8478        );
8479
8480        // prefix_set should be unchanged since Touched is read-only
8481        assert_eq!(
8482            trie.prefix_set.len(),
8483            prefix_set_len_before,
8484            "prefix_set should be unchanged for Touched update (read-only)"
8485        );
8486    }
8487
8488    #[test]
8489    fn test_update_leaves_touched_nonexistent() {
8490        use crate::LeafUpdate;
8491        use alloy_primitives::map::B256Map;
8492        use std::cell::RefCell;
8493
8494        let mut trie = ParallelSparseTrie::default();
8495
8496        // Create a Touched update for a key that doesn't exist
8497        let b256_key = B256::with_last_byte(99);
8498        let full_path = Nibbles::unpack(b256_key);
8499
8500        let prefix_set_len_before = trie.prefix_set.len();
8501
8502        let mut updates: B256Map<LeafUpdate> = B256Map::default();
8503        updates.insert(b256_key, LeafUpdate::Touched);
8504
8505        let proof_targets = RefCell::new(Vec::new());
8506        trie.update_leaves(&mut updates, |path, min_len| {
8507            proof_targets.borrow_mut().push((path, min_len));
8508        })
8509        .unwrap();
8510
8511        // Update should be removed (path IS accessible - it's just empty)
8512        assert!(updates.is_empty(), "Touched update should be removed for accessible (empty) path");
8513
8514        // No callback should be invoked (path is revealed, just empty)
8515        assert!(
8516            proof_targets.borrow().is_empty(),
8517            "Callback should not be invoked for accessible path"
8518        );
8519
8520        // prefix_set should NOT be modified (Touched is read-only)
8521        assert_eq!(
8522            trie.prefix_set.len(),
8523            prefix_set_len_before,
8524            "prefix_set should not be modified by Touched update"
8525        );
8526
8527        // No value should be inserted
8528        assert!(
8529            trie.get_leaf_value(&full_path).is_none(),
8530            "No value should exist for non-existent key after Touched update"
8531        );
8532    }
8533
8534    #[test]
8535    fn test_update_leaves_touched_blinded() {
8536        use crate::LeafUpdate;
8537        use alloy_primitives::map::B256Map;
8538        use std::cell::RefCell;
8539
8540        // Create a trie with a blinded node
8541        // Use a small value that fits in RLP encoding
8542        let small_value = alloy_rlp::encode_fixed_size(&U256::from(1)).to_vec();
8543        let leaf = LeafNode::new(
8544            Nibbles::default(), // short key for RLP encoding
8545            small_value,
8546        );
8547        let branch = TrieNodeV2::Branch(BranchNodeV2::new(
8548            Nibbles::default(),
8549            vec![
8550                RlpNode::word_rlp(&B256::repeat_byte(1)), // blinded child at 0
8551                RlpNode::from_raw_rlp(&alloy_rlp::encode(leaf.clone())).unwrap(), // revealed at 1
8552            ],
8553            TrieMask::new(0b11),
8554            None,
8555        ));
8556
8557        let mut trie = ParallelSparseTrie::from_root(
8558            branch.clone(),
8559            Some(BranchNodeMasks {
8560                hash_mask: TrieMask::new(0b01),
8561                tree_mask: TrieMask::default(),
8562            }),
8563            false,
8564        )
8565        .unwrap();
8566
8567        trie.reveal_node(
8568            Nibbles::default(),
8569            branch,
8570            Some(BranchNodeMasks {
8571                hash_mask: TrieMask::default(),
8572                tree_mask: TrieMask::new(0b01),
8573            }),
8574        )
8575        .unwrap();
8576        trie.reveal_node(Nibbles::from_nibbles([0x1]), TrieNodeV2::Leaf(leaf), None).unwrap();
8577
8578        // Create a Touched update targeting the blinded path using full B256 key
8579        let b256_key = B256::ZERO; // starts with 0x0...
8580
8581        let mut updates: B256Map<LeafUpdate> = B256Map::default();
8582        updates.insert(b256_key, LeafUpdate::Touched);
8583
8584        let proof_targets = RefCell::new(Vec::new());
8585        let prefix_set_len_before = trie.prefix_set.len();
8586        trie.update_leaves(&mut updates, |path, min_len| {
8587            proof_targets.borrow_mut().push((path, min_len));
8588        })
8589        .unwrap();
8590
8591        // Callback should be invoked
8592        assert!(!proof_targets.borrow().is_empty(), "Callback should be invoked for blinded path");
8593
8594        // Update should remain in map
8595        assert!(!updates.is_empty(), "Touched update should remain in map for blinded path");
8596
8597        // prefix_set should be unchanged since Touched is read-only
8598        assert_eq!(
8599            trie.prefix_set.len(),
8600            prefix_set_len_before,
8601            "prefix_set should be unchanged for Touched update on blinded path"
8602        );
8603    }
8604
8605    #[test]
8606    fn test_update_leaves_deduplication() {
8607        use crate::LeafUpdate;
8608        use alloy_primitives::map::B256Map;
8609        use std::cell::RefCell;
8610
8611        // Create a trie with a blinded node
8612        // Use a small value that fits in RLP encoding
8613        let small_value = alloy_rlp::encode_fixed_size(&U256::from(1)).to_vec();
8614        let leaf = LeafNode::new(
8615            Nibbles::default(), // short key for RLP encoding
8616            small_value,
8617        );
8618        let branch = TrieNodeV2::Branch(BranchNodeV2::new(
8619            Nibbles::default(),
8620            vec![
8621                RlpNode::word_rlp(&B256::repeat_byte(1)), // blinded child at 0
8622                RlpNode::from_raw_rlp(&alloy_rlp::encode(leaf.clone())).unwrap(), // revealed at 1
8623            ],
8624            TrieMask::new(0b11),
8625            None,
8626        ));
8627
8628        let mut trie = ParallelSparseTrie::from_root(
8629            branch.clone(),
8630            Some(BranchNodeMasks {
8631                hash_mask: TrieMask::new(0b01),
8632                tree_mask: TrieMask::default(),
8633            }),
8634            false,
8635        )
8636        .unwrap();
8637
8638        trie.reveal_node(
8639            Nibbles::default(),
8640            branch,
8641            Some(BranchNodeMasks {
8642                hash_mask: TrieMask::default(),
8643                tree_mask: TrieMask::new(0b01),
8644            }),
8645        )
8646        .unwrap();
8647        trie.reveal_node(Nibbles::from_nibbles([0x1]), TrieNodeV2::Leaf(leaf), None).unwrap();
8648
8649        // Create multiple updates that would all hit the same blinded node at path 0x0
8650        // Use full B256 keys that all start with 0x0
8651        let b256_key1 = B256::ZERO;
8652        let b256_key2 = B256::with_last_byte(1); // still starts with 0x0
8653        let b256_key3 = B256::with_last_byte(2); // still starts with 0x0
8654
8655        let mut updates: B256Map<LeafUpdate> = B256Map::default();
8656        let value = encode_account_value(42);
8657
8658        updates.insert(b256_key1, LeafUpdate::Changed(value.clone()));
8659        updates.insert(b256_key2, LeafUpdate::Changed(value.clone()));
8660        updates.insert(b256_key3, LeafUpdate::Changed(value));
8661
8662        let proof_targets = RefCell::new(Vec::new());
8663        trie.update_leaves(&mut updates, |path, min_len| {
8664            proof_targets.borrow_mut().push((path, min_len));
8665        })
8666        .unwrap();
8667
8668        // The callback should be invoked 3 times - once for each unique full_path
8669        // The deduplication is by (full_path, min_len), not by blinded node
8670        let targets = proof_targets.borrow();
8671        assert_eq!(targets.len(), 3, "Callback should be invoked for each unique key");
8672
8673        // All should have the same min_len (1) since they all hit blinded node at path 0x0
8674        for (_, min_len) in targets.iter() {
8675            assert_eq!(*min_len, 1, "All should have min_len 1 from blinded node at 0x0");
8676        }
8677    }
8678
8679    #[test]
8680    fn test_nibbles_to_padded_b256() {
8681        // Empty nibbles should produce all zeros
8682        let empty = Nibbles::default();
8683        assert_eq!(ParallelSparseTrie::nibbles_to_padded_b256(&empty), B256::ZERO);
8684
8685        // Full 64-nibble path should round-trip through B256
8686        let full_key = b256!("0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef");
8687        let full_nibbles = Nibbles::unpack(full_key);
8688        assert_eq!(ParallelSparseTrie::nibbles_to_padded_b256(&full_nibbles), full_key);
8689
8690        // Partial nibbles should be left-aligned with zero padding on the right
8691        // 4 nibbles [0x1, 0x2, 0x3, 0x4] should pack to 0x1234...00
8692        let partial = Nibbles::from_nibbles_unchecked([0x1, 0x2, 0x3, 0x4]);
8693        let expected = b256!("1234000000000000000000000000000000000000000000000000000000000000");
8694        assert_eq!(ParallelSparseTrie::nibbles_to_padded_b256(&partial), expected);
8695
8696        // Single nibble
8697        let single = Nibbles::from_nibbles_unchecked([0xf]);
8698        let expected_single =
8699            b256!("f000000000000000000000000000000000000000000000000000000000000000");
8700        assert_eq!(ParallelSparseTrie::nibbles_to_padded_b256(&single), expected_single);
8701    }
8702
8703    #[test]
8704    fn test_memory_size() {
8705        // Test that memory_size returns a reasonable value for an empty trie
8706        let trie = ParallelSparseTrie::default();
8707        let empty_size = trie.memory_size();
8708
8709        // Should at least be the size of the struct itself
8710        assert!(empty_size >= core::mem::size_of::<ParallelSparseTrie>());
8711
8712        // Create a trie with some data. Set up a root branch with children at 0x1 and
8713        // 0x5, and branches at [0x1] and [0x5] pointing to 0x2 and 0x6 respectively,
8714        // so the lower subtries at [0x1, 0x2] and [0x5, 0x6] are reachable.
8715        let root_branch = create_branch_node_with_children(
8716            &[0x1, 0x5],
8717            [
8718                RlpNode::word_rlp(&B256::repeat_byte(0xAA)),
8719                RlpNode::word_rlp(&B256::repeat_byte(0xBB)),
8720            ],
8721        );
8722        let mut trie = ParallelSparseTrie::from_root(root_branch, None, false).unwrap();
8723
8724        let branch_at_1 =
8725            create_branch_node_with_children(&[0x2], [RlpNode::word_rlp(&B256::repeat_byte(0xCC))]);
8726        let branch_at_5 =
8727            create_branch_node_with_children(&[0x6], [RlpNode::word_rlp(&B256::repeat_byte(0xDD))]);
8728        trie.reveal_nodes(&mut [
8729            ProofTrieNodeV2 {
8730                path: Nibbles::from_nibbles_unchecked([0x1]),
8731                node: branch_at_1,
8732                masks: None,
8733            },
8734            ProofTrieNodeV2 {
8735                path: Nibbles::from_nibbles_unchecked([0x5]),
8736                node: branch_at_5,
8737                masks: None,
8738            },
8739        ])
8740        .unwrap();
8741
8742        let mut nodes = vec![
8743            ProofTrieNodeV2 {
8744                path: Nibbles::from_nibbles_unchecked([0x1, 0x2]),
8745                node: TrieNodeV2::Leaf(LeafNode {
8746                    key: Nibbles::from_nibbles_unchecked([0x3, 0x4]),
8747                    value: vec![1, 2, 3],
8748                }),
8749                masks: None,
8750            },
8751            ProofTrieNodeV2 {
8752                path: Nibbles::from_nibbles_unchecked([0x5, 0x6]),
8753                node: TrieNodeV2::Leaf(LeafNode {
8754                    key: Nibbles::from_nibbles_unchecked([0x7, 0x8]),
8755                    value: vec![4, 5, 6],
8756                }),
8757                masks: None,
8758            },
8759        ];
8760        trie.reveal_nodes(&mut nodes).unwrap();
8761
8762        let populated_size = trie.memory_size();
8763
8764        // Populated trie should use more memory than an empty one
8765        assert!(populated_size > empty_size);
8766    }
8767
8768    #[test]
8769    fn test_reveal_extension_branch_leaves_then_root() {
8770        // Test structure:
8771        // - 0x (root): extension node with key of 63 zeroes
8772        // - 0x000...000 (63 zeroes): branch node with children at 1 and 2
8773        // - 0x000...0001 (62 zeroes + 01): leaf with value 1
8774        // - 0x000...0002 (62 zeroes + 02): leaf with value 2
8775        //
8776        // The leaves and branch are small enough to be embedded (< 32 bytes),
8777        // so we manually RLP encode them and use those encodings in parent nodes.
8778
8779        // Create the extension key (63 zero nibbles)
8780        let ext_key: [u8; 63] = [0; 63];
8781
8782        // The branch is at the end of the extension (63 zeroes)
8783        let branch_path = Nibbles::from_nibbles(ext_key);
8784
8785        // Leaf paths: 63 zeroes + 1, 63 zeroes + 2
8786        let mut leaf1_path_bytes = [0u8; 64];
8787        leaf1_path_bytes[63] = 1;
8788        let leaf1_path = Nibbles::from_nibbles(leaf1_path_bytes);
8789
8790        let mut leaf2_path_bytes = [0u8; 64];
8791        leaf2_path_bytes[63] = 2;
8792        let leaf2_path = Nibbles::from_nibbles(leaf2_path_bytes);
8793
8794        // Create leaves with empty keys (full path consumed by extension + branch)
8795        // and simple values
8796        let leaf1_node = LeafNode::new(Nibbles::default(), vec![0x1]);
8797        let leaf2_node = LeafNode::new(Nibbles::default(), vec![0x2]);
8798
8799        // RLP encode the leaves to get their RlpNode representations
8800        let leaf1_rlp = RlpNode::from_rlp(&alloy_rlp::encode(TrieNodeV2::Leaf(leaf1_node.clone())));
8801        let leaf2_rlp = RlpNode::from_rlp(&alloy_rlp::encode(TrieNodeV2::Leaf(leaf2_node.clone())));
8802
8803        // Create the branch node with children at indices 1 and 2, using the RLP-encoded leaves.
8804        // In V2, branch and extension are combined: the key holds the extension prefix.
8805        let state_mask = TrieMask::new(0b0000_0110); // bits 1 and 2 set
8806        let stack = vec![leaf1_rlp, leaf2_rlp];
8807
8808        // First encode the bare branch (empty key) to get its RlpNode
8809        let bare_branch = BranchNodeV2::new(Nibbles::new(), stack.clone(), state_mask, None);
8810        let branch_rlp = RlpNode::from_rlp(&alloy_rlp::encode(&bare_branch));
8811
8812        // Create the combined extension+branch node as the root.
8813        let root_node = TrieNodeV2::Branch(BranchNodeV2::new(
8814            Nibbles::from_nibbles(ext_key),
8815            stack.clone(),
8816            state_mask,
8817            Some(branch_rlp),
8818        ));
8819
8820        // Initialize trie with the extension+branch as root
8821        let mut trie = ParallelSparseTrie::from_root(root_node, None, false).unwrap();
8822
8823        // Reveal the branch and leaves
8824        let mut nodes = vec![
8825            ProofTrieNodeV2 {
8826                path: branch_path,
8827                node: TrieNodeV2::Branch(BranchNodeV2::new(
8828                    Nibbles::new(),
8829                    stack,
8830                    state_mask,
8831                    None,
8832                )),
8833                masks: None,
8834            },
8835            ProofTrieNodeV2 { path: leaf1_path, node: TrieNodeV2::Leaf(leaf1_node), masks: None },
8836            ProofTrieNodeV2 { path: leaf2_path, node: TrieNodeV2::Leaf(leaf2_node), masks: None },
8837        ];
8838        trie.reveal_nodes(&mut nodes).unwrap();
8839
8840        // Add the leaf paths to prefix_set so that root() will update their hashes
8841        trie.prefix_set.insert(leaf1_path);
8842        trie.prefix_set.insert(leaf2_path);
8843
8844        // Call root() to compute the trie root hash
8845        let _root = trie.root();
8846    }
8847
8848    #[test]
8849    fn test_update_leaf_creates_embedded_nodes_then_root() {
8850        // Similar structure to test_reveal_extension_branch_leaves_then_root, but created
8851        // via update_leaf calls on an empty trie instead of revealing pre-built nodes.
8852        //
8853        // Two leaves with paths that share a long common prefix will create:
8854        // - Extension node at root with the shared prefix
8855        // - Branch node where the paths diverge
8856        // - Two leaf nodes (embedded in the branch since they're small)
8857
8858        // Create two paths that share 63 nibbles and differ only at the 64th
8859        let mut leaf1_path_bytes = [0u8; 64];
8860        leaf1_path_bytes[63] = 1;
8861        let leaf1_path = Nibbles::from_nibbles(leaf1_path_bytes);
8862
8863        let mut leaf2_path_bytes = [0u8; 64];
8864        leaf2_path_bytes[63] = 2;
8865        let leaf2_path = Nibbles::from_nibbles(leaf2_path_bytes);
8866
8867        // Create an empty trie and update with two leaves
8868        let mut trie = ParallelSparseTrie::default();
8869        trie.update_leaf(leaf1_path, vec![0x1], DefaultTrieNodeProvider).unwrap();
8870        trie.update_leaf(leaf2_path, vec![0x2], DefaultTrieNodeProvider).unwrap();
8871
8872        // Call root() to compute the trie root hash
8873        let _root = trie.root();
8874    }
8875}