Skip to main content

reth_trie_sparse/
parallel.rs

1#[cfg(feature = "trie-debug")]
2use crate::debug_recorder::{LeafUpdateRecord, ProofTrieNodeRecord, RecordedOp, TrieDebugRecorder};
3use crate::{
4    lower::LowerSparseSubtrie, provider::TrieNodeProvider, LeafLookup, LeafLookupError,
5    RlpNodeStackItem, SparseNode, SparseNodeState, SparseNodeType, SparseTrie, SparseTrieUpdates,
6};
7use alloc::{borrow::Cow, boxed::Box, vec, vec::Vec};
8use alloy_primitives::{
9    map::{Entry, HashMap, HashSet},
10    B256, U256,
11};
12use alloy_rlp::Decodable;
13use alloy_trie::{BranchNodeCompact, TrieMask, EMPTY_ROOT_HASH};
14use core::cmp::{Ord, Ordering, PartialOrd};
15use reth_execution_errors::{SparseTrieError, SparseTrieErrorKind, SparseTrieResult};
16#[cfg(feature = "metrics")]
17use reth_primitives_traits::FastInstant as Instant;
18use reth_trie_common::{
19    prefix_set::{PrefixSet, PrefixSetMut},
20    BranchNodeMasks, BranchNodeMasksMap, BranchNodeRef, ExtensionNodeRef, LeafNodeRef, Nibbles,
21    ProofTrieNodeV2, RlpNode, TrieNodeV2,
22};
23use smallvec::SmallVec;
24use tracing::{instrument, trace};
25
26/// The maximum length of a path, in nibbles, which belongs to the upper subtrie of a
27/// [`ParallelSparseTrie`]. All longer paths belong to a lower subtrie.
28pub const UPPER_TRIE_MAX_DEPTH: usize = 2;
29
30/// Number of lower subtries which are managed by the [`ParallelSparseTrie`].
31pub const NUM_LOWER_SUBTRIES: usize = 16usize.pow(UPPER_TRIE_MAX_DEPTH as u32);
32
33/// Configuration for controlling when parallelism is enabled in [`ParallelSparseTrie`] operations.
34#[derive(Debug, Clone, Copy, PartialEq, Eq, Default)]
35pub struct ParallelismThresholds {
36    /// Minimum number of nodes to reveal before parallel processing is enabled.
37    /// When `reveal_nodes` has fewer nodes than this threshold, they will be processed serially.
38    pub min_revealed_nodes: usize,
39    /// Minimum number of changed keys (prefix set length) before parallel processing is enabled
40    /// for hash updates. When updating subtrie hashes with fewer changed keys than this threshold,
41    /// the updates will be processed serially.
42    pub min_updated_nodes: usize,
43}
44
45/// A revealed sparse trie with subtries that can be updated in parallel.
46///
47/// ## Structure
48///
49/// The trie is divided into two tiers for efficient parallel processing:
50/// - **Upper subtrie**: Contains nodes with paths shorter than [`UPPER_TRIE_MAX_DEPTH`]
51/// - **Lower subtries**: An array of [`NUM_LOWER_SUBTRIES`] subtries, each handling nodes with
52///   paths of at least [`UPPER_TRIE_MAX_DEPTH`] nibbles
53///
54/// Node placement is determined by path depth:
55/// - Paths with < [`UPPER_TRIE_MAX_DEPTH`] nibbles go to the upper subtrie
56/// - Paths with >= [`UPPER_TRIE_MAX_DEPTH`] nibbles go to lower subtries, indexed by their first
57///   [`UPPER_TRIE_MAX_DEPTH`] nibbles.
58///
59/// Each lower subtrie tracks its root via the `path` field, which represents the shortest path
60/// in that subtrie. This path will have at least [`UPPER_TRIE_MAX_DEPTH`] nibbles, but may be
61/// longer when an extension node in the upper trie "reaches into" the lower subtrie. For example,
62/// if the upper trie has an extension from `0x1` to `0x12345`, then the lower subtrie for prefix
63/// `0x12` will have its root at path `0x12345` rather than at `0x12`.
64///
65/// ## Node Revealing
66///
67/// The trie uses lazy loading to efficiently handle large state tries. Nodes can be:
68/// - **Blind nodes**: Stored as hashes on [`SparseNode::Branch::blinded_hashes`]
69/// - **Revealed nodes**: Fully loaded nodes (Branch, Extension, Leaf) with complete structure
70///
71/// Note: An empty trie contains an `EmptyRoot` node at the root path, rather than no nodes at all.
72/// A trie with no nodes is blinded, its root may be `EmptyRoot` or some other node type.
73///
74/// Revealing is generally done using pre-loaded node data provided to via `reveal_nodes`. In
75/// certain cases, such as edge-cases when updating/removing leaves, nodes are revealed on-demand.
76///
77/// ## Leaf Operations
78///
79/// **Update**: When updating a leaf, the new value is stored in the appropriate subtrie's values
80/// map. If the leaf is new, the trie structure is updated by walking to the leaf from the root,
81/// creating necessary intermediate branch nodes.
82///
83/// **Removal**: Leaf removal may require parent node modifications. The algorithm walks up the
84/// trie, removing nodes that become empty and converting single-child branches to extensions.
85///
86/// During leaf operations the overall structure of the trie may change, causing nodes to be moved
87/// from the upper to lower trie or vice-versa.
88///
89/// The `prefix_set` is modified during both leaf updates and removals to track changed leaf paths.
90///
91/// ## Root Hash Calculation
92///
93/// Root hash computation follows a bottom-up approach:
94/// 1. Update hashes for all modified lower subtries (can be done in parallel)
95/// 2. Update hashes for the upper subtrie (which may reference lower subtrie hashes)
96/// 3. Calculate the final root hash from the upper subtrie's root node
97///
98/// The `prefix_set` tracks which paths have been modified, enabling incremental updates instead of
99/// recalculating the entire trie.
100///
101/// ## Invariants
102///
103/// - Each leaf entry in the `subtries` and `upper_trie` collection must have a corresponding entry
104///   in `values` collection. If the root node is a leaf, it must also have an entry in `values`.
105/// - All keys in `values` collection are full leaf paths.
106#[derive(Clone, PartialEq, Eq, Debug)]
107pub struct ParallelSparseTrie {
108    /// This contains the trie nodes for the upper part of the trie.
109    upper_subtrie: Box<SparseSubtrie>,
110    /// An array containing the subtries at the second level of the trie.
111    lower_subtries: Box<[LowerSparseSubtrie; NUM_LOWER_SUBTRIES]>,
112    /// Set of prefixes (key paths) that have been marked as updated.
113    /// This is used to track which parts of the trie need to be recalculated.
114    prefix_set: PrefixSetMut,
115    /// Optional tracking of trie updates for later use.
116    updates: Option<SparseTrieUpdates>,
117    /// Branch node masks containing `tree_mask` and `hash_mask` for each path.
118    /// - `tree_mask`: When a bit is set, the corresponding child subtree is stored in the
119    ///   database.
120    /// - `hash_mask`: When a bit is set, the corresponding child is stored as a hash in the
121    ///   database.
122    branch_node_masks: BranchNodeMasksMap,
123    /// Reusable buffer pool used for collecting [`SparseTrieUpdatesAction`]s during hash
124    /// computations.
125    update_actions_buffers: Vec<Vec<SparseTrieUpdatesAction>>,
126    /// Thresholds controlling when parallelism is enabled for different operations.
127    parallelism_thresholds: ParallelismThresholds,
128    /// Metrics for the parallel sparse trie.
129    #[cfg(feature = "metrics")]
130    metrics: crate::metrics::ParallelSparseTrieMetrics,
131    /// Debug recorder for tracking mutating operations.
132    #[cfg(feature = "trie-debug")]
133    debug_recorder: TrieDebugRecorder,
134}
135
136impl Default for ParallelSparseTrie {
137    fn default() -> Self {
138        Self {
139            upper_subtrie: Box::new(SparseSubtrie {
140                nodes: HashMap::from_iter([(Nibbles::default(), SparseNode::Empty)]),
141                ..Default::default()
142            }),
143            lower_subtries: Box::new(
144                [const { LowerSparseSubtrie::Blind(None) }; NUM_LOWER_SUBTRIES],
145            ),
146            prefix_set: PrefixSetMut::default(),
147            updates: None,
148            branch_node_masks: BranchNodeMasksMap::default(),
149            update_actions_buffers: Vec::default(),
150            parallelism_thresholds: Default::default(),
151            #[cfg(feature = "metrics")]
152            metrics: Default::default(),
153            #[cfg(feature = "trie-debug")]
154            debug_recorder: Default::default(),
155        }
156    }
157}
158
159impl SparseTrie for ParallelSparseTrie {
160    fn set_root(
161        &mut self,
162        root: TrieNodeV2,
163        masks: Option<BranchNodeMasks>,
164        retain_updates: bool,
165    ) -> SparseTrieResult<()> {
166        #[cfg(feature = "trie-debug")]
167        self.debug_recorder.record(RecordedOp::SetRoot {
168            node: ProofTrieNodeRecord::from_proof_trie_node_v2(&ProofTrieNodeV2 {
169                path: Nibbles::default(),
170                node: root.clone(),
171                masks,
172            }),
173        });
174
175        // A fresh/cleared `ParallelSparseTrie` has a `SparseNode::Empty` at its root in the upper
176        // subtrie. Delete that so we can reveal the new root node.
177        let path = Nibbles::default();
178        let _removed_root = self.upper_subtrie.nodes.remove(&path).expect("root node should exist");
179        debug_assert_eq!(_removed_root, SparseNode::Empty);
180
181        self.set_updates(retain_updates);
182
183        if let Some(masks) = masks {
184            let branch_path = if let TrieNodeV2::Branch(branch) = &root {
185                branch.key
186            } else {
187                Nibbles::default()
188            };
189
190            self.branch_node_masks.insert(branch_path, masks);
191        }
192
193        self.reveal_upper_node(Nibbles::default(), &root, masks)
194    }
195
196    fn set_updates(&mut self, retain_updates: bool) {
197        self.updates = retain_updates.then(Default::default);
198    }
199
200    fn reveal_nodes(&mut self, nodes: &mut [ProofTrieNodeV2]) -> SparseTrieResult<()> {
201        if nodes.is_empty() {
202            return Ok(())
203        }
204
205        #[cfg(feature = "trie-debug")]
206        self.debug_recorder.record(RecordedOp::RevealNodes {
207            nodes: nodes.iter().map(ProofTrieNodeRecord::from_proof_trie_node_v2).collect(),
208        });
209
210        // Sort nodes first by their subtrie, and secondarily by their path. This allows for
211        // grouping nodes by their subtrie using `chunk_by`.
212        nodes.sort_unstable_by(
213            |ProofTrieNodeV2 { path: path_a, .. }, ProofTrieNodeV2 { path: path_b, .. }| {
214                let subtrie_type_a = SparseSubtrieType::from_path(path_a);
215                let subtrie_type_b = SparseSubtrieType::from_path(path_b);
216                subtrie_type_a.cmp(&subtrie_type_b).then_with(|| path_a.cmp(path_b))
217            },
218        );
219
220        // Update the top-level branch node masks. This is simple and can't be done in parallel.
221        self.branch_node_masks.reserve(nodes.len());
222        for ProofTrieNodeV2 { path, masks, node } in nodes.iter() {
223            if let Some(branch_masks) = masks {
224                // Use proper path for branch nodes by combining path and extension key.
225                let path = if let TrieNodeV2::Branch(branch) = node &&
226                    !branch.key.is_empty()
227                {
228                    let mut path = *path;
229                    path.extend(&branch.key);
230                    path
231                } else {
232                    *path
233                };
234                self.branch_node_masks.insert(path, *branch_masks);
235            }
236        }
237
238        // Due to the sorting all upper subtrie nodes will be at the front of the slice. We split
239        // them off from the rest to be handled specially by
240        // `ParallelSparseTrie::reveal_upper_node`.
241        let num_upper_nodes = nodes
242            .iter()
243            .position(|n| !SparseSubtrieType::path_len_is_upper(n.path.len()))
244            .unwrap_or(nodes.len());
245        let (upper_nodes, lower_nodes) = nodes.split_at(num_upper_nodes);
246
247        // Reserve the capacity of the upper subtrie's `nodes` HashMap before iterating, so we don't
248        // end up making many small capacity changes as we loop.
249        self.upper_subtrie.nodes.reserve(upper_nodes.len());
250        for node in upper_nodes {
251            self.reveal_upper_node(node.path, &node.node, node.masks)?;
252        }
253
254        let reachable_subtries = self.reachable_subtries();
255
256        // Best-effort for boundary nodes: if the parent upper node exists as a branch and the
257        // boundary child is still blinded, unset that blinded bit and carry the hash into
258        // `reveal_node`. If the parent path is absent/non-branch (for example upper extension
259        // crossing the boundary), skip without failing.
260        let hashes_from_upper = nodes
261            .iter()
262            .filter_map(|node| {
263                if node.path.len() != UPPER_TRIE_MAX_DEPTH ||
264                    !reachable_subtries.get(path_subtrie_index_unchecked(&node.path))
265                {
266                    return None;
267                }
268
269                let parent_path = node.path.slice(0..UPPER_TRIE_MAX_DEPTH - 1);
270                let Some(SparseNode::Branch { blinded_mask, blinded_hashes, .. }) =
271                    self.upper_subtrie.nodes.get_mut(&parent_path)
272                else {
273                    return None;
274                };
275
276                let nibble = node.path.last().unwrap();
277                blinded_mask.is_bit_set(nibble).then(|| {
278                    blinded_mask.unset_bit(nibble);
279                    (node.path, blinded_hashes[nibble as usize])
280                })
281            })
282            .collect::<HashMap<_, _>>();
283
284        if !self.is_reveal_parallelism_enabled(lower_nodes.len()) {
285            for node in lower_nodes {
286                let idx = path_subtrie_index_unchecked(&node.path);
287                if !reachable_subtries.get(idx) {
288                    trace!(
289                        target: "trie::parallel_sparse",
290                        reveal_path = ?node.path,
291                        "Node's lower subtrie is not reachable, skipping",
292                    );
293                    continue;
294                }
295                // For boundary leaves, check reachability from upper subtrie's parent branch
296                if node.path.len() == UPPER_TRIE_MAX_DEPTH &&
297                    !Self::is_boundary_leaf_reachable(
298                        &self.upper_subtrie.nodes,
299                        &node.path,
300                        &node.node,
301                    )
302                {
303                    trace!(
304                        target: "trie::parallel_sparse",
305                        path = ?node.path,
306                        "Boundary leaf not reachable from upper subtrie, skipping",
307                    );
308                    continue;
309                }
310                self.lower_subtries[idx].reveal(&node.path);
311                self.lower_subtries[idx].as_revealed_mut().expect("just revealed").reveal_node(
312                    node.path,
313                    &node.node,
314                    node.masks,
315                    hashes_from_upper.get(&node.path).copied(),
316                )?;
317            }
318            return Ok(())
319        }
320
321        #[cfg(not(feature = "std"))]
322        unreachable!("nostd is checked by is_reveal_parallelism_enabled");
323
324        #[cfg(feature = "std")]
325        // Reveal lower subtrie nodes in parallel
326        {
327            use rayon::iter::{IntoParallelIterator, ParallelIterator};
328            use tracing::Span;
329
330            // Capture the current span so it can be propagated to rayon worker threads
331            let parent_span = Span::current();
332
333            // Capture reference to upper subtrie nodes for boundary leaf reachability checks
334            let upper_nodes = &self.upper_subtrie.nodes;
335
336            // Group the nodes by lower subtrie.
337            let results = lower_nodes
338                .chunk_by(|node_a, node_b| {
339                    SparseSubtrieType::from_path(&node_a.path) ==
340                        SparseSubtrieType::from_path(&node_b.path)
341                })
342                // Filter out chunks for unreachable subtries.
343                .filter_map(|nodes| {
344                    let mut nodes = nodes
345                        .iter()
346                        .filter(|node| {
347                            // For boundary leaves, check reachability from upper subtrie's parent
348                            // branch.
349                            if node.path.len() == UPPER_TRIE_MAX_DEPTH &&
350                                !Self::is_boundary_leaf_reachable(
351                                    upper_nodes,
352                                    &node.path,
353                                    &node.node,
354                                )
355                            {
356                                trace!(
357                                    target: "trie::parallel_sparse",
358                                    path = ?node.path,
359                                    "Boundary leaf not reachable from upper subtrie, skipping",
360                                );
361                                false
362                            } else {
363                                true
364                            }
365                        })
366                        .peekable();
367
368                    let node = nodes.peek()?;
369                    let idx =
370                        SparseSubtrieType::from_path(&node.path).lower_index().unwrap_or_else(
371                            || panic!("upper subtrie node {node:?} found amongst lower nodes"),
372                        );
373
374                    if !reachable_subtries.get(idx) {
375                        trace!(
376                            target: "trie::parallel_sparse",
377                            nodes = ?nodes,
378                            "Lower subtrie is not reachable, skipping reveal",
379                        );
380                        return None;
381                    }
382
383                    // due to the nodes being sorted secondarily on their path, and chunk_by keeping
384                    // the first element of each group, the `path` here will necessarily be the
385                    // shortest path being revealed for each subtrie. Therefore we can reveal the
386                    // subtrie itself using this path and retain correct behavior.
387                    self.lower_subtries[idx].reveal(&node.path);
388                    Some((
389                        idx,
390                        self.lower_subtries[idx].take_revealed().expect("just revealed"),
391                        nodes,
392                    ))
393                })
394                .collect::<Vec<_>>()
395                .into_par_iter()
396                .map(|(subtrie_idx, mut subtrie, nodes)| {
397                    // Enter the parent span to propagate context (e.g., hashed_address for storage
398                    // tries) to the worker thread
399                    let _guard = parent_span.enter();
400
401                    // reserve space in the HashMap ahead of time; doing it on a node-by-node basis
402                    // can cause multiple re-allocations as the hashmap grows.
403                    subtrie.nodes.reserve(nodes.size_hint().1.unwrap_or(0));
404
405                    for node in nodes {
406                        // Reveal each node in the subtrie, returning early on any errors
407                        let res = subtrie.reveal_node(
408                            node.path,
409                            &node.node,
410                            node.masks,
411                            hashes_from_upper.get(&node.path).copied(),
412                        );
413                        if res.is_err() {
414                            return (subtrie_idx, subtrie, res.map(|_| ()))
415                        }
416                    }
417                    (subtrie_idx, subtrie, Ok(()))
418                })
419                .collect::<Vec<_>>();
420
421            // Put subtries back which were processed in the rayon pool, collecting the last
422            // seen error in the process and returning that.
423            let mut any_err = Ok(());
424            for (subtrie_idx, subtrie, res) in results {
425                self.lower_subtries[subtrie_idx] = LowerSparseSubtrie::Revealed(subtrie);
426                if res.is_err() {
427                    any_err = res;
428                }
429            }
430
431            any_err
432        }
433    }
434
435    fn update_leaf<P: TrieNodeProvider>(
436        &mut self,
437        full_path: Nibbles,
438        value: Vec<u8>,
439        _provider: P,
440    ) -> SparseTrieResult<()> {
441        debug_assert_eq!(
442            full_path.len(),
443            B256::len_bytes() * 2,
444            "update_leaf full_path must be 64 nibbles (32 bytes), got {} nibbles",
445            full_path.len()
446        );
447
448        trace!(
449            target: "trie::parallel_sparse",
450            ?full_path,
451            value_len = value.len(),
452            "Updating leaf",
453        );
454
455        // Check if the value already exists - if so, just update it (no structural changes needed)
456        if self.upper_subtrie.inner.values.contains_key(&full_path) {
457            self.prefix_set.insert(full_path);
458            self.upper_subtrie.inner.values.insert(full_path, value);
459            return Ok(());
460        }
461        // Also check lower subtries for existing value
462        if let Some(subtrie) = self.lower_subtrie_for_path(&full_path) &&
463            subtrie.inner.values.contains_key(&full_path)
464        {
465            self.prefix_set.insert(full_path);
466            self.lower_subtrie_for_path_mut(&full_path)
467                .expect("subtrie exists")
468                .inner
469                .values
470                .insert(full_path, value);
471            return Ok(());
472        }
473
474        // Insert value into upper subtrie temporarily. We'll move it to the correct subtrie
475        // during traversal, or clean it up if we error.
476        self.upper_subtrie.inner.values.insert(full_path, value.clone());
477
478        // Start at the root, traversing until we find either the node to update or a subtrie to
479        // update.
480        //
481        // We first traverse the upper subtrie for two levels, and moving any created nodes to a
482        // lower subtrie if necessary.
483        //
484        // We use `next` to keep track of the next node that we need to traverse to, and
485        // `new_nodes` to keep track of any nodes that were created during the traversal.
486        let mut new_nodes = Vec::new();
487        let mut next = Some(Nibbles::default());
488
489        // Traverse the upper subtrie to find the node to update or the subtrie to update.
490        //
491        // We stop when the next node to traverse would be in a lower subtrie, or if there are no
492        // more nodes to traverse.
493        while let Some(current) =
494            next.as_mut().filter(|next| SparseSubtrieType::path_len_is_upper(next.len()))
495        {
496            // Traverse the next node, keeping track of any changed nodes and the next step in the
497            // trie. If traversal fails, clean up the value we inserted and propagate the error.
498            let step_result = self.upper_subtrie.update_next_node(current, &full_path);
499
500            if step_result.is_err() {
501                self.upper_subtrie.inner.values.remove(&full_path);
502                return step_result.map(|_| ());
503            }
504
505            match step_result? {
506                LeafUpdateStep::Continue => {}
507                LeafUpdateStep::Complete { inserted_nodes } => {
508                    new_nodes.extend(inserted_nodes);
509                    next = None;
510                }
511                LeafUpdateStep::NodeNotFound => {
512                    next = None;
513                }
514            }
515        }
516
517        // Move nodes from upper subtrie to lower subtries
518        for node_path in &new_nodes {
519            // Skip nodes that belong in the upper subtrie
520            if SparseSubtrieType::path_len_is_upper(node_path.len()) {
521                continue
522            }
523
524            let node =
525                self.upper_subtrie.nodes.remove(node_path).expect("node belongs to upper subtrie");
526
527            // If it's a leaf node, extract its value before getting mutable reference to subtrie.
528            let leaf_value = if let SparseNode::Leaf { key, .. } = &node {
529                let mut leaf_full_path = *node_path;
530                leaf_full_path.extend(key);
531                Some((
532                    leaf_full_path,
533                    self.upper_subtrie
534                        .inner
535                        .values
536                        .remove(&leaf_full_path)
537                        .expect("leaf nodes have associated values entries"),
538                ))
539            } else {
540                None
541            };
542
543            // Get or create the subtrie with the exact node path (not truncated to 2 nibbles).
544            let subtrie = self.subtrie_for_path_mut(node_path);
545
546            // Insert the leaf value if we have one
547            if let Some((leaf_full_path, value)) = leaf_value {
548                subtrie.inner.values.insert(leaf_full_path, value);
549            }
550
551            // Insert the node into the lower subtrie
552            subtrie.nodes.insert(*node_path, node);
553        }
554
555        // If we reached the max depth of the upper trie, we may have had more nodes to insert.
556        if let Some(next_path) = next.filter(|n| !SparseSubtrieType::path_len_is_upper(n.len())) {
557            // The value was inserted into the upper subtrie's `values` at the top of this method.
558            // At this point we know the value is not in the upper subtrie, and the call to
559            // `update_leaf` below will insert it into the lower subtrie. So remove it from the
560            // upper subtrie.
561            self.upper_subtrie.inner.values.remove(&full_path);
562
563            // Use subtrie_for_path to ensure the subtrie has the correct path.
564            //
565            // The next_path here represents where we need to continue traversal, which may
566            // be longer than 2 nibbles if we're following an extension node.
567            let subtrie = self.subtrie_for_path_mut(&next_path);
568
569            // Create an empty root at the subtrie path if the subtrie is empty
570            if subtrie.nodes.is_empty() {
571                subtrie.nodes.insert(subtrie.path, SparseNode::Empty);
572            }
573
574            // If we didn't update the target leaf, we need to call update_leaf on the subtrie
575            // to ensure that the leaf is updated correctly.
576            if let Err(e) = subtrie.update_leaf(full_path, value) {
577                // Clean up: remove the value from lower subtrie if it was inserted
578                if let Some(lower) = self.lower_subtrie_for_path_mut(&full_path) {
579                    lower.inner.values.remove(&full_path);
580                }
581                return Err(e);
582            }
583        }
584
585        // Insert into prefix_set only after all operations succeed
586        self.prefix_set.insert(full_path);
587
588        Ok(())
589    }
590
591    fn remove_leaf<P: TrieNodeProvider>(
592        &mut self,
593        full_path: &Nibbles,
594        _provider: P,
595    ) -> SparseTrieResult<()> {
596        debug_assert_eq!(
597            full_path.len(),
598            B256::len_bytes() * 2,
599            "remove_leaf full_path must be 64 nibbles (32 bytes), got {} nibbles",
600            full_path.len()
601        );
602
603        trace!(
604            target: "trie::parallel_sparse",
605            ?full_path,
606            "Removing leaf",
607        );
608
609        // When removing a leaf node it's possibly necessary to modify its parent node, and possibly
610        // the parent's parent node. It is not ever necessary to descend further than that; once an
611        // extension node is hit it must terminate in a branch or the root, which won't need further
612        // updates. So the situation with maximum updates is:
613        //
614        // - Leaf
615        // - Branch with 2 children, one being this leaf
616        // - Extension
617        //
618        // ...which will result in just a leaf or extension, depending on what the branch's other
619        // child is.
620        //
621        // Therefore, first traverse the trie in order to find the leaf node and at most its parent
622        // and grandparent.
623
624        let leaf_path;
625        let leaf_subtrie_type;
626
627        let mut branch_parent_path: Option<Nibbles> = None;
628        let mut branch_parent_node: Option<SparseNode> = None;
629
630        let mut ext_grandparent_path: Option<Nibbles> = None;
631        let mut ext_grandparent_node: Option<SparseNode> = None;
632
633        let mut curr_path = Nibbles::new(); // start traversal from root
634        let mut curr_subtrie_type = SparseSubtrieType::Upper;
635
636        // List of node paths which need to be marked dirty
637        let mut paths_to_mark_dirty = Vec::new();
638
639        loop {
640            let curr_subtrie = match curr_subtrie_type {
641                SparseSubtrieType::Upper => &mut self.upper_subtrie,
642                SparseSubtrieType::Lower(idx) => {
643                    self.lower_subtries[idx].as_revealed_mut().expect("lower subtrie is revealed")
644                }
645            };
646            let curr_node = curr_subtrie.nodes.get_mut(&curr_path).unwrap();
647
648            match Self::find_next_to_leaf(&curr_path, curr_node, full_path) {
649                FindNextToLeafOutcome::NotFound => return Ok(()), // leaf isn't in the trie
650                FindNextToLeafOutcome::BlindedNode(path) => {
651                    return Err(SparseTrieErrorKind::BlindedNode(path).into())
652                }
653                FindNextToLeafOutcome::Found => {
654                    // this node is the target leaf
655                    leaf_path = curr_path;
656                    leaf_subtrie_type = curr_subtrie_type;
657                    break;
658                }
659                FindNextToLeafOutcome::ContinueFrom(next_path) => {
660                    // Any branches/extensions along the path to the leaf will have their `hash`
661                    // field unset, as it will no longer be valid once the leaf is removed.
662                    match curr_node {
663                        SparseNode::Branch { .. } => {
664                            paths_to_mark_dirty
665                                .push((SparseSubtrieType::from_path(&curr_path), curr_path));
666
667                            // If there is already an extension leading into a branch, then that
668                            // extension is no longer relevant.
669                            match (&branch_parent_path, &ext_grandparent_path) {
670                                (Some(branch), Some(ext)) if branch.len() > ext.len() => {
671                                    ext_grandparent_path = None;
672                                    ext_grandparent_node = None;
673                                }
674                                _ => (),
675                            };
676                            branch_parent_path = Some(curr_path);
677                            branch_parent_node = Some(curr_node.clone());
678                        }
679                        SparseNode::Extension { .. } => {
680                            paths_to_mark_dirty
681                                .push((SparseSubtrieType::from_path(&curr_path), curr_path));
682
683                            // We can assume a new branch node will be found after the extension, so
684                            // there's no need to modify branch_parent_path/node even if it's
685                            // already set.
686                            ext_grandparent_path = Some(curr_path);
687                            ext_grandparent_node = Some(curr_node.clone());
688                        }
689                        SparseNode::Empty | SparseNode::Leaf { .. } => {
690                            unreachable!(
691                                "find_next_to_leaf only continues to a branch or extension"
692                            )
693                        }
694                    }
695
696                    curr_path = next_path;
697
698                    // Update subtrie type if we're crossing into the lower trie.
699                    let next_subtrie_type = SparseSubtrieType::from_path(&curr_path);
700                    if matches!(curr_subtrie_type, SparseSubtrieType::Upper) &&
701                        matches!(next_subtrie_type, SparseSubtrieType::Lower(_))
702                    {
703                        curr_subtrie_type = next_subtrie_type;
704                    }
705                }
706            };
707        }
708
709        // Before mutating, check if branch collapse would require revealing a blinded node.
710        // This ensures remove_leaf is atomic: if it errors, the trie is unchanged.
711        if let (Some(branch_path), Some(SparseNode::Branch { state_mask, blinded_mask, .. })) =
712            (&branch_parent_path, &branch_parent_node)
713        {
714            let mut check_mask = *state_mask;
715            let child_nibble = leaf_path.get_unchecked(branch_path.len());
716            check_mask.unset_bit(child_nibble);
717
718            if check_mask.count_bits() == 1 {
719                let remaining_nibble =
720                    check_mask.first_set_bit_index().expect("state mask is not empty");
721
722                if blinded_mask.is_bit_set(remaining_nibble) {
723                    let mut path = *branch_path;
724                    path.push_unchecked(remaining_nibble);
725                    return Err(SparseTrieErrorKind::BlindedNode(path).into());
726                }
727            }
728        }
729
730        // We've traversed to the leaf and collected its ancestors as necessary. Remove the leaf
731        // from its SparseSubtrie and reset the hashes of the nodes along the path.
732        self.prefix_set.insert(*full_path);
733        let leaf_subtrie = match leaf_subtrie_type {
734            SparseSubtrieType::Upper => &mut self.upper_subtrie,
735            SparseSubtrieType::Lower(idx) => {
736                self.lower_subtries[idx].as_revealed_mut().expect("lower subtrie is revealed")
737            }
738        };
739        leaf_subtrie.inner.values.remove(full_path);
740        for (subtrie_type, path) in paths_to_mark_dirty {
741            let node = match subtrie_type {
742                SparseSubtrieType::Upper => self.upper_subtrie.nodes.get_mut(&path),
743                SparseSubtrieType::Lower(idx) => self.lower_subtries[idx]
744                    .as_revealed_mut()
745                    .expect("lower subtrie is revealed")
746                    .nodes
747                    .get_mut(&path),
748            }
749            .expect("node exists");
750
751            match node {
752                SparseNode::Extension { state, .. } | SparseNode::Branch { state, .. } => {
753                    *state = SparseNodeState::Dirty
754                }
755                SparseNode::Empty | SparseNode::Leaf { .. } => {
756                    unreachable!(
757                        "only branch and extension nodes can be marked dirty when removing a leaf"
758                    )
759                }
760            }
761        }
762        self.remove_node(&leaf_path);
763
764        // If the leaf was at the root replace its node with the empty value. We can stop execution
765        // here, all remaining logic is related to the ancestors of the leaf.
766        if leaf_path.is_empty() {
767            self.upper_subtrie.nodes.insert(leaf_path, SparseNode::Empty);
768            return Ok(())
769        }
770
771        // If there is a parent branch node (very likely, unless the leaf is at the root) execute
772        // any required changes for that node, relative to the removed leaf.
773        if let (
774            Some(branch_path),
775            &Some(SparseNode::Branch { mut state_mask, blinded_mask, ref blinded_hashes, .. }),
776        ) = (&branch_parent_path, &branch_parent_node)
777        {
778            let child_nibble = leaf_path.get_unchecked(branch_path.len());
779            state_mask.unset_bit(child_nibble);
780
781            let new_branch_node = if state_mask.count_bits() == 1 {
782                // If only one child is left set in the branch node, we need to collapse it. Get
783                // full path of the only child node left.
784                let remaining_child_nibble =
785                    state_mask.first_set_bit_index().expect("state mask is not empty");
786                let mut remaining_child_path = *branch_path;
787                remaining_child_path.push_unchecked(remaining_child_nibble);
788
789                trace!(
790                    target: "trie::parallel_sparse",
791                    ?leaf_path,
792                    ?branch_path,
793                    ?remaining_child_path,
794                    "Branch node has only one child",
795                );
796
797                // If the remaining child node is not yet revealed then we have to reveal it here,
798                // otherwise it's not possible to know how to collapse the branch.
799                if blinded_mask.is_bit_set(remaining_child_nibble) {
800                    return Err(SparseTrieErrorKind::BlindedNode(remaining_child_path).into());
801                }
802
803                let remaining_child_node = self
804                    .subtrie_for_path_mut(&remaining_child_path)
805                    .nodes
806                    .get(&remaining_child_path)
807                    .unwrap();
808
809                let (new_branch_node, remove_child) = Self::branch_changes_on_leaf_removal(
810                    branch_path,
811                    &remaining_child_path,
812                    remaining_child_node,
813                );
814
815                if remove_child {
816                    self.move_value_on_leaf_removal(
817                        branch_path,
818                        &new_branch_node,
819                        &remaining_child_path,
820                    );
821                    self.remove_node(&remaining_child_path);
822                }
823
824                if let Some(updates) = self.updates.as_mut() {
825                    updates.updated_nodes.remove(branch_path);
826                    updates.removed_nodes.insert(*branch_path);
827                }
828
829                new_branch_node
830            } else {
831                // If more than one child is left set in the branch, we just re-insert it with the
832                // updated state_mask.
833                SparseNode::Branch {
834                    state_mask,
835                    blinded_mask,
836                    blinded_hashes: blinded_hashes.clone(),
837                    state: SparseNodeState::Dirty,
838                }
839            };
840
841            let branch_subtrie = self.subtrie_for_path_mut(branch_path);
842            branch_subtrie.nodes.insert(*branch_path, new_branch_node.clone());
843            branch_parent_node = Some(new_branch_node);
844        };
845
846        // If there is a grandparent extension node then there will necessarily be a parent branch
847        // node. Execute any required changes for the extension node, relative to the (possibly now
848        // replaced with a leaf or extension) branch node.
849        if let (Some(ext_path), Some(SparseNode::Extension { key: shortkey, .. })) =
850            (ext_grandparent_path, &ext_grandparent_node)
851        {
852            let ext_subtrie = self.subtrie_for_path_mut(&ext_path);
853            let branch_path = branch_parent_path.as_ref().unwrap();
854
855            if let Some(new_ext_node) = Self::extension_changes_on_leaf_removal(
856                &ext_path,
857                shortkey,
858                branch_path,
859                branch_parent_node.as_ref().unwrap(),
860            ) {
861                ext_subtrie.nodes.insert(ext_path, new_ext_node.clone());
862                self.move_value_on_leaf_removal(&ext_path, &new_ext_node, branch_path);
863                self.remove_node(branch_path);
864            }
865        }
866
867        Ok(())
868    }
869
870    #[instrument(level = "trace", target = "trie::sparse::parallel", skip(self))]
871    fn root(&mut self) -> B256 {
872        trace!(target: "trie::parallel_sparse", "Calculating trie root hash");
873
874        #[cfg(feature = "trie-debug")]
875        self.debug_recorder.record(RecordedOp::Root);
876
877        if self.prefix_set.is_empty() &&
878            let Some(rlp_node) = self
879                .upper_subtrie
880                .nodes
881                .get(&Nibbles::default())
882                .and_then(|node| node.cached_rlp_node())
883        {
884            return rlp_node
885                .as_hash()
886                .expect("RLP-encoding of the root node cannot be less than 32 bytes")
887        }
888
889        // Update all lower subtrie hashes
890        self.update_subtrie_hashes();
891
892        // Update hashes for the upper subtrie using our specialized function
893        // that can access both upper and lower subtrie nodes
894        let mut prefix_set = core::mem::take(&mut self.prefix_set).freeze();
895        let root_rlp = self.update_upper_subtrie_hashes(&mut prefix_set);
896
897        // Return the root hash
898        root_rlp.as_hash().unwrap_or(EMPTY_ROOT_HASH)
899    }
900
901    fn is_root_cached(&self) -> bool {
902        self.prefix_set.is_empty() &&
903            self.upper_subtrie
904                .nodes
905                .get(&Nibbles::default())
906                .is_some_and(|node| node.cached_rlp_node().is_some())
907    }
908
909    #[instrument(level = "trace", target = "trie::sparse::parallel", skip(self))]
910    fn update_subtrie_hashes(&mut self) {
911        trace!(target: "trie::parallel_sparse", "Updating subtrie hashes");
912
913        #[cfg(feature = "trie-debug")]
914        self.debug_recorder.record(RecordedOp::UpdateSubtrieHashes);
915
916        // Take changed subtries according to the prefix set
917        let mut prefix_set = core::mem::take(&mut self.prefix_set).freeze();
918        let num_changed_keys = prefix_set.len();
919        let (mut changed_subtries, unchanged_prefix_set) =
920            self.take_changed_lower_subtries(&mut prefix_set);
921
922        // update metrics
923        #[cfg(feature = "metrics")]
924        self.metrics.subtries_updated.record(changed_subtries.len() as f64);
925
926        // Update the prefix set with the keys that didn't have matching subtries
927        self.prefix_set = unchanged_prefix_set;
928
929        // Update subtrie hashes serially parallelism is not enabled
930        if !self.is_update_parallelism_enabled(num_changed_keys) {
931            for changed_subtrie in &mut changed_subtries {
932                changed_subtrie.subtrie.update_hashes(
933                    &mut changed_subtrie.prefix_set,
934                    &mut changed_subtrie.update_actions_buf,
935                    &self.branch_node_masks,
936                );
937            }
938
939            self.insert_changed_subtries(changed_subtries);
940            return
941        }
942
943        #[cfg(not(feature = "std"))]
944        unreachable!("nostd is checked by is_update_parallelism_enabled");
945
946        #[cfg(feature = "std")]
947        // Update subtrie hashes in parallel
948        {
949            use rayon::prelude::*;
950
951            changed_subtries.par_iter_mut().for_each(|changed_subtrie| {
952                #[cfg(feature = "metrics")]
953                let start = Instant::now();
954                changed_subtrie.subtrie.update_hashes(
955                    &mut changed_subtrie.prefix_set,
956                    &mut changed_subtrie.update_actions_buf,
957                    &self.branch_node_masks,
958                );
959                #[cfg(feature = "metrics")]
960                self.metrics.subtrie_hash_update_latency.record(start.elapsed());
961            });
962
963            self.insert_changed_subtries(changed_subtries);
964        }
965    }
966
967    fn get_leaf_value(&self, full_path: &Nibbles) -> Option<&Vec<u8>> {
968        // `subtrie_for_path` is intended for a node path, but here we are using a full key path. So
969        // we need to check if the subtrie that the key might belong to has any nodes; if not then
970        // the key's portion of the trie doesn't have enough depth to reach into the subtrie, and
971        // the key will be in the upper subtrie
972        if let Some(subtrie) = self.subtrie_for_path(full_path) &&
973            !subtrie.is_empty()
974        {
975            return subtrie.inner.values.get(full_path);
976        }
977
978        self.upper_subtrie.inner.values.get(full_path)
979    }
980
981    fn updates_ref(&self) -> Cow<'_, SparseTrieUpdates> {
982        self.updates.as_ref().map_or(Cow::Owned(SparseTrieUpdates::default()), Cow::Borrowed)
983    }
984
985    fn take_updates(&mut self) -> SparseTrieUpdates {
986        match self.updates.take() {
987            Some(updates) => {
988                // NOTE: we need to preserve Some case
989                self.updates = Some(SparseTrieUpdates::with_capacity(
990                    updates.updated_nodes.len(),
991                    updates.removed_nodes.len(),
992                ));
993                updates
994            }
995            None => SparseTrieUpdates::default(),
996        }
997    }
998
999    fn wipe(&mut self) {
1000        self.upper_subtrie.wipe();
1001        for trie in &mut *self.lower_subtries {
1002            trie.wipe();
1003        }
1004        self.prefix_set = PrefixSetMut::all();
1005        self.updates = self.updates.is_some().then(SparseTrieUpdates::wiped);
1006    }
1007
1008    fn clear(&mut self) {
1009        self.upper_subtrie.clear();
1010        self.upper_subtrie.nodes.insert(Nibbles::default(), SparseNode::Empty);
1011        for subtrie in &mut *self.lower_subtries {
1012            subtrie.clear();
1013        }
1014        self.prefix_set.clear();
1015        self.updates = None;
1016        self.branch_node_masks.clear();
1017        #[cfg(feature = "trie-debug")]
1018        self.debug_recorder.reset();
1019        // `update_actions_buffers` doesn't need to be cleared; we want to reuse the Vecs it has
1020        // buffered, and all of those are already inherently cleared when they get used.
1021    }
1022
1023    fn find_leaf(
1024        &self,
1025        full_path: &Nibbles,
1026        expected_value: Option<&Vec<u8>>,
1027    ) -> Result<LeafLookup, LeafLookupError> {
1028        // Inclusion proof
1029        //
1030        // First, do a quick check if the value exists in either the upper or lower subtrie's values
1031        // map. We assume that if there exists a leaf node, then its value will be in the `values`
1032        // map.
1033        if let Some(actual_value) = core::iter::once(self.upper_subtrie.as_ref())
1034            .chain(self.lower_subtrie_for_path(full_path))
1035            .filter_map(|subtrie| subtrie.inner.values.get(full_path))
1036            .next()
1037        {
1038            // We found the leaf, check if the value matches (if expected value was provided)
1039            return expected_value
1040                .is_none_or(|v| v == actual_value)
1041                .then_some(LeafLookup::Exists)
1042                .ok_or_else(|| LeafLookupError::ValueMismatch {
1043                    path: *full_path,
1044                    expected: expected_value.cloned(),
1045                    actual: actual_value.clone(),
1046                })
1047        }
1048
1049        // If the value does not exist in the `values` map, then this means that the leaf either:
1050        // - Does not exist in the trie
1051        // - Is missing from the witness
1052        // We traverse the trie to find the location where this leaf would have been, showing
1053        // that it is not in the trie. Or we find a blinded node, showing that the witness is
1054        // not complete.
1055        let mut curr_path = Nibbles::new(); // start traversal from root
1056        let mut curr_subtrie = self.upper_subtrie.as_ref();
1057        let mut curr_subtrie_is_upper = true;
1058
1059        loop {
1060            match curr_subtrie.nodes.get(&curr_path).unwrap() {
1061                SparseNode::Empty => return Ok(LeafLookup::NonExistent),
1062                SparseNode::Leaf { key, .. } => {
1063                    let mut found_full_path = curr_path;
1064                    found_full_path.extend(key);
1065                    assert!(&found_full_path != full_path, "target leaf {full_path:?} found, even though value wasn't in values hashmap");
1066                    return Ok(LeafLookup::NonExistent)
1067                }
1068                SparseNode::Extension { key, .. } => {
1069                    if full_path.len() == curr_path.len() {
1070                        return Ok(LeafLookup::NonExistent)
1071                    }
1072                    curr_path.extend(key);
1073                    if !full_path.starts_with(&curr_path) {
1074                        return Ok(LeafLookup::NonExistent)
1075                    }
1076                }
1077                SparseNode::Branch { state_mask, blinded_mask, blinded_hashes, .. } => {
1078                    if full_path.len() == curr_path.len() {
1079                        return Ok(LeafLookup::NonExistent)
1080                    }
1081                    let nibble = full_path.get_unchecked(curr_path.len());
1082                    if !state_mask.is_bit_set(nibble) {
1083                        return Ok(LeafLookup::NonExistent)
1084                    }
1085                    curr_path.push_unchecked(nibble);
1086                    if blinded_mask.is_bit_set(nibble) {
1087                        return Err(LeafLookupError::BlindedNode {
1088                            path: curr_path,
1089                            hash: blinded_hashes[nibble as usize],
1090                        })
1091                    }
1092                }
1093            }
1094
1095            // If we were previously looking at the upper trie, and the new path is in the
1096            // lower trie, we need to pull out a ref to the lower trie.
1097            if curr_subtrie_is_upper &&
1098                let Some(lower_subtrie) = self.lower_subtrie_for_path(&curr_path)
1099            {
1100                curr_subtrie = lower_subtrie;
1101                curr_subtrie_is_upper = false;
1102            }
1103        }
1104    }
1105
1106    fn shrink_nodes_to(&mut self, size: usize) {
1107        // Distribute the capacity across upper and lower subtries
1108        //
1109        // Always include upper subtrie, plus any lower subtries
1110        let total_subtries = 1 + NUM_LOWER_SUBTRIES;
1111        let size_per_subtrie = size / total_subtries;
1112
1113        // Shrink the upper subtrie
1114        self.upper_subtrie.shrink_nodes_to(size_per_subtrie);
1115
1116        // Shrink lower subtries (works for both revealed and blind with allocation)
1117        for subtrie in &mut *self.lower_subtries {
1118            subtrie.shrink_nodes_to(size_per_subtrie);
1119        }
1120
1121        // shrink masks map
1122        self.branch_node_masks.shrink_to(size);
1123    }
1124
1125    fn shrink_values_to(&mut self, size: usize) {
1126        // Distribute the capacity across upper and lower subtries
1127        //
1128        // Always include upper subtrie, plus any lower subtries
1129        let total_subtries = 1 + NUM_LOWER_SUBTRIES;
1130        let size_per_subtrie = size / total_subtries;
1131
1132        // Shrink the upper subtrie
1133        self.upper_subtrie.shrink_values_to(size_per_subtrie);
1134
1135        // Shrink lower subtries (works for both revealed and blind with allocation)
1136        for subtrie in &mut *self.lower_subtries {
1137            subtrie.shrink_values_to(size_per_subtrie);
1138        }
1139    }
1140
1141    /// O(1) size hint based on total node count (including hash stubs).
1142    fn size_hint(&self) -> usize {
1143        let upper_count = self.upper_subtrie.nodes.len();
1144        let lower_count: usize = self
1145            .lower_subtries
1146            .iter()
1147            .filter_map(|s| s.as_revealed_ref())
1148            .map(|s| s.nodes.len())
1149            .sum();
1150        upper_count + lower_count
1151    }
1152
1153    fn memory_size(&self) -> usize {
1154        self.memory_size()
1155    }
1156
1157    fn prune(&mut self, retained_leaves: &[Nibbles]) -> usize {
1158        #[cfg(feature = "trie-debug")]
1159        self.debug_recorder.reset();
1160
1161        let mut retained_leaves = retained_leaves.to_vec();
1162        retained_leaves.sort_unstable();
1163
1164        let mut effective_pruned_roots = Vec::<Nibbles>::new();
1165        let mut stack: SmallVec<[Nibbles; 32]> = SmallVec::new();
1166        stack.push(Nibbles::default());
1167
1168        while let Some(path) = stack.pop() {
1169            let Some(node) =
1170                self.subtrie_for_path(&path).and_then(|subtrie| subtrie.nodes.get(&path).cloned())
1171            else {
1172                continue;
1173            };
1174
1175            match node {
1176                SparseNode::Empty | SparseNode::Leaf { .. } => {}
1177                SparseNode::Extension { key, state, .. } => {
1178                    let mut child = path;
1179                    child.extend(&key);
1180
1181                    if has_retained_descendant(&retained_leaves, &child) {
1182                        stack.push(child);
1183                        continue;
1184                    }
1185
1186                    // Root extension has no parent branch edge to blind; keep it as-is.
1187                    if path.is_empty() {
1188                        continue;
1189                    }
1190
1191                    let Some(hash) = state.cached_hash() else { continue };
1192                    self.subtrie_for_path_mut_untracked(&path)
1193                        .expect("node subtrie exists")
1194                        .nodes
1195                        .remove(&path);
1196
1197                    let parent_path = path.slice(0..path.len() - 1);
1198                    // Parent can live in a different subtrie when `path` is the root of a lower
1199                    // subtrie, so resolve it by `parent_path` rather than reusing `path`'s subtrie.
1200                    let SparseNode::Branch { blinded_mask, blinded_hashes, .. } = self
1201                        .subtrie_for_path_mut_untracked(&parent_path)
1202                        .expect("parent subtrie exists")
1203                        .nodes
1204                        .get_mut(&parent_path)
1205                        .expect("expected parent branch node")
1206                    else {
1207                        panic!("expected branch node at path {parent_path:?}");
1208                    };
1209
1210                    let nibble = path.last().unwrap();
1211                    blinded_mask.set_bit(nibble);
1212                    blinded_hashes[nibble as usize] = hash;
1213                    effective_pruned_roots.push(path);
1214                }
1215                SparseNode::Branch { state_mask, blinded_mask, blinded_hashes, .. } => {
1216                    let mut blinded_mask = blinded_mask;
1217                    let mut blinded_hashes = blinded_hashes;
1218                    for nibble in state_mask.iter() {
1219                        if blinded_mask.is_bit_set(nibble) {
1220                            continue;
1221                        }
1222
1223                        let mut child = path;
1224                        child.push_unchecked(nibble);
1225                        if has_retained_descendant(&retained_leaves, &child) {
1226                            stack.push(child);
1227                            continue;
1228                        }
1229
1230                        let Entry::Occupied(entry) =
1231                            self.subtrie_for_path_mut_untracked(&child).unwrap().nodes.entry(child)
1232                        else {
1233                            panic!("expected node at path {child:?}");
1234                        };
1235
1236                        let Some(hash) = entry.get().cached_hash() else {
1237                            continue;
1238                        };
1239                        entry.remove();
1240                        blinded_mask.set_bit(nibble);
1241                        blinded_hashes[nibble as usize] = hash;
1242                        effective_pruned_roots.push(child);
1243                    }
1244
1245                    let SparseNode::Branch {
1246                        blinded_mask: old_blinded_mask,
1247                        blinded_hashes: old_blinded_hashes,
1248                        ..
1249                    } = self
1250                        .subtrie_for_path_mut_untracked(&path)
1251                        .unwrap()
1252                        .nodes
1253                        .get_mut(&path)
1254                        .unwrap()
1255                    else {
1256                        unreachable!("expected branch node at path {path:?}");
1257                    };
1258                    *old_blinded_mask = blinded_mask;
1259                    *old_blinded_hashes = blinded_hashes;
1260                }
1261            }
1262        }
1263
1264        Self::finalize_pruned_roots(self, effective_pruned_roots)
1265    }
1266
1267    fn update_leaves(
1268        &mut self,
1269        updates: &mut alloy_primitives::map::B256Map<crate::LeafUpdate>,
1270        mut proof_required_fn: impl FnMut(B256, u8),
1271    ) -> SparseTrieResult<()> {
1272        use crate::{provider::NoRevealProvider, LeafUpdate};
1273
1274        #[cfg(feature = "trie-debug")]
1275        let recorded_updates: Vec<_> =
1276            updates.iter().map(|(k, v)| (*k, LeafUpdateRecord::from(v))).collect();
1277        #[cfg(feature = "trie-debug")]
1278        let mut recorded_proof_targets: Vec<(B256, u8)> = Vec::new();
1279
1280        // Drain updates to avoid cloning keys while preserving the map's allocation.
1281        // On success, entries remain removed; on blinded node failure, they're re-inserted.
1282        let drained: Vec<_> = updates.drain().collect();
1283
1284        for (key, update) in drained {
1285            let full_path = Nibbles::unpack(key);
1286
1287            match update {
1288                LeafUpdate::Changed(value) => {
1289                    if value.is_empty() {
1290                        // Removal: remove_leaf with NoRevealProvider is atomic - returns a
1291                        // retriable error before any mutations (via pre_validate_reveal_chain).
1292                        match self.remove_leaf(&full_path, NoRevealProvider) {
1293                            Ok(()) => {}
1294                            Err(e) => {
1295                                if let Some(path) = Self::get_retriable_path(&e) {
1296                                    let (target_key, min_len) =
1297                                        Self::proof_target_for_path(key, &full_path, &path);
1298                                    proof_required_fn(target_key, min_len);
1299                                    #[cfg(feature = "trie-debug")]
1300                                    recorded_proof_targets.push((target_key, min_len));
1301                                    updates.insert(key, LeafUpdate::Changed(value));
1302                                } else {
1303                                    return Err(e);
1304                                }
1305                            }
1306                        }
1307                    } else {
1308                        // Update/insert: update_leaf is atomic - cleans up on error.
1309                        if let Err(e) = self.update_leaf(full_path, value.clone(), NoRevealProvider)
1310                        {
1311                            if let Some(path) = Self::get_retriable_path(&e) {
1312                                let (target_key, min_len) =
1313                                    Self::proof_target_for_path(key, &full_path, &path);
1314                                proof_required_fn(target_key, min_len);
1315                                #[cfg(feature = "trie-debug")]
1316                                recorded_proof_targets.push((target_key, min_len));
1317                                updates.insert(key, LeafUpdate::Changed(value));
1318                            } else {
1319                                return Err(e);
1320                            }
1321                        }
1322                    }
1323                }
1324                LeafUpdate::Touched => {
1325                    // Touched is read-only: check if path is accessible, request proof if blinded.
1326                    match self.find_leaf(&full_path, None) {
1327                        Err(LeafLookupError::BlindedNode { path, .. }) => {
1328                            let (target_key, min_len) =
1329                                Self::proof_target_for_path(key, &full_path, &path);
1330                            proof_required_fn(target_key, min_len);
1331                            #[cfg(feature = "trie-debug")]
1332                            recorded_proof_targets.push((target_key, min_len));
1333                            updates.insert(key, LeafUpdate::Touched);
1334                        }
1335                        // Path is fully revealed (exists or proven non-existent), no action needed.
1336                        Ok(_) | Err(LeafLookupError::ValueMismatch { .. }) => {}
1337                    }
1338                }
1339            }
1340        }
1341
1342        #[cfg(feature = "trie-debug")]
1343        self.debug_recorder.record(RecordedOp::UpdateLeaves {
1344            updates: recorded_updates,
1345            proof_targets: recorded_proof_targets,
1346        });
1347
1348        Ok(())
1349    }
1350
1351    #[cfg(feature = "trie-debug")]
1352    fn take_debug_recorder(&mut self) -> TrieDebugRecorder {
1353        core::mem::take(&mut self.debug_recorder)
1354    }
1355
1356    fn commit_updates(
1357        &mut self,
1358        updated: &HashMap<Nibbles, BranchNodeCompact>,
1359        removed: &HashSet<Nibbles>,
1360    ) {
1361        // Sync branch_node_masks with what's being committed to DB.
1362        // This ensures that on subsequent root() calls, the masks reflect the actual
1363        // DB state, which is needed for correct removal detection.
1364        self.branch_node_masks.reserve(updated.len());
1365        for (path, node) in updated {
1366            self.branch_node_masks.insert(
1367                *path,
1368                BranchNodeMasks { tree_mask: node.tree_mask, hash_mask: node.hash_mask },
1369            );
1370        }
1371        for path in removed {
1372            self.branch_node_masks.remove(path);
1373        }
1374    }
1375}
1376
1377impl ParallelSparseTrie {
1378    /// Sets the thresholds that control when parallelism is used during operations.
1379    pub const fn with_parallelism_thresholds(mut self, thresholds: ParallelismThresholds) -> Self {
1380        self.parallelism_thresholds = thresholds;
1381        self
1382    }
1383
1384    /// Returns true if retaining updates is enabled for the overall trie.
1385    const fn updates_enabled(&self) -> bool {
1386        self.updates.is_some()
1387    }
1388
1389    /// Returns true if parallelism should be enabled for revealing the given number of nodes.
1390    /// Will always return false in nostd builds.
1391    const fn is_reveal_parallelism_enabled(&self, num_nodes: usize) -> bool {
1392        #[cfg(not(feature = "std"))]
1393        {
1394            let _ = num_nodes;
1395            return false;
1396        }
1397
1398        #[cfg(feature = "std")]
1399        {
1400            num_nodes >= self.parallelism_thresholds.min_revealed_nodes
1401        }
1402    }
1403
1404    /// Returns true if parallelism should be enabled for updating hashes with the given number
1405    /// of changed keys. Will always return false in nostd builds.
1406    const fn is_update_parallelism_enabled(&self, num_changed_keys: usize) -> bool {
1407        #[cfg(not(feature = "std"))]
1408        {
1409            let _ = num_changed_keys;
1410            return false;
1411        }
1412
1413        #[cfg(feature = "std")]
1414        {
1415            num_changed_keys >= self.parallelism_thresholds.min_updated_nodes
1416        }
1417    }
1418
1419    /// Checks if an error is retriable (`BlindedNode` or `NodeNotFoundInProvider`) and extracts
1420    /// the path if so.
1421    ///
1422    /// Both error types indicate that a node needs to be revealed before the operation can
1423    /// succeed. `BlindedNode` occurs when traversing to a Hash node, while `NodeNotFoundInProvider`
1424    /// occurs when `retain_updates` is enabled and an extension node's child needs revealing.
1425    const fn get_retriable_path(e: &SparseTrieError) -> Option<Nibbles> {
1426        match e.kind() {
1427            SparseTrieErrorKind::BlindedNode(path) |
1428            SparseTrieErrorKind::NodeNotFoundInProvider { path } => Some(*path),
1429            _ => None,
1430        }
1431    }
1432
1433    /// Converts a nibbles path to a B256, right-padding with zeros to 64 nibbles.
1434    fn nibbles_to_padded_b256(path: &Nibbles) -> B256 {
1435        let mut bytes = [0u8; 32];
1436        path.pack_to(&mut bytes);
1437        B256::from(bytes)
1438    }
1439
1440    /// Computes the proof target key and `min_len` for a blinded node error.
1441    ///
1442    /// Returns `(target_key, min_len)` where:
1443    /// - `target_key` is `full_key` if `path` is a prefix of `full_path`, otherwise the padded path
1444    /// - `min_len` is always based on `path.len()`
1445    fn proof_target_for_path(full_key: B256, full_path: &Nibbles, path: &Nibbles) -> (B256, u8) {
1446        let min_len = (path.len() as u8).min(64);
1447        let target_key =
1448            if full_path.starts_with(path) { full_key } else { Self::nibbles_to_padded_b256(path) };
1449        (target_key, min_len)
1450    }
1451
1452    /// Creates a new revealed sparse trie from the given root node.
1453    ///
1454    /// This function initializes the internal structures and then reveals the root.
1455    /// It is a convenient method to create a trie when you already have the root node available.
1456    ///
1457    /// # Arguments
1458    ///
1459    /// * `root` - The root node of the trie
1460    /// * `masks` - Trie masks for root branch node
1461    /// * `retain_updates` - Whether to track updates
1462    ///
1463    /// # Returns
1464    ///
1465    /// Self if successful, or an error if revealing fails.
1466    pub fn from_root(
1467        root: TrieNodeV2,
1468        masks: Option<BranchNodeMasks>,
1469        retain_updates: bool,
1470    ) -> SparseTrieResult<Self> {
1471        Self::default().with_root(root, masks, retain_updates)
1472    }
1473
1474    fn finalize_pruned_roots(&mut self, mut effective_pruned_roots: Vec<Nibbles>) -> usize {
1475        if effective_pruned_roots.is_empty() {
1476            return 0;
1477        }
1478
1479        let nodes_converted = effective_pruned_roots.len();
1480
1481        // Sort roots by subtrie type (upper first), then by path for efficient partitioning.
1482        effective_pruned_roots.sort_unstable_by(|path_a, path_b| {
1483            let subtrie_type_a = SparseSubtrieType::from_path(path_a);
1484            let subtrie_type_b = SparseSubtrieType::from_path(path_b);
1485            subtrie_type_a.cmp(&subtrie_type_b).then(path_a.cmp(path_b))
1486        });
1487
1488        // Split off upper subtrie roots (they come first due to sorting)
1489        let num_upper_roots = effective_pruned_roots
1490            .iter()
1491            .position(|p| !SparseSubtrieType::path_len_is_upper(p.len()))
1492            .unwrap_or(effective_pruned_roots.len());
1493
1494        let roots_upper = &effective_pruned_roots[..num_upper_roots];
1495        let roots_lower = &effective_pruned_roots[num_upper_roots..];
1496
1497        debug_assert!(
1498            {
1499                let mut all_roots: Vec<_> = effective_pruned_roots.clone();
1500                all_roots.sort_unstable();
1501                all_roots.windows(2).all(|w| !w[1].starts_with(&w[0]))
1502            },
1503            "prune roots must be prefix-free"
1504        );
1505
1506        // Upper prune roots that are prefixes of lower subtrie root paths cause the entire
1507        // subtrie to be cleared (preserving allocations for reuse).
1508        if !roots_upper.is_empty() {
1509            for subtrie in &mut *self.lower_subtries {
1510                let should_clear = subtrie.as_revealed_ref().is_some_and(|s| {
1511                    let search_idx = roots_upper.partition_point(|root| root <= &s.path);
1512                    search_idx > 0 && s.path.starts_with(&roots_upper[search_idx - 1])
1513                });
1514                if should_clear {
1515                    subtrie.clear();
1516                }
1517            }
1518        }
1519
1520        // Upper subtrie: prune nodes and values
1521        self.upper_subtrie.nodes.retain(|p, _| !is_strict_descendant_in(roots_upper, p));
1522        self.upper_subtrie.inner.values.retain(|p, _| {
1523            !starts_with_pruned_in(roots_upper, p) && !starts_with_pruned_in(roots_lower, p)
1524        });
1525
1526        // Process lower subtries using chunk_by to group roots by subtrie
1527        for roots_group in roots_lower.chunk_by(|path_a, path_b| {
1528            SparseSubtrieType::from_path(path_a) == SparseSubtrieType::from_path(path_b)
1529        }) {
1530            let subtrie_idx = path_subtrie_index_unchecked(&roots_group[0]);
1531
1532            // Skip unrevealed/blinded subtries - nothing to prune.
1533            let should_clear = {
1534                let Some(subtrie) = self.lower_subtries[subtrie_idx].as_revealed_mut() else {
1535                    continue;
1536                };
1537
1538                // Retain only nodes/values not descended from any pruned root.
1539                subtrie.nodes.retain(|p, _| !is_strict_descendant_in(roots_group, p));
1540                subtrie.inner.values.retain(|p, _| !starts_with_pruned_in(roots_group, p));
1541
1542                // If prune removed the node at `subtrie.path`, the subtrie can no longer be
1543                // represented as revealed and must be blinded.
1544                !subtrie.nodes.contains_key(&subtrie.path)
1545            };
1546
1547            if should_clear {
1548                self.lower_subtries[subtrie_idx].clear();
1549            }
1550        }
1551
1552        // Branch node masks pruning
1553        self.branch_node_masks.retain(|p, _| {
1554            if SparseSubtrieType::path_len_is_upper(p.len()) {
1555                !starts_with_pruned_in(roots_upper, p)
1556            } else {
1557                !starts_with_pruned_in(roots_lower, p) && !starts_with_pruned_in(roots_upper, p)
1558            }
1559        });
1560
1561        nodes_converted
1562    }
1563
1564    /// Returns a reference to the lower `SparseSubtrie` for the given path, or None if the
1565    /// path belongs to the upper trie, or if the lower subtrie for the path doesn't exist or is
1566    /// blinded.
1567    fn lower_subtrie_for_path(&self, path: &Nibbles) -> Option<&SparseSubtrie> {
1568        match SparseSubtrieType::from_path(path) {
1569            SparseSubtrieType::Upper => None,
1570            SparseSubtrieType::Lower(idx) => self.lower_subtries[idx].as_revealed_ref(),
1571        }
1572    }
1573
1574    /// Returns a mutable reference to the lower `SparseSubtrie` for the given path, or None if the
1575    /// path belongs to the upper trie.
1576    ///
1577    /// This method will create/reveal a new lower subtrie for the given path if one isn't already.
1578    /// If one does exist, but its path field is longer than the given path, then the field will be
1579    /// set to the given path.
1580    fn lower_subtrie_for_path_mut(&mut self, path: &Nibbles) -> Option<&mut SparseSubtrie> {
1581        match SparseSubtrieType::from_path(path) {
1582            SparseSubtrieType::Upper => None,
1583            SparseSubtrieType::Lower(idx) => {
1584                self.lower_subtries[idx].reveal(path);
1585                Some(self.lower_subtries[idx].as_revealed_mut().expect("just revealed"))
1586            }
1587        }
1588    }
1589
1590    /// Returns a reference to either the lower or upper `SparseSubtrie` for the given path,
1591    /// depending on the path's length.
1592    ///
1593    /// Returns `None` if a lower subtrie does not exist for the given path.
1594    fn subtrie_for_path(&self, path: &Nibbles) -> Option<&SparseSubtrie> {
1595        if SparseSubtrieType::path_len_is_upper(path.len()) {
1596            Some(&self.upper_subtrie)
1597        } else {
1598            self.lower_subtrie_for_path(path)
1599        }
1600    }
1601
1602    /// Returns a mutable reference to either the lower or upper `SparseSubtrie` for the given path,
1603    /// depending on the path's length.
1604    ///
1605    /// This method will create/reveal a new lower subtrie for the given path if one isn't already.
1606    /// If one does exist, but its path field is longer than the given path, then the field will be
1607    /// set to the given path.
1608    fn subtrie_for_path_mut(&mut self, path: &Nibbles) -> &mut SparseSubtrie {
1609        // We can't just call `lower_subtrie_for_path` and return `upper_subtrie` if it returns
1610        // None, because Rust complains about double mutable borrowing `self`.
1611        if SparseSubtrieType::path_len_is_upper(path.len()) {
1612            &mut self.upper_subtrie
1613        } else {
1614            self.lower_subtrie_for_path_mut(path).unwrap()
1615        }
1616    }
1617
1618    /// Returns a mutable reference to a subtrie without marking it as modified.
1619    /// Used for internal operations like pruning that shouldn't affect heat tracking.
1620    fn subtrie_for_path_mut_untracked(&mut self, path: &Nibbles) -> Option<&mut SparseSubtrie> {
1621        if SparseSubtrieType::path_len_is_upper(path.len()) {
1622            Some(&mut self.upper_subtrie)
1623        } else {
1624            match SparseSubtrieType::from_path(path) {
1625                SparseSubtrieType::Upper => None,
1626                SparseSubtrieType::Lower(idx) => self.lower_subtries[idx].as_revealed_mut(),
1627            }
1628        }
1629    }
1630
1631    /// Returns the next node in the traversal path from the given path towards the leaf for the
1632    /// given full leaf path, or an error if any node along the traversal path is not revealed.
1633    ///
1634    ///
1635    /// ## Panics
1636    ///
1637    /// If `from_path` is not a prefix of `leaf_full_path`.
1638    fn find_next_to_leaf(
1639        from_path: &Nibbles,
1640        from_node: &SparseNode,
1641        leaf_full_path: &Nibbles,
1642    ) -> FindNextToLeafOutcome {
1643        debug_assert!(leaf_full_path.len() >= from_path.len());
1644        debug_assert!(leaf_full_path.starts_with(from_path));
1645
1646        match from_node {
1647            // If empty node is found it means the subtrie doesn't have any nodes in it, let alone
1648            // the target leaf.
1649            SparseNode::Empty => FindNextToLeafOutcome::NotFound,
1650            SparseNode::Leaf { key, .. } => {
1651                let mut found_full_path = *from_path;
1652                found_full_path.extend(key);
1653
1654                if &found_full_path == leaf_full_path {
1655                    return FindNextToLeafOutcome::Found
1656                }
1657                FindNextToLeafOutcome::NotFound
1658            }
1659            SparseNode::Extension { key, .. } => {
1660                if leaf_full_path.len() == from_path.len() {
1661                    return FindNextToLeafOutcome::NotFound
1662                }
1663
1664                let mut child_path = *from_path;
1665                child_path.extend(key);
1666
1667                if !leaf_full_path.starts_with(&child_path) {
1668                    return FindNextToLeafOutcome::NotFound
1669                }
1670                FindNextToLeafOutcome::ContinueFrom(child_path)
1671            }
1672            SparseNode::Branch { state_mask, blinded_mask, .. } => {
1673                if leaf_full_path.len() == from_path.len() {
1674                    return FindNextToLeafOutcome::NotFound
1675                }
1676
1677                let nibble = leaf_full_path.get_unchecked(from_path.len());
1678                if !state_mask.is_bit_set(nibble) {
1679                    return FindNextToLeafOutcome::NotFound
1680                }
1681
1682                let mut child_path = *from_path;
1683                child_path.push_unchecked(nibble);
1684
1685                if blinded_mask.is_bit_set(nibble) {
1686                    return FindNextToLeafOutcome::BlindedNode(child_path);
1687                }
1688
1689                FindNextToLeafOutcome::ContinueFrom(child_path)
1690            }
1691        }
1692    }
1693
1694    /// Called when a child node has collapsed into its parent as part of `remove_leaf`. If the
1695    /// new parent node is a leaf, then the previous child also was, and if the previous child was
1696    /// on a lower subtrie while the parent is on an upper then the leaf value needs to be moved to
1697    /// the upper.
1698    fn move_value_on_leaf_removal(
1699        &mut self,
1700        parent_path: &Nibbles,
1701        new_parent_node: &SparseNode,
1702        prev_child_path: &Nibbles,
1703    ) {
1704        // If the parent path isn't in the upper then it doesn't matter what the new node is,
1705        // there's no situation where a leaf value needs to be moved.
1706        if SparseSubtrieType::from_path(parent_path).lower_index().is_some() {
1707            return;
1708        }
1709
1710        if let SparseNode::Leaf { key, .. } = new_parent_node {
1711            let Some(prev_child_subtrie) = self.lower_subtrie_for_path_mut(prev_child_path) else {
1712                return;
1713            };
1714
1715            let mut leaf_full_path = *parent_path;
1716            leaf_full_path.extend(key);
1717
1718            let val = prev_child_subtrie.inner.values.remove(&leaf_full_path).expect("ParallelSparseTrie is in an inconsistent state, expected value on subtrie which wasn't found");
1719            self.upper_subtrie.inner.values.insert(leaf_full_path, val);
1720        }
1721    }
1722
1723    /// Used by `remove_leaf` to ensure that when a node is removed from a lower subtrie that any
1724    /// externalities are handled. These can include:
1725    /// - Removing the lower subtrie completely, if it is now empty.
1726    /// - Updating the `path` field of the lower subtrie to indicate that its root node has changed.
1727    ///
1728    /// This method assumes that the caller will deal with putting all other nodes in the trie into
1729    /// a consistent state after the removal of this one.
1730    ///
1731    /// ## Panics
1732    ///
1733    /// - If the removed node was not a leaf or extension.
1734    fn remove_node(&mut self, path: &Nibbles) {
1735        let subtrie = self.subtrie_for_path_mut(path);
1736        let node = subtrie.nodes.remove(path);
1737
1738        let Some(idx) = SparseSubtrieType::from_path(path).lower_index() else {
1739            // When removing a node from the upper trie there's nothing special we need to do to fix
1740            // its path field; the upper trie's path is always empty.
1741            return;
1742        };
1743
1744        match node {
1745            Some(SparseNode::Leaf { .. }) => {
1746                // If the leaf was the final node in its lower subtrie then we can blind the
1747                // subtrie, effectively marking it as empty.
1748                if subtrie.nodes.is_empty() {
1749                    self.lower_subtries[idx].clear();
1750                }
1751            }
1752            Some(SparseNode::Extension { key, .. }) => {
1753                // If the removed extension was the root node of a lower subtrie then the lower
1754                // subtrie's `path` needs to be updated to be whatever node the extension used to
1755                // point to.
1756                if &subtrie.path == path {
1757                    subtrie.path.extend(&key);
1758                }
1759            }
1760            _ => panic!("Expected to remove a leaf or extension, but removed {node:?}"),
1761        }
1762    }
1763
1764    /// Given the path to a parent branch node and a child node which is the sole remaining child on
1765    /// that branch after removing a leaf, returns a node to replace the parent branch node and a
1766    /// boolean indicating if the child should be deleted.
1767    ///
1768    /// ## Panics
1769    ///
1770    /// - If either parent or child node is not already revealed.
1771    /// - If parent's path is not a prefix of the child's path.
1772    fn branch_changes_on_leaf_removal(
1773        parent_path: &Nibbles,
1774        remaining_child_path: &Nibbles,
1775        remaining_child_node: &SparseNode,
1776    ) -> (SparseNode, bool) {
1777        debug_assert!(remaining_child_path.len() > parent_path.len());
1778        debug_assert!(remaining_child_path.starts_with(parent_path));
1779
1780        let remaining_child_nibble = remaining_child_path.get_unchecked(parent_path.len());
1781
1782        // If we swap the branch node out either an extension or leaf, depending on
1783        // what its remaining child is.
1784        match remaining_child_node {
1785            SparseNode::Empty => {
1786                panic!("remaining child must have been revealed already")
1787            }
1788            // If the only child is a leaf node, we downgrade the branch node into a
1789            // leaf node, prepending the nibble to the key, and delete the old
1790            // child.
1791            SparseNode::Leaf { key, .. } => {
1792                let mut new_key = Nibbles::from_nibbles_unchecked([remaining_child_nibble]);
1793                new_key.extend(key);
1794                (SparseNode::new_leaf(new_key), true)
1795            }
1796            // If the only child node is an extension node, we downgrade the branch
1797            // node into an even longer extension node, prepending the nibble to the
1798            // key, and delete the old child.
1799            SparseNode::Extension { key, .. } => {
1800                let mut new_key = Nibbles::from_nibbles_unchecked([remaining_child_nibble]);
1801                new_key.extend(key);
1802                (SparseNode::new_ext(new_key), true)
1803            }
1804            // If the only child is a branch node, we downgrade the current branch
1805            // node into a one-nibble extension node.
1806            SparseNode::Branch { .. } => (
1807                SparseNode::new_ext(Nibbles::from_nibbles_unchecked([remaining_child_nibble])),
1808                false,
1809            ),
1810        }
1811    }
1812
1813    /// Given the path to a parent extension and its key, and a child node (not necessarily on this
1814    /// subtrie), returns an optional replacement parent node. If a replacement is returned then the
1815    /// child node should be deleted.
1816    ///
1817    /// ## Panics
1818    ///
1819    /// - If either parent or child node is not already revealed.
1820    /// - If parent's path is not a prefix of the child's path.
1821    fn extension_changes_on_leaf_removal(
1822        parent_path: &Nibbles,
1823        parent_key: &Nibbles,
1824        child_path: &Nibbles,
1825        child: &SparseNode,
1826    ) -> Option<SparseNode> {
1827        debug_assert!(child_path.len() > parent_path.len());
1828        debug_assert!(child_path.starts_with(parent_path));
1829
1830        // If the parent node is an extension node, we need to look at its child to see
1831        // if we need to merge it.
1832        match child {
1833            SparseNode::Empty => {
1834                panic!("child must be revealed")
1835            }
1836            // For a leaf node, we collapse the extension node into a leaf node,
1837            // extending the key. While it's impossible to encounter an extension node
1838            // followed by a leaf node in a complete trie, it's possible here because we
1839            // could have downgraded the extension node's child into a leaf node from a
1840            // branch in a previous call to `branch_changes_on_leaf_removal`.
1841            SparseNode::Leaf { key, .. } => {
1842                let mut new_key = *parent_key;
1843                new_key.extend(key);
1844                Some(SparseNode::new_leaf(new_key))
1845            }
1846            // Similar to the leaf node, for an extension node, we collapse them into one
1847            // extension node, extending the key.
1848            SparseNode::Extension { key, .. } => {
1849                let mut new_key = *parent_key;
1850                new_key.extend(key);
1851                Some(SparseNode::new_ext(new_key))
1852            }
1853            // For a branch node, we just leave the extension node as-is.
1854            SparseNode::Branch { .. } => None,
1855        }
1856    }
1857
1858    /// Drains any [`SparseTrieUpdatesAction`]s from the given subtrie, and applies each action to
1859    /// the given `updates` set. If the given set is None then this is a no-op.
1860    #[instrument(level = "trace", target = "trie::parallel_sparse", skip_all)]
1861    fn apply_subtrie_update_actions(
1862        &mut self,
1863        update_actions: impl Iterator<Item = SparseTrieUpdatesAction>,
1864    ) {
1865        if let Some(updates) = self.updates.as_mut() {
1866            let additional = update_actions.size_hint().0;
1867            updates.updated_nodes.reserve(additional);
1868            updates.removed_nodes.reserve(additional);
1869            for action in update_actions {
1870                match action {
1871                    SparseTrieUpdatesAction::InsertRemoved(path) => {
1872                        updates.updated_nodes.remove(&path);
1873                        updates.removed_nodes.insert(path);
1874                    }
1875                    SparseTrieUpdatesAction::RemoveUpdated(path) => {
1876                        updates.updated_nodes.remove(&path);
1877                    }
1878                    SparseTrieUpdatesAction::InsertUpdated(path, branch_node) => {
1879                        updates.updated_nodes.insert(path, branch_node);
1880                        updates.removed_nodes.remove(&path);
1881                    }
1882                }
1883            }
1884        };
1885    }
1886
1887    /// Updates hashes for the upper subtrie, using nodes from both upper and lower subtries.
1888    #[instrument(level = "trace", target = "trie::parallel_sparse", skip_all, ret)]
1889    fn update_upper_subtrie_hashes(&mut self, prefix_set: &mut PrefixSet) -> RlpNode {
1890        trace!(target: "trie::parallel_sparse", "Updating upper subtrie hashes");
1891
1892        debug_assert!(self.upper_subtrie.inner.buffers.path_stack.is_empty());
1893        self.upper_subtrie.inner.buffers.path_stack.push(RlpNodePathStackItem {
1894            path: Nibbles::default(), // Start from root
1895            is_in_prefix_set: None,
1896        });
1897
1898        #[cfg(feature = "metrics")]
1899        let start = Instant::now();
1900
1901        let mut update_actions_buf =
1902            self.updates_enabled().then(|| self.update_actions_buffers.pop().unwrap_or_default());
1903
1904        while let Some(stack_item) = self.upper_subtrie.inner.buffers.path_stack.pop() {
1905            let path = stack_item.path;
1906            let node = if path.len() < UPPER_TRIE_MAX_DEPTH {
1907                self.upper_subtrie.nodes.get_mut(&path).expect("upper subtrie node must exist")
1908            } else {
1909                let index = path_subtrie_index_unchecked(&path);
1910                let node = self.lower_subtries[index]
1911                    .as_revealed_mut()
1912                    .expect("lower subtrie must exist")
1913                    .nodes
1914                    .get_mut(&path)
1915                    .expect("lower subtrie node must exist");
1916                // Lower subtrie root node RLP nodes must be computed before updating upper subtrie
1917                // hashes
1918                debug_assert!(
1919                    node.cached_rlp_node().is_some(),
1920                    "Lower subtrie root node {node:?} at path {path:?} has no cached RLP node"
1921                );
1922                node
1923            };
1924
1925            // Calculate the RLP node for the current node using upper subtrie
1926            self.upper_subtrie.inner.rlp_node(
1927                prefix_set,
1928                &mut update_actions_buf,
1929                stack_item,
1930                node,
1931                &self.branch_node_masks,
1932            );
1933        }
1934
1935        // If there were any branch node updates as a result of calculating the RLP node for the
1936        // upper trie then apply them to the top-level set.
1937        if let Some(mut update_actions_buf) = update_actions_buf {
1938            self.apply_subtrie_update_actions(
1939                #[allow(clippy::iter_with_drain)]
1940                update_actions_buf.drain(..),
1941            );
1942            self.update_actions_buffers.push(update_actions_buf);
1943        }
1944
1945        #[cfg(feature = "metrics")]
1946        self.metrics.subtrie_upper_hash_latency.record(start.elapsed());
1947
1948        debug_assert_eq!(self.upper_subtrie.inner.buffers.rlp_node_stack.len(), 1);
1949        self.upper_subtrie.inner.buffers.rlp_node_stack.pop().unwrap().rlp_node
1950    }
1951
1952    /// Returns:
1953    /// 1. List of lower [subtries](SparseSubtrie) that have changed according to the provided
1954    ///    [prefix set](PrefixSet). See documentation of [`ChangedSubtrie`] for more details. Lower
1955    ///    subtries whose root node is missing a hash will also be returned; this is required to
1956    ///    handle cases where extensions/leafs get shortened and therefore moved from the upper to a
1957    ///    lower subtrie.
1958    /// 2. Prefix set of keys that do not belong to any lower subtrie.
1959    ///
1960    /// This method helps optimize hash recalculations by identifying which specific
1961    /// lower subtries need to be updated. Each lower subtrie can then be updated in parallel.
1962    ///
1963    /// IMPORTANT: The method removes the subtries from `lower_subtries`, and the caller is
1964    /// responsible for returning them back into the array.
1965    #[instrument(level = "trace", target = "trie::parallel_sparse", skip_all, fields(prefix_set_len = prefix_set.len()))]
1966    fn take_changed_lower_subtries(
1967        &mut self,
1968        prefix_set: &mut PrefixSet,
1969    ) -> (Vec<ChangedSubtrie>, PrefixSetMut) {
1970        // Fast-path: If the prefix set is empty then no subtries can have been changed. Just return
1971        // empty values.
1972        if prefix_set.is_empty() {
1973            return Default::default();
1974        }
1975
1976        // Clone the prefix set to iterate over its keys. Cloning is cheap, it's just an Arc.
1977        let prefix_set_clone = prefix_set.clone();
1978        let mut prefix_set_iter = prefix_set_clone.into_iter().copied().peekable();
1979        let mut changed_subtries = Vec::new();
1980        let mut unchanged_prefix_set = PrefixSetMut::default();
1981        let updates_enabled = self.updates_enabled();
1982
1983        for (index, subtrie) in self.lower_subtries.iter_mut().enumerate() {
1984            if let Some(subtrie) = subtrie.take_revealed_if(|subtrie| {
1985                prefix_set.contains(&subtrie.path) ||
1986                    subtrie
1987                        .nodes
1988                        .get(&subtrie.path)
1989                        .is_some_and(|n| n.cached_rlp_node().is_none())
1990            }) {
1991                let prefix_set = if prefix_set.all() {
1992                    unchanged_prefix_set = PrefixSetMut::all();
1993                    PrefixSetMut::all()
1994                } else {
1995                    // Take those keys from the original prefix set that start with the subtrie path
1996                    //
1997                    // Subtries are stored in the order of their paths, so we can use the same
1998                    // prefix set iterator.
1999                    let mut new_prefix_set = Vec::new();
2000                    while let Some(key) = prefix_set_iter.peek() {
2001                        if key.starts_with(&subtrie.path) {
2002                            // If the key starts with the subtrie path, add it to the new prefix set
2003                            new_prefix_set.push(prefix_set_iter.next().unwrap());
2004                        } else if new_prefix_set.is_empty() && key < &subtrie.path {
2005                            // If we didn't yet have any keys that belong to this subtrie, and the
2006                            // current key is still less than the subtrie path, add it to the
2007                            // unchanged prefix set
2008                            unchanged_prefix_set.insert(prefix_set_iter.next().unwrap());
2009                        } else {
2010                            // If we're past the subtrie path, we're done with this subtrie. Do not
2011                            // advance the iterator, the next key will be processed either by the
2012                            // next subtrie or inserted into the unchanged prefix set.
2013                            break
2014                        }
2015                    }
2016                    PrefixSetMut::from(new_prefix_set)
2017                }
2018                .freeze();
2019
2020                // We need the full path of root node of the lower subtrie to the unchanged prefix
2021                // set, so that we don't skip it when calculating hashes for the upper subtrie.
2022                match subtrie.nodes.get(&subtrie.path) {
2023                    Some(SparseNode::Extension { key, .. } | SparseNode::Leaf { key, .. }) => {
2024                        unchanged_prefix_set.insert(subtrie.path.join(key));
2025                    }
2026                    Some(SparseNode::Branch { .. }) => {
2027                        unchanged_prefix_set.insert(subtrie.path);
2028                    }
2029                    _ => {}
2030                }
2031
2032                let update_actions_buf =
2033                    updates_enabled.then(|| self.update_actions_buffers.pop().unwrap_or_default());
2034
2035                changed_subtries.push(ChangedSubtrie {
2036                    index,
2037                    subtrie,
2038                    prefix_set,
2039                    update_actions_buf,
2040                });
2041            }
2042        }
2043
2044        // Extend the unchanged prefix set with the remaining keys that are not part of any subtries
2045        unchanged_prefix_set.extend_keys(prefix_set_iter);
2046
2047        (changed_subtries, unchanged_prefix_set)
2048    }
2049
2050    /// Returns an iterator over all nodes in the trie in no particular order.
2051    #[cfg(test)]
2052    fn all_nodes(&self) -> impl IntoIterator<Item = (&Nibbles, &SparseNode)> {
2053        let mut nodes = vec![];
2054        for subtrie in self.lower_subtries.iter().filter_map(LowerSparseSubtrie::as_revealed_ref) {
2055            nodes.extend(subtrie.nodes.iter())
2056        }
2057        nodes.extend(self.upper_subtrie.nodes.iter());
2058        nodes
2059    }
2060
2061    /// Reveals a trie node in the upper trie if it has not been revealed before. When revealing
2062    /// branch/extension nodes this may recurse into a lower trie to reveal a child.
2063    ///
2064    /// This function decodes a trie node and inserts it into the trie structure. It handles
2065    /// different node types (leaf, extension, branch) by appropriately adding them to the trie and
2066    /// recursively revealing their children.
2067    ///
2068    /// # Arguments
2069    ///
2070    /// * `path` - The path where the node should be revealed
2071    /// * `node` - The trie node to reveal
2072    /// * `masks` - Branch node masks if known
2073    ///
2074    /// # Returns
2075    ///
2076    /// `Ok(())` if successful, or an error if the node was not revealed.
2077    fn reveal_upper_node(
2078        &mut self,
2079        path: Nibbles,
2080        node: &TrieNodeV2,
2081        masks: Option<BranchNodeMasks>,
2082    ) -> SparseTrieResult<()> {
2083        // Only reveal nodes that can be reached given the current state of the upper trie. If they
2084        // can't be reached, it means that they were removed.
2085        if !self.is_path_reachable_from_upper(&path) {
2086            return Ok(())
2087        }
2088
2089        // Exit early if the node was already revealed before.
2090        if !self.upper_subtrie.reveal_node(path, node, masks, None)? {
2091            if let TrieNodeV2::Branch(branch) = node {
2092                if branch.key.is_empty() {
2093                    return Ok(());
2094                }
2095
2096                // We might still potentially need to reveal a child branch node in the lower
2097                // subtrie, even if the upper subtrie already knew about the extension node.
2098                if SparseSubtrieType::path_len_is_upper(path.len() + branch.key.len()) {
2099                    return Ok(())
2100                }
2101            } else {
2102                return Ok(());
2103            }
2104        }
2105
2106        // The previous upper_trie.reveal_node call will not have revealed any child nodes via
2107        // reveal_node_or_hash if the child node would be found on a lower subtrie. We handle that
2108        // here by manually checking the specific cases where this could happen, and calling
2109        // reveal_node_or_hash for each.
2110        match node {
2111            TrieNodeV2::Branch(branch) => {
2112                let mut branch_path = path;
2113                branch_path.extend(&branch.key);
2114
2115                // If only the parent extension belongs to the upper trie, we need to reveal the
2116                // actual branch node in the corresponding lower subtrie.
2117                if !SparseSubtrieType::path_len_is_upper(branch_path.len()) {
2118                    self.lower_subtrie_for_path_mut(&branch_path)
2119                        .expect("branch_path must have a lower subtrie")
2120                        .reveal_branch(
2121                            branch_path,
2122                            branch.state_mask,
2123                            &branch.stack,
2124                            masks,
2125                            branch.branch_rlp_node.clone(),
2126                        )?
2127                } else if !SparseSubtrieType::path_len_is_upper(branch_path.len() + 1) {
2128                    // If a branch is at the cutoff level of the trie then it will be in the upper
2129                    // trie, but all of its children will be in a lower trie.
2130                    // Check if a child node would be in the lower subtrie, and
2131                    // reveal accordingly.
2132                    for (stack_ptr, idx) in branch.state_mask.iter().enumerate() {
2133                        let mut child_path = branch_path;
2134                        child_path.push_unchecked(idx);
2135                        let child = &branch.stack[stack_ptr];
2136
2137                        // Only reveal children that are not hashes. Hashes are stored on branch
2138                        // nodes directly.
2139                        if !child.is_hash() {
2140                            self.lower_subtrie_for_path_mut(&child_path)
2141                                .expect("child_path must have a lower subtrie")
2142                                .reveal_node(
2143                                    child_path,
2144                                    &TrieNodeV2::decode(&mut branch.stack[stack_ptr].as_ref())?,
2145                                    None,
2146                                    None,
2147                                )?;
2148                        }
2149                    }
2150                }
2151            }
2152            TrieNodeV2::Extension(ext) => {
2153                let mut child_path = path;
2154                child_path.extend(&ext.key);
2155                if let Some(subtrie) = self.lower_subtrie_for_path_mut(&child_path) {
2156                    subtrie.reveal_node(
2157                        child_path,
2158                        &TrieNodeV2::decode(&mut ext.child.as_ref())?,
2159                        None,
2160                        None,
2161                    )?;
2162                }
2163            }
2164            TrieNodeV2::EmptyRoot | TrieNodeV2::Leaf(_) => (),
2165        }
2166
2167        Ok(())
2168    }
2169
2170    /// Return updated subtries back to the trie after executing any actions required on the
2171    /// top-level `SparseTrieUpdates`.
2172    #[instrument(level = "trace", target = "trie::parallel_sparse", skip_all)]
2173    fn insert_changed_subtries(
2174        &mut self,
2175        changed_subtries: impl IntoIterator<Item = ChangedSubtrie>,
2176    ) {
2177        for ChangedSubtrie { index, subtrie, update_actions_buf, .. } in changed_subtries {
2178            if let Some(mut update_actions_buf) = update_actions_buf {
2179                self.apply_subtrie_update_actions(
2180                    #[allow(clippy::iter_with_drain)]
2181                    update_actions_buf.drain(..),
2182                );
2183                self.update_actions_buffers.push(update_actions_buf);
2184            }
2185
2186            self.lower_subtries[index] = LowerSparseSubtrie::Revealed(subtrie);
2187        }
2188    }
2189
2190    /// Returns a heuristic for the in-memory size of this trie in bytes.
2191    ///
2192    /// This is an approximation that accounts for:
2193    /// - The upper subtrie nodes and values
2194    /// - All revealed lower subtries nodes and values
2195    /// - The prefix set keys
2196    /// - The branch node masks map
2197    /// - Updates if retained
2198    /// - Update action buffers
2199    ///
2200    /// Note: Heap allocations for hash maps may be larger due to load factor overhead.
2201    pub fn memory_size(&self) -> usize {
2202        let mut size = core::mem::size_of::<Self>();
2203
2204        // Upper subtrie
2205        size += self.upper_subtrie.memory_size();
2206
2207        // Lower subtries (both Revealed and Blind with allocation)
2208        for subtrie in self.lower_subtries.iter() {
2209            size += subtrie.memory_size();
2210        }
2211
2212        // Prefix set keys
2213        size += self.prefix_set.len() * core::mem::size_of::<Nibbles>();
2214
2215        // Branch node masks map
2216        size += self.branch_node_masks.len() *
2217            (core::mem::size_of::<Nibbles>() + core::mem::size_of::<BranchNodeMasks>());
2218
2219        // Updates if present
2220        if let Some(updates) = &self.updates {
2221            size += updates.updated_nodes.len() *
2222                (core::mem::size_of::<Nibbles>() + core::mem::size_of::<BranchNodeCompact>());
2223            size += updates.removed_nodes.len() * core::mem::size_of::<Nibbles>();
2224        }
2225
2226        // Update actions buffers
2227        for buf in &self.update_actions_buffers {
2228            size += buf.capacity() * core::mem::size_of::<SparseTrieUpdatesAction>();
2229        }
2230
2231        size
2232    }
2233
2234    /// Determines if the given path can be directly reached from the upper trie.
2235    fn is_path_reachable_from_upper(&self, path: &Nibbles) -> bool {
2236        let mut current = Nibbles::default();
2237        while current.len() < path.len() {
2238            let Some(node) = self.upper_subtrie.nodes.get(&current) else { return false };
2239            match node {
2240                SparseNode::Branch { state_mask, .. } => {
2241                    if !state_mask.is_bit_set(path.get_unchecked(current.len())) {
2242                        return false
2243                    }
2244
2245                    current.push_unchecked(path.get_unchecked(current.len()));
2246                }
2247                SparseNode::Extension { key, .. } => {
2248                    if *key != path.slice(current.len()..current.len() + key.len()) {
2249                        return false
2250                    }
2251                    current.extend(key);
2252                }
2253                SparseNode::Empty | SparseNode::Leaf { .. } => return false,
2254            }
2255        }
2256        true
2257    }
2258
2259    /// Checks if a boundary leaf (at `path.len() == UPPER_TRIE_MAX_DEPTH`) is reachable from its
2260    /// parent branch in the upper subtrie.
2261    ///
2262    /// This is used for leaves that sit at the upper/lower subtrie boundary, where the leaf is
2263    /// in a lower subtrie but its parent branch is in the upper subtrie.
2264    fn is_boundary_leaf_reachable(
2265        upper_nodes: &HashMap<Nibbles, SparseNode>,
2266        path: &Nibbles,
2267        node: &TrieNodeV2,
2268    ) -> bool {
2269        debug_assert_eq!(path.len(), UPPER_TRIE_MAX_DEPTH);
2270
2271        if !matches!(node, TrieNodeV2::Leaf(_)) {
2272            return true
2273        }
2274
2275        let parent_path = path.slice(..path.len() - 1);
2276        let leaf_nibble = path.get_unchecked(path.len() - 1);
2277
2278        match upper_nodes.get(&parent_path) {
2279            Some(SparseNode::Branch { state_mask, .. }) => state_mask.is_bit_set(leaf_nibble),
2280            _ => false,
2281        }
2282    }
2283
2284    /// Returns a bitset of all subtries that are reachable from the upper trie. If subtrie is not
2285    /// reachable it means that it does not exist.
2286    fn reachable_subtries(&self) -> SubtriesBitmap {
2287        let mut reachable = SubtriesBitmap::default();
2288
2289        let mut stack = Vec::new();
2290        stack.push(Nibbles::default());
2291
2292        while let Some(current) = stack.pop() {
2293            let Some(node) = self.upper_subtrie.nodes.get(&current) else { continue };
2294            match node {
2295                SparseNode::Branch { state_mask, .. } => {
2296                    for idx in state_mask.iter() {
2297                        let mut next = current;
2298                        next.push_unchecked(idx);
2299                        if next.len() >= UPPER_TRIE_MAX_DEPTH {
2300                            reachable.set(path_subtrie_index_unchecked(&next));
2301                        } else {
2302                            stack.push(next);
2303                        }
2304                    }
2305                }
2306                SparseNode::Extension { key, .. } => {
2307                    let mut next = current;
2308                    next.extend(key);
2309                    if next.len() >= UPPER_TRIE_MAX_DEPTH {
2310                        reachable.set(path_subtrie_index_unchecked(&next));
2311                    } else {
2312                        stack.push(next);
2313                    }
2314                }
2315                SparseNode::Empty | SparseNode::Leaf { .. } => {}
2316            };
2317        }
2318
2319        reachable
2320    }
2321}
2322
2323/// Bitset tracking which of the 256 lower subtries were modified in the current cycle.
2324#[derive(Clone, Default, PartialEq, Eq, Debug)]
2325struct SubtriesBitmap(U256);
2326
2327impl SubtriesBitmap {
2328    /// Marks a subtrie index.
2329    #[inline]
2330    fn set(&mut self, idx: usize) {
2331        debug_assert!(idx < NUM_LOWER_SUBTRIES);
2332        self.0.set_bit(idx, true);
2333    }
2334
2335    /// Returns whether a subtrie index is set.
2336    #[inline]
2337    fn get(&self, idx: usize) -> bool {
2338        debug_assert!(idx < NUM_LOWER_SUBTRIES);
2339        self.0.bit(idx)
2340    }
2341}
2342
2343/// This is a subtrie of the [`ParallelSparseTrie`] that contains a map from path to sparse trie
2344/// nodes.
2345#[derive(Clone, PartialEq, Eq, Debug, Default)]
2346pub struct SparseSubtrie {
2347    /// The root path of this subtrie.
2348    ///
2349    /// This is the _full_ path to this subtrie, meaning it includes the first
2350    /// [`UPPER_TRIE_MAX_DEPTH`] nibbles that we also use for indexing subtries in the
2351    /// [`ParallelSparseTrie`].
2352    ///
2353    /// There should be a node for this path in `nodes` map.
2354    pub(crate) path: Nibbles,
2355    /// The map from paths to sparse trie nodes within this subtrie.
2356    nodes: HashMap<Nibbles, SparseNode>,
2357    /// Subset of fields for mutable access while `nodes` field is also being mutably borrowed.
2358    inner: SparseSubtrieInner,
2359}
2360
2361/// Returned by the `find_next_to_leaf` method to indicate either that the leaf has been found,
2362/// traversal should be continued from the given path, or the leaf is not in the trie.
2363enum FindNextToLeafOutcome {
2364    /// `Found` indicates that the leaf was found at the given path.
2365    Found,
2366    /// `ContinueFrom` indicates that traversal should continue from the given path.
2367    ContinueFrom(Nibbles),
2368    /// `NotFound` indicates that there is no way to traverse to the leaf, as it is not in the
2369    /// trie.
2370    NotFound,
2371    /// `BlindedNode` indicates that the node is blinded with the contained hash and cannot be
2372    /// traversed.
2373    BlindedNode(Nibbles),
2374}
2375
2376impl SparseSubtrie {
2377    /// Creates a new empty subtrie with the specified root path.
2378    pub(crate) fn new(path: Nibbles) -> Self {
2379        Self { path, ..Default::default() }
2380    }
2381
2382    /// Returns true if this subtrie has any nodes, false otherwise.
2383    pub(crate) fn is_empty(&self) -> bool {
2384        self.nodes.is_empty()
2385    }
2386
2387    /// Returns true if the current path and its child are both found in the same level.
2388    fn is_child_same_level(current_path: &Nibbles, child_path: &Nibbles) -> bool {
2389        let current_level = core::mem::discriminant(&SparseSubtrieType::from_path(current_path));
2390        let child_level = core::mem::discriminant(&SparseSubtrieType::from_path(child_path));
2391        current_level == child_level
2392    }
2393
2394    /// Checks if a leaf node at the given path is reachable from its parent branch node.
2395    ///
2396    /// Returns `true` if:
2397    /// - The path is at the root (no parent to check)
2398    /// - The parent branch node has the corresponding `state_mask` bit set for this leaf
2399    ///
2400    /// Returns `false` if the parent is a branch node that doesn't have the `state_mask` bit set
2401    /// for this leaf's nibble, meaning the leaf is not reachable.
2402    fn is_leaf_reachable_from_parent(&self, path: &Nibbles) -> bool {
2403        if path.is_empty() {
2404            return true
2405        }
2406
2407        let parent_path = path.slice(..path.len() - 1);
2408        let leaf_nibble = path.get_unchecked(path.len() - 1);
2409
2410        match self.nodes.get(&parent_path) {
2411            Some(SparseNode::Branch { state_mask, .. }) => state_mask.is_bit_set(leaf_nibble),
2412            _ => false,
2413        }
2414    }
2415
2416    /// Updates or inserts a leaf node at the specified key path with the provided RLP-encoded
2417    /// value.
2418    ///
2419    /// If the leaf did not previously exist, this method adjusts the trie structure by inserting
2420    /// new leaf nodes, splitting branch nodes, or collapsing extension nodes as needed.
2421    ///
2422    /// # Returns
2423    ///
2424    /// Returns the path and masks of any blinded node revealed as a result of updating the leaf.
2425    ///
2426    /// If an update requires revealing a blinded node, an error is returned if the blinded
2427    /// provider returns an error.
2428    ///
2429    /// This method is atomic: if an error occurs during structural changes, all modifications
2430    /// are rolled back and the trie state is unchanged.
2431    pub fn update_leaf(&mut self, full_path: Nibbles, value: Vec<u8>) -> SparseTrieResult<()> {
2432        debug_assert!(full_path.starts_with(&self.path));
2433
2434        // Check if value already exists - if so, just update it (no structural changes needed)
2435        if let Entry::Occupied(mut e) = self.inner.values.entry(full_path) {
2436            e.insert(value);
2437            return Ok(())
2438        }
2439
2440        // Here we are starting at the root of the subtrie, and traversing from there.
2441        let mut current = Some(self.path);
2442
2443        while let Some(current_path) = current.as_mut() {
2444            match self.update_next_node(current_path, &full_path)? {
2445                LeafUpdateStep::Continue => {}
2446                LeafUpdateStep::NodeNotFound | LeafUpdateStep::Complete { .. } => break,
2447            }
2448        }
2449
2450        // Only insert the value after all structural changes succeed
2451        self.inner.values.insert(full_path, value);
2452
2453        Ok(())
2454    }
2455
2456    /// Processes the current node, returning what to do next in the leaf update process.
2457    ///
2458    /// This will add or update any nodes in the trie as necessary.
2459    ///
2460    /// Returns a `LeafUpdateStep` containing the next node to process (if any) and
2461    /// the paths of nodes that were inserted during this step.
2462    fn update_next_node(
2463        &mut self,
2464        current: &mut Nibbles,
2465        path: &Nibbles,
2466    ) -> SparseTrieResult<LeafUpdateStep> {
2467        debug_assert!(path.starts_with(&self.path));
2468        debug_assert!(current.starts_with(&self.path));
2469        debug_assert!(path.starts_with(current));
2470        let Some(node) = self.nodes.get_mut(current) else {
2471            return Ok(LeafUpdateStep::NodeNotFound);
2472        };
2473
2474        match node {
2475            SparseNode::Empty => {
2476                // We need to insert the node with a different path and key depending on the path of
2477                // the subtrie.
2478                let path = path.slice(self.path.len()..);
2479                *node = SparseNode::new_leaf(path);
2480                Ok(LeafUpdateStep::complete_with_insertions(vec![*current]))
2481            }
2482            SparseNode::Leaf { key: current_key, .. } => {
2483                current.extend(current_key);
2484
2485                // this leaf is being updated
2486                debug_assert!(current != path, "we already checked leaf presence in the beginning");
2487
2488                // find the common prefix
2489                let common = current.common_prefix_length(path);
2490
2491                // update existing node
2492                let new_ext_key = current.slice(current.len() - current_key.len()..common);
2493                *node = SparseNode::new_ext(new_ext_key);
2494
2495                // create a branch node and corresponding leaves
2496                self.nodes.reserve(3);
2497                let branch_path = current.slice(..common);
2498                let new_leaf_path = path.slice(..=common);
2499                let existing_leaf_path = current.slice(..=common);
2500
2501                self.nodes.insert(
2502                    branch_path,
2503                    SparseNode::new_split_branch(
2504                        current.get_unchecked(common),
2505                        path.get_unchecked(common),
2506                    ),
2507                );
2508                self.nodes.insert(new_leaf_path, SparseNode::new_leaf(path.slice(common + 1..)));
2509                self.nodes
2510                    .insert(existing_leaf_path, SparseNode::new_leaf(current.slice(common + 1..)));
2511
2512                Ok(LeafUpdateStep::complete_with_insertions(vec![
2513                    branch_path,
2514                    new_leaf_path,
2515                    existing_leaf_path,
2516                ]))
2517            }
2518            SparseNode::Extension { key, .. } => {
2519                current.extend(key);
2520
2521                if !path.starts_with(current) {
2522                    // find the common prefix
2523                    let common = current.common_prefix_length(path);
2524                    *key = current.slice(current.len() - key.len()..common);
2525
2526                    // create state mask for new branch node
2527                    // NOTE: this might overwrite the current extension node
2528                    self.nodes.reserve(3);
2529                    let branch_path = current.slice(..common);
2530                    let new_leaf_path = path.slice(..=common);
2531                    let branch = SparseNode::new_split_branch(
2532                        current.get_unchecked(common),
2533                        path.get_unchecked(common),
2534                    );
2535
2536                    self.nodes.insert(branch_path, branch);
2537
2538                    // create new leaf
2539                    let new_leaf = SparseNode::new_leaf(path.slice(common + 1..));
2540                    self.nodes.insert(new_leaf_path, new_leaf);
2541
2542                    let mut inserted_nodes = vec![branch_path, new_leaf_path];
2543
2544                    // recreate extension to previous child if needed
2545                    let key = current.slice(common + 1..);
2546                    if !key.is_empty() {
2547                        let ext_path = current.slice(..=common);
2548                        self.nodes.insert(ext_path, SparseNode::new_ext(key));
2549                        inserted_nodes.push(ext_path);
2550                    }
2551
2552                    return Ok(LeafUpdateStep::complete_with_insertions(inserted_nodes))
2553                }
2554
2555                Ok(LeafUpdateStep::Continue)
2556            }
2557            SparseNode::Branch { state_mask, blinded_mask, .. } => {
2558                let nibble = path.get_unchecked(current.len());
2559                current.push_unchecked(nibble);
2560
2561                if !state_mask.is_bit_set(nibble) {
2562                    state_mask.set_bit(nibble);
2563                    let new_leaf = SparseNode::new_leaf(path.slice(current.len()..));
2564                    self.nodes.insert(*current, new_leaf);
2565                    return Ok(LeafUpdateStep::complete_with_insertions(vec![*current]))
2566                }
2567
2568                if blinded_mask.is_bit_set(nibble) {
2569                    return Err(SparseTrieErrorKind::BlindedNode(*current).into());
2570                }
2571
2572                // If the nibble is set, we can continue traversing the branch.
2573                Ok(LeafUpdateStep::Continue)
2574            }
2575        }
2576    }
2577
2578    /// Reveals a branch node at the given path.
2579    fn reveal_branch(
2580        &mut self,
2581        path: Nibbles,
2582        state_mask: TrieMask,
2583        children: &[RlpNode],
2584        masks: Option<BranchNodeMasks>,
2585        rlp_node: Option<RlpNode>,
2586    ) -> SparseTrieResult<()> {
2587        match self.nodes.entry(path) {
2588            Entry::Occupied(_) => {
2589                // Branch already revealed, do nothing
2590                return Ok(());
2591            }
2592            Entry::Vacant(entry) => {
2593                let state =
2594                    match rlp_node.as_ref() {
2595                        Some(rlp_node) => SparseNodeState::Cached {
2596                            rlp_node: rlp_node.clone(),
2597                            store_in_db_trie: Some(masks.is_some_and(|m| {
2598                                !m.hash_mask.is_empty() || !m.tree_mask.is_empty()
2599                            })),
2600                        },
2601                        None => SparseNodeState::Dirty,
2602                    };
2603
2604                let mut blinded_mask = TrieMask::default();
2605                let mut blinded_hashes = Box::new([B256::ZERO; 16]);
2606
2607                for (stack_ptr, idx) in state_mask.iter().enumerate() {
2608                    let mut child_path = path;
2609                    child_path.push_unchecked(idx);
2610                    let child = &children[stack_ptr];
2611
2612                    if let Some(hash) = child.as_hash() {
2613                        blinded_mask.set_bit(idx);
2614                        blinded_hashes[idx as usize] = hash;
2615                    }
2616                }
2617
2618                entry.insert(SparseNode::Branch {
2619                    state_mask,
2620                    state,
2621                    blinded_mask,
2622                    blinded_hashes,
2623                });
2624            }
2625        }
2626
2627        // For a branch node, iterate over all children. This must happen second so leaf
2628        // children can check connectivity with parent branch.
2629        for (stack_ptr, idx) in state_mask.iter().enumerate() {
2630            let mut child_path = path;
2631            child_path.push_unchecked(idx);
2632            let child = &children[stack_ptr];
2633            if !child.is_hash() && Self::is_child_same_level(&path, &child_path) {
2634                // Reveal each child node or hash it has, but only if the child is on
2635                // the same level as the parent.
2636                self.reveal_node(
2637                    child_path,
2638                    &TrieNodeV2::decode(&mut child.as_ref())?,
2639                    None,
2640                    None,
2641                )?;
2642            }
2643        }
2644
2645        Ok(())
2646    }
2647
2648    /// Internal implementation of the method of the same name on `ParallelSparseTrie`.
2649    ///
2650    /// This accepts `hash_from_upper` to handle cases when boundary nodes revealed in lower subtrie
2651    /// but its blinded hash is known from the upper subtrie.
2652    fn reveal_node(
2653        &mut self,
2654        path: Nibbles,
2655        node: &TrieNodeV2,
2656        masks: Option<BranchNodeMasks>,
2657        hash_from_upper: Option<B256>,
2658    ) -> SparseTrieResult<bool> {
2659        debug_assert!(path.starts_with(&self.path));
2660
2661        // If the node is already revealed, do nothing.
2662        if self.nodes.contains_key(&path) {
2663            return Ok(false);
2664        }
2665
2666        // If the hash is provided from the upper subtrie, use it. Otherwise, find the parent branch
2667        // node, unset its blinded bit and use the hash.
2668        let hash = if let Some(hash) = hash_from_upper {
2669            Some(hash)
2670        } else if path.len() != UPPER_TRIE_MAX_DEPTH && !path.is_empty() {
2671            let Some(SparseNode::Branch { state_mask, blinded_mask, blinded_hashes, .. }) =
2672                self.nodes.get_mut(&path.slice(0..path.len() - 1))
2673            else {
2674                return Ok(false);
2675            };
2676            let nibble = path.last().unwrap();
2677            if !state_mask.is_bit_set(nibble) {
2678                return Ok(false);
2679            }
2680
2681            blinded_mask.is_bit_set(nibble).then(|| {
2682                blinded_mask.unset_bit(nibble);
2683                blinded_hashes[nibble as usize]
2684            })
2685        } else {
2686            None
2687        };
2688
2689        trace!(
2690            target: "trie::parallel_sparse",
2691            ?path,
2692            ?node,
2693            ?masks,
2694            "Revealing node",
2695        );
2696
2697        match node {
2698            TrieNodeV2::EmptyRoot => {
2699                // For an empty root, ensure that we are at the root path, and at the upper subtrie.
2700                debug_assert!(path.is_empty());
2701                debug_assert!(self.path.is_empty());
2702                self.nodes.insert(path, SparseNode::Empty);
2703            }
2704            TrieNodeV2::Branch(branch) => {
2705                if branch.key.is_empty() {
2706                    self.reveal_branch(
2707                        path,
2708                        branch.state_mask,
2709                        &branch.stack,
2710                        masks,
2711                        hash.as_ref().map(RlpNode::word_rlp),
2712                    )?;
2713                    return Ok(true);
2714                }
2715
2716                self.nodes.insert(
2717                    path,
2718                    SparseNode::Extension {
2719                        key: branch.key,
2720                        state: hash
2721                            .as_ref()
2722                            .map(|hash| SparseNodeState::Cached {
2723                                rlp_node: RlpNode::word_rlp(hash),
2724                                // Inherit `store_in_db_trie` from the child branch
2725                                // node masks so that the memoized hash can be used
2726                                // without needing to fetch the child branch.
2727                                store_in_db_trie: Some(masks.is_some_and(|m| {
2728                                    !m.hash_mask.is_empty() || !m.tree_mask.is_empty()
2729                                })),
2730                            })
2731                            .unwrap_or(SparseNodeState::Dirty),
2732                    },
2733                );
2734
2735                let mut branch_path = path;
2736                branch_path.extend(&branch.key);
2737
2738                // Exit early if the actual branch node does not belong to this subtrie.
2739                if !Self::is_child_same_level(&path, &branch_path) {
2740                    return Ok(true);
2741                }
2742
2743                // Reveal the actual branch node.
2744                self.reveal_branch(
2745                    branch_path,
2746                    branch.state_mask,
2747                    &branch.stack,
2748                    masks,
2749                    branch.branch_rlp_node.clone(),
2750                )?;
2751            }
2752            TrieNodeV2::Extension(_) => unreachable!(),
2753            TrieNodeV2::Leaf(leaf) => {
2754                // Skip the reachability check when path.len() == UPPER_TRIE_MAX_DEPTH because
2755                // at that boundary the leaf is in the lower subtrie but its parent branch is in
2756                // the upper subtrie. The subtrie cannot check connectivity across the upper/lower
2757                // boundary, so that check happens in `reveal_nodes` instead.
2758                if path.len() != UPPER_TRIE_MAX_DEPTH && !self.is_leaf_reachable_from_parent(&path)
2759                {
2760                    trace!(
2761                        target: "trie::parallel_sparse",
2762                        ?path,
2763                        "Leaf not reachable from parent branch, skipping",
2764                    );
2765                    return Ok(false)
2766                }
2767
2768                let mut full_key = path;
2769                full_key.extend(&leaf.key);
2770
2771                match self.inner.values.entry(full_key) {
2772                    Entry::Occupied(_) => {
2773                        trace!(
2774                            target: "trie::parallel_sparse",
2775                            ?path,
2776                            ?full_key,
2777                            "Leaf full key value already present, skipping",
2778                        );
2779                        return Ok(false)
2780                    }
2781                    Entry::Vacant(entry) => {
2782                        entry.insert(leaf.value.clone());
2783                    }
2784                }
2785
2786                self.nodes.insert(
2787                    path,
2788                    SparseNode::Leaf {
2789                        key: leaf.key,
2790                        state: hash
2791                            .as_ref()
2792                            .map(|hash| SparseNodeState::Cached {
2793                                rlp_node: RlpNode::word_rlp(hash),
2794                                store_in_db_trie: Some(false),
2795                            })
2796                            .unwrap_or(SparseNodeState::Dirty),
2797                    },
2798                );
2799            }
2800        }
2801
2802        Ok(true)
2803    }
2804
2805    /// Recalculates and updates the RLP hashes for the changed nodes in this subtrie.
2806    ///
2807    /// The function starts from the subtrie root, traverses down to leaves, and then calculates
2808    /// the hashes from leaves back up to the root. It uses a stack from [`SparseSubtrieBuffers`] to
2809    /// track the traversal and accumulate RLP encodings.
2810    ///
2811    /// # Parameters
2812    ///
2813    /// - `prefix_set`: The set of trie paths whose nodes have changed.
2814    /// - `update_actions`: A buffer which `SparseTrieUpdatesAction`s will be written to in the
2815    ///   event that any changes to the top-level updates are required. If None then update
2816    ///   retention is disabled.
2817    /// - `branch_node_masks`: The tree and hash masks for branch nodes.
2818    ///
2819    /// # Returns
2820    ///
2821    /// A tuple containing the root node of the updated subtrie.
2822    ///
2823    /// # Panics
2824    ///
2825    /// If the node at the root path does not exist.
2826    #[instrument(level = "trace", target = "trie::parallel_sparse", skip_all, fields(root = ?self.path), ret)]
2827    fn update_hashes(
2828        &mut self,
2829        prefix_set: &mut PrefixSet,
2830        update_actions: &mut Option<Vec<SparseTrieUpdatesAction>>,
2831        branch_node_masks: &BranchNodeMasksMap,
2832    ) -> RlpNode {
2833        trace!(target: "trie::parallel_sparse", "Updating subtrie hashes");
2834
2835        debug_assert!(prefix_set.iter().all(|path| path.starts_with(&self.path)));
2836
2837        debug_assert!(self.inner.buffers.path_stack.is_empty());
2838        self.inner
2839            .buffers
2840            .path_stack
2841            .push(RlpNodePathStackItem { path: self.path, is_in_prefix_set: None });
2842
2843        while let Some(stack_item) = self.inner.buffers.path_stack.pop() {
2844            let path = stack_item.path;
2845            let node = self
2846                .nodes
2847                .get_mut(&path)
2848                .unwrap_or_else(|| panic!("node at path {path:?} does not exist"));
2849
2850            self.inner.rlp_node(prefix_set, update_actions, stack_item, node, branch_node_masks);
2851        }
2852
2853        debug_assert_eq!(self.inner.buffers.rlp_node_stack.len(), 1);
2854        self.inner.buffers.rlp_node_stack.pop().unwrap().rlp_node
2855    }
2856
2857    /// Removes all nodes and values from the subtrie, resetting it to a blank state
2858    /// with only an empty root node. This is used when a storage root is deleted.
2859    fn wipe(&mut self) {
2860        self.nodes.clear();
2861        self.nodes.insert(Nibbles::default(), SparseNode::Empty);
2862        self.inner.clear();
2863    }
2864
2865    /// Clears the subtrie, keeping the data structures allocated.
2866    pub(crate) fn clear(&mut self) {
2867        self.nodes.clear();
2868        self.inner.clear();
2869    }
2870
2871    /// Shrinks the capacity of the subtrie's node storage.
2872    pub(crate) fn shrink_nodes_to(&mut self, size: usize) {
2873        self.nodes.shrink_to(size);
2874    }
2875
2876    /// Shrinks the capacity of the subtrie's value storage.
2877    pub(crate) fn shrink_values_to(&mut self, size: usize) {
2878        self.inner.values.shrink_to(size);
2879    }
2880
2881    /// Returns a heuristic for the in-memory size of this subtrie in bytes.
2882    pub(crate) fn memory_size(&self) -> usize {
2883        let mut size = core::mem::size_of::<Self>();
2884
2885        // Nodes map: key (Nibbles) + value (SparseNode)
2886        for (path, node) in &self.nodes {
2887            size += core::mem::size_of::<Nibbles>();
2888            size += path.len(); // Nibbles heap allocation
2889            size += node.memory_size();
2890        }
2891
2892        // Values map: key (Nibbles) + value (Vec<u8>)
2893        for (path, value) in &self.inner.values {
2894            size += core::mem::size_of::<Nibbles>();
2895            size += path.len(); // Nibbles heap allocation
2896            size += core::mem::size_of::<Vec<u8>>() + value.capacity();
2897        }
2898
2899        // Buffers
2900        size += self.inner.buffers.memory_size();
2901
2902        size
2903    }
2904}
2905
2906/// Helper type for [`SparseSubtrie`] to mutably access only a subset of fields from the original
2907/// struct.
2908#[derive(Clone, PartialEq, Eq, Debug, Default)]
2909struct SparseSubtrieInner {
2910    /// Map from leaf key paths to their values.
2911    /// All values are stored here instead of directly in leaf nodes.
2912    values: HashMap<Nibbles, Vec<u8>>,
2913    /// Reusable buffers for [`SparseSubtrie::update_hashes`].
2914    buffers: SparseSubtrieBuffers,
2915}
2916
2917impl SparseSubtrieInner {
2918    /// Computes the RLP encoding and its hash for a single (trie node)[`SparseNode`].
2919    ///
2920    /// # Deferred Processing
2921    ///
2922    /// When an extension or a branch node depends on child nodes that haven't been computed yet,
2923    /// the function pushes the current node back onto the path stack along with its children,
2924    /// then returns early. This allows the iterative algorithm to process children first before
2925    /// retrying the parent.
2926    ///
2927    /// # Parameters
2928    ///
2929    /// - `prefix_set`: Set of prefixes (key paths) that have been marked as updated
2930    /// - `update_actions`: A buffer which `SparseTrieUpdatesAction`s will be written to in the
2931    ///   event that any changes to the top-level updates are required. If None then update
2932    ///   retention is disabled.
2933    /// - `stack_item`: The stack item to process
2934    /// - `node`: The sparse node to process (will be mutated to update hash)
2935    /// - `branch_node_masks`: The tree and hash masks for branch nodes.
2936    ///
2937    /// # Side Effects
2938    ///
2939    /// - Updates the node's hash field after computing RLP
2940    /// - Pushes nodes to [`SparseSubtrieBuffers::path_stack`] to manage traversal
2941    /// - May push items onto the path stack for deferred processing
2942    ///
2943    /// # Exit condition
2944    ///
2945    /// Once all nodes have been processed and all RLPs and hashes calculated, pushes the root node
2946    /// onto the [`SparseSubtrieBuffers::rlp_node_stack`] and exits.
2947    fn rlp_node(
2948        &mut self,
2949        prefix_set: &mut PrefixSet,
2950        update_actions: &mut Option<Vec<SparseTrieUpdatesAction>>,
2951        mut stack_item: RlpNodePathStackItem,
2952        node: &mut SparseNode,
2953        branch_node_masks: &BranchNodeMasksMap,
2954    ) {
2955        let path = stack_item.path;
2956        trace!(
2957            target: "trie::parallel_sparse",
2958            ?path,
2959            ?node,
2960            "Calculating node RLP"
2961        );
2962
2963        // Check if the path is in the prefix set.
2964        // First, check the cached value. If it's `None`, then check the prefix set, and update
2965        // the cached value.
2966        let mut prefix_set_contains = |path: &Nibbles| {
2967            *stack_item.is_in_prefix_set.get_or_insert_with(|| prefix_set.contains(path))
2968        };
2969
2970        let (rlp_node, node_type) = match node {
2971            SparseNode::Empty => (RlpNode::word_rlp(&EMPTY_ROOT_HASH), SparseNodeType::Empty),
2972            SparseNode::Leaf { key, state } => {
2973                let mut path = path;
2974                path.extend(key);
2975                let value = self.values.get(&path);
2976
2977                // Check if we should use cached RLP:
2978                // - If there's a cached RLP and the path is not in prefix_set, use cached
2979                // - If the value is not in this subtrie's values (e.g., lower subtrie leaf being
2980                //   processed via upper subtrie), we must use cached RLP
2981                let cached_rlp_node = state.cached_rlp_node();
2982                let use_cached =
2983                    cached_rlp_node.is_some() && (!prefix_set_contains(&path) || value.is_none());
2984
2985                if let Some(rlp_node) = use_cached.then(|| cached_rlp_node.unwrap()) {
2986                    // Return the cached RLP
2987                    (rlp_node.clone(), SparseNodeType::Leaf)
2988                } else {
2989                    // Encode the leaf node and update its RlpNode
2990                    let value = value.expect("leaf value must exist in subtrie");
2991                    self.buffers.rlp_buf.clear();
2992                    let rlp_node = LeafNodeRef { key, value }.rlp(&mut self.buffers.rlp_buf);
2993                    *state = SparseNodeState::Cached {
2994                        rlp_node: rlp_node.clone(),
2995                        store_in_db_trie: Some(false),
2996                    };
2997                    trace!(
2998                        target: "trie::parallel_sparse",
2999                        ?path,
3000                        ?key,
3001                        value = %alloy_primitives::hex::encode(value),
3002                        ?rlp_node,
3003                        "Calculated leaf RLP node",
3004                    );
3005                    (rlp_node, SparseNodeType::Leaf)
3006                }
3007            }
3008            SparseNode::Extension { key, state } => {
3009                let mut child_path = path;
3010                child_path.extend(key);
3011                if let Some((rlp_node, store_in_db_trie)) = state
3012                    .cached_rlp_node()
3013                    .zip(state.store_in_db_trie())
3014                    .filter(|_| !prefix_set_contains(&path))
3015                {
3016                    // If the node is already computed, and the node path is not in
3017                    // the prefix set, return the pre-computed node
3018                    (
3019                        rlp_node.clone(),
3020                        SparseNodeType::Extension { store_in_db_trie: Some(store_in_db_trie) },
3021                    )
3022                } else if self.buffers.rlp_node_stack.last().is_some_and(|e| e.path == child_path) {
3023                    // Top of the stack has the child node, we can encode the extension node and
3024                    // update its hash
3025                    let RlpNodeStackItem { path: _, rlp_node: child, node_type: child_node_type } =
3026                        self.buffers.rlp_node_stack.pop().unwrap();
3027                    self.buffers.rlp_buf.clear();
3028                    let rlp_node =
3029                        ExtensionNodeRef::new(key, &child).rlp(&mut self.buffers.rlp_buf);
3030
3031                    let store_in_db_trie_value = child_node_type.store_in_db_trie();
3032
3033                    trace!(
3034                        target: "trie::parallel_sparse",
3035                        ?path,
3036                        ?child_path,
3037                        ?child_node_type,
3038                        "Extension node"
3039                    );
3040
3041                    *state = SparseNodeState::Cached {
3042                        rlp_node: rlp_node.clone(),
3043                        store_in_db_trie: store_in_db_trie_value,
3044                    };
3045
3046                    (
3047                        rlp_node,
3048                        SparseNodeType::Extension {
3049                            // Inherit the `store_in_db_trie` flag from the child node, which is
3050                            // always the branch node
3051                            store_in_db_trie: store_in_db_trie_value,
3052                        },
3053                    )
3054                } else {
3055                    // Need to defer processing until child is computed, on the next
3056                    // invocation update the node's hash.
3057                    self.buffers.path_stack.extend([
3058                        RlpNodePathStackItem {
3059                            path,
3060                            is_in_prefix_set: Some(prefix_set_contains(&path)),
3061                        },
3062                        RlpNodePathStackItem { path: child_path, is_in_prefix_set: None },
3063                    ]);
3064                    return
3065                }
3066            }
3067            SparseNode::Branch { state_mask, state, blinded_mask, blinded_hashes } => {
3068                if let Some((rlp_node, store_in_db_trie)) = state
3069                    .cached_rlp_node()
3070                    .zip(state.store_in_db_trie())
3071                    .filter(|_| !prefix_set_contains(&path))
3072                {
3073                    let node_type =
3074                        SparseNodeType::Branch { store_in_db_trie: Some(store_in_db_trie) };
3075
3076                    trace!(
3077                        target: "trie::parallel_sparse",
3078                        ?path,
3079                        ?node_type,
3080                        ?rlp_node,
3081                        "Adding node to RLP node stack (cached branch)"
3082                    );
3083
3084                    // If the node hash is already computed, and the node path is not in
3085                    // the prefix set, return the pre-computed hash
3086                    self.buffers.rlp_node_stack.push(RlpNodeStackItem {
3087                        path,
3088                        rlp_node: rlp_node.clone(),
3089                        node_type,
3090                    });
3091                    return
3092                }
3093
3094                let retain_updates = update_actions.is_some() && prefix_set_contains(&path);
3095
3096                self.buffers.branch_child_buf.clear();
3097                // Walk children in a reverse order from `f` to `0`, so we pop the `0` first
3098                // from the stack and keep walking in the sorted order.
3099                for bit in state_mask.iter().rev() {
3100                    let mut child = path;
3101                    child.push_unchecked(bit);
3102
3103                    if !blinded_mask.is_bit_set(bit) {
3104                        self.buffers.branch_child_buf.push(child);
3105                    }
3106                }
3107
3108                self.buffers.branch_value_stack_buf.resize(state_mask.len(), Default::default());
3109
3110                let mut tree_mask = TrieMask::default();
3111                let mut hash_mask = TrieMask::default();
3112                let mut hashes = Vec::new();
3113
3114                // Lazy lookup for branch node masks - shared across loop iterations
3115                let mut path_masks_storage = None;
3116                let mut path_masks =
3117                    || *path_masks_storage.get_or_insert_with(|| branch_node_masks.get(&path));
3118
3119                for (i, child_nibble) in state_mask.iter().enumerate().rev() {
3120                    let mut child_path = path;
3121                    child_path.push_unchecked(child_nibble);
3122
3123                    let (child, child_node_type) = if blinded_mask.is_bit_set(child_nibble) {
3124                        (
3125                            RlpNode::word_rlp(&blinded_hashes[child_nibble as usize]),
3126                            SparseNodeType::Hash,
3127                        )
3128                    } else if self
3129                        .buffers
3130                        .rlp_node_stack
3131                        .last()
3132                        .is_some_and(|e| e.path == child_path)
3133                    {
3134                        let RlpNodeStackItem { path: _, rlp_node, node_type } =
3135                            self.buffers.rlp_node_stack.pop().unwrap();
3136
3137                        (rlp_node, node_type)
3138                    } else {
3139                        // Need to defer processing until children are computed, on the next
3140                        // invocation update the node's hash.
3141                        self.buffers.path_stack.push(RlpNodePathStackItem {
3142                            path,
3143                            is_in_prefix_set: Some(prefix_set_contains(&path)),
3144                        });
3145                        self.buffers.path_stack.extend(
3146                            self.buffers
3147                                .branch_child_buf
3148                                .drain(..)
3149                                .map(|path| RlpNodePathStackItem { path, is_in_prefix_set: None }),
3150                        );
3151                        return
3152                    };
3153
3154                    // Update the masks only if we need to retain trie updates
3155                    if retain_updates {
3156                        // Determine whether we need to set trie mask bit.
3157                        let should_set_tree_mask_bit =
3158                            if let Some(store_in_db_trie) = child_node_type.store_in_db_trie() {
3159                                // A branch or an extension node explicitly set the
3160                                // `store_in_db_trie` flag
3161                                store_in_db_trie
3162                            } else {
3163                                // A blinded node has the tree mask bit set
3164                                child_node_type.is_hash() &&
3165                                    path_masks().is_some_and(|masks| {
3166                                        masks.tree_mask.is_bit_set(child_nibble)
3167                                    })
3168                            };
3169                        if should_set_tree_mask_bit {
3170                            tree_mask.set_bit(child_nibble);
3171                        }
3172                        // Set the hash mask. If a child node is a revealed branch node OR
3173                        // is a blinded node that has its hash mask bit set according to the
3174                        // database, set the hash mask bit and save the hash.
3175                        let hash = child.as_hash().filter(|_| {
3176                            child_node_type.is_branch() ||
3177                                (child_node_type.is_hash() &&
3178                                    path_masks().is_some_and(|masks| {
3179                                        masks.hash_mask.is_bit_set(child_nibble)
3180                                    }))
3181                        });
3182                        if let Some(hash) = hash {
3183                            hash_mask.set_bit(child_nibble);
3184                            hashes.push(hash);
3185                        }
3186                    }
3187
3188                    // Insert children in the resulting buffer in a normal order,
3189                    // because initially we iterated in reverse.
3190                    // SAFETY: i < len and len is never 0
3191                    self.buffers.branch_value_stack_buf[i] = child;
3192                }
3193
3194                trace!(
3195                    target: "trie::parallel_sparse",
3196                    ?path,
3197                    ?tree_mask,
3198                    ?hash_mask,
3199                    "Branch node masks"
3200                );
3201
3202                // Top of the stack has all children node, we can encode the branch node and
3203                // update its hash
3204                self.buffers.rlp_buf.clear();
3205                let branch_node_ref =
3206                    BranchNodeRef::new(&self.buffers.branch_value_stack_buf, *state_mask);
3207                let rlp_node = branch_node_ref.rlp(&mut self.buffers.rlp_buf);
3208
3209                // Save a branch node update only if it's not a root node, and we need to
3210                // persist updates.
3211                let store_in_db_trie_value = if let Some(update_actions) =
3212                    update_actions.as_mut().filter(|_| retain_updates && !path.is_empty())
3213                {
3214                    let store_in_db_trie = !tree_mask.is_empty() || !hash_mask.is_empty();
3215                    if store_in_db_trie {
3216                        // Store in DB trie if there are either any children that are stored in
3217                        // the DB trie, or any children represent hashed values
3218                        hashes.reverse();
3219                        let branch_node =
3220                            BranchNodeCompact::new(*state_mask, tree_mask, hash_mask, hashes, None);
3221                        update_actions
3222                            .push(SparseTrieUpdatesAction::InsertUpdated(path, branch_node));
3223                    } else {
3224                        // New tree and hash masks are empty - check previous state
3225                        let prev_had_masks = path_masks()
3226                            .is_some_and(|m| !m.tree_mask.is_empty() || !m.hash_mask.is_empty());
3227                        if prev_had_masks {
3228                            // Previously had masks, now empty - mark as removed
3229                            update_actions.push(SparseTrieUpdatesAction::InsertRemoved(path));
3230                        } else {
3231                            // Previously empty too - just remove the update
3232                            update_actions.push(SparseTrieUpdatesAction::RemoveUpdated(path));
3233                        }
3234                    }
3235
3236                    store_in_db_trie
3237                } else {
3238                    false
3239                };
3240
3241                *state = SparseNodeState::Cached {
3242                    rlp_node: rlp_node.clone(),
3243                    store_in_db_trie: Some(store_in_db_trie_value),
3244                };
3245
3246                (
3247                    rlp_node,
3248                    SparseNodeType::Branch { store_in_db_trie: Some(store_in_db_trie_value) },
3249                )
3250            }
3251        };
3252
3253        trace!(
3254            target: "trie::parallel_sparse",
3255            ?path,
3256            ?node_type,
3257            ?rlp_node,
3258            "Adding node to RLP node stack"
3259        );
3260        self.buffers.rlp_node_stack.push(RlpNodeStackItem { path, rlp_node, node_type });
3261    }
3262
3263    /// Clears the subtrie, keeping the data structures allocated.
3264    fn clear(&mut self) {
3265        self.values.clear();
3266        self.buffers.clear();
3267    }
3268}
3269
3270/// Represents the outcome of processing a node during leaf insertion
3271#[derive(Clone, Debug, PartialEq, Eq, Default)]
3272pub enum LeafUpdateStep {
3273    /// Continue traversing to the next node
3274    Continue,
3275    /// Update is complete with nodes inserted
3276    Complete {
3277        /// The node paths that were inserted during this step
3278        inserted_nodes: Vec<Nibbles>,
3279    },
3280    /// The node was not found
3281    #[default]
3282    NodeNotFound,
3283}
3284
3285impl LeafUpdateStep {
3286    /// Creates a step indicating completion with inserted nodes
3287    pub const fn complete_with_insertions(inserted_nodes: Vec<Nibbles>) -> Self {
3288        Self::Complete { inserted_nodes }
3289    }
3290}
3291
3292/// Sparse Subtrie Type.
3293///
3294/// Used to determine the type of subtrie a certain path belongs to:
3295/// - Paths in the range `0x..=0xf` belong to the upper subtrie.
3296/// - Paths in the range `0x00..` belong to one of the lower subtries. The index of the lower
3297///   subtrie is determined by the first [`UPPER_TRIE_MAX_DEPTH`] nibbles of the path.
3298///
3299/// There can be at most [`NUM_LOWER_SUBTRIES`] lower subtries.
3300#[derive(Clone, Copy, PartialEq, Eq, Debug)]
3301pub enum SparseSubtrieType {
3302    /// Upper subtrie with paths in the range `0x..=0xf`
3303    Upper,
3304    /// Lower subtrie with paths in the range `0x00..`. Includes the index of the subtrie,
3305    /// according to the path prefix.
3306    Lower(usize),
3307}
3308
3309impl SparseSubtrieType {
3310    /// Returns true if a node at a path of the given length would be placed in the upper subtrie.
3311    ///
3312    /// Nodes with paths shorter than [`UPPER_TRIE_MAX_DEPTH`] nibbles belong to the upper subtrie,
3313    /// while longer paths belong to the lower subtries.
3314    pub const fn path_len_is_upper(len: usize) -> bool {
3315        len < UPPER_TRIE_MAX_DEPTH
3316    }
3317
3318    /// Returns the type of subtrie based on the given path.
3319    pub fn from_path(path: &Nibbles) -> Self {
3320        if Self::path_len_is_upper(path.len()) {
3321            Self::Upper
3322        } else {
3323            Self::Lower(path_subtrie_index_unchecked(path))
3324        }
3325    }
3326
3327    /// Returns the index of the lower subtrie, if it exists.
3328    pub const fn lower_index(&self) -> Option<usize> {
3329        match self {
3330            Self::Upper => None,
3331            Self::Lower(index) => Some(*index),
3332        }
3333    }
3334}
3335
3336impl Ord for SparseSubtrieType {
3337    /// Orders two [`SparseSubtrieType`]s such that `Upper` is less than `Lower(_)`, and `Lower`s
3338    /// are ordered by their index.
3339    fn cmp(&self, other: &Self) -> Ordering {
3340        match (self, other) {
3341            (Self::Upper, Self::Upper) => Ordering::Equal,
3342            (Self::Upper, Self::Lower(_)) => Ordering::Less,
3343            (Self::Lower(_), Self::Upper) => Ordering::Greater,
3344            (Self::Lower(idx_a), Self::Lower(idx_b)) if idx_a == idx_b => Ordering::Equal,
3345            (Self::Lower(idx_a), Self::Lower(idx_b)) => idx_a.cmp(idx_b),
3346        }
3347    }
3348}
3349
3350impl PartialOrd for SparseSubtrieType {
3351    fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
3352        Some(self.cmp(other))
3353    }
3354}
3355
3356/// Collection of reusable buffers for calculating subtrie hashes.
3357///
3358/// These buffers reduce allocations when computing RLP representations during trie updates.
3359#[derive(Clone, PartialEq, Eq, Debug, Default)]
3360pub struct SparseSubtrieBuffers {
3361    /// Stack of RLP node paths
3362    path_stack: Vec<RlpNodePathStackItem>,
3363    /// Stack of RLP nodes
3364    rlp_node_stack: Vec<RlpNodeStackItem>,
3365    /// Reusable branch child path
3366    branch_child_buf: Vec<Nibbles>,
3367    /// Reusable branch value stack
3368    branch_value_stack_buf: Vec<RlpNode>,
3369    /// Reusable RLP buffer
3370    rlp_buf: Vec<u8>,
3371}
3372
3373impl SparseSubtrieBuffers {
3374    /// Clears all buffers.
3375    fn clear(&mut self) {
3376        self.path_stack.clear();
3377        self.rlp_node_stack.clear();
3378        self.branch_child_buf.clear();
3379        self.branch_value_stack_buf.clear();
3380        self.rlp_buf.clear();
3381    }
3382
3383    /// Returns a heuristic for the in-memory size of these buffers in bytes.
3384    const fn memory_size(&self) -> usize {
3385        let mut size = core::mem::size_of::<Self>();
3386
3387        size += self.path_stack.capacity() * core::mem::size_of::<RlpNodePathStackItem>();
3388        size += self.rlp_node_stack.capacity() * core::mem::size_of::<RlpNodeStackItem>();
3389        size += self.branch_child_buf.capacity() * core::mem::size_of::<Nibbles>();
3390        size += self.branch_value_stack_buf.capacity() * core::mem::size_of::<RlpNode>();
3391        size += self.rlp_buf.capacity();
3392
3393        size
3394    }
3395}
3396
3397/// RLP node path stack item.
3398#[derive(Clone, PartialEq, Eq, Debug)]
3399pub struct RlpNodePathStackItem {
3400    /// Path to the node.
3401    pub path: Nibbles,
3402    /// Whether the path is in the prefix set. If [`None`], then unknown yet.
3403    pub is_in_prefix_set: Option<bool>,
3404}
3405
3406/// Changed subtrie.
3407#[derive(Debug)]
3408struct ChangedSubtrie {
3409    /// Lower subtrie index in the range [0, [`NUM_LOWER_SUBTRIES`]).
3410    index: usize,
3411    /// Changed subtrie
3412    subtrie: Box<SparseSubtrie>,
3413    /// Prefix set of keys that belong to the subtrie.
3414    prefix_set: PrefixSet,
3415    /// Reusable buffer for collecting [`SparseTrieUpdatesAction`]s during computations. Will be
3416    /// None if update retention is disabled.
3417    update_actions_buf: Option<Vec<SparseTrieUpdatesAction>>,
3418}
3419
3420/// Convert first [`UPPER_TRIE_MAX_DEPTH`] nibbles of the path into a lower subtrie index in the
3421/// range [0, [`NUM_LOWER_SUBTRIES`]).
3422///
3423/// # Panics
3424///
3425/// If the path is shorter than [`UPPER_TRIE_MAX_DEPTH`] nibbles.
3426fn path_subtrie_index_unchecked(path: &Nibbles) -> usize {
3427    debug_assert_eq!(UPPER_TRIE_MAX_DEPTH, 2);
3428    let idx = path.get_byte_unchecked(0) as usize;
3429    // SAFETY: always true.
3430    unsafe { core::hint::assert_unchecked(idx < NUM_LOWER_SUBTRIES) };
3431    idx
3432}
3433
3434/// Checks if `path` is a strict descendant of any root in a sorted slice.
3435///
3436/// Uses binary search to find the candidate root that could be an ancestor.
3437/// Returns `true` if `path` starts with a root and is longer (strict descendant).
3438fn is_strict_descendant_in(roots: &[Nibbles], path: &Nibbles) -> bool {
3439    if roots.is_empty() {
3440        return false;
3441    }
3442    debug_assert!(roots.windows(2).all(|w| w[0] <= w[1]), "roots must be sorted by path");
3443    let idx = roots.partition_point(|root| root <= path);
3444    if idx > 0 {
3445        let candidate = &roots[idx - 1];
3446        if path.starts_with(candidate) && path.len() > candidate.len() {
3447            return true;
3448        }
3449    }
3450    false
3451}
3452
3453/// Returns true if any retained leaf path has `prefix` as a prefix.
3454///
3455/// The `retained` slice must be sorted.
3456fn has_retained_descendant(retained: &[Nibbles], prefix: &Nibbles) -> bool {
3457    if retained.is_empty() {
3458        return false;
3459    }
3460    debug_assert!(retained.windows(2).all(|w| w[0] <= w[1]), "retained must be sorted by path");
3461    let idx = retained.partition_point(|path| path < prefix);
3462    idx < retained.len() && retained[idx].starts_with(prefix)
3463}
3464
3465/// Checks if `path` starts with any root in a sorted slice (inclusive).
3466///
3467/// Uses binary search to find the candidate root that could be a prefix.
3468/// Returns `true` if `path` starts with a root (including exact match).
3469fn starts_with_pruned_in(roots: &[Nibbles], path: &Nibbles) -> bool {
3470    if roots.is_empty() {
3471        return false;
3472    }
3473    debug_assert!(roots.windows(2).all(|w| w[0] <= w[1]), "roots must be sorted by path");
3474    let idx = roots.partition_point(|root| root <= path);
3475    if idx > 0 {
3476        let candidate = &roots[idx - 1];
3477        if path.starts_with(candidate) {
3478            return true;
3479        }
3480    }
3481    false
3482}
3483
3484/// Used by lower subtries to communicate updates to the top-level [`SparseTrieUpdates`] set.
3485#[derive(Clone, Debug, Eq, PartialEq)]
3486enum SparseTrieUpdatesAction {
3487    /// Remove the path from the `updated_nodes`, if it was present, and add it to `removed_nodes`.
3488    InsertRemoved(Nibbles),
3489    /// Remove the path from the `updated_nodes`, if it was present, leaving `removed_nodes`
3490    /// unaffected.
3491    RemoveUpdated(Nibbles),
3492    /// Insert the branch node into `updated_nodes`.
3493    InsertUpdated(Nibbles, BranchNodeCompact),
3494}
3495
3496#[cfg(test)]
3497mod tests {
3498    use super::{
3499        path_subtrie_index_unchecked, LowerSparseSubtrie, ParallelSparseTrie, SparseSubtrie,
3500        SparseSubtrieType,
3501    };
3502    use crate::{
3503        parallel::ChangedSubtrie,
3504        provider::{DefaultTrieNodeProvider, NoRevealProvider},
3505        trie::SparseNodeState,
3506        LeafLookup, LeafLookupError, SparseNode, SparseTrie, SparseTrieUpdates,
3507    };
3508    use alloy_primitives::{
3509        b256, hex,
3510        map::{B256Set, HashMap},
3511        B256, U256,
3512    };
3513    use alloy_rlp::{Decodable, Encodable};
3514    use alloy_trie::{proof::AddedRemovedKeys, BranchNodeCompact, Nibbles};
3515    use assert_matches::assert_matches;
3516    use itertools::Itertools;
3517    use proptest::{prelude::*, sample::SizeRange};
3518    use proptest_arbitrary_interop::arb;
3519    use reth_execution_errors::SparseTrieErrorKind;
3520    use reth_primitives_traits::Account;
3521    use reth_provider::{
3522        test_utils::create_test_provider_factory, StorageSettingsCache, TrieWriter,
3523    };
3524    use reth_trie::{
3525        hashed_cursor::{noop::NoopHashedCursor, HashedPostStateCursor},
3526        node_iter::{TrieElement, TrieNodeIter},
3527        trie_cursor::{noop::NoopAccountTrieCursor, TrieCursor, TrieCursorFactory},
3528        walker::TrieWalker,
3529        HashedPostState,
3530    };
3531    use reth_trie_common::{
3532        prefix_set::PrefixSetMut,
3533        proof::{ProofNodes, ProofRetainer},
3534        updates::TrieUpdates,
3535        BranchNodeMasks, BranchNodeMasksMap, BranchNodeRef, BranchNodeV2, ExtensionNode,
3536        HashBuilder, LeafNode, ProofTrieNodeV2, RlpNode, TrieMask, TrieNode, TrieNodeV2,
3537        EMPTY_ROOT_HASH,
3538    };
3539    use reth_trie_db::DatabaseTrieCursorFactory;
3540    use std::collections::{BTreeMap, BTreeSet};
3541
3542    /// Pad nibbles to the length of a B256 hash with zeros on the right.
3543    fn pad_nibbles_right(mut nibbles: Nibbles) -> Nibbles {
3544        nibbles.extend(&Nibbles::from_nibbles_unchecked(vec![
3545            0;
3546            B256::len_bytes() * 2 - nibbles.len()
3547        ]));
3548        nibbles
3549    }
3550
3551    /// Create a leaf key (suffix) for a leaf at a given position depth.
3552    /// `suffix` contains the non-zero nibbles, padded with zeros to reach `total_len`.
3553    fn leaf_key(suffix: impl AsRef<[u8]>, total_len: usize) -> Nibbles {
3554        let suffix = suffix.as_ref();
3555        let mut nibbles = Nibbles::from_nibbles(suffix);
3556        nibbles.extend(&Nibbles::from_nibbles_unchecked(vec![0; total_len - suffix.len()]));
3557        nibbles
3558    }
3559
3560    fn create_account(nonce: u64) -> Account {
3561        Account { nonce, ..Default::default() }
3562    }
3563
3564    fn large_account_value() -> Vec<u8> {
3565        let account = Account {
3566            nonce: 0x123456789abcdef,
3567            balance: U256::from(0x123456789abcdef0123456789abcdef_u128),
3568            ..Default::default()
3569        };
3570        let mut buf = Vec::new();
3571        account.into_trie_account(EMPTY_ROOT_HASH).encode(&mut buf);
3572        buf
3573    }
3574
3575    fn encode_account_value(nonce: u64) -> Vec<u8> {
3576        let account = Account { nonce, ..Default::default() };
3577        let trie_account = account.into_trie_account(EMPTY_ROOT_HASH);
3578        let mut buf = Vec::new();
3579        trie_account.encode(&mut buf);
3580        buf
3581    }
3582
3583    /// Test context that provides helper methods for trie testing
3584    #[derive(Default)]
3585    struct ParallelSparseTrieTestContext;
3586
3587    impl ParallelSparseTrieTestContext {
3588        /// Assert that a lower subtrie exists at the given path
3589        fn assert_subtrie_exists(&self, trie: &ParallelSparseTrie, path: &Nibbles) {
3590            let idx = path_subtrie_index_unchecked(path);
3591            assert!(
3592                trie.lower_subtries[idx].as_revealed_ref().is_some(),
3593                "Expected lower subtrie at path {path:?} to exist",
3594            );
3595        }
3596
3597        /// Get a lower subtrie, panicking if it doesn't exist
3598        fn get_subtrie<'a>(
3599            &self,
3600            trie: &'a ParallelSparseTrie,
3601            path: &Nibbles,
3602        ) -> &'a SparseSubtrie {
3603            let idx = path_subtrie_index_unchecked(path);
3604            trie.lower_subtries[idx]
3605                .as_revealed_ref()
3606                .unwrap_or_else(|| panic!("Lower subtrie at path {path:?} should exist"))
3607        }
3608
3609        /// Assert that a lower subtrie has a specific path field value
3610        fn assert_subtrie_path(
3611            &self,
3612            trie: &ParallelSparseTrie,
3613            subtrie_prefix: impl AsRef<[u8]>,
3614            expected_path: impl AsRef<[u8]>,
3615        ) {
3616            let subtrie_prefix = Nibbles::from_nibbles(subtrie_prefix);
3617            let expected_path = Nibbles::from_nibbles(expected_path);
3618            let idx = path_subtrie_index_unchecked(&subtrie_prefix);
3619
3620            let subtrie = trie.lower_subtries[idx].as_revealed_ref().unwrap_or_else(|| {
3621                panic!("Lower subtrie at prefix {subtrie_prefix:?} should exist")
3622            });
3623
3624            assert_eq!(
3625                subtrie.path, expected_path,
3626                "Subtrie at prefix {subtrie_prefix:?} should have path {expected_path:?}, but has {:?}",
3627                subtrie.path
3628            );
3629        }
3630
3631        /// Create test leaves with consecutive account values
3632        fn create_test_leaves(&self, paths: &[&[u8]]) -> Vec<(Nibbles, Vec<u8>)> {
3633            paths
3634                .iter()
3635                .enumerate()
3636                .map(|(i, path)| {
3637                    (
3638                        pad_nibbles_right(Nibbles::from_nibbles(path)),
3639                        encode_account_value(i as u64 + 1),
3640                    )
3641                })
3642                .collect()
3643        }
3644
3645        /// Create a single test leaf with the given path and value nonce
3646        fn create_test_leaf(&self, path: impl AsRef<[u8]>, value_nonce: u64) -> (Nibbles, Vec<u8>) {
3647            (pad_nibbles_right(Nibbles::from_nibbles(path)), encode_account_value(value_nonce))
3648        }
3649
3650        /// Update multiple leaves in the trie
3651        fn update_leaves(
3652            &self,
3653            trie: &mut ParallelSparseTrie,
3654            leaves: impl IntoIterator<Item = (Nibbles, Vec<u8>)>,
3655        ) {
3656            for (path, value) in leaves {
3657                trie.update_leaf(path, value, DefaultTrieNodeProvider).unwrap();
3658            }
3659        }
3660
3661        /// Create an assertion builder for a subtrie
3662        fn assert_subtrie<'a>(
3663            &self,
3664            trie: &'a ParallelSparseTrie,
3665            path: Nibbles,
3666        ) -> SubtrieAssertion<'a> {
3667            self.assert_subtrie_exists(trie, &path);
3668            let subtrie = self.get_subtrie(trie, &path);
3669            SubtrieAssertion::new(subtrie)
3670        }
3671
3672        /// Create an assertion builder for the upper subtrie
3673        fn assert_upper_subtrie<'a>(&self, trie: &'a ParallelSparseTrie) -> SubtrieAssertion<'a> {
3674            SubtrieAssertion::new(&trie.upper_subtrie)
3675        }
3676
3677        /// Assert the root, trie updates, and nodes against the hash builder output.
3678        fn assert_with_hash_builder(
3679            &self,
3680            trie: &mut ParallelSparseTrie,
3681            hash_builder_root: B256,
3682            hash_builder_updates: TrieUpdates,
3683            hash_builder_proof_nodes: ProofNodes,
3684        ) {
3685            assert_eq!(trie.root(), hash_builder_root);
3686            pretty_assertions::assert_eq!(
3687                BTreeMap::from_iter(trie.updates_ref().updated_nodes.clone()),
3688                BTreeMap::from_iter(hash_builder_updates.account_nodes)
3689            );
3690            assert_eq_parallel_sparse_trie_proof_nodes(trie, hash_builder_proof_nodes);
3691        }
3692    }
3693
3694    /// Assertion builder for subtrie structure
3695    struct SubtrieAssertion<'a> {
3696        subtrie: &'a SparseSubtrie,
3697    }
3698
3699    impl<'a> SubtrieAssertion<'a> {
3700        fn new(subtrie: &'a SparseSubtrie) -> Self {
3701            Self { subtrie }
3702        }
3703
3704        fn has_branch(self, path: &Nibbles, expected_mask_bits: &[u8]) -> Self {
3705            match self.subtrie.nodes.get(path) {
3706                Some(SparseNode::Branch { state_mask, .. }) => {
3707                    for bit in expected_mask_bits {
3708                        assert!(
3709                            state_mask.is_bit_set(*bit),
3710                            "Expected branch at {path:?} to have bit {bit} set, instead mask is: {state_mask:?}",
3711                        );
3712                    }
3713                }
3714                node => panic!("Expected branch node at {path:?}, found {node:?}"),
3715            }
3716            self
3717        }
3718
3719        fn has_leaf(self, path: &Nibbles, expected_key: &Nibbles) -> Self {
3720            match self.subtrie.nodes.get(path) {
3721                Some(SparseNode::Leaf { key, .. }) => {
3722                    assert_eq!(
3723                        *key, *expected_key,
3724                        "Expected leaf at {path:?} to have key {expected_key:?}, found {key:?}",
3725                    );
3726                }
3727                node => panic!("Expected leaf node at {path:?}, found {node:?}"),
3728            }
3729            self
3730        }
3731
3732        fn has_extension(self, path: &Nibbles, expected_key: &Nibbles) -> Self {
3733            match self.subtrie.nodes.get(path) {
3734                Some(SparseNode::Extension { key, .. }) => {
3735                    assert_eq!(
3736                        *key, *expected_key,
3737                        "Expected extension at {path:?} to have key {expected_key:?}, found {key:?}",
3738                    );
3739                }
3740                node => panic!("Expected extension node at {path:?}, found {node:?}"),
3741            }
3742            self
3743        }
3744
3745        fn has_value(self, path: &Nibbles, expected_value: &[u8]) -> Self {
3746            let actual = self.subtrie.inner.values.get(path);
3747            assert_eq!(
3748                actual.map(|v| v.as_slice()),
3749                Some(expected_value),
3750                "Expected value at {path:?} to be {expected_value:?}, found {actual:?}",
3751            );
3752            self
3753        }
3754
3755        fn has_no_value(self, path: &Nibbles) -> Self {
3756            let actual = self.subtrie.inner.values.get(path);
3757            assert!(actual.is_none(), "Expected no value at {path:?}, but found {actual:?}");
3758            self
3759        }
3760    }
3761
3762    fn create_leaf_node(key: impl AsRef<[u8]>, value_nonce: u64) -> TrieNodeV2 {
3763        TrieNodeV2::Leaf(LeafNode::new(
3764            Nibbles::from_nibbles(key),
3765            encode_account_value(value_nonce),
3766        ))
3767    }
3768
3769    fn create_branch_node(
3770        key: Nibbles,
3771        children_indices: &[u8],
3772        child_hashes: impl IntoIterator<Item = RlpNode>,
3773    ) -> TrieNodeV2 {
3774        let mut stack = Vec::new();
3775        let mut state_mask = TrieMask::default();
3776
3777        for (&idx, hash) in children_indices.iter().zip(child_hashes) {
3778            state_mask.set_bit(idx);
3779            stack.push(hash);
3780        }
3781
3782        let branch_rlp_node = if key.is_empty() {
3783            None
3784        } else {
3785            Some(RlpNode::from_rlp(&alloy_rlp::encode(BranchNodeRef::new(&stack, state_mask))))
3786        };
3787
3788        TrieNodeV2::Branch(BranchNodeV2::new(key, stack, state_mask, branch_rlp_node))
3789    }
3790
3791    fn create_branch_node_with_children(
3792        children_indices: &[u8],
3793        child_hashes: impl IntoIterator<Item = RlpNode>,
3794    ) -> TrieNodeV2 {
3795        create_branch_node(Nibbles::default(), children_indices, child_hashes)
3796    }
3797
3798    /// Calculate the state root by feeding the provided state to the hash builder and retaining the
3799    /// proofs for the provided targets.
3800    ///
3801    /// Returns the state root and the retained proof nodes.
3802    fn run_hash_builder(
3803        state: impl IntoIterator<Item = (Nibbles, Account)> + Clone,
3804        trie_cursor: impl TrieCursor,
3805        destroyed_accounts: B256Set,
3806        proof_targets: impl IntoIterator<Item = Nibbles>,
3807    ) -> (B256, TrieUpdates, ProofNodes, HashMap<Nibbles, TrieMask>, HashMap<Nibbles, TrieMask>)
3808    {
3809        let mut account_rlp = Vec::new();
3810
3811        let mut hash_builder = HashBuilder::default()
3812            .with_updates(true)
3813            .with_proof_retainer(ProofRetainer::from_iter(proof_targets).with_added_removed_keys(
3814                Some(AddedRemovedKeys::default().with_assume_added(true)),
3815            ));
3816
3817        let mut prefix_set = PrefixSetMut::default();
3818        prefix_set.extend_keys(state.clone().into_iter().map(|(nibbles, _)| nibbles));
3819        prefix_set.extend_keys(destroyed_accounts.iter().map(Nibbles::unpack));
3820        let walker = TrieWalker::<_>::state_trie(trie_cursor, prefix_set.freeze())
3821            .with_deletions_retained(true);
3822        let hashed_post_state = HashedPostState::default()
3823            .with_accounts(state.into_iter().map(|(nibbles, account)| {
3824                (nibbles.pack().into_inner().unwrap().into(), Some(account))
3825            }))
3826            .into_sorted();
3827        let mut node_iter = TrieNodeIter::state_trie(
3828            walker,
3829            HashedPostStateCursor::new_account(
3830                NoopHashedCursor::<Account>::default(),
3831                &hashed_post_state,
3832            ),
3833        );
3834
3835        while let Some(node) = node_iter.try_next().unwrap() {
3836            match node {
3837                TrieElement::Branch(branch) => {
3838                    hash_builder.add_branch(branch.key, branch.value, branch.children_are_in_trie);
3839                }
3840                TrieElement::Leaf(key, account) => {
3841                    let account = account.into_trie_account(EMPTY_ROOT_HASH);
3842                    account.encode(&mut account_rlp);
3843
3844                    hash_builder.add_leaf(Nibbles::unpack(key), &account_rlp);
3845                    account_rlp.clear();
3846                }
3847            }
3848        }
3849        let root = hash_builder.root();
3850        let proof_nodes = hash_builder.take_proof_nodes();
3851        let branch_node_hash_masks = hash_builder
3852            .updated_branch_nodes
3853            .clone()
3854            .unwrap_or_default()
3855            .iter()
3856            .map(|(path, node)| (*path, node.hash_mask))
3857            .collect();
3858        let branch_node_tree_masks = hash_builder
3859            .updated_branch_nodes
3860            .clone()
3861            .unwrap_or_default()
3862            .iter()
3863            .map(|(path, node)| (*path, node.tree_mask))
3864            .collect();
3865
3866        let mut trie_updates = TrieUpdates::default();
3867        let removed_keys = node_iter.walker.take_removed_keys();
3868        trie_updates.finalize(hash_builder, removed_keys, destroyed_accounts);
3869
3870        (root, trie_updates, proof_nodes, branch_node_hash_masks, branch_node_tree_masks)
3871    }
3872
3873    /// Returns a `ParallelSparseTrie` pre-loaded with the given nodes, as well as leaf values
3874    /// inferred from any provided leaf nodes.
3875    fn new_test_trie<Nodes>(nodes: Nodes) -> ParallelSparseTrie
3876    where
3877        Nodes: Iterator<Item = (Nibbles, SparseNode)>,
3878    {
3879        let mut trie = ParallelSparseTrie::default().with_updates(true);
3880
3881        for (path, node) in nodes {
3882            let subtrie = trie.subtrie_for_path_mut(&path);
3883            if let SparseNode::Leaf { key, .. } = &node {
3884                let mut full_key = path;
3885                full_key.extend(key);
3886                subtrie.inner.values.insert(full_key, "LEAF VALUE".into());
3887            }
3888            subtrie.nodes.insert(path, node);
3889        }
3890        trie
3891    }
3892
3893    fn parallel_sparse_trie_nodes(
3894        sparse_trie: &ParallelSparseTrie,
3895    ) -> impl IntoIterator<Item = (&Nibbles, &SparseNode)> {
3896        let lower_sparse_nodes = sparse_trie
3897            .lower_subtries
3898            .iter()
3899            .filter_map(|subtrie| subtrie.as_revealed_ref())
3900            .flat_map(|subtrie| subtrie.nodes.iter());
3901
3902        let upper_sparse_nodes = sparse_trie.upper_subtrie.nodes.iter();
3903
3904        lower_sparse_nodes.chain(upper_sparse_nodes).sorted_by_key(|(path, _)| *path)
3905    }
3906
3907    /// Assert that the parallel sparse trie nodes and the proof nodes from the hash builder are
3908    /// equal.
3909    fn assert_eq_parallel_sparse_trie_proof_nodes(
3910        sparse_trie: &ParallelSparseTrie,
3911        proof_nodes: ProofNodes,
3912    ) {
3913        let proof_nodes = proof_nodes
3914            .into_nodes_sorted()
3915            .into_iter()
3916            .map(|(path, node)| (path, TrieNodeV2::decode(&mut node.as_ref()).unwrap()));
3917
3918        let all_sparse_nodes = parallel_sparse_trie_nodes(sparse_trie);
3919
3920        for ((proof_node_path, proof_node), (sparse_node_path, sparse_node)) in
3921            proof_nodes.zip(all_sparse_nodes)
3922        {
3923            assert_eq!(&proof_node_path, sparse_node_path);
3924
3925            let equals = match (&proof_node, &sparse_node) {
3926                // Both nodes are empty
3927                (TrieNodeV2::EmptyRoot, SparseNode::Empty) => true,
3928                // Both nodes are branches and have the same state mask
3929                (
3930                    TrieNodeV2::Branch(BranchNodeV2 { state_mask: proof_state_mask, .. }),
3931                    SparseNode::Branch { state_mask: sparse_state_mask, .. },
3932                ) => proof_state_mask == sparse_state_mask,
3933                // Both nodes are extensions and have the same key
3934                (
3935                    TrieNodeV2::Extension(ExtensionNode { key: proof_key, .. }),
3936                    SparseNode::Extension { key: sparse_key, .. },
3937                ) |
3938                // Both nodes are leaves and have the same key
3939                (
3940                    TrieNodeV2::Leaf(LeafNode { key: proof_key, .. }),
3941                    SparseNode::Leaf { key: sparse_key, .. },
3942                ) => proof_key == sparse_key,
3943                // Empty and hash nodes are specific to the sparse trie, skip them
3944                (_, SparseNode::Empty) => continue,
3945                _ => false,
3946            };
3947            assert!(
3948                equals,
3949                "path: {proof_node_path:?}\nproof node: {proof_node:?}\nsparse node: {sparse_node:?}"
3950            );
3951        }
3952    }
3953
3954    #[test]
3955    fn test_get_changed_subtries_empty() {
3956        let mut trie = ParallelSparseTrie::default();
3957        let mut prefix_set = PrefixSetMut::from([Nibbles::default()]).freeze();
3958
3959        let (subtries, unchanged_prefix_set) = trie.take_changed_lower_subtries(&mut prefix_set);
3960        assert!(subtries.is_empty());
3961        assert_eq!(unchanged_prefix_set, PrefixSetMut::from(prefix_set.iter().copied()));
3962    }
3963
3964    #[test]
3965    fn test_get_changed_subtries() {
3966        // Create a trie with three subtries
3967        let mut trie = ParallelSparseTrie::default();
3968        let subtrie_1 = Box::new(SparseSubtrie::new(Nibbles::from_nibbles([0x0, 0x0])));
3969        let subtrie_1_index = path_subtrie_index_unchecked(&subtrie_1.path);
3970        let subtrie_2 = Box::new(SparseSubtrie::new(Nibbles::from_nibbles([0x1, 0x0])));
3971        let subtrie_2_index = path_subtrie_index_unchecked(&subtrie_2.path);
3972        let subtrie_3 = Box::new(SparseSubtrie::new(Nibbles::from_nibbles([0x3, 0x0])));
3973        let subtrie_3_index = path_subtrie_index_unchecked(&subtrie_3.path);
3974
3975        // Add subtries at specific positions
3976        trie.lower_subtries[subtrie_1_index] = LowerSparseSubtrie::Revealed(subtrie_1.clone());
3977        trie.lower_subtries[subtrie_2_index] = LowerSparseSubtrie::Revealed(subtrie_2.clone());
3978        trie.lower_subtries[subtrie_3_index] = LowerSparseSubtrie::Revealed(subtrie_3);
3979
3980        let unchanged_prefix_set = PrefixSetMut::from([
3981            Nibbles::from_nibbles([0x0]),
3982            Nibbles::from_nibbles([0x2, 0x0, 0x0]),
3983        ]);
3984        // Create a prefix set with the keys that match only the second subtrie
3985        let mut prefix_set = PrefixSetMut::from([
3986            // Match second subtrie
3987            Nibbles::from_nibbles([0x1, 0x0, 0x0]),
3988            Nibbles::from_nibbles([0x1, 0x0, 0x1, 0x0]),
3989        ]);
3990        prefix_set.extend(unchanged_prefix_set);
3991        let mut prefix_set = prefix_set.freeze();
3992
3993        // Second subtrie should be removed and returned
3994        let (subtries, unchanged_prefix_set) = trie.take_changed_lower_subtries(&mut prefix_set);
3995        assert_eq!(
3996            subtries
3997                .into_iter()
3998                .map(|ChangedSubtrie { index, subtrie, prefix_set, .. }| {
3999                    (index, subtrie, prefix_set.iter().copied().collect::<Vec<_>>())
4000                })
4001                .collect::<Vec<_>>(),
4002            vec![(
4003                subtrie_2_index,
4004                subtrie_2,
4005                vec![
4006                    Nibbles::from_nibbles([0x1, 0x0, 0x0]),
4007                    Nibbles::from_nibbles([0x1, 0x0, 0x1, 0x0])
4008                ]
4009            )]
4010        );
4011        assert_eq!(unchanged_prefix_set, unchanged_prefix_set);
4012        assert!(trie.lower_subtries[subtrie_2_index].as_revealed_ref().is_none());
4013
4014        // First subtrie should remain unchanged
4015        assert_eq!(trie.lower_subtries[subtrie_1_index], LowerSparseSubtrie::Revealed(subtrie_1));
4016    }
4017
4018    #[test]
4019    fn test_get_changed_subtries_all() {
4020        // Create a trie with three subtries
4021        let mut trie = ParallelSparseTrie::default();
4022        let subtrie_1 = Box::new(SparseSubtrie::new(Nibbles::from_nibbles([0x0, 0x0])));
4023        let subtrie_1_index = path_subtrie_index_unchecked(&subtrie_1.path);
4024        let subtrie_2 = Box::new(SparseSubtrie::new(Nibbles::from_nibbles([0x1, 0x0])));
4025        let subtrie_2_index = path_subtrie_index_unchecked(&subtrie_2.path);
4026        let subtrie_3 = Box::new(SparseSubtrie::new(Nibbles::from_nibbles([0x3, 0x0])));
4027        let subtrie_3_index = path_subtrie_index_unchecked(&subtrie_3.path);
4028
4029        // Add subtries at specific positions
4030        trie.lower_subtries[subtrie_1_index] = LowerSparseSubtrie::Revealed(subtrie_1.clone());
4031        trie.lower_subtries[subtrie_2_index] = LowerSparseSubtrie::Revealed(subtrie_2.clone());
4032        trie.lower_subtries[subtrie_3_index] = LowerSparseSubtrie::Revealed(subtrie_3.clone());
4033
4034        // Create a prefix set that matches any key
4035        let mut prefix_set = PrefixSetMut::all().freeze();
4036
4037        // All subtries should be removed and returned
4038        let (subtries, unchanged_prefix_set) = trie.take_changed_lower_subtries(&mut prefix_set);
4039        assert_eq!(
4040            subtries
4041                .into_iter()
4042                .map(|ChangedSubtrie { index, subtrie, prefix_set, .. }| {
4043                    (index, subtrie, prefix_set.all())
4044                })
4045                .collect::<Vec<_>>(),
4046            vec![
4047                (subtrie_1_index, subtrie_1, true),
4048                (subtrie_2_index, subtrie_2, true),
4049                (subtrie_3_index, subtrie_3, true)
4050            ]
4051        );
4052        assert_eq!(unchanged_prefix_set, PrefixSetMut::all());
4053
4054        assert!(trie.lower_subtries.iter().all(|subtrie| subtrie.as_revealed_ref().is_none()));
4055    }
4056
4057    #[test]
4058    fn test_sparse_subtrie_type() {
4059        assert_eq!(SparseSubtrieType::from_path(&Nibbles::new()), SparseSubtrieType::Upper);
4060        assert_eq!(
4061            SparseSubtrieType::from_path(&Nibbles::from_nibbles([0])),
4062            SparseSubtrieType::Upper
4063        );
4064        assert_eq!(
4065            SparseSubtrieType::from_path(&Nibbles::from_nibbles([15])),
4066            SparseSubtrieType::Upper
4067        );
4068        assert_eq!(
4069            SparseSubtrieType::from_path(&Nibbles::from_nibbles([0, 0])),
4070            SparseSubtrieType::Lower(0)
4071        );
4072        assert_eq!(
4073            SparseSubtrieType::from_path(&Nibbles::from_nibbles([0, 0, 0])),
4074            SparseSubtrieType::Lower(0)
4075        );
4076        assert_eq!(
4077            SparseSubtrieType::from_path(&Nibbles::from_nibbles([0, 1])),
4078            SparseSubtrieType::Lower(1)
4079        );
4080        assert_eq!(
4081            SparseSubtrieType::from_path(&Nibbles::from_nibbles([0, 1, 0])),
4082            SparseSubtrieType::Lower(1)
4083        );
4084        assert_eq!(
4085            SparseSubtrieType::from_path(&Nibbles::from_nibbles([0, 15])),
4086            SparseSubtrieType::Lower(15)
4087        );
4088        assert_eq!(
4089            SparseSubtrieType::from_path(&Nibbles::from_nibbles([15, 0])),
4090            SparseSubtrieType::Lower(240)
4091        );
4092        assert_eq!(
4093            SparseSubtrieType::from_path(&Nibbles::from_nibbles([15, 1])),
4094            SparseSubtrieType::Lower(241)
4095        );
4096        assert_eq!(
4097            SparseSubtrieType::from_path(&Nibbles::from_nibbles([15, 15])),
4098            SparseSubtrieType::Lower(255)
4099        );
4100        assert_eq!(
4101            SparseSubtrieType::from_path(&Nibbles::from_nibbles([15, 15, 15])),
4102            SparseSubtrieType::Lower(255)
4103        );
4104    }
4105
4106    #[test]
4107    fn test_reveal_node_leaves() {
4108        // Reveal leaf in the upper trie. A root branch with child 0x1 makes path [0x1]
4109        // reachable for the subsequent reveal_nodes call.
4110        let root_branch =
4111            create_branch_node_with_children(&[0x1], [RlpNode::word_rlp(&B256::repeat_byte(0xAA))]);
4112        let mut trie = ParallelSparseTrie::from_root(root_branch, None, false).unwrap();
4113
4114        {
4115            let path = Nibbles::from_nibbles([0x1]);
4116            let node = create_leaf_node([0x2, 0x3], 42);
4117            let masks = None;
4118
4119            trie.reveal_nodes(&mut [ProofTrieNodeV2 { path, node, masks }]).unwrap();
4120
4121            assert_matches!(
4122                trie.upper_subtrie.nodes.get(&path),
4123                Some(SparseNode::Leaf { key, state: SparseNodeState::Cached { .. } })
4124                if key == &Nibbles::from_nibbles([0x2, 0x3])
4125            );
4126
4127            let full_path = Nibbles::from_nibbles([0x1, 0x2, 0x3]);
4128            assert_eq!(
4129                trie.upper_subtrie.inner.values.get(&full_path),
4130                Some(&encode_account_value(42))
4131            );
4132        }
4133
4134        // Reveal leaf in a lower trie. A separate trie is needed because the structure at
4135        // [0x1] conflicts: the upper trie test placed a leaf there, but reaching [0x1, 0x2]
4136        // requires a branch at [0x1]. A root branch → branch at [0x1] with child 0x2
4137        // makes path [0x1, 0x2] reachable.
4138        let root_branch =
4139            create_branch_node_with_children(&[0x1], [RlpNode::word_rlp(&B256::repeat_byte(0xAA))]);
4140        let branch_at_1 =
4141            create_branch_node_with_children(&[0x2], [RlpNode::word_rlp(&B256::repeat_byte(0xBB))]);
4142        let mut trie = ParallelSparseTrie::from_root(root_branch, None, false).unwrap();
4143        trie.reveal_nodes(&mut [ProofTrieNodeV2 {
4144            path: Nibbles::from_nibbles([0x1]),
4145            node: branch_at_1,
4146            masks: None,
4147        }])
4148        .unwrap();
4149
4150        {
4151            let path = Nibbles::from_nibbles([0x1, 0x2]);
4152            let node = create_leaf_node([0x3, 0x4], 42);
4153            let masks = None;
4154
4155            trie.reveal_nodes(&mut [ProofTrieNodeV2 { path, node, masks }]).unwrap();
4156
4157            // Check that the lower subtrie was created
4158            let idx = path_subtrie_index_unchecked(&path);
4159            assert!(trie.lower_subtries[idx].as_revealed_ref().is_some());
4160
4161            // Check that the lower subtrie's path was correctly set
4162            let lower_subtrie = trie.lower_subtries[idx].as_revealed_ref().unwrap();
4163            assert_eq!(lower_subtrie.path, path);
4164
4165            assert_matches!(
4166                lower_subtrie.nodes.get(&path),
4167                Some(SparseNode::Leaf { key, state: SparseNodeState::Cached { .. } })
4168                if key == &Nibbles::from_nibbles([0x3, 0x4])
4169            );
4170        }
4171
4172        // Reveal leaf in a lower trie with a longer path, shouldn't result in the subtrie's root
4173        // path changing.
4174        {
4175            let path = Nibbles::from_nibbles([0x1, 0x2, 0x3]);
4176            let node = create_leaf_node([0x4, 0x5], 42);
4177            let masks = None;
4178
4179            trie.reveal_nodes(&mut [ProofTrieNodeV2 { path, node, masks }]).unwrap();
4180
4181            // Check that the lower subtrie's path hasn't changed
4182            let idx = path_subtrie_index_unchecked(&path);
4183            let lower_subtrie = trie.lower_subtries[idx].as_revealed_ref().unwrap();
4184            assert_eq!(lower_subtrie.path, Nibbles::from_nibbles([0x1, 0x2]));
4185        }
4186    }
4187
4188    #[test]
4189    fn test_reveal_node_branch_all_upper() {
4190        let path = Nibbles::new();
4191        let child_hashes = [
4192            RlpNode::word_rlp(&B256::repeat_byte(0x11)),
4193            RlpNode::word_rlp(&B256::repeat_byte(0x22)),
4194        ];
4195        let node = create_branch_node_with_children(&[0x0, 0x5], child_hashes.clone());
4196        let masks = None;
4197        let trie = ParallelSparseTrie::from_root(node, masks, true).unwrap();
4198
4199        // Branch node should be in upper trie
4200        assert_eq!(
4201            trie.upper_subtrie.nodes.get(&path).unwrap(),
4202            &SparseNode::new_branch(
4203                0b0000000000100001.into(),
4204                &[(0, child_hashes[0].as_hash().unwrap()), (5, child_hashes[1].as_hash().unwrap())]
4205            )
4206        );
4207
4208        // Children should not be revealed yet
4209        let child_path_0 = Nibbles::from_nibbles([0x0]);
4210        let child_path_5 = Nibbles::from_nibbles([0x5]);
4211        assert!(!trie.upper_subtrie.nodes.contains_key(&child_path_0));
4212        assert!(!trie.upper_subtrie.nodes.contains_key(&child_path_5));
4213    }
4214
4215    #[test]
4216    fn test_reveal_node_branch_cross_level() {
4217        // Set up root branch with nibble 0x1 so path [0x1] is reachable.
4218        let root_branch =
4219            create_branch_node_with_children(&[0x1], [RlpNode::word_rlp(&B256::repeat_byte(0xAA))]);
4220        let mut trie = ParallelSparseTrie::from_root(root_branch, None, false).unwrap();
4221
4222        let path = Nibbles::from_nibbles([0x1]); // Exactly 1 nibbles - boundary case
4223        let child_hashes = [
4224            RlpNode::word_rlp(&B256::repeat_byte(0x33)),
4225            RlpNode::word_rlp(&B256::repeat_byte(0x44)),
4226            RlpNode::word_rlp(&B256::repeat_byte(0x55)),
4227        ];
4228        let node = create_branch_node_with_children(&[0x0, 0x7, 0xf], child_hashes.clone());
4229        let masks = None;
4230
4231        trie.reveal_nodes(&mut [ProofTrieNodeV2 { path, node, masks }]).unwrap();
4232
4233        // Branch node should be in upper trie, hash is memoized from the previous Hash node
4234        assert_eq!(
4235            trie.upper_subtrie.nodes.get(&path).unwrap(),
4236            &SparseNode::new_branch(
4237                0b1000000010000001.into(),
4238                &[
4239                    (0x0, child_hashes[0].as_hash().unwrap()),
4240                    (0x7, child_hashes[1].as_hash().unwrap()),
4241                    (0xf, child_hashes[2].as_hash().unwrap())
4242                ]
4243            )
4244            .with_state(SparseNodeState::Cached {
4245                rlp_node: RlpNode::word_rlp(&B256::repeat_byte(0xAA)),
4246                store_in_db_trie: Some(false),
4247            })
4248        );
4249
4250        // All children should be in lower tries since they have paths of length 3
4251        let child_paths = [
4252            Nibbles::from_nibbles([0x1, 0x0]),
4253            Nibbles::from_nibbles([0x1, 0x7]),
4254            Nibbles::from_nibbles([0x1, 0xf]),
4255        ];
4256
4257        let mut children = child_paths
4258            .iter()
4259            .map(|path| ProofTrieNodeV2 {
4260                path: *path,
4261                node: create_leaf_node([0x0], 1),
4262                masks: None,
4263            })
4264            .collect::<Vec<_>>();
4265
4266        trie.reveal_nodes(&mut children).unwrap();
4267
4268        // Branch node should still be in upper trie but without any blinded children
4269        assert_matches!(
4270            trie.upper_subtrie.nodes.get(&path),
4271            Some(&SparseNode::Branch {
4272                state_mask,
4273                state: SparseNodeState::Cached { ref rlp_node, store_in_db_trie: Some(false) },
4274                blinded_mask,
4275                ..
4276            }) if state_mask == 0b1000000010000001.into() && blinded_mask.is_empty() && *rlp_node == RlpNode::word_rlp(&B256::repeat_byte(0xAA))
4277        );
4278
4279        for (i, child_path) in child_paths.iter().enumerate() {
4280            let idx = path_subtrie_index_unchecked(child_path);
4281            let lower_subtrie = trie.lower_subtries[idx].as_revealed_ref().unwrap();
4282            assert_eq!(&lower_subtrie.path, child_path);
4283            assert_eq!(
4284                lower_subtrie.nodes.get(child_path),
4285                Some(&SparseNode::Leaf {
4286                    key: Nibbles::from_nibbles([0x0]),
4287                    state: SparseNodeState::Cached {
4288                        rlp_node: child_hashes[i].clone(),
4289                        store_in_db_trie: Some(false)
4290                    }
4291                })
4292            );
4293        }
4294    }
4295
4296    #[test]
4297    fn test_update_subtrie_hashes_prefix_set_matching() {
4298        // Create a trie with a root branch that makes paths [0x0, ...] and [0x3, ...]
4299        // reachable from the upper trie.
4300        let root_branch = create_branch_node_with_children(
4301            &[0x0, 0x3],
4302            [
4303                RlpNode::word_rlp(&B256::repeat_byte(0xAA)),
4304                RlpNode::word_rlp(&B256::repeat_byte(0xBB)),
4305            ],
4306        );
4307        let mut trie = ParallelSparseTrie::from_root(root_branch, None, false).unwrap();
4308
4309        // Create leaf paths.
4310        let leaf_1_full_path = Nibbles::from_nibbles([0; 64]);
4311        let leaf_1_path = leaf_1_full_path.slice(..2);
4312        let leaf_1_key = leaf_1_full_path.slice(2..);
4313        let leaf_2_full_path = Nibbles::from_nibbles([vec![0, 1], vec![0; 62]].concat());
4314        let leaf_2_path = leaf_2_full_path.slice(..2);
4315        let leaf_2_key = leaf_2_full_path.slice(2..);
4316        let leaf_3_full_path = Nibbles::from_nibbles([vec![0, 2], vec![0; 62]].concat());
4317        let leaf_1 = create_leaf_node(leaf_1_key.to_vec(), 1);
4318        let leaf_2 = create_leaf_node(leaf_2_key.to_vec(), 2);
4319
4320        // Create branch node at [0x0] with only children 0x0 and 0x1.
4321        // Child 0x2 (leaf_3) will be inserted via update_leaf to create a fresh node
4322        // with hash: None.
4323        let child_hashes = [
4324            RlpNode::word_rlp(&B256::repeat_byte(0x00)),
4325            RlpNode::word_rlp(&B256::repeat_byte(0x11)),
4326        ];
4327        let branch_path = Nibbles::from_nibbles([0x0]);
4328        let branch_node = create_branch_node_with_children(&[0x0, 0x1], child_hashes);
4329
4330        // Reveal the existing nodes
4331        trie.reveal_nodes(&mut [
4332            ProofTrieNodeV2 { path: branch_path, node: branch_node, masks: None },
4333            ProofTrieNodeV2 { path: leaf_1_path, node: leaf_1, masks: None },
4334            ProofTrieNodeV2 { path: leaf_2_path, node: leaf_2, masks: None },
4335        ])
4336        .unwrap();
4337
4338        // Insert leaf_3 via update_leaf. This modifies the branch at [0x0] to add child
4339        // 0x2 and creates a fresh leaf node with hash: None in the lower subtrie.
4340        let provider = NoRevealProvider;
4341        trie.update_leaf(leaf_3_full_path, encode_account_value(3), provider).unwrap();
4342
4343        // Calculate subtrie indexes
4344        let subtrie_1_index = SparseSubtrieType::from_path(&leaf_1_path).lower_index().unwrap();
4345        let subtrie_2_index = SparseSubtrieType::from_path(&leaf_2_path).lower_index().unwrap();
4346        let leaf_3_path = leaf_3_full_path.slice(..2);
4347        let subtrie_3_index = SparseSubtrieType::from_path(&leaf_3_path).lower_index().unwrap();
4348
4349        let mut unchanged_prefix_set = PrefixSetMut::from([
4350            Nibbles::from_nibbles([0x0]),
4351            leaf_2_full_path,
4352            Nibbles::from_nibbles([0x3, 0x0, 0x0]),
4353        ]);
4354        // Create a prefix set with the keys that match only the second subtrie
4355        let mut prefix_set = PrefixSetMut::from([
4356            // Match second subtrie
4357            Nibbles::from_nibbles([0x0, 0x1, 0x0]),
4358            Nibbles::from_nibbles([0x0, 0x1, 0x1, 0x0]),
4359        ]);
4360        prefix_set.extend(unchanged_prefix_set.clone());
4361        trie.prefix_set = prefix_set;
4362
4363        // Update subtrie hashes
4364        trie.update_subtrie_hashes();
4365
4366        // We expect that leaf 3 (0x02) should have been added to the prefix set, because it is
4367        // missing a hash and is the root node of a lower subtrie, and therefore would need to have
4368        // that hash calculated by `update_upper_subtrie_hashes`.
4369        unchanged_prefix_set.insert(leaf_3_full_path);
4370
4371        // Check that the prefix set was updated
4372        assert_eq!(
4373            trie.prefix_set.clone().freeze().into_iter().collect::<Vec<_>>(),
4374            unchanged_prefix_set.freeze().into_iter().collect::<Vec<_>>()
4375        );
4376        // Check that subtries were returned back to the array
4377        assert!(trie.lower_subtries[subtrie_1_index].as_revealed_ref().is_some());
4378        assert!(trie.lower_subtries[subtrie_2_index].as_revealed_ref().is_some());
4379        assert!(trie.lower_subtries[subtrie_3_index].as_revealed_ref().is_some());
4380    }
4381
4382    #[test]
4383    fn test_subtrie_update_hashes() {
4384        let mut subtrie = Box::new(SparseSubtrie::new(Nibbles::from_nibbles([0x0, 0x0])));
4385
4386        // Create leaf nodes with paths 0x0...0, 0x00001...0, 0x0010...0
4387        let leaf_1_full_path = Nibbles::from_nibbles([0; 64]);
4388        let leaf_1_path = leaf_1_full_path.slice(..5);
4389        let leaf_1_key = leaf_1_full_path.slice(5..);
4390        let leaf_2_full_path = Nibbles::from_nibbles([vec![0, 0, 0, 0, 1], vec![0; 59]].concat());
4391        let leaf_2_path = leaf_2_full_path.slice(..5);
4392        let leaf_2_key = leaf_2_full_path.slice(5..);
4393        let leaf_3_full_path = Nibbles::from_nibbles([vec![0, 0, 1], vec![0; 61]].concat());
4394        let leaf_3_path = leaf_3_full_path.slice(..3);
4395        let leaf_3_key = leaf_3_full_path.slice(3..);
4396
4397        let account_1 = create_account(1);
4398        let account_2 = create_account(2);
4399        let account_3 = create_account(3);
4400        let leaf_1 = create_leaf_node(leaf_1_key.to_vec(), account_1.nonce);
4401        let leaf_2 = create_leaf_node(leaf_2_key.to_vec(), account_2.nonce);
4402        let leaf_3 = create_leaf_node(leaf_3_key.to_vec(), account_3.nonce);
4403
4404        // Create bottom branch node
4405        let extension_path = Nibbles::from_nibbles([0, 0, 0]);
4406        let branch_1_path = Nibbles::from_nibbles([0, 0, 0, 0]);
4407        let branch_1 = create_branch_node(
4408            Nibbles::from_nibbles([0]),
4409            &[0, 1],
4410            vec![
4411                RlpNode::from_rlp(&alloy_rlp::encode(&leaf_1)),
4412                RlpNode::from_rlp(&alloy_rlp::encode(&leaf_2)),
4413            ],
4414        );
4415
4416        // Create top branch node
4417        let branch_2_path = Nibbles::from_nibbles([0, 0]);
4418        let branch_2 = create_branch_node_with_children(
4419            &[0, 1],
4420            vec![
4421                RlpNode::from_rlp(&alloy_rlp::encode(&branch_1)),
4422                RlpNode::from_rlp(&alloy_rlp::encode(&leaf_3)),
4423            ],
4424        );
4425
4426        // Reveal nodes
4427        subtrie.reveal_node(branch_2_path, &branch_2, None, None).unwrap();
4428        subtrie.reveal_node(extension_path, &branch_1, None, None).unwrap();
4429        subtrie.reveal_node(leaf_1_path, &leaf_1, None, None).unwrap();
4430        subtrie.reveal_node(leaf_2_path, &leaf_2, None, None).unwrap();
4431        subtrie.reveal_node(leaf_3_path, &leaf_3, None, None).unwrap();
4432
4433        // Run hash builder for two leaf nodes
4434        let (_, _, proof_nodes, _, _) = run_hash_builder(
4435            [
4436                (leaf_1_full_path, account_1),
4437                (leaf_2_full_path, account_2),
4438                (leaf_3_full_path, account_3),
4439            ],
4440            NoopAccountTrieCursor::default(),
4441            Default::default(),
4442            [extension_path, branch_2_path, leaf_1_full_path, leaf_2_full_path, leaf_3_full_path],
4443        );
4444
4445        // Update hashes for the subtrie
4446        subtrie.update_hashes(
4447            &mut PrefixSetMut::from([leaf_1_full_path, leaf_2_full_path, leaf_3_full_path])
4448                .freeze(),
4449            &mut None,
4450            &BranchNodeMasksMap::default(),
4451        );
4452
4453        // Compare hashes between hash builder and subtrie
4454        let hash_builder_branch_1_hash =
4455            RlpNode::from_rlp(proof_nodes.get(&branch_1_path).unwrap().as_ref()).as_hash().unwrap();
4456        let subtrie_branch_1_hash =
4457            subtrie.nodes.get(&branch_1_path).unwrap().cached_hash().unwrap();
4458        assert_eq!(hash_builder_branch_1_hash, subtrie_branch_1_hash);
4459
4460        let hash_builder_extension_hash =
4461            RlpNode::from_rlp(proof_nodes.get(&extension_path).unwrap().as_ref())
4462                .as_hash()
4463                .unwrap();
4464        let subtrie_extension_hash =
4465            subtrie.nodes.get(&extension_path).unwrap().cached_hash().unwrap();
4466        assert_eq!(hash_builder_extension_hash, subtrie_extension_hash);
4467
4468        let hash_builder_branch_2_hash =
4469            RlpNode::from_rlp(proof_nodes.get(&branch_2_path).unwrap().as_ref()).as_hash().unwrap();
4470        let subtrie_branch_2_hash =
4471            subtrie.nodes.get(&branch_2_path).unwrap().cached_hash().unwrap();
4472        assert_eq!(hash_builder_branch_2_hash, subtrie_branch_2_hash);
4473
4474        let subtrie_leaf_1_hash = subtrie.nodes.get(&leaf_1_path).unwrap().cached_hash().unwrap();
4475        let hash_builder_leaf_1_hash =
4476            RlpNode::from_rlp(proof_nodes.get(&leaf_1_path).unwrap().as_ref()).as_hash().unwrap();
4477        assert_eq!(hash_builder_leaf_1_hash, subtrie_leaf_1_hash);
4478
4479        let hash_builder_leaf_2_hash =
4480            RlpNode::from_rlp(proof_nodes.get(&leaf_2_path).unwrap().as_ref()).as_hash().unwrap();
4481        let subtrie_leaf_2_hash = subtrie.nodes.get(&leaf_2_path).unwrap().cached_hash().unwrap();
4482        assert_eq!(hash_builder_leaf_2_hash, subtrie_leaf_2_hash);
4483
4484        let hash_builder_leaf_3_hash =
4485            RlpNode::from_rlp(proof_nodes.get(&leaf_3_path).unwrap().as_ref()).as_hash().unwrap();
4486        let subtrie_leaf_3_hash = subtrie.nodes.get(&leaf_3_path).unwrap().cached_hash().unwrap();
4487        assert_eq!(hash_builder_leaf_3_hash, subtrie_leaf_3_hash);
4488    }
4489
4490    #[test]
4491    fn test_remove_leaf_branch_becomes_extension() {
4492        //
4493        // 0x:      Extension (Key = 5)
4494        // 0x5:     └── Branch (Mask = 1001)
4495        // 0x50:        ├── 0 -> Extension (Key = 23)
4496        // 0x5023:      │        └── Branch (Mask = 0101)
4497        // 0x50231:     │            ├── 1 -> Leaf
4498        // 0x50233:     │            └── 3 -> Leaf
4499        // 0x53:        └── 3 -> Leaf (Key = 7)
4500        //
4501        // After removing 0x53, extension+branch+extension become a single extension
4502        //
4503        let mut trie = new_test_trie(
4504            [
4505                (Nibbles::default(), SparseNode::new_ext(Nibbles::from_nibbles([0x5]))),
4506                (Nibbles::from_nibbles([0x5]), SparseNode::new_branch(TrieMask::new(0b1001), &[])),
4507                (
4508                    Nibbles::from_nibbles([0x5, 0x0]),
4509                    SparseNode::new_ext(Nibbles::from_nibbles([0x2, 0x3])),
4510                ),
4511                (
4512                    Nibbles::from_nibbles([0x5, 0x0, 0x2, 0x3]),
4513                    SparseNode::new_branch(TrieMask::new(0b0101), &[]),
4514                ),
4515                (
4516                    Nibbles::from_nibbles([0x5, 0x0, 0x2, 0x3, 0x1]),
4517                    SparseNode::new_leaf(leaf_key([], 59)),
4518                ),
4519                (
4520                    Nibbles::from_nibbles([0x5, 0x0, 0x2, 0x3, 0x3]),
4521                    SparseNode::new_leaf(leaf_key([], 59)),
4522                ),
4523                (Nibbles::from_nibbles([0x5, 0x3]), SparseNode::new_leaf(leaf_key([0x7], 62))),
4524            ]
4525            .into_iter(),
4526        );
4527
4528        let provider = NoRevealProvider;
4529
4530        // Remove the leaf with a full path of 0x537
4531        let leaf_full_path = pad_nibbles_right(Nibbles::from_nibbles([0x5, 0x3, 0x7]));
4532        trie.remove_leaf(&leaf_full_path, provider).unwrap();
4533
4534        let upper_subtrie = &trie.upper_subtrie;
4535        let lower_subtrie_50 = trie.lower_subtries[0x50].as_revealed_ref().unwrap();
4536
4537        // Check that the `SparseSubtrie` the leaf was removed from was itself removed, as it is now
4538        // empty.
4539        assert_matches!(trie.lower_subtries[0x53].as_revealed_ref(), None);
4540
4541        // Check that the leaf node was removed, and that its parent/grandparent were modified
4542        // appropriately.
4543        assert_matches!(
4544            upper_subtrie.nodes.get(&Nibbles::from_nibbles([])),
4545            Some(SparseNode::Extension{ key, ..})
4546            if key == &Nibbles::from_nibbles([0x5, 0x0, 0x2, 0x3])
4547        );
4548        assert_matches!(upper_subtrie.nodes.get(&Nibbles::from_nibbles([0x5])), None);
4549        assert_matches!(lower_subtrie_50.nodes.get(&Nibbles::from_nibbles([0x5, 0x0])), None);
4550        assert_matches!(
4551            lower_subtrie_50.nodes.get(&Nibbles::from_nibbles([0x5, 0x0, 0x2, 0x3])),
4552            Some(SparseNode::Branch{ state_mask, .. })
4553            if *state_mask == 0b0101.into()
4554        );
4555    }
4556
4557    #[test]
4558    fn test_remove_leaf_branch_becomes_leaf() {
4559        //
4560        // 0x:      Branch (Mask = 0011)
4561        // 0x0:     ├── 0 -> Leaf (Key = 12)
4562        // 0x1:     └── 1 -> Leaf (Key = 34)
4563        //
4564        // After removing 0x012, branch becomes a leaf
4565        //
4566        let mut trie = new_test_trie(
4567            [
4568                (Nibbles::default(), SparseNode::new_branch(TrieMask::new(0b0011), &[])),
4569                (Nibbles::from_nibbles([0x0]), SparseNode::new_leaf(leaf_key([0x1, 0x2], 63))),
4570                (Nibbles::from_nibbles([0x1]), SparseNode::new_leaf(leaf_key([0x3, 0x4], 63))),
4571            ]
4572            .into_iter(),
4573        );
4574
4575        // Add the branch node to updated_nodes to simulate it being modified earlier
4576        if let Some(updates) = trie.updates.as_mut() {
4577            updates
4578                .updated_nodes
4579                .insert(Nibbles::default(), BranchNodeCompact::new(0b11, 0, 0, vec![], None));
4580        }
4581
4582        let provider = NoRevealProvider;
4583
4584        // Remove the leaf with a full path of 0x012
4585        let leaf_full_path = pad_nibbles_right(Nibbles::from_nibbles([0x0, 0x1, 0x2]));
4586        trie.remove_leaf(&leaf_full_path, provider).unwrap();
4587
4588        let upper_subtrie = &trie.upper_subtrie;
4589
4590        // Check that the leaf's value was removed
4591        assert_matches!(upper_subtrie.inner.values.get(&leaf_full_path), None);
4592
4593        // Check that the branch node collapsed into a leaf node with the remaining child's key
4594        assert_matches!(
4595            upper_subtrie.nodes.get(&Nibbles::default()),
4596            Some(SparseNode::Leaf{ key, ..})
4597            if key == &pad_nibbles_right(Nibbles::from_nibbles([0x1, 0x3, 0x4]))
4598        );
4599
4600        // Check that the remaining child node was removed
4601        assert_matches!(upper_subtrie.nodes.get(&Nibbles::from_nibbles([0x1])), None);
4602        // Check that the removed child node was also removed
4603        assert_matches!(upper_subtrie.nodes.get(&Nibbles::from_nibbles([0x0])), None);
4604
4605        // Check that updates were tracked correctly when branch collapsed
4606        let updates = trie.updates.as_ref().unwrap();
4607
4608        // The branch at root should be marked as removed since it collapsed
4609        assert!(updates.removed_nodes.contains(&Nibbles::default()));
4610
4611        // The branch should no longer be in updated_nodes
4612        assert!(!updates.updated_nodes.contains_key(&Nibbles::default()));
4613    }
4614
4615    #[test]
4616    fn test_remove_leaf_extension_becomes_leaf() {
4617        //
4618        // 0x:      Extension (Key = 5)
4619        // 0x5:     └── Branch (Mask = 0011)
4620        // 0x50:        ├── 0 -> Leaf (Key = 12)
4621        // 0x51:        └── 1 -> Leaf (Key = 34)
4622        //
4623        // After removing 0x5012, extension+branch becomes a leaf
4624        //
4625        let mut trie = new_test_trie(
4626            [
4627                (Nibbles::default(), SparseNode::new_ext(Nibbles::from_nibbles([0x5]))),
4628                (Nibbles::from_nibbles([0x5]), SparseNode::new_branch(TrieMask::new(0b0011), &[])),
4629                (Nibbles::from_nibbles([0x5, 0x0]), SparseNode::new_leaf(leaf_key([0x1, 0x2], 62))),
4630                (Nibbles::from_nibbles([0x5, 0x1]), SparseNode::new_leaf(leaf_key([0x3, 0x4], 62))),
4631            ]
4632            .into_iter(),
4633        );
4634
4635        let provider = NoRevealProvider;
4636
4637        // Remove the leaf with a full path of 0x5012
4638        let leaf_full_path = pad_nibbles_right(Nibbles::from_nibbles([0x5, 0x0, 0x1, 0x2]));
4639        trie.remove_leaf(&leaf_full_path, provider).unwrap();
4640
4641        let upper_subtrie = &trie.upper_subtrie;
4642
4643        // Check that both lower subtries were removed. 0x50 should have been removed because
4644        // removing its leaf made it empty. 0x51 should have been removed after its own leaf was
4645        // collapsed into the upper trie, leaving it also empty.
4646        assert_matches!(trie.lower_subtries[0x50].as_revealed_ref(), None);
4647        assert_matches!(trie.lower_subtries[0x51].as_revealed_ref(), None);
4648
4649        // Check that the other leaf's value was moved to the upper trie
4650        let other_leaf_full_value = pad_nibbles_right(Nibbles::from_nibbles([0x5, 0x1, 0x3, 0x4]));
4651        assert_matches!(upper_subtrie.inner.values.get(&other_leaf_full_value), Some(_));
4652
4653        // Check that the extension node collapsed into a leaf node
4654        assert_matches!(
4655            upper_subtrie.nodes.get(&Nibbles::default()),
4656            Some(SparseNode::Leaf{ key, ..})
4657            if key == &pad_nibbles_right(Nibbles::from_nibbles([0x5, 0x1, 0x3, 0x4]))
4658        );
4659
4660        // Check that intermediate nodes were removed
4661        assert_matches!(upper_subtrie.nodes.get(&Nibbles::from_nibbles([0x5])), None);
4662    }
4663
4664    #[test]
4665    fn test_remove_leaf_branch_on_branch() {
4666        //
4667        // 0x:      Branch (Mask = 0101)
4668        // 0x0:     ├── 0 -> Leaf (Key = 12)
4669        // 0x2:     └── 2 -> Branch (Mask = 0011)
4670        // 0x20:        ├── 0 -> Leaf (Key = 34)
4671        // 0x21:        └── 1 -> Leaf (Key = 56)
4672        //
4673        // After removing 0x2034, the inner branch becomes a leaf
4674        //
4675        let mut trie = new_test_trie(
4676            [
4677                (Nibbles::default(), SparseNode::new_branch(TrieMask::new(0b0101), &[])),
4678                (Nibbles::from_nibbles([0x0]), SparseNode::new_leaf(leaf_key([0x1, 0x2], 63))),
4679                (Nibbles::from_nibbles([0x2]), SparseNode::new_branch(TrieMask::new(0b0011), &[])),
4680                (Nibbles::from_nibbles([0x2, 0x0]), SparseNode::new_leaf(leaf_key([0x3, 0x4], 62))),
4681                (Nibbles::from_nibbles([0x2, 0x1]), SparseNode::new_leaf(leaf_key([0x5, 0x6], 62))),
4682            ]
4683            .into_iter(),
4684        );
4685
4686        let provider = NoRevealProvider;
4687
4688        // Remove the leaf with a full path of 0x2034
4689        let leaf_full_path = pad_nibbles_right(Nibbles::from_nibbles([0x2, 0x0, 0x3, 0x4]));
4690        trie.remove_leaf(&leaf_full_path, provider).unwrap();
4691
4692        let upper_subtrie = &trie.upper_subtrie;
4693
4694        // Check that both lower subtries were removed. 0x20 should have been removed because
4695        // removing its leaf made it empty. 0x21 should have been removed after its own leaf was
4696        // collapsed into the upper trie, leaving it also empty.
4697        assert_matches!(trie.lower_subtries[0x20].as_revealed_ref(), None);
4698        assert_matches!(trie.lower_subtries[0x21].as_revealed_ref(), None);
4699
4700        // Check that the other leaf's value was moved to the upper trie
4701        let other_leaf_full_value = pad_nibbles_right(Nibbles::from_nibbles([0x2, 0x1, 0x5, 0x6]));
4702        assert_matches!(upper_subtrie.inner.values.get(&other_leaf_full_value), Some(_));
4703
4704        // Check that the root branch still exists unchanged
4705        assert_matches!(
4706            upper_subtrie.nodes.get(&Nibbles::default()),
4707            Some(SparseNode::Branch{ state_mask, .. })
4708            if *state_mask == 0b0101.into()
4709        );
4710
4711        // Check that the inner branch became an extension
4712        assert_matches!(
4713            upper_subtrie.nodes.get(&Nibbles::from_nibbles([0x2])),
4714            Some(SparseNode::Leaf{ key, ..})
4715            if key == &leaf_key([0x1, 0x5, 0x6], 63)
4716        );
4717    }
4718
4719    #[test]
4720    fn test_remove_leaf_lower_subtrie_root_path_update() {
4721        //
4722        // 0x:        Extension (Key = 123, root of lower subtrie)
4723        // 0x123:     └── Branch (Mask = 0011000)
4724        // 0x1233:        ├── 3 -> Leaf (Key = [])
4725        // 0x1234:        └── 4 -> Extension (Key = 5)
4726        // 0x12345:           └── Branch (Mask = 0011)
4727        // 0x123450:              ├── 0 -> Leaf (Key = [])
4728        // 0x123451:              └── 1 -> Leaf (Key = [])
4729        //
4730        // After removing leaf at 0x1233, the branch at 0x123 becomes an extension to 0x12345, which
4731        // then gets merged with the root extension at 0x. The lower subtrie's `path` field should
4732        // be updated from 0x123 to 0x12345.
4733        //
4734        let mut trie = new_test_trie(
4735            [
4736                (Nibbles::default(), SparseNode::new_ext(Nibbles::from_nibbles([0x1, 0x2, 0x3]))),
4737                (
4738                    Nibbles::from_nibbles([0x1, 0x2, 0x3]),
4739                    SparseNode::new_branch(TrieMask::new(0b0011000), &[]),
4740                ),
4741                (
4742                    Nibbles::from_nibbles([0x1, 0x2, 0x3, 0x3]),
4743                    SparseNode::new_leaf(leaf_key([], 60)),
4744                ),
4745                (
4746                    Nibbles::from_nibbles([0x1, 0x2, 0x3, 0x4]),
4747                    SparseNode::new_ext(Nibbles::from_nibbles([0x5])),
4748                ),
4749                (
4750                    Nibbles::from_nibbles([0x1, 0x2, 0x3, 0x4, 0x5]),
4751                    SparseNode::new_branch(TrieMask::new(0b0011), &[]),
4752                ),
4753                (
4754                    Nibbles::from_nibbles([0x1, 0x2, 0x3, 0x4, 0x5, 0x0]),
4755                    SparseNode::new_leaf(leaf_key([], 58)),
4756                ),
4757                (
4758                    Nibbles::from_nibbles([0x1, 0x2, 0x3, 0x4, 0x5, 0x1]),
4759                    SparseNode::new_leaf(leaf_key([], 58)),
4760                ),
4761            ]
4762            .into_iter(),
4763        );
4764
4765        let provider = NoRevealProvider;
4766
4767        // Verify initial state - the lower subtrie's path should be 0x123
4768        let lower_subtrie_root_path = Nibbles::from_nibbles([0x1, 0x2, 0x3]);
4769        assert_matches!(
4770            trie.lower_subtrie_for_path_mut(&lower_subtrie_root_path),
4771            Some(subtrie)
4772            if subtrie.path == lower_subtrie_root_path
4773        );
4774
4775        // Remove the leaf at 0x1233
4776        let leaf_full_path = pad_nibbles_right(Nibbles::from_nibbles([0x1, 0x2, 0x3, 0x3]));
4777        trie.remove_leaf(&leaf_full_path, provider).unwrap();
4778
4779        // After removal:
4780        // 1. The branch at 0x123 should become an extension to 0x12345
4781        // 2. That extension should merge with the root extension at 0x
4782        // 3. The lower subtrie's path should be updated to 0x12345
4783        let lower_subtrie = trie.lower_subtries[0x12].as_revealed_ref().unwrap();
4784        assert_eq!(lower_subtrie.path, Nibbles::from_nibbles([0x1, 0x2, 0x3, 0x4, 0x5]));
4785
4786        // Verify the root extension now points all the way to 0x12345
4787        assert_matches!(
4788            trie.upper_subtrie.nodes.get(&Nibbles::default()),
4789            Some(SparseNode::Extension { key, .. })
4790            if key == &Nibbles::from_nibbles([0x1, 0x2, 0x3, 0x4, 0x5])
4791        );
4792
4793        // Verify the branch at 0x12345 hasn't been modified
4794        assert_matches!(
4795            lower_subtrie.nodes.get(&Nibbles::from_nibbles([0x1, 0x2, 0x3, 0x4, 0x5])),
4796            Some(SparseNode::Branch { state_mask, .. })
4797            if state_mask == &TrieMask::new(0b0011)
4798        );
4799    }
4800
4801    #[test]
4802    fn test_remove_leaf_remaining_child_needs_reveal() {
4803        //
4804        // 0x:      Branch (Mask = 0011)
4805        // 0x0:     ├── 0 -> Leaf (Key = 12)
4806        // 0x1:     └── 1 -> Hash (blinded leaf)
4807        //
4808        // After removing 0x012, the hash node needs to be revealed to collapse the branch
4809        //
4810        let mut trie = new_test_trie(
4811            [
4812                (
4813                    Nibbles::default(),
4814                    SparseNode::new_branch(
4815                        TrieMask::new(0b0011),
4816                        &[(0x1, B256::repeat_byte(0xab))],
4817                    ),
4818                ),
4819                (Nibbles::from_nibbles([0x0]), SparseNode::new_leaf(leaf_key([0x1, 0x2], 63))),
4820            ]
4821            .into_iter(),
4822        );
4823
4824        // Create a mock provider that will reveal the blinded leaf
4825        let revealed_leaf = create_leaf_node(leaf_key([0x3, 0x4], 63).to_vec(), 42);
4826        let mut encoded = Vec::new();
4827        revealed_leaf.encode(&mut encoded);
4828
4829        // Try removing the leaf with a full path of 0x012, this should fail because the leaf is
4830        // blinded
4831        let leaf_full_path = pad_nibbles_right(Nibbles::from_nibbles([0x0, 0x1, 0x2]));
4832        let Err(err) = trie.remove_leaf(&leaf_full_path, NoRevealProvider) else {
4833            panic!("expected error");
4834        };
4835        assert_matches!(err.kind(), SparseTrieErrorKind::BlindedNode(path) if *path == Nibbles::from_nibbles([0x1]));
4836
4837        // Now reveal the leaf and try removing it again
4838        trie.reveal_nodes(&mut [ProofTrieNodeV2 {
4839            path: Nibbles::from_nibbles([0x1]),
4840            node: revealed_leaf,
4841            masks: None,
4842        }])
4843        .unwrap();
4844        trie.remove_leaf(&leaf_full_path, NoRevealProvider).unwrap();
4845
4846        let upper_subtrie = &trie.upper_subtrie;
4847
4848        // Check that the leaf value was removed
4849        assert_matches!(upper_subtrie.inner.values.get(&leaf_full_path), None);
4850
4851        // Check that the branch node collapsed into a leaf node with the revealed child's key
4852        assert_matches!(
4853            upper_subtrie.nodes.get(&Nibbles::default()),
4854            Some(SparseNode::Leaf{ key, ..})
4855            if key == &pad_nibbles_right(Nibbles::from_nibbles([0x1, 0x3, 0x4]))
4856        );
4857
4858        // Check that the remaining child node was removed (since it was merged)
4859        assert_matches!(upper_subtrie.nodes.get(&Nibbles::from_nibbles([0x1])), None);
4860    }
4861
4862    #[test]
4863    fn test_remove_leaf_root() {
4864        //
4865        // 0x:      Leaf (Key = 123)
4866        //
4867        // After removing 0x123, the trie becomes empty
4868        //
4869        let mut trie = new_test_trie(core::iter::once((
4870            Nibbles::default(),
4871            SparseNode::new_leaf(pad_nibbles_right(Nibbles::from_nibbles([0x1, 0x2, 0x3]))),
4872        )));
4873
4874        let provider = NoRevealProvider;
4875
4876        // Remove the leaf with a full key of 0x123
4877        let leaf_full_path = pad_nibbles_right(Nibbles::from_nibbles([0x1, 0x2, 0x3]));
4878        trie.remove_leaf(&leaf_full_path, provider).unwrap();
4879
4880        let upper_subtrie = &trie.upper_subtrie;
4881
4882        // Check that the leaf value was removed
4883        assert_matches!(upper_subtrie.inner.values.get(&leaf_full_path), None);
4884
4885        // Check that the root node was changed to Empty
4886        assert_matches!(upper_subtrie.nodes.get(&Nibbles::default()), Some(SparseNode::Empty));
4887    }
4888
4889    #[test]
4890    fn test_remove_leaf_unsets_hash_along_path() {
4891        //
4892        // Creates a trie structure:
4893        // 0x:      Branch (with hash set)
4894        // 0x0:     ├── Extension (with hash set)
4895        // 0x01:    │   └── Branch (with hash set)
4896        // 0x012:   │       ├── Leaf (Key = 34, with hash set)
4897        // 0x013:   │       ├── Leaf (Key = 56, with hash set)
4898        // 0x014:   │       └── Leaf (Key = 78, with hash set)
4899        // 0x1:     └── Leaf (Key = 78, with hash set)
4900        //
4901        // When removing leaf at 0x01234, all nodes along the path (root branch,
4902        // extension at 0x0, branch at 0x01) should have their hash field unset
4903        //
4904
4905        let make_revealed = |hash: B256| SparseNodeState::Cached {
4906            rlp_node: RlpNode::word_rlp(&hash),
4907            store_in_db_trie: None,
4908        };
4909        let mut trie = new_test_trie(
4910            [
4911                (
4912                    Nibbles::default(),
4913                    SparseNode::Branch {
4914                        state_mask: TrieMask::new(0b0011),
4915                        state: make_revealed(B256::repeat_byte(0x10)),
4916                        blinded_mask: Default::default(),
4917                        blinded_hashes: Default::default(),
4918                    },
4919                ),
4920                (
4921                    Nibbles::from_nibbles([0x0]),
4922                    SparseNode::Extension {
4923                        key: Nibbles::from_nibbles([0x1]),
4924                        state: make_revealed(B256::repeat_byte(0x20)),
4925                    },
4926                ),
4927                (
4928                    Nibbles::from_nibbles([0x0, 0x1]),
4929                    SparseNode::Branch {
4930                        state_mask: TrieMask::new(0b11100),
4931                        state: make_revealed(B256::repeat_byte(0x30)),
4932                        blinded_mask: Default::default(),
4933                        blinded_hashes: Default::default(),
4934                    },
4935                ),
4936                (
4937                    Nibbles::from_nibbles([0x0, 0x1, 0x2]),
4938                    SparseNode::Leaf {
4939                        key: leaf_key([0x3, 0x4], 61),
4940                        state: make_revealed(B256::repeat_byte(0x40)),
4941                    },
4942                ),
4943                (
4944                    Nibbles::from_nibbles([0x0, 0x1, 0x3]),
4945                    SparseNode::Leaf {
4946                        key: leaf_key([0x5, 0x6], 61),
4947                        state: make_revealed(B256::repeat_byte(0x50)),
4948                    },
4949                ),
4950                (
4951                    Nibbles::from_nibbles([0x0, 0x1, 0x4]),
4952                    SparseNode::Leaf {
4953                        key: leaf_key([0x6, 0x7], 61),
4954                        state: make_revealed(B256::repeat_byte(0x60)),
4955                    },
4956                ),
4957                (
4958                    Nibbles::from_nibbles([0x1]),
4959                    SparseNode::Leaf {
4960                        key: leaf_key([0x7, 0x8], 63),
4961                        state: make_revealed(B256::repeat_byte(0x70)),
4962                    },
4963                ),
4964            ]
4965            .into_iter(),
4966        );
4967
4968        let provider = NoRevealProvider;
4969
4970        // Remove a leaf which does not exist; this should have no effect.
4971        trie.remove_leaf(
4972            &pad_nibbles_right(Nibbles::from_nibbles([0x0, 0x1, 0x2, 0x3, 0x4, 0xF])),
4973            provider,
4974        )
4975        .unwrap();
4976        for (path, node) in trie.all_nodes() {
4977            assert!(node.cached_hash().is_some(), "path {path:?} should still have a hash");
4978        }
4979
4980        // Remove the leaf at path 0x01234
4981        let leaf_full_path = pad_nibbles_right(Nibbles::from_nibbles([0x0, 0x1, 0x2, 0x3, 0x4]));
4982        trie.remove_leaf(&leaf_full_path, provider).unwrap();
4983
4984        let upper_subtrie = &trie.upper_subtrie;
4985        let lower_subtrie_10 = trie.lower_subtries[0x01].as_revealed_ref().unwrap();
4986
4987        // Verify that hash fields are unset for all nodes along the path to the removed leaf
4988        assert_matches!(
4989            upper_subtrie.nodes.get(&Nibbles::default()),
4990            Some(SparseNode::Branch { state: SparseNodeState::Dirty, .. })
4991        );
4992        assert_matches!(
4993            upper_subtrie.nodes.get(&Nibbles::from_nibbles([0x0])),
4994            Some(SparseNode::Extension { state: SparseNodeState::Dirty, .. })
4995        );
4996        assert_matches!(
4997            lower_subtrie_10.nodes.get(&Nibbles::from_nibbles([0x0, 0x1])),
4998            Some(SparseNode::Branch { state: SparseNodeState::Dirty, .. })
4999        );
5000
5001        // Verify that nodes not on the path still have their hashes
5002        assert_matches!(
5003            upper_subtrie.nodes.get(&Nibbles::from_nibbles([0x1])),
5004            Some(SparseNode::Leaf { state: SparseNodeState::Cached { .. }, .. })
5005        );
5006        assert_matches!(
5007            lower_subtrie_10.nodes.get(&Nibbles::from_nibbles([0x0, 0x1, 0x3])),
5008            Some(SparseNode::Leaf { state: SparseNodeState::Cached { .. }, .. })
5009        );
5010        assert_matches!(
5011            lower_subtrie_10.nodes.get(&Nibbles::from_nibbles([0x0, 0x1, 0x4])),
5012            Some(SparseNode::Leaf { state: SparseNodeState::Cached { .. }, .. })
5013        );
5014    }
5015
5016    #[test]
5017    fn test_parallel_sparse_trie_root() {
5018        // Step 1: Create the trie structure
5019        // Extension node at 0x with key 0x2 (goes to upper subtrie)
5020        let extension_path = Nibbles::new();
5021        let extension_key = Nibbles::from_nibbles([0x2]);
5022
5023        // Branch node at 0x2 with children 0 and 1 (goes to upper subtrie)
5024        let branch_path = Nibbles::from_nibbles([0x2]);
5025
5026        // Leaf nodes at 0x20 and 0x21 (go to lower subtries)
5027        let leaf_1_path = Nibbles::from_nibbles([0x2, 0x0]);
5028        let leaf_1_key = Nibbles::from_nibbles(vec![0; 62]); // Remaining key
5029        let leaf_1_full_path = Nibbles::from_nibbles([vec![0x2, 0x0], vec![0; 62]].concat());
5030
5031        let leaf_2_path = Nibbles::from_nibbles([0x2, 0x1]);
5032        let leaf_2_key = Nibbles::from_nibbles(vec![0; 62]); // Remaining key
5033        let leaf_2_full_path = Nibbles::from_nibbles([vec![0x2, 0x1], vec![0; 62]].concat());
5034
5035        // Create accounts
5036        let account_1 = create_account(1);
5037        let account_2 = create_account(2);
5038
5039        // Create leaf nodes
5040        let leaf_1 = create_leaf_node(leaf_1_key.to_vec(), account_1.nonce);
5041        let leaf_2 = create_leaf_node(leaf_2_key.to_vec(), account_2.nonce);
5042
5043        // Create branch node with children at indices 0 and 1
5044        let branch = create_branch_node(
5045            extension_key,
5046            &[0, 1],
5047            vec![
5048                RlpNode::from_rlp(&alloy_rlp::encode(&leaf_1)),
5049                RlpNode::from_rlp(&alloy_rlp::encode(&leaf_2)),
5050            ],
5051        );
5052
5053        // Step 2: Reveal nodes in the trie
5054        let mut trie = ParallelSparseTrie::from_root(branch, None, true).unwrap();
5055        trie.reveal_nodes(&mut [
5056            ProofTrieNodeV2 { path: leaf_1_path, node: leaf_1, masks: None },
5057            ProofTrieNodeV2 { path: leaf_2_path, node: leaf_2, masks: None },
5058        ])
5059        .unwrap();
5060
5061        // Step 3: Reset hashes for all revealed nodes to test actual hash calculation
5062        // Reset upper subtrie node hashes
5063        trie.upper_subtrie
5064            .nodes
5065            .get_mut(&extension_path)
5066            .unwrap()
5067            .set_state(SparseNodeState::Dirty);
5068        trie.upper_subtrie.nodes.get_mut(&branch_path).unwrap().set_state(SparseNodeState::Dirty);
5069
5070        // Reset lower subtrie node hashes
5071        let leaf_1_subtrie_idx = path_subtrie_index_unchecked(&leaf_1_path);
5072        let leaf_2_subtrie_idx = path_subtrie_index_unchecked(&leaf_2_path);
5073
5074        trie.lower_subtries[leaf_1_subtrie_idx]
5075            .as_revealed_mut()
5076            .unwrap()
5077            .nodes
5078            .get_mut(&leaf_1_path)
5079            .unwrap()
5080            .set_state(SparseNodeState::Dirty);
5081        trie.lower_subtries[leaf_2_subtrie_idx]
5082            .as_revealed_mut()
5083            .unwrap()
5084            .nodes
5085            .get_mut(&leaf_2_path)
5086            .unwrap()
5087            .set_state(SparseNodeState::Dirty);
5088
5089        // Step 4: Add changed leaf node paths to prefix set
5090        trie.prefix_set.insert(leaf_1_full_path);
5091        trie.prefix_set.insert(leaf_2_full_path);
5092
5093        // Step 5: Calculate root using our implementation
5094        let root = trie.root();
5095
5096        // Step 6: Calculate root using HashBuilder for comparison
5097        let (hash_builder_root, _, _proof_nodes, _, _) = run_hash_builder(
5098            [(leaf_1_full_path, account_1), (leaf_2_full_path, account_2)],
5099            NoopAccountTrieCursor::default(),
5100            Default::default(),
5101            [extension_path, branch_path, leaf_1_full_path, leaf_2_full_path],
5102        );
5103
5104        // Step 7: Verify the roots match
5105        assert_eq!(root, hash_builder_root);
5106
5107        // Verify hashes were computed
5108        let leaf_1_subtrie = trie.lower_subtries[leaf_1_subtrie_idx].as_revealed_ref().unwrap();
5109        let leaf_2_subtrie = trie.lower_subtries[leaf_2_subtrie_idx].as_revealed_ref().unwrap();
5110        assert!(trie.upper_subtrie.nodes.get(&extension_path).unwrap().cached_hash().is_some());
5111        assert!(trie.upper_subtrie.nodes.get(&branch_path).unwrap().cached_hash().is_some());
5112        assert!(leaf_1_subtrie.nodes.get(&leaf_1_path).unwrap().cached_hash().is_some());
5113        assert!(leaf_2_subtrie.nodes.get(&leaf_2_path).unwrap().cached_hash().is_some());
5114    }
5115
5116    #[test]
5117    fn sparse_trie_empty_update_one() {
5118        let ctx = ParallelSparseTrieTestContext;
5119
5120        let key = Nibbles::unpack(B256::with_last_byte(42));
5121        let value = || Account::default();
5122        let value_encoded = || {
5123            let mut account_rlp = Vec::new();
5124            value().into_trie_account(EMPTY_ROOT_HASH).encode(&mut account_rlp);
5125            account_rlp
5126        };
5127
5128        let (hash_builder_root, hash_builder_updates, hash_builder_proof_nodes, _, _) =
5129            run_hash_builder(
5130                [(key, value())],
5131                NoopAccountTrieCursor::default(),
5132                Default::default(),
5133                [key],
5134            );
5135
5136        let mut sparse = ParallelSparseTrie::default().with_updates(true);
5137        ctx.update_leaves(&mut sparse, [(key, value_encoded())]);
5138        ctx.assert_with_hash_builder(
5139            &mut sparse,
5140            hash_builder_root,
5141            hash_builder_updates,
5142            hash_builder_proof_nodes,
5143        );
5144    }
5145
5146    #[test]
5147    fn sparse_trie_empty_update_multiple_lower_nibbles() {
5148        let ctx = ParallelSparseTrieTestContext;
5149
5150        let paths = (0..=16).map(|b| Nibbles::unpack(B256::with_last_byte(b))).collect::<Vec<_>>();
5151        let value = || Account::default();
5152        let value_encoded = || {
5153            let mut account_rlp = Vec::new();
5154            value().into_trie_account(EMPTY_ROOT_HASH).encode(&mut account_rlp);
5155            account_rlp
5156        };
5157
5158        let (hash_builder_root, hash_builder_updates, hash_builder_proof_nodes, _, _) =
5159            run_hash_builder(
5160                paths.iter().copied().zip(core::iter::repeat_with(value)),
5161                NoopAccountTrieCursor::default(),
5162                Default::default(),
5163                paths.clone(),
5164            );
5165
5166        let mut sparse = ParallelSparseTrie::default().with_updates(true);
5167        ctx.update_leaves(
5168            &mut sparse,
5169            paths.into_iter().zip(core::iter::repeat_with(value_encoded)),
5170        );
5171
5172        ctx.assert_with_hash_builder(
5173            &mut sparse,
5174            hash_builder_root,
5175            hash_builder_updates,
5176            hash_builder_proof_nodes,
5177        );
5178    }
5179
5180    #[test]
5181    fn sparse_trie_empty_update_multiple_upper_nibbles() {
5182        let paths = (239..=255).map(|b| Nibbles::unpack(B256::repeat_byte(b))).collect::<Vec<_>>();
5183        let value = || Account::default();
5184        let value_encoded = || {
5185            let mut account_rlp = Vec::new();
5186            value().into_trie_account(EMPTY_ROOT_HASH).encode(&mut account_rlp);
5187            account_rlp
5188        };
5189
5190        let (hash_builder_root, hash_builder_updates, hash_builder_proof_nodes, _, _) =
5191            run_hash_builder(
5192                paths.iter().copied().zip(core::iter::repeat_with(value)),
5193                NoopAccountTrieCursor::default(),
5194                Default::default(),
5195                paths.clone(),
5196            );
5197
5198        let provider = DefaultTrieNodeProvider;
5199        let mut sparse = ParallelSparseTrie::default().with_updates(true);
5200        for path in &paths {
5201            sparse.update_leaf(*path, value_encoded(), &provider).unwrap();
5202        }
5203        let sparse_root = sparse.root();
5204        let sparse_updates = sparse.take_updates();
5205
5206        assert_eq!(sparse_root, hash_builder_root);
5207        assert_eq!(sparse_updates.updated_nodes, hash_builder_updates.account_nodes);
5208        assert_eq_parallel_sparse_trie_proof_nodes(&sparse, hash_builder_proof_nodes);
5209    }
5210
5211    #[test]
5212    fn sparse_trie_empty_update_multiple() {
5213        let ctx = ParallelSparseTrieTestContext;
5214
5215        let paths = (0..=255)
5216            .map(|b| {
5217                Nibbles::unpack(if b % 2 == 0 {
5218                    B256::repeat_byte(b)
5219                } else {
5220                    B256::with_last_byte(b)
5221                })
5222            })
5223            .collect::<Vec<_>>();
5224        let value = || Account::default();
5225        let value_encoded = || {
5226            let mut account_rlp = Vec::new();
5227            value().into_trie_account(EMPTY_ROOT_HASH).encode(&mut account_rlp);
5228            account_rlp
5229        };
5230
5231        let (hash_builder_root, hash_builder_updates, hash_builder_proof_nodes, _, _) =
5232            run_hash_builder(
5233                paths.iter().sorted_unstable().copied().zip(core::iter::repeat_with(value)),
5234                NoopAccountTrieCursor::default(),
5235                Default::default(),
5236                paths.clone(),
5237            );
5238
5239        let mut sparse = ParallelSparseTrie::default().with_updates(true);
5240        ctx.update_leaves(
5241            &mut sparse,
5242            paths.iter().copied().zip(core::iter::repeat_with(value_encoded)),
5243        );
5244        ctx.assert_with_hash_builder(
5245            &mut sparse,
5246            hash_builder_root,
5247            hash_builder_updates,
5248            hash_builder_proof_nodes,
5249        );
5250    }
5251
5252    #[test]
5253    fn sparse_trie_empty_update_repeated() {
5254        let ctx = ParallelSparseTrieTestContext;
5255
5256        let paths = (0..=255).map(|b| Nibbles::unpack(B256::repeat_byte(b))).collect::<Vec<_>>();
5257        let old_value = Account { nonce: 1, ..Default::default() };
5258        let old_value_encoded = {
5259            let mut account_rlp = Vec::new();
5260            old_value.into_trie_account(EMPTY_ROOT_HASH).encode(&mut account_rlp);
5261            account_rlp
5262        };
5263        let new_value = Account { nonce: 2, ..Default::default() };
5264        let new_value_encoded = {
5265            let mut account_rlp = Vec::new();
5266            new_value.into_trie_account(EMPTY_ROOT_HASH).encode(&mut account_rlp);
5267            account_rlp
5268        };
5269
5270        let (hash_builder_root, hash_builder_updates, hash_builder_proof_nodes, _, _) =
5271            run_hash_builder(
5272                paths.iter().copied().zip(core::iter::repeat_with(|| old_value)),
5273                NoopAccountTrieCursor::default(),
5274                Default::default(),
5275                paths.clone(),
5276            );
5277
5278        let mut sparse = ParallelSparseTrie::default().with_updates(true);
5279        ctx.update_leaves(
5280            &mut sparse,
5281            paths.iter().copied().zip(core::iter::repeat(old_value_encoded)),
5282        );
5283        ctx.assert_with_hash_builder(
5284            &mut sparse,
5285            hash_builder_root,
5286            hash_builder_updates,
5287            hash_builder_proof_nodes,
5288        );
5289
5290        let (hash_builder_root, hash_builder_updates, hash_builder_proof_nodes, _, _) =
5291            run_hash_builder(
5292                paths.iter().copied().zip(core::iter::repeat(new_value)),
5293                NoopAccountTrieCursor::default(),
5294                Default::default(),
5295                paths.clone(),
5296            );
5297
5298        ctx.update_leaves(
5299            &mut sparse,
5300            paths.iter().copied().zip(core::iter::repeat(new_value_encoded)),
5301        );
5302        ctx.assert_with_hash_builder(
5303            &mut sparse,
5304            hash_builder_root,
5305            hash_builder_updates,
5306            hash_builder_proof_nodes,
5307        );
5308    }
5309
5310    #[test]
5311    fn sparse_trie_remove_leaf() {
5312        let ctx = ParallelSparseTrieTestContext;
5313        let provider = DefaultTrieNodeProvider;
5314        let mut sparse = ParallelSparseTrie::default();
5315
5316        let value = alloy_rlp::encode_fixed_size(&U256::ZERO).to_vec();
5317
5318        ctx.update_leaves(
5319            &mut sparse,
5320            [
5321                (
5322                    pad_nibbles_right(Nibbles::from_nibbles([0x5, 0x0, 0x2, 0x3, 0x1])),
5323                    value.clone(),
5324                ),
5325                (
5326                    pad_nibbles_right(Nibbles::from_nibbles([0x5, 0x0, 0x2, 0x3, 0x3])),
5327                    value.clone(),
5328                ),
5329                (
5330                    pad_nibbles_right(Nibbles::from_nibbles([0x5, 0x2, 0x0, 0x1, 0x3])),
5331                    value.clone(),
5332                ),
5333                (
5334                    pad_nibbles_right(Nibbles::from_nibbles([0x5, 0x3, 0x1, 0x0, 0x2])),
5335                    value.clone(),
5336                ),
5337                (
5338                    pad_nibbles_right(Nibbles::from_nibbles([0x5, 0x3, 0x3, 0x0, 0x2])),
5339                    value.clone(),
5340                ),
5341                (pad_nibbles_right(Nibbles::from_nibbles([0x5, 0x3, 0x3, 0x2, 0x0])), value),
5342            ],
5343        );
5344
5345        // Extension (Key = 5)
5346        // └── Branch (Mask = 1011)
5347        //     ├── 0 -> Extension (Key = 23)
5348        //     │        └── Branch (Mask = 0101)
5349        //     │              ├── 1 -> Leaf (Key = 1, Path = 50231)
5350        //     │              └── 3 -> Leaf (Key = 3, Path = 50233)
5351        //     ├── 2 -> Leaf (Key = 013, Path = 52013)
5352        //     └── 3 -> Branch (Mask = 0101)
5353        //                ├── 1 -> Leaf (Key = 3102, Path = 53102)
5354        //                └── 3 -> Branch (Mask = 1010)
5355        //                       ├── 0 -> Leaf (Key = 3302, Path = 53302)
5356        //                       └── 2 -> Leaf (Key = 3320, Path = 53320)
5357        pretty_assertions::assert_eq!(
5358            parallel_sparse_trie_nodes(&sparse)
5359                .into_iter()
5360                .map(|(k, v)| (*k, v.clone()))
5361                .collect::<BTreeMap<_, _>>(),
5362            BTreeMap::from_iter([
5363                (Nibbles::default(), SparseNode::new_ext(Nibbles::from_nibbles([0x5]))),
5364                (Nibbles::from_nibbles([0x5]), SparseNode::new_branch(0b1101.into(), &[])),
5365                (
5366                    Nibbles::from_nibbles([0x5, 0x0]),
5367                    SparseNode::new_ext(Nibbles::from_nibbles([0x2, 0x3]))
5368                ),
5369                (
5370                    Nibbles::from_nibbles([0x5, 0x0, 0x2, 0x3]),
5371                    SparseNode::new_branch(0b1010.into(), &[])
5372                ),
5373                (
5374                    Nibbles::from_nibbles([0x5, 0x0, 0x2, 0x3, 0x1]),
5375                    SparseNode::new_leaf(leaf_key([], 59))
5376                ),
5377                (
5378                    Nibbles::from_nibbles([0x5, 0x0, 0x2, 0x3, 0x3]),
5379                    SparseNode::new_leaf(leaf_key([], 59))
5380                ),
5381                (
5382                    Nibbles::from_nibbles([0x5, 0x2]),
5383                    SparseNode::new_leaf(leaf_key([0x0, 0x1, 0x3], 62))
5384                ),
5385                (Nibbles::from_nibbles([0x5, 0x3]), SparseNode::new_branch(0b1010.into(), &[])),
5386                (
5387                    Nibbles::from_nibbles([0x5, 0x3, 0x1]),
5388                    SparseNode::new_leaf(leaf_key([0x0, 0x2], 61))
5389                ),
5390                (
5391                    Nibbles::from_nibbles([0x5, 0x3, 0x3]),
5392                    SparseNode::new_branch(0b0101.into(), &[])
5393                ),
5394                (
5395                    Nibbles::from_nibbles([0x5, 0x3, 0x3, 0x0]),
5396                    SparseNode::new_leaf(leaf_key([0x2], 60))
5397                ),
5398                (
5399                    Nibbles::from_nibbles([0x5, 0x3, 0x3, 0x2]),
5400                    SparseNode::new_leaf(leaf_key([0x0], 60))
5401                )
5402            ])
5403        );
5404
5405        sparse
5406            .remove_leaf(
5407                &pad_nibbles_right(Nibbles::from_nibbles([0x5, 0x2, 0x0, 0x1, 0x3])),
5408                &provider,
5409            )
5410            .unwrap();
5411
5412        // Extension (Key = 5)
5413        // └── Branch (Mask = 1001)
5414        //     ├── 0 -> Extension (Key = 23)
5415        //     │        └── Branch (Mask = 0101)
5416        //     │              ├── 1 -> Leaf (Path = 50231...)
5417        //     │              └── 3 -> Leaf (Path = 50233...)
5418        //     └── 3 -> Branch (Mask = 0101)
5419        //                ├── 1 -> Leaf (Path = 53102...)
5420        //                └── 3 -> Branch (Mask = 1010)
5421        //                       ├── 0 -> Leaf (Path = 53302...)
5422        //                       └── 2 -> Leaf (Path = 53320...)
5423        pretty_assertions::assert_eq!(
5424            parallel_sparse_trie_nodes(&sparse)
5425                .into_iter()
5426                .map(|(k, v)| (*k, v.clone()))
5427                .collect::<BTreeMap<_, _>>(),
5428            BTreeMap::from_iter([
5429                (Nibbles::default(), SparseNode::new_ext(Nibbles::from_nibbles([0x5]))),
5430                (Nibbles::from_nibbles([0x5]), SparseNode::new_branch(0b1001.into(), &[])),
5431                (
5432                    Nibbles::from_nibbles([0x5, 0x0]),
5433                    SparseNode::new_ext(Nibbles::from_nibbles([0x2, 0x3]))
5434                ),
5435                (
5436                    Nibbles::from_nibbles([0x5, 0x0, 0x2, 0x3]),
5437                    SparseNode::new_branch(0b1010.into(), &[])
5438                ),
5439                (
5440                    Nibbles::from_nibbles([0x5, 0x0, 0x2, 0x3, 0x1]),
5441                    SparseNode::new_leaf(leaf_key([], 59))
5442                ),
5443                (
5444                    Nibbles::from_nibbles([0x5, 0x0, 0x2, 0x3, 0x3]),
5445                    SparseNode::new_leaf(leaf_key([], 59))
5446                ),
5447                (Nibbles::from_nibbles([0x5, 0x3]), SparseNode::new_branch(0b1010.into(), &[])),
5448                (
5449                    Nibbles::from_nibbles([0x5, 0x3, 0x1]),
5450                    SparseNode::new_leaf(leaf_key([0x0, 0x2], 61))
5451                ),
5452                (
5453                    Nibbles::from_nibbles([0x5, 0x3, 0x3]),
5454                    SparseNode::new_branch(0b0101.into(), &[])
5455                ),
5456                (
5457                    Nibbles::from_nibbles([0x5, 0x3, 0x3, 0x0]),
5458                    SparseNode::new_leaf(leaf_key([0x2], 60))
5459                ),
5460                (
5461                    Nibbles::from_nibbles([0x5, 0x3, 0x3, 0x2]),
5462                    SparseNode::new_leaf(leaf_key([0x0], 60))
5463                )
5464            ])
5465        );
5466
5467        sparse
5468            .remove_leaf(
5469                &pad_nibbles_right(Nibbles::from_nibbles([0x5, 0x0, 0x2, 0x3, 0x1])),
5470                &provider,
5471            )
5472            .unwrap();
5473
5474        // Extension (Key = 5)
5475        // └── Branch (Mask = 1001)
5476        //     ├── 0 -> Leaf (Path = 50233...)
5477        //     └── 3 -> Branch (Mask = 0101)
5478        //                ├── 1 -> Leaf (Path = 53102...)
5479        //                └── 3 -> Branch (Mask = 1010)
5480        //                       ├── 0 -> Leaf (Path = 53302...)
5481        //                       └── 2 -> Leaf (Path = 53320...)
5482        pretty_assertions::assert_eq!(
5483            parallel_sparse_trie_nodes(&sparse)
5484                .into_iter()
5485                .map(|(k, v)| (*k, v.clone()))
5486                .collect::<BTreeMap<_, _>>(),
5487            BTreeMap::from_iter([
5488                (Nibbles::default(), SparseNode::new_ext(Nibbles::from_nibbles([0x5]))),
5489                (Nibbles::from_nibbles([0x5]), SparseNode::new_branch(0b1001.into(), &[])),
5490                (
5491                    Nibbles::from_nibbles([0x5, 0x0]),
5492                    SparseNode::new_leaf(leaf_key([0x2, 0x3, 0x3], 62))
5493                ),
5494                (Nibbles::from_nibbles([0x5, 0x3]), SparseNode::new_branch(0b1010.into(), &[])),
5495                (
5496                    Nibbles::from_nibbles([0x5, 0x3, 0x1]),
5497                    SparseNode::new_leaf(leaf_key([0x0, 0x2], 61))
5498                ),
5499                (
5500                    Nibbles::from_nibbles([0x5, 0x3, 0x3]),
5501                    SparseNode::new_branch(0b0101.into(), &[])
5502                ),
5503                (
5504                    Nibbles::from_nibbles([0x5, 0x3, 0x3, 0x0]),
5505                    SparseNode::new_leaf(leaf_key([0x2], 60))
5506                ),
5507                (
5508                    Nibbles::from_nibbles([0x5, 0x3, 0x3, 0x2]),
5509                    SparseNode::new_leaf(leaf_key([0x0], 60))
5510                )
5511            ])
5512        );
5513
5514        sparse
5515            .remove_leaf(
5516                &pad_nibbles_right(Nibbles::from_nibbles([0x5, 0x3, 0x1, 0x0, 0x2])),
5517                &provider,
5518            )
5519            .unwrap();
5520
5521        // Extension (Key = 5)
5522        // └── Branch (Mask = 1001)
5523        //     ├── 0 -> Leaf (Path = 50233...)
5524        //     └── 3 -> Branch (Mask = 1010)
5525        //                ├── 0 -> Leaf (Path = 53302...)
5526        //                └── 2 -> Leaf (Path = 53320...)
5527        pretty_assertions::assert_eq!(
5528            parallel_sparse_trie_nodes(&sparse)
5529                .into_iter()
5530                .map(|(k, v)| (*k, v.clone()))
5531                .collect::<BTreeMap<_, _>>(),
5532            BTreeMap::from_iter([
5533                (Nibbles::default(), SparseNode::new_ext(Nibbles::from_nibbles([0x5]))),
5534                (Nibbles::from_nibbles([0x5]), SparseNode::new_branch(0b1001.into(), &[])),
5535                (
5536                    Nibbles::from_nibbles([0x5, 0x0]),
5537                    SparseNode::new_leaf(leaf_key([0x2, 0x3, 0x3], 62))
5538                ),
5539                (
5540                    Nibbles::from_nibbles([0x5, 0x3]),
5541                    SparseNode::new_ext(Nibbles::from_nibbles([0x3]))
5542                ),
5543                (
5544                    Nibbles::from_nibbles([0x5, 0x3, 0x3]),
5545                    SparseNode::new_branch(0b0101.into(), &[])
5546                ),
5547                (
5548                    Nibbles::from_nibbles([0x5, 0x3, 0x3, 0x0]),
5549                    SparseNode::new_leaf(leaf_key([0x2], 60))
5550                ),
5551                (
5552                    Nibbles::from_nibbles([0x5, 0x3, 0x3, 0x2]),
5553                    SparseNode::new_leaf(leaf_key([0x0], 60))
5554                )
5555            ])
5556        );
5557
5558        sparse
5559            .remove_leaf(
5560                &pad_nibbles_right(Nibbles::from_nibbles([0x5, 0x3, 0x3, 0x2, 0x0])),
5561                &provider,
5562            )
5563            .unwrap();
5564
5565        // Extension (Key = 5)
5566        // └── Branch (Mask = 1001)
5567        //     ├── 0 -> Leaf (Path = 50233...)
5568        //     └── 3 -> Leaf (Path = 53302...)
5569        pretty_assertions::assert_eq!(
5570            parallel_sparse_trie_nodes(&sparse)
5571                .into_iter()
5572                .map(|(k, v)| (*k, v.clone()))
5573                .collect::<BTreeMap<_, _>>(),
5574            BTreeMap::from_iter([
5575                (Nibbles::default(), SparseNode::new_ext(Nibbles::from_nibbles([0x5]))),
5576                (Nibbles::from_nibbles([0x5]), SparseNode::new_branch(0b1001.into(), &[])),
5577                (
5578                    Nibbles::from_nibbles([0x5, 0x0]),
5579                    SparseNode::new_leaf(leaf_key([0x2, 0x3, 0x3], 62))
5580                ),
5581                (
5582                    Nibbles::from_nibbles([0x5, 0x3]),
5583                    SparseNode::new_leaf(leaf_key([0x3, 0x0, 0x2], 62))
5584                ),
5585            ])
5586        );
5587
5588        sparse
5589            .remove_leaf(
5590                &pad_nibbles_right(Nibbles::from_nibbles([0x5, 0x0, 0x2, 0x3, 0x3])),
5591                &provider,
5592            )
5593            .unwrap();
5594
5595        // Leaf (Path = 53302...)
5596        pretty_assertions::assert_eq!(
5597            parallel_sparse_trie_nodes(&sparse)
5598                .into_iter()
5599                .map(|(k, v)| (*k, v.clone()))
5600                .collect::<BTreeMap<_, _>>(),
5601            BTreeMap::from_iter([(
5602                Nibbles::default(),
5603                SparseNode::new_leaf(pad_nibbles_right(Nibbles::from_nibbles([
5604                    0x5, 0x3, 0x3, 0x0, 0x2
5605                ])))
5606            ),])
5607        );
5608
5609        sparse
5610            .remove_leaf(
5611                &pad_nibbles_right(Nibbles::from_nibbles([0x5, 0x3, 0x3, 0x0, 0x2])),
5612                &provider,
5613            )
5614            .unwrap();
5615
5616        // Empty
5617        pretty_assertions::assert_eq!(
5618            parallel_sparse_trie_nodes(&sparse)
5619                .into_iter()
5620                .map(|(k, v)| (*k, v.clone()))
5621                .collect::<BTreeMap<_, _>>(),
5622            BTreeMap::from_iter([(Nibbles::default(), SparseNode::Empty)])
5623        );
5624    }
5625
5626    #[test]
5627    fn sparse_trie_remove_leaf_blinded() {
5628        let leaf = LeafNode::new(
5629            Nibbles::default(),
5630            alloy_rlp::encode_fixed_size(&U256::from(1)).to_vec(),
5631        );
5632        let branch = TrieNodeV2::Branch(BranchNodeV2::new(
5633            Nibbles::default(),
5634            vec![
5635                RlpNode::word_rlp(&B256::repeat_byte(1)),
5636                RlpNode::from_raw_rlp(&alloy_rlp::encode(leaf.clone())).unwrap(),
5637            ],
5638            TrieMask::new(0b11),
5639            None,
5640        ));
5641
5642        let provider = DefaultTrieNodeProvider;
5643        let mut sparse = ParallelSparseTrie::from_root(
5644            branch.clone(),
5645            Some(BranchNodeMasks {
5646                hash_mask: TrieMask::new(0b01),
5647                tree_mask: TrieMask::default(),
5648            }),
5649            false,
5650        )
5651        .unwrap();
5652
5653        // Reveal a branch node and one of its children
5654        //
5655        // Branch (Mask = 11)
5656        // ├── 0 -> Hash (Path = 0)
5657        // └── 1 -> Leaf (Path = 1)
5658        sparse
5659            .reveal_nodes(&mut [
5660                ProofTrieNodeV2 {
5661                    path: Nibbles::default(),
5662                    node: branch,
5663                    masks: Some(BranchNodeMasks {
5664                        hash_mask: TrieMask::default(),
5665                        tree_mask: TrieMask::new(0b01),
5666                    }),
5667                },
5668                ProofTrieNodeV2 {
5669                    path: Nibbles::from_nibbles([0x1]),
5670                    node: TrieNodeV2::Leaf(leaf),
5671                    masks: None,
5672                },
5673            ])
5674            .unwrap();
5675
5676        // Removing a blinded leaf should result in an error
5677        assert_matches!(
5678            sparse.remove_leaf(&pad_nibbles_right(Nibbles::from_nibbles([0x0])), &provider).map_err(|e| e.into_kind()),
5679            Err(SparseTrieErrorKind::BlindedNode(path)) if path == Nibbles::from_nibbles([0x0])
5680        );
5681    }
5682
5683    #[test]
5684    fn sparse_trie_remove_leaf_non_existent() {
5685        let leaf = LeafNode::new(
5686            Nibbles::default(),
5687            alloy_rlp::encode_fixed_size(&U256::from(1)).to_vec(),
5688        );
5689        let branch = TrieNodeV2::Branch(BranchNodeV2::new(
5690            Nibbles::default(),
5691            vec![
5692                RlpNode::word_rlp(&B256::repeat_byte(1)),
5693                RlpNode::from_raw_rlp(&alloy_rlp::encode(leaf.clone())).unwrap(),
5694            ],
5695            TrieMask::new(0b11),
5696            None,
5697        ));
5698
5699        let provider = DefaultTrieNodeProvider;
5700        let mut sparse = ParallelSparseTrie::from_root(
5701            branch.clone(),
5702            Some(BranchNodeMasks {
5703                hash_mask: TrieMask::new(0b01),
5704                tree_mask: TrieMask::default(),
5705            }),
5706            false,
5707        )
5708        .unwrap();
5709
5710        // Reveal a branch node and one of its children
5711        //
5712        // Branch (Mask = 11)
5713        // ├── 0 -> Hash (Path = 0)
5714        // └── 1 -> Leaf (Path = 1)
5715        sparse
5716            .reveal_nodes(&mut [
5717                ProofTrieNodeV2 {
5718                    path: Nibbles::default(),
5719                    node: branch,
5720                    masks: Some(BranchNodeMasks {
5721                        hash_mask: TrieMask::default(),
5722                        tree_mask: TrieMask::new(0b01),
5723                    }),
5724                },
5725                ProofTrieNodeV2 {
5726                    path: Nibbles::from_nibbles([0x1]),
5727                    node: TrieNodeV2::Leaf(leaf),
5728                    masks: None,
5729                },
5730            ])
5731            .unwrap();
5732
5733        // Removing a non-existent leaf should be a noop
5734        let sparse_old = sparse.clone();
5735        assert_matches!(
5736            sparse.remove_leaf(&pad_nibbles_right(Nibbles::from_nibbles([0x2])), &provider),
5737            Ok(())
5738        );
5739        assert_eq!(sparse, sparse_old);
5740    }
5741
5742    #[test]
5743    fn sparse_trie_fuzz() {
5744        // Having only the first 3 nibbles set, we narrow down the range of keys
5745        // to 4096 different hashes. It allows us to generate collisions more likely
5746        // to test the sparse trie updates.
5747        const KEY_NIBBLES_LEN: usize = 3;
5748
5749        fn test(updates: Vec<(BTreeMap<Nibbles, Account>, BTreeSet<Nibbles>)>) {
5750            {
5751                let mut state = BTreeMap::default();
5752                let default_provider = DefaultTrieNodeProvider;
5753                let provider_factory = create_test_provider_factory();
5754                let mut sparse = ParallelSparseTrie::default().with_updates(true);
5755
5756                for (update, keys_to_delete) in updates {
5757                    // Insert state updates into the sparse trie and calculate the root
5758                    for (key, account) in update.clone() {
5759                        let account = account.into_trie_account(EMPTY_ROOT_HASH);
5760                        let mut account_rlp = Vec::new();
5761                        account.encode(&mut account_rlp);
5762                        sparse.update_leaf(key, account_rlp, &default_provider).unwrap();
5763                    }
5764                    // We need to clone the sparse trie, so that all updated branch nodes are
5765                    // preserved, and not only those that were changed after the last call to
5766                    // `root()`.
5767                    let mut updated_sparse = sparse.clone();
5768                    let sparse_root = updated_sparse.root();
5769                    let sparse_updates = updated_sparse.take_updates();
5770
5771                    // Insert state updates into the hash builder and calculate the root
5772                    state.extend(update);
5773                    let provider = provider_factory.provider().unwrap();
5774                    let (hash_builder_root, hash_builder_updates, hash_builder_proof_nodes, _, _) =
5775                        reth_trie_db::with_adapter!(provider_factory, |A| {
5776                            let trie_cursor =
5777                                DatabaseTrieCursorFactory::<_, A>::new(provider.tx_ref());
5778                            run_hash_builder(
5779                                state.clone(),
5780                                trie_cursor.account_trie_cursor().unwrap(),
5781                                Default::default(),
5782                                state.keys().copied(),
5783                            )
5784                        });
5785
5786                    // Extract account nodes before moving hash_builder_updates
5787                    let hash_builder_account_nodes = hash_builder_updates.account_nodes.clone();
5788
5789                    // Write trie updates to the database
5790                    let provider_rw = provider_factory.provider_rw().unwrap();
5791                    provider_rw.write_trie_updates(hash_builder_updates).unwrap();
5792                    provider_rw.commit().unwrap();
5793
5794                    // Assert that the sparse trie root matches the hash builder root
5795                    assert_eq!(sparse_root, hash_builder_root);
5796                    // Assert that the sparse trie updates match the hash builder updates
5797                    pretty_assertions::assert_eq!(
5798                        BTreeMap::from_iter(sparse_updates.updated_nodes),
5799                        BTreeMap::from_iter(hash_builder_account_nodes)
5800                    );
5801                    // Assert that the sparse trie nodes match the hash builder proof nodes
5802                    assert_eq_parallel_sparse_trie_proof_nodes(
5803                        &updated_sparse,
5804                        hash_builder_proof_nodes,
5805                    );
5806
5807                    // Delete some keys from both the hash builder and the sparse trie and check
5808                    // that the sparse trie root still matches the hash builder root
5809                    for key in &keys_to_delete {
5810                        state.remove(key).unwrap();
5811                        sparse.remove_leaf(key, &default_provider).unwrap();
5812                    }
5813
5814                    // We need to clone the sparse trie, so that all updated branch nodes are
5815                    // preserved, and not only those that were changed after the last call to
5816                    // `root()`.
5817                    let mut updated_sparse = sparse.clone();
5818                    let sparse_root = updated_sparse.root();
5819                    let sparse_updates = updated_sparse.take_updates();
5820
5821                    let provider = provider_factory.provider().unwrap();
5822                    let (hash_builder_root, hash_builder_updates, hash_builder_proof_nodes, _, _) =
5823                        reth_trie_db::with_adapter!(provider_factory, |A| {
5824                            let trie_cursor =
5825                                DatabaseTrieCursorFactory::<_, A>::new(provider.tx_ref());
5826                            run_hash_builder(
5827                                state.clone(),
5828                                trie_cursor.account_trie_cursor().unwrap(),
5829                                keys_to_delete
5830                                    .iter()
5831                                    .map(|nibbles| B256::from_slice(&nibbles.pack()))
5832                                    .collect(),
5833                                state.keys().copied(),
5834                            )
5835                        });
5836
5837                    // Extract account nodes before moving hash_builder_updates
5838                    let hash_builder_account_nodes = hash_builder_updates.account_nodes.clone();
5839
5840                    // Write trie updates to the database
5841                    let provider_rw = provider_factory.provider_rw().unwrap();
5842                    provider_rw.write_trie_updates(hash_builder_updates).unwrap();
5843                    provider_rw.commit().unwrap();
5844
5845                    // Assert that the sparse trie root matches the hash builder root
5846                    assert_eq!(sparse_root, hash_builder_root);
5847                    // Assert that the sparse trie updates match the hash builder updates
5848                    pretty_assertions::assert_eq!(
5849                        BTreeMap::from_iter(sparse_updates.updated_nodes),
5850                        BTreeMap::from_iter(hash_builder_account_nodes)
5851                    );
5852                    // Assert that the sparse trie nodes match the hash builder proof nodes
5853                    assert_eq_parallel_sparse_trie_proof_nodes(
5854                        &updated_sparse,
5855                        hash_builder_proof_nodes,
5856                    );
5857                }
5858            }
5859        }
5860
5861        fn transform_updates(
5862            updates: Vec<BTreeMap<Nibbles, Account>>,
5863            mut rng: impl rand::Rng,
5864        ) -> Vec<(BTreeMap<Nibbles, Account>, BTreeSet<Nibbles>)> {
5865            let mut keys = BTreeSet::new();
5866            updates
5867                .into_iter()
5868                .map(|update| {
5869                    keys.extend(update.keys().copied());
5870
5871                    let keys_to_delete_len = update.len() / 2;
5872                    let keys_to_delete = (0..keys_to_delete_len)
5873                        .map(|_| {
5874                            let key =
5875                                *rand::seq::IteratorRandom::choose(keys.iter(), &mut rng).unwrap();
5876                            keys.take(&key).unwrap()
5877                        })
5878                        .collect();
5879
5880                    (update, keys_to_delete)
5881                })
5882                .collect::<Vec<_>>()
5883        }
5884
5885        proptest!(ProptestConfig::with_cases(10), |(
5886            updates in proptest::collection::vec(
5887                proptest::collection::btree_map(
5888                    any_with::<Nibbles>(SizeRange::new(KEY_NIBBLES_LEN..=KEY_NIBBLES_LEN)).prop_map(pad_nibbles_right),
5889                    arb::<Account>(),
5890                    1..50,
5891                ),
5892                1..50,
5893            ).prop_perturb(transform_updates)
5894        )| {
5895            test(updates)
5896        });
5897    }
5898
5899    #[test]
5900    fn sparse_trie_two_leaves_at_lower_roots() {
5901        let provider = DefaultTrieNodeProvider;
5902        let mut trie = ParallelSparseTrie::default().with_updates(true);
5903        let key_50 = Nibbles::unpack(hex!(
5904            "0x5000000000000000000000000000000000000000000000000000000000000000"
5905        ));
5906        let key_51 = Nibbles::unpack(hex!(
5907            "0x5100000000000000000000000000000000000000000000000000000000000000"
5908        ));
5909
5910        let account = Account::default().into_trie_account(EMPTY_ROOT_HASH);
5911        let mut account_rlp = Vec::new();
5912        account.encode(&mut account_rlp);
5913
5914        // Add a leaf and calculate the root.
5915        trie.update_leaf(key_50, account_rlp.clone(), &provider).unwrap();
5916        trie.root();
5917
5918        // Add a second leaf and assert that the root is the expected value.
5919        trie.update_leaf(key_51, account_rlp.clone(), &provider).unwrap();
5920
5921        let expected_root =
5922            hex!("0xdaf0ef9f91a2f179bb74501209effdb5301db1697bcab041eca2234b126e25de");
5923        let root = trie.root();
5924        assert_eq!(root, expected_root);
5925        assert_eq!(SparseTrieUpdates::default(), trie.take_updates());
5926    }
5927
5928    /// We have three leaves that share the same prefix: 0x00, 0x01 and 0x02. Hash builder trie has
5929    /// only nodes 0x00 and 0x01, and we have proofs for them. Node B is new and inserted in the
5930    /// sparse trie first.
5931    ///
5932    /// 1. Reveal the hash builder proof to leaf 0x00 in the sparse trie.
5933    /// 2. Insert leaf 0x01 into the sparse trie.
5934    /// 3. Reveal the hash builder proof to leaf 0x02 in the sparse trie.
5935    ///
5936    /// The hash builder proof to the leaf 0x02 didn't have the leaf 0x01 at the corresponding
5937    /// nibble of the branch node, so we need to adjust the branch node instead of fully
5938    /// replacing it.
5939    #[test]
5940    fn sparse_trie_reveal_node_1() {
5941        let key1 = || pad_nibbles_right(Nibbles::from_nibbles_unchecked([0x00]));
5942        let key2 = || pad_nibbles_right(Nibbles::from_nibbles_unchecked([0x01]));
5943        let key3 = || pad_nibbles_right(Nibbles::from_nibbles_unchecked([0x02]));
5944        let value = || Account::default();
5945        let value_encoded = || {
5946            let mut account_rlp = Vec::new();
5947            value().into_trie_account(EMPTY_ROOT_HASH).encode(&mut account_rlp);
5948            account_rlp
5949        };
5950
5951        // Generate the proof for the root node and initialize the sparse trie with it
5952        let (_, _, hash_builder_proof_nodes, branch_node_hash_masks, branch_node_tree_masks) =
5953            run_hash_builder(
5954                [(key1(), value()), (key3(), value())],
5955                NoopAccountTrieCursor::default(),
5956                Default::default(),
5957                [Nibbles::default()],
5958            );
5959
5960        let provider = DefaultTrieNodeProvider;
5961        let masks = match (
5962            branch_node_hash_masks.get(&Nibbles::default()).copied(),
5963            branch_node_tree_masks.get(&Nibbles::default()).copied(),
5964        ) {
5965            (Some(h), Some(t)) => Some(BranchNodeMasks { hash_mask: h, tree_mask: t }),
5966            (Some(h), None) => {
5967                Some(BranchNodeMasks { hash_mask: h, tree_mask: TrieMask::default() })
5968            }
5969            (None, Some(t)) => {
5970                Some(BranchNodeMasks { hash_mask: TrieMask::default(), tree_mask: t })
5971            }
5972            (None, None) => None,
5973        };
5974        let mut sparse = ParallelSparseTrie::from_root(
5975            TrieNodeV2::decode(&mut &hash_builder_proof_nodes.nodes_sorted()[0].1[..]).unwrap(),
5976            masks,
5977            false,
5978        )
5979        .unwrap();
5980
5981        // Generate the proof for the first key and reveal it in the sparse trie
5982        let (_, _, hash_builder_proof_nodes, branch_node_hash_masks, branch_node_tree_masks) =
5983            run_hash_builder(
5984                [(key1(), value()), (key3(), value())],
5985                NoopAccountTrieCursor::default(),
5986                Default::default(),
5987                [key1()],
5988            );
5989        let mut revealed_nodes: Vec<ProofTrieNodeV2> = hash_builder_proof_nodes
5990            .nodes_sorted()
5991            .into_iter()
5992            .map(|(path, node)| {
5993                let hash_mask = branch_node_hash_masks.get(&path).copied();
5994                let tree_mask = branch_node_tree_masks.get(&path).copied();
5995                let masks = BranchNodeMasks::from_optional(hash_mask, tree_mask);
5996                ProofTrieNodeV2 { path, node: TrieNodeV2::decode(&mut &node[..]).unwrap(), masks }
5997            })
5998            .collect();
5999        sparse.reveal_nodes(&mut revealed_nodes).unwrap();
6000
6001        // Check that the branch node exists with only two nibbles set
6002        assert_matches!(
6003            sparse.upper_subtrie.nodes.get(&Nibbles::default()),
6004            Some(&SparseNode::Branch { state_mask, state: SparseNodeState::Dirty, .. }) if state_mask == TrieMask::new(0b101)
6005        );
6006
6007        // Insert the leaf for the second key
6008        sparse.update_leaf(key2(), value_encoded(), &provider).unwrap();
6009
6010        // Check that the branch node was updated and another nibble was set
6011        assert_matches!(
6012            sparse.upper_subtrie.nodes.get(&Nibbles::default()),
6013            Some(&SparseNode::Branch { state_mask, state: SparseNodeState::Dirty, .. }) if state_mask == TrieMask::new(0b111)
6014        );
6015
6016        // Generate the proof for the third key and reveal it in the sparse trie
6017        let (_, _, hash_builder_proof_nodes, branch_node_hash_masks, branch_node_tree_masks) =
6018            run_hash_builder(
6019                [(key1(), value()), (key3(), value())],
6020                NoopAccountTrieCursor::default(),
6021                Default::default(),
6022                [key3()],
6023            );
6024        let mut revealed_nodes: Vec<ProofTrieNodeV2> = hash_builder_proof_nodes
6025            .nodes_sorted()
6026            .into_iter()
6027            .map(|(path, node)| {
6028                let hash_mask = branch_node_hash_masks.get(&path).copied();
6029                let tree_mask = branch_node_tree_masks.get(&path).copied();
6030                let masks = BranchNodeMasks::from_optional(hash_mask, tree_mask);
6031                ProofTrieNodeV2 { path, node: TrieNodeV2::decode(&mut &node[..]).unwrap(), masks }
6032            })
6033            .collect();
6034        sparse.reveal_nodes(&mut revealed_nodes).unwrap();
6035
6036        // Check that nothing changed in the branch node
6037        assert_matches!(
6038            sparse.upper_subtrie.nodes.get(&Nibbles::default()),
6039            Some(&SparseNode::Branch { state_mask, state: SparseNodeState::Dirty, .. }) if state_mask == TrieMask::new(0b111)
6040        );
6041
6042        // Generate the nodes for the full trie with all three key using the hash builder, and
6043        // compare them to the sparse trie
6044        let (_, _, hash_builder_proof_nodes, _, _) = run_hash_builder(
6045            [(key1(), value()), (key2(), value()), (key3(), value())],
6046            NoopAccountTrieCursor::default(),
6047            Default::default(),
6048            [key1(), key2(), key3()],
6049        );
6050
6051        assert_eq_parallel_sparse_trie_proof_nodes(&sparse, hash_builder_proof_nodes);
6052    }
6053
6054    /// We have three leaves: 0x0000, 0x0101, and 0x0102. Hash builder trie has all nodes, and we
6055    /// have proofs for them.
6056    ///
6057    /// 1. Reveal the hash builder proof to leaf 0x00 in the sparse trie.
6058    /// 2. Remove leaf 0x00 from the sparse trie (that will remove the branch node and create an
6059    ///    extension node with the key 0x0000).
6060    /// 3. Reveal the hash builder proof to leaf 0x0101 in the sparse trie.
6061    ///
6062    /// The hash builder proof to the leaf 0x0101 had a branch node in the path, but we turned it
6063    /// into an extension node, so it should ignore this node.
6064    #[test]
6065    fn sparse_trie_reveal_node_2() {
6066        let key1 = || pad_nibbles_right(Nibbles::from_nibbles_unchecked([0x00, 0x00]));
6067        let key2 = || pad_nibbles_right(Nibbles::from_nibbles_unchecked([0x01, 0x01]));
6068        let key3 = || pad_nibbles_right(Nibbles::from_nibbles_unchecked([0x01, 0x02]));
6069        let value = || Account::default();
6070
6071        // Generate the proof for the root node and initialize the sparse trie with it
6072        let (_, _, hash_builder_proof_nodes, branch_node_hash_masks, branch_node_tree_masks) =
6073            run_hash_builder(
6074                [(key1(), value()), (key2(), value()), (key3(), value())],
6075                NoopAccountTrieCursor::default(),
6076                Default::default(),
6077                [Nibbles::default()],
6078            );
6079
6080        let provider = DefaultTrieNodeProvider;
6081        let masks = match (
6082            branch_node_hash_masks.get(&Nibbles::default()).copied(),
6083            branch_node_tree_masks.get(&Nibbles::default()).copied(),
6084        ) {
6085            (Some(h), Some(t)) => Some(BranchNodeMasks { hash_mask: h, tree_mask: t }),
6086            (Some(h), None) => {
6087                Some(BranchNodeMasks { hash_mask: h, tree_mask: TrieMask::default() })
6088            }
6089            (None, Some(t)) => {
6090                Some(BranchNodeMasks { hash_mask: TrieMask::default(), tree_mask: t })
6091            }
6092            (None, None) => None,
6093        };
6094        let mut sparse = ParallelSparseTrie::from_root(
6095            TrieNodeV2::decode(&mut &hash_builder_proof_nodes.nodes_sorted()[0].1[..]).unwrap(),
6096            masks,
6097            false,
6098        )
6099        .unwrap();
6100
6101        // Generate the proof for the children of the root branch node and reveal it in the sparse
6102        // trie
6103        let (_, _, hash_builder_proof_nodes, branch_node_hash_masks, branch_node_tree_masks) =
6104            run_hash_builder(
6105                [(key1(), value()), (key2(), value()), (key3(), value())],
6106                NoopAccountTrieCursor::default(),
6107                Default::default(),
6108                [key1(), Nibbles::from_nibbles_unchecked([0x01])],
6109            );
6110        let mut revealed_nodes: Vec<ProofTrieNodeV2> = hash_builder_proof_nodes
6111            .nodes_sorted()
6112            .into_iter()
6113            .map(|(path, node)| {
6114                let hash_mask = branch_node_hash_masks.get(&path).copied();
6115                let tree_mask = branch_node_tree_masks.get(&path).copied();
6116                let masks = BranchNodeMasks::from_optional(hash_mask, tree_mask);
6117                ProofTrieNodeV2 { path, node: TrieNodeV2::decode(&mut &node[..]).unwrap(), masks }
6118            })
6119            .collect();
6120        sparse.reveal_nodes(&mut revealed_nodes).unwrap();
6121
6122        // Check that the branch node exists
6123        assert_matches!(
6124            sparse.upper_subtrie.nodes.get(&Nibbles::default()),
6125            Some(&SparseNode::Branch { state_mask, state: SparseNodeState::Dirty, .. }) if state_mask == TrieMask::new(0b11)
6126        );
6127
6128        // Remove the leaf for the first key
6129        sparse.remove_leaf(&key1(), &provider).unwrap();
6130
6131        // Check that the branch node was turned into an extension node
6132        assert_eq!(
6133            sparse.upper_subtrie.nodes.get(&Nibbles::default()),
6134            Some(&SparseNode::new_ext(Nibbles::from_nibbles_unchecked([0x01])))
6135        );
6136
6137        // Generate the proof for the third key and reveal it in the sparse trie
6138        let (_, _, hash_builder_proof_nodes, branch_node_hash_masks, branch_node_tree_masks) =
6139            run_hash_builder(
6140                [(key1(), value()), (key2(), value()), (key3(), value())],
6141                NoopAccountTrieCursor::default(),
6142                Default::default(),
6143                [key2()],
6144            );
6145        let mut revealed_nodes: Vec<ProofTrieNodeV2> = hash_builder_proof_nodes
6146            .nodes_sorted()
6147            .into_iter()
6148            .map(|(path, node)| {
6149                let hash_mask = branch_node_hash_masks.get(&path).copied();
6150                let tree_mask = branch_node_tree_masks.get(&path).copied();
6151                let masks = BranchNodeMasks::from_optional(hash_mask, tree_mask);
6152                ProofTrieNodeV2 { path, node: TrieNodeV2::decode(&mut &node[..]).unwrap(), masks }
6153            })
6154            .collect();
6155        sparse.reveal_nodes(&mut revealed_nodes).unwrap();
6156
6157        // Check that nothing changed in the extension node
6158        assert_eq!(
6159            sparse.upper_subtrie.nodes.get(&Nibbles::default()),
6160            Some(&SparseNode::new_ext(Nibbles::from_nibbles_unchecked([0x01])))
6161        );
6162    }
6163
6164    /// We have two leaves that share the same prefix: 0x0001 and 0x0002, and a leaf with a
6165    /// different prefix: 0x0100. Hash builder trie has only the first two leaves, and we have
6166    /// proofs for them.
6167    ///
6168    /// 1. Insert the leaf 0x0100 into the sparse trie, and check that the root extension node was
6169    ///    turned into a branch node.
6170    /// 2. Reveal the leaf 0x0001 in the sparse trie, and check that the root branch node wasn't
6171    ///    overwritten with the extension node from the proof.
6172    #[test]
6173    fn sparse_trie_reveal_node_3() {
6174        let key1 = || pad_nibbles_right(Nibbles::from_nibbles_unchecked([0x00, 0x01]));
6175        let key2 = || pad_nibbles_right(Nibbles::from_nibbles_unchecked([0x00, 0x02]));
6176        let key3 = || pad_nibbles_right(Nibbles::from_nibbles_unchecked([0x01, 0x00]));
6177        let value = || Account::default();
6178        let value_encoded = || {
6179            let mut account_rlp = Vec::new();
6180            value().into_trie_account(EMPTY_ROOT_HASH).encode(&mut account_rlp);
6181            account_rlp
6182        };
6183
6184        // Generate the proof for the root node and initialize the sparse trie with it
6185        let (_, _, hash_builder_proof_nodes, branch_node_hash_masks, branch_node_tree_masks) =
6186            run_hash_builder(
6187                [(key1(), value()), (key2(), value())],
6188                NoopAccountTrieCursor::default(),
6189                Default::default(),
6190                [Nibbles::default()],
6191            );
6192
6193        let mut nodes = Vec::new();
6194
6195        for (path, node) in hash_builder_proof_nodes.nodes_sorted() {
6196            let hash_mask = branch_node_hash_masks.get(&path).copied();
6197            let tree_mask = branch_node_tree_masks.get(&path).copied();
6198            let masks = BranchNodeMasks::from_optional(hash_mask, tree_mask);
6199            nodes.push((path, TrieNode::decode(&mut &node[..]).unwrap(), masks));
6200        }
6201
6202        nodes.sort_unstable_by(|a, b| reth_trie_common::depth_first_cmp(&a.0, &b.0));
6203
6204        let nodes = ProofTrieNodeV2::from_sorted_trie_nodes(nodes);
6205
6206        let provider = DefaultTrieNodeProvider;
6207        let mut sparse =
6208            ParallelSparseTrie::from_root(nodes[0].node.clone(), nodes[0].masks, false).unwrap();
6209
6210        // Check that the root extension node exists
6211        assert_matches!(
6212            sparse.upper_subtrie.nodes.get(&Nibbles::default()),
6213            Some(SparseNode::Extension { key, state: SparseNodeState::Dirty }) if *key == Nibbles::from_nibbles([0x00])
6214        );
6215
6216        // Insert the leaf with a different prefix
6217        sparse.update_leaf(key3(), value_encoded(), &provider).unwrap();
6218
6219        // Check that the extension node was turned into a branch node
6220        assert_eq!(
6221            sparse.upper_subtrie.nodes.get(&Nibbles::default()),
6222            Some(&SparseNode::new_branch(TrieMask::new(0b11), &[]))
6223        );
6224
6225        // Generate the proof for the first key and reveal it in the sparse trie
6226        let (_, _, hash_builder_proof_nodes, branch_node_hash_masks, branch_node_tree_masks) =
6227            run_hash_builder(
6228                [(key1(), value()), (key2(), value())],
6229                NoopAccountTrieCursor::default(),
6230                Default::default(),
6231                [key1()],
6232            );
6233        let mut revealed_nodes: Vec<ProofTrieNodeV2> = hash_builder_proof_nodes
6234            .nodes_sorted()
6235            .into_iter()
6236            .map(|(path, node)| {
6237                let hash_mask = branch_node_hash_masks.get(&path).copied();
6238                let tree_mask = branch_node_tree_masks.get(&path).copied();
6239                let masks = BranchNodeMasks::from_optional(hash_mask, tree_mask);
6240                ProofTrieNodeV2 { path, node: TrieNodeV2::decode(&mut &node[..]).unwrap(), masks }
6241            })
6242            .collect();
6243        sparse.reveal_nodes(&mut revealed_nodes).unwrap();
6244
6245        // Check that the branch node wasn't overwritten by the extension node in the proof
6246        assert_eq!(
6247            sparse.upper_subtrie.nodes.get(&Nibbles::default()),
6248            Some(&SparseNode::new_branch(TrieMask::new(0b11), &[]))
6249        );
6250    }
6251
6252    #[test]
6253    fn test_update_leaf_cross_level() {
6254        let ctx = ParallelSparseTrieTestContext;
6255        let mut trie = ParallelSparseTrie::from_root(TrieNodeV2::EmptyRoot, None, true).unwrap();
6256
6257        // Test adding leaves that demonstrate the cross-level behavior
6258        // Based on the example: leaves 0x1234, 0x1245, 0x1334, 0x1345
6259        //
6260        // Final trie structure:
6261        // Upper trie:
6262        //   0x: Extension { key: 0x1 }
6263        //   └── 0x1: Branch { state_mask: 0x1100 }
6264        //       └── Subtrie (0x12): pointer to lower subtrie
6265        //       └── Subtrie (0x13): pointer to lower subtrie
6266        //
6267        // Lower subtrie (0x12):
6268        //   0x12: Branch { state_mask: 0x8 | 0x10 }
6269        //   ├── 0x123: Leaf { key: 0x4 }
6270        //   └── 0x124: Leaf { key: 0x5 }
6271        //
6272        // Lower subtrie (0x13):
6273        //   0x13: Branch { state_mask: 0x8 | 0x10 }
6274        //   ├── 0x133: Leaf { key: 0x4 }
6275        //   └── 0x134: Leaf { key: 0x5 }
6276
6277        // First add leaf 0x1345 - this should create a leaf in upper trie at 0x
6278        let (leaf1_path, value1) = ctx.create_test_leaf([0x1, 0x3, 0x4, 0x5], 1);
6279        trie.update_leaf(leaf1_path, value1.clone(), DefaultTrieNodeProvider).unwrap();
6280
6281        // Verify upper trie has a leaf at the root with key 1345
6282        ctx.assert_upper_subtrie(&trie)
6283            .has_leaf(
6284                &Nibbles::default(),
6285                &pad_nibbles_right(Nibbles::from_nibbles([0x1, 0x3, 0x4, 0x5])),
6286            )
6287            .has_value(&leaf1_path, &value1);
6288
6289        // Add leaf 0x1234 - this should go first in the upper subtrie
6290        let (leaf2_path, value2) = ctx.create_test_leaf([0x1, 0x2, 0x3, 0x4], 2);
6291        trie.update_leaf(leaf2_path, value2.clone(), DefaultTrieNodeProvider).unwrap();
6292
6293        // Upper trie should now have a branch at 0x1
6294        ctx.assert_upper_subtrie(&trie)
6295            .has_branch(&Nibbles::from_nibbles([0x1]), &[0x2, 0x3])
6296            .has_no_value(&leaf1_path)
6297            .has_no_value(&leaf2_path);
6298
6299        // Add leaf 0x1245 - this should cause a branch and create the 0x12 subtrie
6300        let (leaf3_path, value3) = ctx.create_test_leaf([0x1, 0x2, 0x4, 0x5], 3);
6301        trie.update_leaf(leaf3_path, value3.clone(), DefaultTrieNodeProvider).unwrap();
6302
6303        // Verify lower subtrie at 0x12 exists with correct structure
6304        ctx.assert_subtrie(&trie, Nibbles::from_nibbles([0x1, 0x2]))
6305            .has_branch(&Nibbles::from_nibbles([0x1, 0x2]), &[0x3, 0x4])
6306            .has_leaf(&Nibbles::from_nibbles([0x1, 0x2, 0x3]), &leaf_key([0x4], 61))
6307            .has_leaf(&Nibbles::from_nibbles([0x1, 0x2, 0x4]), &leaf_key([0x5], 61))
6308            .has_value(&leaf2_path, &value2)
6309            .has_value(&leaf3_path, &value3);
6310
6311        // Add leaf 0x1334 - this should create another lower subtrie
6312        let (leaf4_path, value4) = ctx.create_test_leaf([0x1, 0x3, 0x3, 0x4], 4);
6313        trie.update_leaf(leaf4_path, value4.clone(), DefaultTrieNodeProvider).unwrap();
6314
6315        // Verify lower subtrie at 0x13 exists with correct values
6316        ctx.assert_subtrie(&trie, Nibbles::from_nibbles([0x1, 0x3]))
6317            .has_value(&leaf1_path, &value1)
6318            .has_value(&leaf4_path, &value4);
6319
6320        // Verify the 0x12 subtrie still has its values
6321        ctx.assert_subtrie(&trie, Nibbles::from_nibbles([0x1, 0x2]))
6322            .has_value(&leaf2_path, &value2)
6323            .has_value(&leaf3_path, &value3);
6324
6325        // Upper trie has no values
6326        ctx.assert_upper_subtrie(&trie)
6327            .has_extension(&Nibbles::default(), &Nibbles::from_nibbles([0x1]))
6328            .has_branch(&Nibbles::from_nibbles([0x1]), &[0x2, 0x3])
6329            .has_no_value(&leaf1_path)
6330            .has_no_value(&leaf2_path)
6331            .has_no_value(&leaf3_path)
6332            .has_no_value(&leaf4_path);
6333    }
6334
6335    #[test]
6336    fn test_update_leaf_split_at_level_boundary() {
6337        let ctx = ParallelSparseTrieTestContext;
6338        let mut trie = ParallelSparseTrie::from_root(TrieNodeV2::EmptyRoot, None, true).unwrap();
6339
6340        // This test demonstrates what happens when we insert leaves that cause
6341        // splitting exactly at the upper/lower trie boundary (2 nibbles).
6342        //
6343        // Final trie structure:
6344        // Upper trie:
6345        //   0x: Extension { key: 0x12 }
6346        //       └── Subtrie (0x12): pointer to lower subtrie
6347        //
6348        // Lower subtrie (0x12):
6349        //   0x12: Branch { state_mask: 0x4 | 0x8 }
6350        //   ├── 0x122: Leaf { key: 0x4 }
6351        //   └── 0x123: Leaf { key: 0x4 }
6352
6353        // First insert a leaf that ends exactly at the boundary (2 nibbles)
6354        let (first_leaf_path, first_value) = ctx.create_test_leaf([0x1, 0x2, 0x2, 0x4], 1);
6355
6356        trie.update_leaf(first_leaf_path, first_value.clone(), DefaultTrieNodeProvider).unwrap();
6357
6358        // In an empty trie, the first leaf becomes the root, regardless of path length
6359        ctx.assert_upper_subtrie(&trie)
6360            .has_leaf(
6361                &Nibbles::default(),
6362                &pad_nibbles_right(Nibbles::from_nibbles([0x1, 0x2, 0x2, 0x4])),
6363            )
6364            .has_value(&first_leaf_path, &first_value);
6365
6366        // Now insert another leaf that shares the same 2-nibble prefix
6367        let (second_leaf_path, second_value) = ctx.create_test_leaf([0x1, 0x2, 0x3, 0x4], 2);
6368
6369        trie.update_leaf(second_leaf_path, second_value.clone(), DefaultTrieNodeProvider).unwrap();
6370
6371        // Now both leaves should be in a lower subtrie at index [0x1, 0x2]
6372        ctx.assert_subtrie(&trie, Nibbles::from_nibbles([0x1, 0x2]))
6373            .has_branch(&Nibbles::from_nibbles([0x1, 0x2]), &[0x2, 0x3])
6374            .has_leaf(&Nibbles::from_nibbles([0x1, 0x2, 0x2]), &leaf_key([0x4], 61))
6375            .has_leaf(&Nibbles::from_nibbles([0x1, 0x2, 0x3]), &leaf_key([0x4], 61))
6376            .has_value(&first_leaf_path, &first_value)
6377            .has_value(&second_leaf_path, &second_value);
6378
6379        // Upper subtrie should no longer have these values
6380        ctx.assert_upper_subtrie(&trie)
6381            .has_no_value(&first_leaf_path)
6382            .has_no_value(&second_leaf_path);
6383    }
6384
6385    #[test]
6386    fn test_update_subtrie_with_multiple_leaves() {
6387        let ctx = ParallelSparseTrieTestContext;
6388        let mut trie = ParallelSparseTrie::from_root(TrieNodeV2::EmptyRoot, None, true).unwrap();
6389
6390        // First, add multiple leaves that will create a subtrie structure
6391        // All leaves share the prefix [0x1, 0x2] to ensure they create a subtrie
6392        //
6393        // This should result in a trie with the following structure:
6394        // 0x: Extension { key: 0x12 }
6395        //  └── Subtrie (0x12):
6396        //      0x12: Branch { state_mask: 0x3 | 0x4 }
6397        //      ├── 0x123: Branch { state_mask: 0x4 | 0x5 }
6398        //      │   ├── 0x1234: Leaf { key: 0x }
6399        //      │   └── 0x1235: Leaf { key: 0x }
6400        //      └── 0x124: Branch { state_mask: 0x6 | 0x7 }
6401        //          ├── 0x1246: Leaf { key: 0x }
6402        //          └── 0x1247: Leaf { key: 0x }
6403        let leaves = ctx.create_test_leaves(&[
6404            &[0x1, 0x2, 0x3, 0x4],
6405            &[0x1, 0x2, 0x3, 0x5],
6406            &[0x1, 0x2, 0x4, 0x6],
6407            &[0x1, 0x2, 0x4, 0x7],
6408        ]);
6409
6410        // Insert all leaves
6411        ctx.update_leaves(&mut trie, leaves.clone());
6412
6413        // Verify the upper subtrie has an extension node at the root with key 0x12
6414        ctx.assert_upper_subtrie(&trie)
6415            .has_extension(&Nibbles::default(), &Nibbles::from_nibbles([0x1, 0x2]));
6416
6417        // Verify the subtrie structure using fluent assertions
6418        ctx.assert_subtrie(&trie, Nibbles::from_nibbles([0x1, 0x2]))
6419            .has_branch(&Nibbles::from_nibbles([0x1, 0x2]), &[0x3, 0x4])
6420            .has_branch(&Nibbles::from_nibbles([0x1, 0x2, 0x3]), &[0x4, 0x5])
6421            .has_branch(&Nibbles::from_nibbles([0x1, 0x2, 0x4]), &[0x6, 0x7])
6422            .has_value(&leaves[0].0, &leaves[0].1)
6423            .has_value(&leaves[1].0, &leaves[1].1)
6424            .has_value(&leaves[2].0, &leaves[2].1)
6425            .has_value(&leaves[3].0, &leaves[3].1);
6426
6427        // Now update one of the leaves with a new value
6428        let updated_path = pad_nibbles_right(Nibbles::from_nibbles([0x1, 0x2, 0x3, 0x4]));
6429        let (_, updated_value) = ctx.create_test_leaf([0x1, 0x2, 0x3, 0x4], 100);
6430
6431        trie.update_leaf(updated_path, updated_value.clone(), DefaultTrieNodeProvider).unwrap();
6432
6433        // Verify the subtrie structure is maintained and value is updated
6434        // The branch structure should remain the same and all values should be present
6435        ctx.assert_subtrie(&trie, Nibbles::from_nibbles([0x1, 0x2]))
6436            .has_branch(&Nibbles::from_nibbles([0x1, 0x2]), &[0x3, 0x4])
6437            .has_value(&updated_path, &updated_value)
6438            .has_value(&leaves[1].0, &leaves[1].1)
6439            .has_value(&leaves[2].0, &leaves[2].1)
6440            .has_value(&leaves[3].0, &leaves[3].1);
6441
6442        // Add a new leaf that extends an existing branch
6443        let (new_leaf_path, new_leaf_value) = ctx.create_test_leaf([0x1, 0x2, 0x3, 0x6], 200);
6444
6445        trie.update_leaf(new_leaf_path, new_leaf_value.clone(), DefaultTrieNodeProvider).unwrap();
6446
6447        // Verify the branch at [0x1, 0x2, 0x3] now has an additional child
6448        ctx.assert_subtrie(&trie, Nibbles::from_nibbles([0x1, 0x2]))
6449            .has_branch(&Nibbles::from_nibbles([0x1, 0x2, 0x3]), &[0x4, 0x5, 0x6])
6450            .has_value(&new_leaf_path, &new_leaf_value);
6451    }
6452
6453    #[test]
6454    fn test_update_subtrie_extension_node_subtrie() {
6455        let ctx = ParallelSparseTrieTestContext;
6456        let mut trie = ParallelSparseTrie::from_root(TrieNodeV2::EmptyRoot, None, true).unwrap();
6457
6458        // All leaves share the prefix [0x1, 0x2] to ensure they create a subtrie
6459        //
6460        // This should result in a trie with the following structure
6461        // 0x: Extension { key: 0x123 }
6462        //  └── Subtrie (0x12):
6463        //      0x123: Branch { state_mask: 0x3 | 0x4 }
6464        //      ├── 0x123: Leaf { key: 0x4 }
6465        //      └── 0x124: Leaf { key: 0x5 }
6466        let leaves = ctx.create_test_leaves(&[&[0x1, 0x2, 0x3, 0x4], &[0x1, 0x2, 0x3, 0x5]]);
6467
6468        // Insert all leaves
6469        ctx.update_leaves(&mut trie, leaves.clone());
6470
6471        // Verify the upper subtrie has an extension node at the root with key 0x123
6472        ctx.assert_upper_subtrie(&trie)
6473            .has_extension(&Nibbles::default(), &Nibbles::from_nibbles([0x1, 0x2, 0x3]));
6474
6475        // Verify the lower subtrie structure
6476        ctx.assert_subtrie(&trie, Nibbles::from_nibbles([0x1, 0x2]))
6477            .has_branch(&Nibbles::from_nibbles([0x1, 0x2, 0x3]), &[0x4, 0x5])
6478            .has_value(&leaves[0].0, &leaves[0].1)
6479            .has_value(&leaves[1].0, &leaves[1].1);
6480    }
6481
6482    #[test]
6483    fn update_subtrie_extension_node_cross_level() {
6484        let ctx = ParallelSparseTrieTestContext;
6485        let mut trie = ParallelSparseTrie::from_root(TrieNodeV2::EmptyRoot, None, true).unwrap();
6486
6487        // First, add multiple leaves that will create a subtrie structure
6488        // All leaves share the prefix [0x1, 0x2] to ensure they create a branch node and subtrie
6489        //
6490        // This should result in a trie with the following structure
6491        // 0x: Extension { key: 0x12 }
6492        //  └── Subtrie (0x12):
6493        //      0x12: Branch { state_mask: 0x3 | 0x4 }
6494        //      ├── 0x123: Leaf { key: 0x4 }
6495        //      └── 0x124: Leaf { key: 0x5 }
6496        let leaves = ctx.create_test_leaves(&[&[0x1, 0x2, 0x3, 0x4], &[0x1, 0x2, 0x4, 0x5]]);
6497
6498        // Insert all leaves
6499        ctx.update_leaves(&mut trie, leaves.clone());
6500
6501        // Verify the upper subtrie has an extension node at the root with key 0x12
6502        ctx.assert_upper_subtrie(&trie)
6503            .has_extension(&Nibbles::default(), &Nibbles::from_nibbles([0x1, 0x2]));
6504
6505        // Verify the lower subtrie structure
6506        ctx.assert_subtrie(&trie, Nibbles::from_nibbles([0x1, 0x2]))
6507            .has_branch(&Nibbles::from_nibbles([0x1, 0x2]), &[0x3, 0x4])
6508            .has_leaf(&Nibbles::from_nibbles([0x1, 0x2, 0x3]), &leaf_key([0x4], 61))
6509            .has_leaf(&Nibbles::from_nibbles([0x1, 0x2, 0x4]), &leaf_key([0x5], 61))
6510            .has_value(&leaves[0].0, &leaves[0].1)
6511            .has_value(&leaves[1].0, &leaves[1].1);
6512    }
6513
6514    #[test]
6515    fn test_update_single_nibble_paths() {
6516        let ctx = ParallelSparseTrieTestContext;
6517        let mut trie = ParallelSparseTrie::from_root(TrieNodeV2::EmptyRoot, None, true).unwrap();
6518
6519        // Test edge case: single nibble paths that create branches in upper trie
6520        //
6521        // Final trie structure:
6522        // Upper trie:
6523        //   0x: Branch { state_mask: 0x1 | 0x2 | 0x4 | 0x8 }
6524        //   ├── 0x0: Leaf { key: 0x }
6525        //   ├── 0x1: Leaf { key: 0x }
6526        //   ├── 0x2: Leaf { key: 0x }
6527        //   └── 0x3: Leaf { key: 0x }
6528
6529        // Insert leaves with single nibble paths
6530        let (leaf1_path, value1) = ctx.create_test_leaf([0x0], 1);
6531        let (leaf2_path, value2) = ctx.create_test_leaf([0x1], 2);
6532        let (leaf3_path, value3) = ctx.create_test_leaf([0x2], 3);
6533        let (leaf4_path, value4) = ctx.create_test_leaf([0x3], 4);
6534
6535        ctx.update_leaves(
6536            &mut trie,
6537            [
6538                (leaf1_path, value1.clone()),
6539                (leaf2_path, value2.clone()),
6540                (leaf3_path, value3.clone()),
6541                (leaf4_path, value4.clone()),
6542            ],
6543        );
6544
6545        // Verify upper trie has a branch at root with 4 children
6546        ctx.assert_upper_subtrie(&trie)
6547            .has_branch(&Nibbles::default(), &[0x0, 0x1, 0x2, 0x3])
6548            .has_leaf(&Nibbles::from_nibbles([0x0]), &leaf_key([], 63))
6549            .has_leaf(&Nibbles::from_nibbles([0x1]), &leaf_key([], 63))
6550            .has_leaf(&Nibbles::from_nibbles([0x2]), &leaf_key([], 63))
6551            .has_leaf(&Nibbles::from_nibbles([0x3]), &leaf_key([], 63))
6552            .has_value(&leaf1_path, &value1)
6553            .has_value(&leaf2_path, &value2)
6554            .has_value(&leaf3_path, &value3)
6555            .has_value(&leaf4_path, &value4);
6556    }
6557
6558    #[test]
6559    fn test_update_deep_extension_chain() {
6560        let ctx = ParallelSparseTrieTestContext;
6561        let mut trie = ParallelSparseTrie::from_root(TrieNodeV2::EmptyRoot, None, true).unwrap();
6562
6563        // Test edge case: deep extension chains that span multiple levels
6564        //
6565        // Final trie structure:
6566        // Upper trie:
6567        //   0x: Extension { key: 0x111111 }
6568        //       └── Subtrie (0x11): pointer to lower subtrie
6569        //
6570        // Lower subtrie (0x11):
6571        //   0x111111: Branch { state_mask: 0x1 | 0x2 }
6572        //   ├── 0x1111110: Leaf { key: 0x }
6573        //   └── 0x1111111: Leaf { key: 0x }
6574
6575        // Create leaves with a long common prefix
6576        let (leaf1_path, value1) = ctx.create_test_leaf([0x1, 0x1, 0x1, 0x1, 0x1, 0x1, 0x0], 1);
6577        let (leaf2_path, value2) = ctx.create_test_leaf([0x1, 0x1, 0x1, 0x1, 0x1, 0x1, 0x1], 2);
6578
6579        ctx.update_leaves(&mut trie, [(leaf1_path, value1.clone()), (leaf2_path, value2.clone())]);
6580
6581        // Verify upper trie has extension with the full common prefix
6582        ctx.assert_upper_subtrie(&trie).has_extension(
6583            &Nibbles::default(),
6584            &Nibbles::from_nibbles([0x1, 0x1, 0x1, 0x1, 0x1, 0x1]),
6585        );
6586
6587        // Verify lower subtrie has branch structure
6588        ctx.assert_subtrie(&trie, Nibbles::from_nibbles([0x1, 0x1]))
6589            .has_branch(&Nibbles::from_nibbles([0x1, 0x1, 0x1, 0x1, 0x1, 0x1]), &[0x0, 0x1])
6590            .has_leaf(
6591                &Nibbles::from_nibbles([0x1, 0x1, 0x1, 0x1, 0x1, 0x1, 0x0]),
6592                &leaf_key([], 57),
6593            )
6594            .has_leaf(
6595                &Nibbles::from_nibbles([0x1, 0x1, 0x1, 0x1, 0x1, 0x1, 0x1]),
6596                &leaf_key([], 57),
6597            )
6598            .has_value(&leaf1_path, &value1)
6599            .has_value(&leaf2_path, &value2);
6600    }
6601
6602    #[test]
6603    fn test_update_branch_with_all_nibbles() {
6604        let ctx = ParallelSparseTrieTestContext;
6605        let mut trie = ParallelSparseTrie::from_root(TrieNodeV2::EmptyRoot, None, true).unwrap();
6606
6607        // Test edge case: branch node with all 16 possible nibble children
6608        //
6609        // Final trie structure:
6610        // Upper trie:
6611        //   0x: Extension { key: 0xA }
6612        //       └── Subtrie (0xA0): pointer to lower subtrie
6613        //
6614        // Lower subtrie (0xA0):
6615        //   0xA0: Branch { state_mask: 0xFFFF } (all 16 children)
6616        //   ├── 0xA00: Leaf { key: 0x }
6617        //   ├── 0xA01: Leaf { key: 0x }
6618        //   ├── 0xA02: Leaf { key: 0x }
6619        //   ... (all nibbles 0x0 through 0xF)
6620        //   └── 0xA0F: Leaf { key: 0x }
6621
6622        // Create leaves for all 16 possible nibbles
6623        let mut leaves = Vec::new();
6624        for nibble in 0x0..=0xF {
6625            let (path, value) = ctx.create_test_leaf([0xA, 0x0, nibble], nibble as u64 + 1);
6626            leaves.push((path, value));
6627        }
6628
6629        // Insert all leaves
6630        ctx.update_leaves(&mut trie, leaves.iter().cloned());
6631
6632        // Verify upper trie structure
6633        ctx.assert_upper_subtrie(&trie)
6634            .has_extension(&Nibbles::default(), &Nibbles::from_nibbles([0xA, 0x0]));
6635
6636        // Verify lower subtrie has branch with all 16 children
6637        let mut subtrie_assert =
6638            ctx.assert_subtrie(&trie, Nibbles::from_nibbles([0xA, 0x0])).has_branch(
6639                &Nibbles::from_nibbles([0xA, 0x0]),
6640                &[0x0, 0x1, 0x2, 0x3, 0x4, 0x5, 0x6, 0x7, 0x8, 0x9, 0xA, 0xB, 0xC, 0xD, 0xE, 0xF],
6641            );
6642
6643        // Verify all leaves exist
6644        for (i, (path, value)) in leaves.iter().enumerate() {
6645            subtrie_assert = subtrie_assert
6646                .has_leaf(&Nibbles::from_nibbles([0xA, 0x0, i as u8]), &leaf_key([], 61))
6647                .has_value(path, value);
6648        }
6649    }
6650
6651    #[test]
6652    fn test_update_creates_multiple_subtries() {
6653        let ctx = ParallelSparseTrieTestContext;
6654        let mut trie = ParallelSparseTrie::from_root(TrieNodeV2::EmptyRoot, None, true).unwrap();
6655
6656        // Test edge case: updates that create multiple subtries at once
6657        //
6658        // Final trie structure:
6659        // Upper trie:
6660        //   0x: Extension { key: 0x0 }
6661        //       └── 0x0: Branch { state_mask: 0xF }
6662        //           ├── Subtrie (0x00): pointer
6663        //           ├── Subtrie (0x01): pointer
6664        //           ├── Subtrie (0x02): pointer
6665        //           └── Subtrie (0x03): pointer
6666        //
6667        // Each lower subtrie has leaves:
6668        //   0xXY: Leaf { key: 0xZ... }
6669
6670        // Create leaves that will force multiple subtries
6671        let leaves = [
6672            ctx.create_test_leaf([0x0, 0x0, 0x1, 0x2], 1),
6673            ctx.create_test_leaf([0x0, 0x1, 0x3, 0x4], 2),
6674            ctx.create_test_leaf([0x0, 0x2, 0x5, 0x6], 3),
6675            ctx.create_test_leaf([0x0, 0x3, 0x7, 0x8], 4),
6676        ];
6677
6678        // Insert all leaves
6679        ctx.update_leaves(&mut trie, leaves.iter().cloned());
6680
6681        // Verify upper trie has extension then branch
6682        ctx.assert_upper_subtrie(&trie)
6683            .has_extension(&Nibbles::default(), &Nibbles::from_nibbles([0x0]))
6684            .has_branch(&Nibbles::from_nibbles([0x0]), &[0x0, 0x1, 0x2, 0x3]);
6685
6686        // Verify each subtrie exists and contains its leaf
6687        for (i, (leaf_path, leaf_value)) in leaves.iter().enumerate() {
6688            let subtrie_path = Nibbles::from_nibbles([0x0, i as u8]);
6689            let full_path: [u8; 4] = match i {
6690                0 => [0x0, 0x0, 0x1, 0x2],
6691                1 => [0x0, 0x1, 0x3, 0x4],
6692                2 => [0x0, 0x2, 0x5, 0x6],
6693                3 => [0x0, 0x3, 0x7, 0x8],
6694                _ => unreachable!(),
6695            };
6696            ctx.assert_subtrie(&trie, subtrie_path)
6697                .has_leaf(&subtrie_path, &leaf_key(&full_path[2..], 62))
6698                .has_value(leaf_path, leaf_value);
6699        }
6700    }
6701
6702    #[test]
6703    fn test_update_extension_to_branch_transformation() {
6704        let ctx = ParallelSparseTrieTestContext;
6705        let mut trie = ParallelSparseTrie::from_root(TrieNodeV2::EmptyRoot, None, true).unwrap();
6706
6707        // Test edge case: extension node transforms to branch when split
6708        //
6709        // Initial state after first two leaves:
6710        // Upper trie:
6711        //   0x: Extension { key: 0xFF0 }
6712        //       └── Subtrie (0xFF): pointer
6713        //
6714        // After third leaf (0xF0...):
6715        // Upper trie:
6716        //   0x: Extension { key: 0xF }
6717        //       └── 0xF: Branch { state_mask: 0x10 | 0x8000 }
6718        //           ├── Subtrie (0xF0): pointer
6719        //           └── Subtrie (0xFF): pointer
6720
6721        // First two leaves share prefix 0xFF0
6722        let (leaf1_path, value1) = ctx.create_test_leaf([0xF, 0xF, 0x0, 0x1], 1);
6723        let (leaf2_path, value2) = ctx.create_test_leaf([0xF, 0xF, 0x0, 0x2], 2);
6724        let (leaf3_path, value3) = ctx.create_test_leaf([0xF, 0x0, 0x0, 0x3], 3);
6725
6726        ctx.update_leaves(&mut trie, [(leaf1_path, value1.clone()), (leaf2_path, value2.clone())]);
6727
6728        // Verify initial extension structure
6729        ctx.assert_upper_subtrie(&trie)
6730            .has_extension(&Nibbles::default(), &Nibbles::from_nibbles([0xF, 0xF, 0x0]));
6731
6732        // Add leaf that splits the extension
6733        ctx.update_leaves(&mut trie, [(leaf3_path, value3.clone())]);
6734
6735        // Verify transformed structure
6736        ctx.assert_upper_subtrie(&trie)
6737            .has_extension(&Nibbles::default(), &Nibbles::from_nibbles([0xF]))
6738            .has_branch(&Nibbles::from_nibbles([0xF]), &[0x0, 0xF]);
6739
6740        // Verify subtries
6741        ctx.assert_subtrie(&trie, Nibbles::from_nibbles([0xF, 0xF]))
6742            .has_branch(&Nibbles::from_nibbles([0xF, 0xF, 0x0]), &[0x1, 0x2])
6743            .has_leaf(&Nibbles::from_nibbles([0xF, 0xF, 0x0, 0x1]), &leaf_key([], 60))
6744            .has_leaf(&Nibbles::from_nibbles([0xF, 0xF, 0x0, 0x2]), &leaf_key([], 60))
6745            .has_value(&leaf1_path, &value1)
6746            .has_value(&leaf2_path, &value2);
6747
6748        ctx.assert_subtrie(&trie, Nibbles::from_nibbles([0xF, 0x0]))
6749            .has_leaf(&Nibbles::from_nibbles([0xF, 0x0]), &leaf_key([0x0, 0x3], 62))
6750            .has_value(&leaf3_path, &value3);
6751    }
6752
6753    #[test]
6754    fn test_update_long_shared_prefix_at_boundary() {
6755        let ctx = ParallelSparseTrieTestContext;
6756        let mut trie = ParallelSparseTrie::from_root(TrieNodeV2::EmptyRoot, None, true).unwrap();
6757
6758        // Test edge case: leaves with long shared prefix that ends exactly at 2-nibble boundary
6759        //
6760        // Final trie structure:
6761        // Upper trie:
6762        //   0x: Extension { key: 0xAB }
6763        //       └── Subtrie (0xAB): pointer to lower subtrie
6764        //
6765        // Lower subtrie (0xAB):
6766        //   0xAB: Branch { state_mask: 0x1000 | 0x2000 }
6767        //   ├── 0xABC: Leaf { key: 0xDEF }
6768        //   └── 0xABD: Leaf { key: 0xEF0 }
6769
6770        // Create leaves that share exactly 2 nibbles
6771        let (leaf1_path, value1) = ctx.create_test_leaf([0xA, 0xB, 0xC, 0xD, 0xE, 0xF], 1);
6772        let (leaf2_path, value2) = ctx.create_test_leaf([0xA, 0xB, 0xD, 0xE, 0xF, 0x0], 2);
6773
6774        trie.update_leaf(leaf1_path, value1.clone(), DefaultTrieNodeProvider).unwrap();
6775        trie.update_leaf(leaf2_path, value2.clone(), DefaultTrieNodeProvider).unwrap();
6776
6777        // Verify upper trie structure
6778        ctx.assert_upper_subtrie(&trie)
6779            .has_extension(&Nibbles::default(), &Nibbles::from_nibbles([0xA, 0xB]));
6780
6781        // Verify lower subtrie structure
6782        ctx.assert_subtrie(&trie, Nibbles::from_nibbles([0xA, 0xB]))
6783            .has_branch(&Nibbles::from_nibbles([0xA, 0xB]), &[0xC, 0xD])
6784            .has_leaf(&Nibbles::from_nibbles([0xA, 0xB, 0xC]), &leaf_key([0xD, 0xE, 0xF], 61))
6785            .has_leaf(&Nibbles::from_nibbles([0xA, 0xB, 0xD]), &leaf_key([0xE, 0xF, 0x0], 61))
6786            .has_value(&leaf1_path, &value1)
6787            .has_value(&leaf2_path, &value2);
6788    }
6789
6790    #[test]
6791    fn test_update_branch_to_extension_collapse() {
6792        let ctx = ParallelSparseTrieTestContext;
6793        let mut trie = ParallelSparseTrie::from_root(TrieNodeV2::EmptyRoot, None, true).unwrap();
6794
6795        // Test creating a trie with leaves that share a long common prefix
6796        //
6797        // Initial state with 3 leaves (0x1234, 0x2345, 0x2356):
6798        // Upper trie:
6799        //   0x: Branch { state_mask: 0x6 }
6800        //       ├── 0x1: Leaf { key: 0x234 }
6801        //       └── 0x2: Extension { key: 0x3 }
6802        //           └── Subtrie (0x23): pointer
6803        // Lower subtrie (0x23):
6804        //   0x23: Branch { state_mask: 0x30 }
6805        //       ├── 0x234: Leaf { key: 0x5 }
6806        //       └── 0x235: Leaf { key: 0x6 }
6807        //
6808        // Then we create a new trie with leaves (0x1234, 0x1235, 0x1236):
6809        // Expected structure:
6810        // Upper trie:
6811        //   0x: Extension { key: 0x123 }
6812        //       └── Subtrie (0x12): pointer
6813        // Lower subtrie (0x12):
6814        //   0x123: Branch { state_mask: 0x70 } // bits 4, 5, 6 set
6815        //       ├── 0x1234: Leaf { key: 0x }
6816        //       ├── 0x1235: Leaf { key: 0x }
6817        //       └── 0x1236: Leaf { key: 0x }
6818
6819        // Create initial leaves
6820        let (leaf1_path, value1) = ctx.create_test_leaf([0x1, 0x2, 0x3, 0x4], 1);
6821        let (leaf2_path, value2) = ctx.create_test_leaf([0x2, 0x3, 0x4, 0x5], 2);
6822        let (leaf3_path, value3) = ctx.create_test_leaf([0x2, 0x3, 0x5, 0x6], 3);
6823
6824        trie.update_leaf(leaf1_path, value1, DefaultTrieNodeProvider).unwrap();
6825        trie.update_leaf(leaf2_path, value2, DefaultTrieNodeProvider).unwrap();
6826        trie.update_leaf(leaf3_path, value3, DefaultTrieNodeProvider).unwrap();
6827
6828        // Verify initial structure has branch at root
6829        ctx.assert_upper_subtrie(&trie).has_branch(&Nibbles::default(), &[0x1, 0x2]);
6830
6831        // Now update to create a pattern where extension is more efficient
6832        // Replace leaves to all share prefix 0x123
6833        let (new_leaf1_path, new_value1) = ctx.create_test_leaf([0x1, 0x2, 0x3, 0x4], 10);
6834        let (new_leaf2_path, new_value2) = ctx.create_test_leaf([0x1, 0x2, 0x3, 0x5], 11);
6835        let (new_leaf3_path, new_value3) = ctx.create_test_leaf([0x1, 0x2, 0x3, 0x6], 12);
6836
6837        // Clear and add new leaves
6838        let mut trie = ParallelSparseTrie::from_root(TrieNodeV2::EmptyRoot, None, true).unwrap();
6839        trie.update_leaf(new_leaf1_path, new_value1.clone(), DefaultTrieNodeProvider).unwrap();
6840        trie.update_leaf(new_leaf2_path, new_value2.clone(), DefaultTrieNodeProvider).unwrap();
6841        trie.update_leaf(new_leaf3_path, new_value3.clone(), DefaultTrieNodeProvider).unwrap();
6842
6843        // Verify new structure has extension
6844        ctx.assert_upper_subtrie(&trie)
6845            .has_extension(&Nibbles::default(), &Nibbles::from_nibbles([0x1, 0x2, 0x3]));
6846
6847        // Verify lower subtrie path was correctly updated to 0x123
6848        ctx.assert_subtrie_path(&trie, [0x1, 0x2], [0x1, 0x2, 0x3]);
6849
6850        // Verify lower subtrie - all three leaves should be properly inserted
6851        ctx.assert_subtrie(&trie, Nibbles::from_nibbles([0x1, 0x2]))
6852            .has_branch(&Nibbles::from_nibbles([0x1, 0x2, 0x3]), &[0x4, 0x5, 0x6]) // All three children
6853            .has_leaf(&Nibbles::from_nibbles([0x1, 0x2, 0x3, 0x4]), &leaf_key([], 60))
6854            .has_leaf(&Nibbles::from_nibbles([0x1, 0x2, 0x3, 0x5]), &leaf_key([], 60))
6855            .has_leaf(&Nibbles::from_nibbles([0x1, 0x2, 0x3, 0x6]), &leaf_key([], 60))
6856            .has_value(&new_leaf1_path, &new_value1)
6857            .has_value(&new_leaf2_path, &new_value2)
6858            .has_value(&new_leaf3_path, &new_value3);
6859    }
6860
6861    #[test]
6862    fn test_update_shared_prefix_patterns() {
6863        let ctx = ParallelSparseTrieTestContext;
6864        let mut trie = ParallelSparseTrie::from_root(TrieNodeV2::EmptyRoot, None, true).unwrap();
6865
6866        // Test edge case: different patterns of shared prefixes
6867        //
6868        // Final trie structure:
6869        // Upper trie:
6870        //   0x: Branch { state_mask: 0x6 }
6871        //       ├── 0x1: Leaf { key: 0x234 }
6872        //       └── 0x2: Extension { key: 0x3 }
6873        //           └── Subtrie (0x23): pointer
6874        //
6875        // Lower subtrie (0x23):
6876        //   0x23: Branch { state_mask: 0x10 | 0x20 }
6877        //   ├── 0x234: Leaf { key: 0x5 }
6878        //   └── 0x235: Leaf { key: 0x6 }
6879
6880        // Create leaves with different shared prefix patterns
6881        let (leaf1_path, value1) = ctx.create_test_leaf([0x1, 0x2, 0x3, 0x4], 1);
6882        let (leaf2_path, value2) = ctx.create_test_leaf([0x2, 0x3, 0x4, 0x5], 2);
6883        let (leaf3_path, value3) = ctx.create_test_leaf([0x2, 0x3, 0x5, 0x6], 3);
6884
6885        trie.update_leaf(leaf1_path, value1, DefaultTrieNodeProvider).unwrap();
6886        trie.update_leaf(leaf2_path, value2.clone(), DefaultTrieNodeProvider).unwrap();
6887        trie.update_leaf(leaf3_path, value3.clone(), DefaultTrieNodeProvider).unwrap();
6888
6889        // Verify upper trie structure
6890        ctx.assert_upper_subtrie(&trie)
6891            .has_branch(&Nibbles::default(), &[0x1, 0x2])
6892            .has_leaf(&Nibbles::from_nibbles([0x1]), &leaf_key([0x2, 0x3, 0x4], 63))
6893            .has_extension(&Nibbles::from_nibbles([0x2]), &Nibbles::from_nibbles([0x3]));
6894
6895        // Verify lower subtrie structure
6896        ctx.assert_subtrie(&trie, Nibbles::from_nibbles([0x2, 0x3]))
6897            .has_branch(&Nibbles::from_nibbles([0x2, 0x3]), &[0x4, 0x5])
6898            .has_leaf(&Nibbles::from_nibbles([0x2, 0x3, 0x4]), &leaf_key([0x5], 61))
6899            .has_leaf(&Nibbles::from_nibbles([0x2, 0x3, 0x5]), &leaf_key([0x6], 61))
6900            .has_value(&leaf2_path, &value2)
6901            .has_value(&leaf3_path, &value3);
6902    }
6903
6904    #[test]
6905    fn test_progressive_branch_creation() {
6906        let ctx = ParallelSparseTrieTestContext;
6907        let mut trie = ParallelSparseTrie::from_root(TrieNodeV2::EmptyRoot, None, true).unwrap();
6908
6909        // Test starting with a single leaf and progressively adding leaves
6910        // that create branch nodes at shorter and shorter paths
6911        //
6912        // Step 1: Add leaf at 0x12345
6913        // Upper trie:
6914        //   0x: Leaf { key: 0x12345 }
6915        //
6916        // Step 2: Add leaf at 0x12346
6917        // Upper trie:
6918        //   0x: Extension { key: 0x1234 }
6919        //       └── Subtrie (0x12): pointer
6920        // Lower subtrie (0x12):
6921        //   0x1234: Branch { state_mask: 0x60 }  // bits 5 and 6 set
6922        //       ├── 0x12345: Leaf { key: 0x }
6923        //       └── 0x12346: Leaf { key: 0x }
6924        //
6925        // Step 3: Add leaf at 0x1235
6926        // Lower subtrie (0x12) updates to:
6927        //   0x123: Branch { state_mask: 0x30 }  // bits 4 and 5 set
6928        //       ├── 0x1234: Branch { state_mask: 0x60 }
6929        //       │   ├── 0x12345: Leaf { key: 0x }
6930        //       │   └── 0x12346: Leaf { key: 0x }
6931        //       └── 0x1235: Leaf { key: 0x }
6932        //
6933        // Step 4: Add leaf at 0x124
6934        // Lower subtrie (0x12) updates to:
6935        //   0x12: Branch { state_mask: 0x18 }  // bits 3 and 4 set
6936        //       ├── 0x123: Branch { state_mask: 0x30 }
6937        //       │   ├── 0x1234: Branch { state_mask: 0x60 }
6938        //       │   │   ├── 0x12345: Leaf { key: 0x }
6939        //       │   │   └── 0x12346: Leaf { key: 0x }
6940        //       │   └── 0x1235: Leaf { key: 0x }
6941        //       └── 0x124: Leaf { key: 0x }
6942
6943        // Step 1: Add first leaf - initially stored as leaf in upper trie
6944        let (leaf1_path, value1) = ctx.create_test_leaf([0x1, 0x2, 0x3, 0x4, 0x5], 1);
6945        trie.update_leaf(leaf1_path, value1.clone(), DefaultTrieNodeProvider).unwrap();
6946
6947        // Verify leaf node in upper trie (optimized single-leaf case)
6948        ctx.assert_upper_subtrie(&trie)
6949            .has_leaf(
6950                &Nibbles::default(),
6951                &pad_nibbles_right(Nibbles::from_nibbles([0x1, 0x2, 0x3, 0x4, 0x5])),
6952            )
6953            .has_value(&leaf1_path, &value1);
6954
6955        // Step 2: Add leaf at 0x12346 - creates branch at 0x1234
6956        let (leaf2_path, value2) = ctx.create_test_leaf([0x1, 0x2, 0x3, 0x4, 0x6], 2);
6957        trie.update_leaf(leaf2_path, value2.clone(), DefaultTrieNodeProvider).unwrap();
6958
6959        // Verify extension now goes to 0x1234
6960        ctx.assert_upper_subtrie(&trie)
6961            .has_extension(&Nibbles::default(), &Nibbles::from_nibbles([0x1, 0x2, 0x3, 0x4]));
6962
6963        // Verify subtrie path updated to 0x1234
6964        ctx.assert_subtrie_path(&trie, [0x1, 0x2], [0x1, 0x2, 0x3, 0x4]);
6965
6966        ctx.assert_subtrie(&trie, Nibbles::from_nibbles([0x1, 0x2]))
6967            .has_branch(&Nibbles::from_nibbles([0x1, 0x2, 0x3, 0x4]), &[0x5, 0x6])
6968            .has_leaf(&Nibbles::from_nibbles([0x1, 0x2, 0x3, 0x4, 0x5]), &leaf_key([], 59))
6969            .has_leaf(&Nibbles::from_nibbles([0x1, 0x2, 0x3, 0x4, 0x6]), &leaf_key([], 59))
6970            .has_value(&leaf1_path, &value1)
6971            .has_value(&leaf2_path, &value2);
6972
6973        // Step 3: Add leaf at 0x1235 - creates branch at 0x123
6974        let (leaf3_path, value3) = ctx.create_test_leaf([0x1, 0x2, 0x3, 0x5], 3);
6975        trie.update_leaf(leaf3_path, value3.clone(), DefaultTrieNodeProvider).unwrap();
6976
6977        // Verify extension now goes to 0x123
6978        ctx.assert_upper_subtrie(&trie)
6979            .has_extension(&Nibbles::default(), &Nibbles::from_nibbles([0x1, 0x2, 0x3]));
6980
6981        // Verify subtrie path updated to 0x123
6982        ctx.assert_subtrie_path(&trie, [0x1, 0x2], [0x1, 0x2, 0x3]);
6983
6984        ctx.assert_subtrie(&trie, Nibbles::from_nibbles([0x1, 0x2]))
6985            .has_branch(&Nibbles::from_nibbles([0x1, 0x2, 0x3]), &[0x4, 0x5])
6986            .has_branch(&Nibbles::from_nibbles([0x1, 0x2, 0x3, 0x4]), &[0x5, 0x6])
6987            .has_leaf(&Nibbles::from_nibbles([0x1, 0x2, 0x3, 0x5]), &leaf_key([], 60))
6988            .has_value(&leaf1_path, &value1)
6989            .has_value(&leaf2_path, &value2)
6990            .has_value(&leaf3_path, &value3);
6991
6992        // Step 4: Add leaf at 0x124 - creates branch at 0x12 (subtrie root)
6993        let (leaf4_path, value4) = ctx.create_test_leaf([0x1, 0x2, 0x4], 4);
6994        trie.update_leaf(leaf4_path, value4.clone(), DefaultTrieNodeProvider).unwrap();
6995
6996        // Verify extension now goes to 0x12
6997        ctx.assert_upper_subtrie(&trie)
6998            .has_extension(&Nibbles::default(), &Nibbles::from_nibbles([0x1, 0x2]));
6999
7000        // Verify subtrie path updated to 0x12
7001        ctx.assert_subtrie_path(&trie, [0x1, 0x2], [0x1, 0x2]);
7002
7003        // Verify final structure
7004        ctx.assert_subtrie(&trie, Nibbles::from_nibbles([0x1, 0x2]))
7005            .has_branch(&Nibbles::from_nibbles([0x1, 0x2]), &[0x3, 0x4])
7006            .has_branch(&Nibbles::from_nibbles([0x1, 0x2, 0x3]), &[0x4, 0x5])
7007            .has_branch(&Nibbles::from_nibbles([0x1, 0x2, 0x3, 0x4]), &[0x5, 0x6])
7008            .has_leaf(&Nibbles::from_nibbles([0x1, 0x2, 0x4]), &leaf_key([], 61))
7009            .has_value(&leaf1_path, &value1)
7010            .has_value(&leaf2_path, &value2)
7011            .has_value(&leaf3_path, &value3)
7012            .has_value(&leaf4_path, &value4);
7013    }
7014
7015    #[test]
7016    fn test_update_max_depth_paths() {
7017        let ctx = ParallelSparseTrieTestContext;
7018        let mut trie = ParallelSparseTrie::from_root(TrieNodeV2::EmptyRoot, None, true).unwrap();
7019
7020        // Test edge case: very long paths (64 nibbles - max for addresses/storage)
7021        //
7022        // Final trie structure:
7023        // Upper trie:
7024        //   0x: Extension { key: 0xFF }
7025        //       └── Subtrie (0xFF): pointer
7026        //
7027        // Lower subtrie (0xFF):
7028        //   Has very long paths with slight differences at the end
7029
7030        // Create two 64-nibble paths that differ only in the last nibble
7031        let mut path1_nibbles = vec![0xF; 63];
7032        path1_nibbles.push(0x0);
7033        let mut path2_nibbles = vec![0xF; 63];
7034        path2_nibbles.push(0x1);
7035
7036        let (leaf1_path, value1) = ctx.create_test_leaf(&path1_nibbles, 1);
7037        let (leaf2_path, value2) = ctx.create_test_leaf(&path2_nibbles, 2);
7038
7039        trie.update_leaf(leaf1_path, value1.clone(), DefaultTrieNodeProvider).unwrap();
7040        trie.update_leaf(leaf2_path, value2.clone(), DefaultTrieNodeProvider).unwrap();
7041
7042        // The common prefix of 63 F's will create a very long extension
7043        let extension_key = vec![0xF; 63];
7044        ctx.assert_upper_subtrie(&trie)
7045            .has_extension(&Nibbles::default(), &Nibbles::from_nibbles(&extension_key));
7046
7047        // Verify the subtrie has the branch at the end
7048        ctx.assert_subtrie(&trie, Nibbles::from_nibbles([0xF, 0xF]))
7049            .has_branch(&Nibbles::from_nibbles(&path1_nibbles[..63]), &[0x0, 0x1])
7050            .has_value(&leaf1_path, &value1)
7051            .has_value(&leaf2_path, &value2);
7052    }
7053
7054    #[test]
7055    fn test_hoodie_block_1_data() {
7056        // Reveal node at path Nibbles(0x) - root branch node
7057        let root_branch_stack = vec![
7058            hex!("a0550b6aba4dd4582a2434d2cbdad8d3007d09f622d7a6e6eaa7a49385823c2fa2"),
7059            hex!("a04788a4975a9e1efd29b834fd80fdfe8a57cc1b1c5ace6d30ce5a36a15e0092b3"),
7060            hex!("a093aeccf87da304e6f7d09edc5d7bd3a552808866d2149dd0940507a8f9bfa910"),
7061            hex!("a08b5b423ba68d0dec2eca1f408076f9170678505eb4a5db2abbbd83bb37666949"),
7062            hex!("a08592f62216af4218098a78acad7cf472a727fb55e6c27d3cfdf2774d4518eb83"),
7063            hex!("a0ef02aeee845cb64c11f85edc1a3094227c26445952554b8a9248915d80c746c3"),
7064            hex!("a0df2529ee3a1ce4df5a758cf17e6a86d0fb5ea22ab7071cf60af6412e9b0a428a"),
7065            hex!("a0acaa1092db69cd5a63676685827b3484c4b80dc1d3361f6073bbb9240101e144"),
7066            hex!("a09c3f2bb2a729d71f246a833353ade65667716bb330e0127a3299a42d11200f93"),
7067            hex!("a0ce978470f4c0b1f8069570563a14d2b79d709add2db4bf22dd9b6aed3271c566"),
7068            hex!("a095f783cd1d464a60e3c8adcadc28c6eb9fec7306664df39553be41dccc909606"),
7069            hex!("a0a9083f5fb914b255e1feb5d951a4dfddacf3c8003ef1d1ec6a13bb6ba5b2ac62"),
7070            hex!("a0fec113d537d8577cd361e0cabf5e95ef58f1cc34318292fdecce9fae57c3e094"),
7071            hex!("a08b7465f5fe8b3e3c0d087cb7521310d4065ef2a0ee43bf73f68dee8a5742b3dd"),
7072            hex!("a0c589aa1ae3d5fd87d8640957f7d5184a4ac06f393b453a8e8ed7e8fba0d385c8"),
7073            hex!("a0b516d6f3352f87beab4ed6e7322f191fc7a147686500ef4de7dd290ad784ef51"),
7074        ];
7075
7076        let root_branch_rlp_stack: Vec<RlpNode> = root_branch_stack
7077            .iter()
7078            .map(|hex_str| RlpNode::from_raw_rlp(&hex_str[..]).unwrap())
7079            .collect();
7080
7081        let root_branch_node = BranchNodeV2::new(
7082            Default::default(),
7083            root_branch_rlp_stack,
7084            TrieMask::new(0b1111111111111111), // state_mask: all 16 children present
7085            None,
7086        );
7087
7088        let root_branch_masks = Some(BranchNodeMasks {
7089            hash_mask: TrieMask::new(0b1111111111111111),
7090            tree_mask: TrieMask::new(0b1111111111111111),
7091        });
7092
7093        let mut trie = ParallelSparseTrie::from_root(
7094            TrieNodeV2::Branch(root_branch_node),
7095            root_branch_masks,
7096            true,
7097        )
7098        .unwrap();
7099
7100        // Reveal node at path Nibbles(0x3) - branch node
7101        let branch_0x3_stack = vec![
7102            hex!("a09da7d9755fe0c558b3c3de9fdcdf9f28ae641f38c9787b05b73ab22ae53af3e2"),
7103            hex!("a0d9990bf0b810d1145ecb2b011fd68c63cc85564e6724166fd4a9520180706e5f"),
7104            hex!("a0f60eb4b12132a40df05d9bbdb88bbde0185a3f097f3c76bf4200c23eda26cf86"),
7105            hex!("a0ca976997ddaf06f18992f6207e4f6a05979d07acead96568058789017cc6d06b"),
7106            hex!("a04d78166b48044fdc28ed22d2fd39c8df6f8aaa04cb71d3a17286856f6893ff83"),
7107            hex!("a021d4f90c34d3f1706e78463b6482bca77a3aa1cd059a3f326c42a1cfd30b9b60"),
7108            hex!("a0fc3b71c33e2e6b77c5e494c1db7fdbb447473f003daf378c7a63ba9bf3f0049d"),
7109            hex!("a0e33ed2be194a3d93d343e85642447c93a9d0cfc47a016c2c23d14c083be32a7c"),
7110            hex!("a07b8e7a21c1178d28074f157b50fca85ee25c12568ff8e9706dcbcdacb77bf854"),
7111            hex!("a0973274526811393ea0bf4811ca9077531db00d06b86237a2ecd683f55ba4bcb0"),
7112            hex!("a03a93d726d7487874e51b52d8d534c63aa2a689df18e3b307c0d6cb0a388b00f3"),
7113            hex!("a06aa67101d011d1c22fe739ef83b04b5214a3e2f8e1a2625d8bfdb116b447e86f"),
7114            hex!("a02dd545b33c62d33a183e127a08a4767fba891d9f3b94fc20a2ca02600d6d1fff"),
7115            hex!("a0fe6db87d00f06d53bff8169fa497571ff5af1addfb715b649b4d79dd3e394b04"),
7116            hex!("a0d9240a9d2d5851d05a97ff3305334dfdb0101e1e321fc279d2bb3cad6afa8fc8"),
7117            hex!("a01b69c6ab5173de8a8ec53a6ebba965713a4cc7feb86cb3e230def37c230ca2b2"),
7118        ];
7119
7120        let branch_0x3_rlp_stack: Vec<RlpNode> = branch_0x3_stack
7121            .iter()
7122            .map(|hex_str| RlpNode::from_raw_rlp(&hex_str[..]).unwrap())
7123            .collect();
7124
7125        let branch_0x3_node = BranchNodeV2::new(
7126            Default::default(),
7127            branch_0x3_rlp_stack,
7128            TrieMask::new(0b1111111111111111), // state_mask: all 16 children present
7129            None,
7130        );
7131
7132        let branch_0x3_masks = Some(BranchNodeMasks {
7133            hash_mask: TrieMask::new(0b0100010000010101),
7134            tree_mask: TrieMask::new(0b0100000000000000),
7135        });
7136
7137        // Reveal node at path Nibbles(0x37) - leaf node
7138        let leaf_path = Nibbles::from_nibbles([0x3, 0x7]);
7139        let leaf_key = Nibbles::unpack(
7140            &hex!("d65eaa92c6bc4c13a5ec45527f0c18ea8932588728769ec7aecfe6d9f32e42")[..],
7141        );
7142        let leaf_value = hex!("f8440180a056e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421a0f57acd40259872606d76197ef052f3d35588dadf919ee1f0e3cb9b62d3f4b02c").to_vec();
7143
7144        let leaf_node = LeafNode::new(leaf_key, leaf_value);
7145        let leaf_masks = None;
7146
7147        trie.reveal_nodes(&mut [
7148            ProofTrieNodeV2 {
7149                path: Nibbles::from_nibbles([0x3]),
7150                node: TrieNodeV2::Branch(branch_0x3_node),
7151                masks: branch_0x3_masks,
7152            },
7153            ProofTrieNodeV2 {
7154                path: leaf_path,
7155                node: TrieNodeV2::Leaf(leaf_node),
7156                masks: leaf_masks,
7157            },
7158        ])
7159        .unwrap();
7160
7161        // Update leaf with its new value
7162        let mut leaf_full_path = leaf_path;
7163        leaf_full_path.extend(&leaf_key);
7164
7165        let leaf_new_value = vec![
7166            248, 68, 1, 128, 160, 224, 163, 152, 169, 122, 160, 155, 102, 53, 41, 0, 47, 28, 205,
7167            190, 199, 5, 215, 108, 202, 22, 138, 70, 196, 178, 193, 208, 18, 96, 95, 63, 238, 160,
7168            245, 122, 205, 64, 37, 152, 114, 96, 109, 118, 25, 126, 240, 82, 243, 211, 85, 136,
7169            218, 223, 145, 158, 225, 240, 227, 203, 155, 98, 211, 244, 176, 44,
7170        ];
7171
7172        trie.update_leaf(leaf_full_path, leaf_new_value.clone(), DefaultTrieNodeProvider).unwrap();
7173
7174        // Sanity checks before calculating the root
7175        assert_eq!(
7176            Some(&leaf_new_value),
7177            trie.lower_subtrie_for_path(&leaf_path).unwrap().inner.values.get(&leaf_full_path)
7178        );
7179        assert!(trie.upper_subtrie.inner.values.is_empty());
7180
7181        // Assert the root hash matches the expected value
7182        let expected_root =
7183            b256!("0x29b07de8376e9ce7b3a69e9b102199869514d3f42590b5abc6f7d48ec9b8665c");
7184        assert_eq!(trie.root(), expected_root);
7185    }
7186
7187    #[test]
7188    fn find_leaf_existing_leaf() {
7189        // Create a simple trie with one leaf
7190        let provider = DefaultTrieNodeProvider;
7191        let mut sparse = ParallelSparseTrie::default();
7192        let path = pad_nibbles_right(Nibbles::from_nibbles([0x1, 0x2, 0x3]));
7193        let value = b"test_value".to_vec();
7194
7195        sparse.update_leaf(path, value.clone(), &provider).unwrap();
7196
7197        // Check that the leaf exists
7198        let result = sparse.find_leaf(&path, None);
7199        assert_matches!(result, Ok(LeafLookup::Exists));
7200
7201        // Check with expected value matching
7202        let result = sparse.find_leaf(&path, Some(&value));
7203        assert_matches!(result, Ok(LeafLookup::Exists));
7204    }
7205
7206    #[test]
7207    fn find_leaf_value_mismatch() {
7208        // Create a simple trie with one leaf
7209        let provider = DefaultTrieNodeProvider;
7210        let mut sparse = ParallelSparseTrie::default();
7211        let path = pad_nibbles_right(Nibbles::from_nibbles([0x1, 0x2, 0x3]));
7212        let value = b"test_value".to_vec();
7213        let wrong_value = b"wrong_value".to_vec();
7214
7215        sparse.update_leaf(path, value, &provider).unwrap();
7216
7217        // Check with wrong expected value
7218        let result = sparse.find_leaf(&path, Some(&wrong_value));
7219        assert_matches!(
7220            result,
7221            Err(LeafLookupError::ValueMismatch { path: p, expected: Some(e), actual: _a }) if p == path && e == wrong_value
7222        );
7223    }
7224
7225    #[test]
7226    fn find_leaf_not_found_empty_trie() {
7227        // Empty trie
7228        let sparse = ParallelSparseTrie::default();
7229        let path = Nibbles::from_nibbles([0x1, 0x2, 0x3]);
7230
7231        // Leaf should not exist
7232        let result = sparse.find_leaf(&path, None);
7233        assert_matches!(result, Ok(LeafLookup::NonExistent));
7234    }
7235
7236    #[test]
7237    fn find_leaf_empty_trie() {
7238        let sparse = ParallelSparseTrie::default();
7239        let path = Nibbles::from_nibbles_unchecked([0x1, 0x2, 0x3, 0x4]);
7240
7241        let result = sparse.find_leaf(&path, None);
7242        assert_matches!(result, Ok(LeafLookup::NonExistent));
7243    }
7244
7245    #[test]
7246    fn find_leaf_exists_no_value_check() {
7247        let provider = DefaultTrieNodeProvider;
7248        let mut sparse = ParallelSparseTrie::default();
7249        let path = pad_nibbles_right(Nibbles::from_nibbles([0x1, 0x2, 0x3, 0x4]));
7250        sparse.update_leaf(path, encode_account_value(0), &provider).unwrap();
7251
7252        let result = sparse.find_leaf(&path, None);
7253        assert_matches!(result, Ok(LeafLookup::Exists));
7254    }
7255
7256    #[test]
7257    fn find_leaf_exists_with_value_check_ok() {
7258        let provider = DefaultTrieNodeProvider;
7259        let mut sparse = ParallelSparseTrie::default();
7260        let path = pad_nibbles_right(Nibbles::from_nibbles([0x1, 0x2, 0x3, 0x4]));
7261        let value = encode_account_value(0);
7262        sparse.update_leaf(path, value.clone(), &provider).unwrap();
7263
7264        let result = sparse.find_leaf(&path, Some(&value));
7265        assert_matches!(result, Ok(LeafLookup::Exists));
7266    }
7267
7268    #[test]
7269    fn find_leaf_exclusion_branch_divergence() {
7270        let provider = DefaultTrieNodeProvider;
7271        let mut sparse = ParallelSparseTrie::default();
7272        let path1 = pad_nibbles_right(Nibbles::from_nibbles([0x1, 0x2, 0x3, 0x4])); // Creates branch at 0x12
7273        let path2 = pad_nibbles_right(Nibbles::from_nibbles([0x1, 0x2, 0x5, 0x6])); // Belongs to same branch
7274        let search_path = pad_nibbles_right(Nibbles::from_nibbles([0x1, 0x2, 0x7, 0x8])); // Diverges at nibble 7
7275
7276        sparse.update_leaf(path1, encode_account_value(0), &provider).unwrap();
7277        sparse.update_leaf(path2, encode_account_value(1), &provider).unwrap();
7278
7279        let result = sparse.find_leaf(&search_path, None);
7280        assert_matches!(result, Ok(LeafLookup::NonExistent))
7281    }
7282
7283    #[test]
7284    fn find_leaf_exclusion_extension_divergence() {
7285        let provider = DefaultTrieNodeProvider;
7286        let mut sparse = ParallelSparseTrie::default();
7287        // This will create an extension node at root with key 0x12
7288        let path1 = pad_nibbles_right(Nibbles::from_nibbles([0x1, 0x2, 0x3, 0x4, 0x5, 0x6]));
7289        // This path diverges from the extension key
7290        let search_path = pad_nibbles_right(Nibbles::from_nibbles([0x1, 0x2, 0x7, 0x8]));
7291
7292        sparse.update_leaf(path1, encode_account_value(0), &provider).unwrap();
7293
7294        let result = sparse.find_leaf(&search_path, None);
7295        assert_matches!(result, Ok(LeafLookup::NonExistent))
7296    }
7297
7298    #[test]
7299    fn find_leaf_exclusion_leaf_divergence() {
7300        let provider = DefaultTrieNodeProvider;
7301        let mut sparse = ParallelSparseTrie::default();
7302        let existing_leaf_path = pad_nibbles_right(Nibbles::from_nibbles([0x1, 0x2, 0x3, 0x4]));
7303        let search_path = pad_nibbles_right(Nibbles::from_nibbles([0x1, 0x2, 0x3, 0x4, 0x5, 0x6]));
7304
7305        sparse.update_leaf(existing_leaf_path, encode_account_value(0), &provider).unwrap();
7306
7307        let result = sparse.find_leaf(&search_path, None);
7308        assert_matches!(result, Ok(LeafLookup::NonExistent))
7309    }
7310
7311    #[test]
7312    fn find_leaf_exclusion_path_ends_at_branch() {
7313        let provider = DefaultTrieNodeProvider;
7314        let mut sparse = ParallelSparseTrie::default();
7315        let path1 = pad_nibbles_right(Nibbles::from_nibbles([0x1, 0x2, 0x3, 0x4])); // Creates branch at 0x12
7316        let path2 = pad_nibbles_right(Nibbles::from_nibbles([0x1, 0x2, 0x5, 0x6]));
7317        let search_path = pad_nibbles_right(Nibbles::from_nibbles([0x1, 0x2])); // Path of the branch itself
7318
7319        sparse.update_leaf(path1, encode_account_value(0), &provider).unwrap();
7320        sparse.update_leaf(path2, encode_account_value(1), &provider).unwrap();
7321
7322        let result = sparse.find_leaf(&search_path, None);
7323        assert_matches!(result, Ok(LeafLookup::NonExistent));
7324    }
7325
7326    #[test]
7327    fn find_leaf_error_blinded_node_at_leaf_path() {
7328        // Scenario: The node *at* the leaf path is blinded.
7329        let blinded_hash = B256::repeat_byte(0xBB);
7330        let leaf_path = Nibbles::from_nibbles_unchecked([0x1, 0x2, 0x3, 0x4]);
7331
7332        let sparse = new_test_trie(
7333            [
7334                (
7335                    // Ext 0x12
7336                    Nibbles::default(),
7337                    SparseNode::new_ext(Nibbles::from_nibbles_unchecked([0x1, 0x2])),
7338                ),
7339                (
7340                    // Ext 0x123
7341                    Nibbles::from_nibbles_unchecked([0x1, 0x2]),
7342                    SparseNode::new_ext(Nibbles::from_nibbles_unchecked([0x3])),
7343                ),
7344                (
7345                    // Branch at 0x123, child 4
7346                    Nibbles::from_nibbles_unchecked([0x1, 0x2, 0x3]),
7347                    SparseNode::new_branch(TrieMask::new(0b10000), &[(0x4, blinded_hash)]),
7348                ),
7349            ]
7350            .into_iter(),
7351        );
7352
7353        let result = sparse.find_leaf(&leaf_path, None);
7354
7355        // Should error because it hit the blinded node exactly at the leaf path
7356        assert_matches!(result, Err(LeafLookupError::BlindedNode { path, hash })
7357            if path == leaf_path && hash == blinded_hash
7358        );
7359    }
7360
7361    #[test]
7362    fn find_leaf_error_blinded_node() {
7363        let blinded_hash = B256::repeat_byte(0xAA);
7364        let path_to_blind = Nibbles::from_nibbles_unchecked([0x1]);
7365        let search_path = Nibbles::from_nibbles_unchecked([0x1, 0x2, 0x3, 0x4]);
7366
7367        let sparse = new_test_trie(
7368            [
7369                // Root is a branch with child 0x1 (blinded) and 0x5 (revealed leaf)
7370                // So we set Bit 1 and Bit 5 in the state_mask
7371                (
7372                    Nibbles::default(),
7373                    SparseNode::new_branch(TrieMask::new(0b100010), &[(0x1, blinded_hash)]),
7374                ),
7375                (
7376                    Nibbles::from_nibbles_unchecked([0x5]),
7377                    SparseNode::new_leaf(Nibbles::from_nibbles_unchecked([0x6, 0x7, 0x8])),
7378                ),
7379            ]
7380            .into_iter(),
7381        );
7382
7383        let result = sparse.find_leaf(&search_path, None);
7384
7385        // Should error because it hit the blinded node at path 0x1
7386        assert_matches!(result, Err(LeafLookupError::BlindedNode { path, hash })
7387            if path == path_to_blind && hash == blinded_hash
7388        );
7389    }
7390
7391    #[test]
7392    fn test_mainnet_block_24185431_storage_0x6ba784ee() {
7393        reth_tracing::init_test_tracing();
7394
7395        // Reveal branch at 0x3 with full state
7396        let mut branch_0x3_hashes = vec![
7397            B256::from(hex!("fc11ba8de4b220b8f19a09f0676c69b8e18bae1350788392640069e59b41733d")),
7398            B256::from(hex!("8afe085cc6685680bd8ba4bac6e65937a4babf737dc5e7413d21cdda958e8f74")),
7399            B256::from(hex!("c7b6f7c0fc601a27aece6ec178fd9be17cdee77c4884ecfbe1ee459731eb57da")),
7400            B256::from(hex!("71c1aec60db78a2deb4e10399b979a2ed5be42b4ee0c0a17c614f9ddc9f9072e")),
7401            B256::from(hex!("e9261302e7c0b77930eaf1851b585210906cd01e015ab6be0f7f3c0cc947c32a")),
7402            B256::from(hex!("38ce8f369c56bd77fabdf679b27265b1f8d0a54b09ef612c8ee8ddfc6b3fab95")),
7403            B256::from(hex!("7b507a8936a28c5776b647d1c4bda0bbbb3d0d227f16c5f5ebba58d02e31918d")),
7404            B256::from(hex!("0f456b9457a824a81e0eb555aa861461acb38674dcf36959b3b26deb24ed0af9")),
7405            B256::from(hex!("2145420289652722ad199ba932622e3003c779d694fa5a2acfb2f77b0782b38a")),
7406            B256::from(hex!("2c1a04dce1a9e2f1cfbf8806edce50a356dfa58e7e7c542c848541502613b796")),
7407            B256::from(hex!("dad7ca55186ac8f40d4450dc874166df8267b44abc07e684d9507260f5712df3")),
7408            B256::from(hex!("3a8c2a1d7d2423e92965ec29014634e7f0307ded60b1a63d28c86c3222b24236")),
7409            B256::from(hex!("4e9929e6728b3a7bf0db6a0750ab376045566b556c9c605e606ecb8ec25200d7")),
7410            B256::from(hex!("1797c36f98922f52292c161590057a1b5582d5503e3370bcfbf6fd939f3ec98b")),
7411            B256::from(hex!("9e514589a9c9210b783c19fa3f0b384bbfaefe98f10ea189a2bfc58c6bf000a1")),
7412            B256::from(hex!("85bdaabbcfa583cbd049650e41d3d19356bd833b3ed585cf225a3548557c7fa3")),
7413        ];
7414        let branch_0x3_node = create_branch_node(
7415            Nibbles::from_nibbles([0x3]),
7416            &[0x0, 0x1, 0x2, 0x3, 0x4, 0x5, 0x6, 0x7, 0x8, 0x9, 0xa, 0xb, 0xc, 0xd, 0xe, 0xf],
7417            branch_0x3_hashes.iter().map(RlpNode::word_rlp),
7418        );
7419
7420        // Reveal branch at 0x31
7421        let branch_0x31_hashes = vec![B256::from(hex!(
7422            "3ca994ba59ce70b83fee1f01731c8dac4fdd0f70ade79bf9b0695c4c53531aab"
7423        ))];
7424        let branch_0x31_node = create_branch_node_with_children(
7425            &[0xc],
7426            branch_0x31_hashes.into_iter().map(|h| RlpNode::word_rlp(&h)),
7427        );
7428
7429        // Reveal leaf at 0x31b0b645a6c4a0a1bb3d2f0c1d31c39f4aba2e3b015928a8eef7161e28388b81
7430        let leaf_path = hex!("31b0b645a6c4a0a1bb3d2f0c1d31c39f4aba2e3b015928a8eef7161e28388b81");
7431        let leaf_nibbles = Nibbles::unpack(leaf_path.as_slice());
7432        let leaf_value = hex!("0009ae8ce8245bff").to_vec();
7433
7434        // Reveal branch at 0x31c
7435        let branch_0x31c_hashes = vec![
7436            B256::from(hex!("1a68fdb36b77e9332b49a977faf800c22d0199e6cecf44032bb083c78943e540")),
7437            B256::from(hex!("cd4622c6df6fd7172c7fed1b284ef241e0f501b4c77b675ef10c612bd0948a7a")),
7438            B256::from(hex!("abf3603d2f991787e21f1709ee4c7375d85dfc506995c0435839fccf3fe2add4")),
7439        ];
7440        let branch_0x31c_node = create_branch_node_with_children(
7441            &[0x3, 0x7, 0xc],
7442            branch_0x31c_hashes.into_iter().map(|h| RlpNode::word_rlp(&h)),
7443        );
7444
7445        // Reveal the trie structure using ProofTrieNode
7446        let mut proof_nodes = vec![ProofTrieNodeV2 {
7447            path: Nibbles::from_nibbles([0x3, 0x1]),
7448            node: branch_0x31_node,
7449            masks: Some(BranchNodeMasks {
7450                tree_mask: TrieMask::new(4096),
7451                hash_mask: TrieMask::new(4096),
7452            }),
7453        }];
7454
7455        // Create a sparse trie and reveal nodes
7456        let mut trie = ParallelSparseTrie::default()
7457            .with_root(
7458                branch_0x3_node,
7459                Some(BranchNodeMasks {
7460                    tree_mask: TrieMask::new(26099),
7461                    hash_mask: TrieMask::new(65535),
7462                }),
7463                true,
7464            )
7465            .expect("root revealed");
7466
7467        trie.reveal_nodes(&mut proof_nodes).unwrap();
7468
7469        // Update the leaf in order to reveal it in the trie
7470        trie.update_leaf(leaf_nibbles, leaf_value, NoRevealProvider).unwrap();
7471
7472        // Now try deleting the leaf
7473        let Err(err) = trie.remove_leaf(&leaf_nibbles, NoRevealProvider) else {
7474            panic!("expected blinded node error");
7475        };
7476        assert_matches!(err.kind(), SparseTrieErrorKind::BlindedNode(path) if path == &Nibbles::from_nibbles([0x3, 0x1, 0xc]));
7477
7478        trie.reveal_nodes(&mut [ProofTrieNodeV2 {
7479            path: Nibbles::from_nibbles([0x3, 0x1, 0xc]),
7480            node: branch_0x31c_node,
7481            masks: Some(BranchNodeMasks { tree_mask: 0.into(), hash_mask: 4096.into() }),
7482        }])
7483        .unwrap();
7484
7485        // Now remove the leaf again, this should succeed
7486        trie.remove_leaf(&leaf_nibbles, NoRevealProvider).unwrap();
7487
7488        // Compute the root to trigger updates
7489        let _ = trie.root();
7490
7491        // Assert the resulting branch node updates
7492        let updates = trie.updates_ref();
7493
7494        // Check that the branch at 0x3 was updated with the expected structure
7495        let branch_0x3_update = updates
7496            .updated_nodes
7497            .get(&Nibbles::from_nibbles([0x3]))
7498            .expect("Branch at 0x3 should be in updates");
7499
7500        // We no longer expect to track the hash for child 1
7501        branch_0x3_hashes.remove(1);
7502
7503        // Expected structure from prompt.md
7504        let expected_branch = BranchNodeCompact::new(
7505            0b1111111111111111,
7506            0b0110010111110011,
7507            0b1111111111111101,
7508            branch_0x3_hashes,
7509            None,
7510        );
7511
7512        assert_eq!(branch_0x3_update, &expected_branch);
7513    }
7514
7515    #[test]
7516    fn test_get_leaf_value_lower_subtrie() {
7517        // This test demonstrates that get_leaf_value must look in the correct subtrie,
7518        // not always in upper_subtrie.
7519
7520        // Set up a root branch pointing to nibble 0x1, and a branch at [0x1] pointing to
7521        // nibble 0x2, so that the lower subtrie at [0x1, 0x2] is reachable.
7522        let root_branch =
7523            create_branch_node_with_children(&[0x1], [RlpNode::word_rlp(&B256::repeat_byte(0xAA))]);
7524        let branch_at_1 =
7525            create_branch_node_with_children(&[0x2], [RlpNode::word_rlp(&B256::repeat_byte(0xBB))]);
7526        let mut trie = ParallelSparseTrie::from_root(root_branch, None, false).unwrap();
7527        trie.reveal_nodes(&mut [ProofTrieNodeV2 {
7528            path: Nibbles::from_nibbles([0x1]),
7529            node: branch_at_1,
7530            masks: None,
7531        }])
7532        .unwrap();
7533
7534        // Create a leaf node with path >= 2 nibbles (will go to lower subtrie)
7535        let leaf_path = Nibbles::from_nibbles([0x1, 0x2]);
7536        let leaf_key = Nibbles::from_nibbles([0x3, 0x4]);
7537        let leaf_node = create_leaf_node(leaf_key.to_vec(), 42);
7538
7539        // Reveal the leaf node
7540        trie.reveal_nodes(&mut [ProofTrieNodeV2 { path: leaf_path, node: leaf_node, masks: None }])
7541            .unwrap();
7542
7543        // The full path is leaf_path + leaf_key
7544        let full_path = Nibbles::from_nibbles([0x1, 0x2, 0x3, 0x4]);
7545
7546        // Verify the value is stored in the lower subtrie, not upper
7547        let idx = path_subtrie_index_unchecked(&leaf_path);
7548        let lower_subtrie = trie.lower_subtries[idx].as_revealed_ref().unwrap();
7549        assert!(
7550            lower_subtrie.inner.values.contains_key(&full_path),
7551            "value should be in lower subtrie"
7552        );
7553        assert!(
7554            !trie.upper_subtrie.inner.values.contains_key(&full_path),
7555            "value should NOT be in upper subtrie"
7556        );
7557
7558        // get_leaf_value should find the value
7559        assert!(
7560            trie.get_leaf_value(&full_path).is_some(),
7561            "get_leaf_value should find the value in lower subtrie"
7562        );
7563    }
7564
7565    /// Test that `get_leaf_value` correctly returns values stored via `update_leaf`
7566    /// when the leaf node ends up in the upper subtrie (depth < 2).
7567    ///
7568    /// This can happen when the trie is sparse and the leaf is inserted at the root level.
7569    /// Previously, `get_leaf_value` only checked the lower subtrie based on the full path,
7570    /// missing values stored in `upper_subtrie.inner.values`.
7571    #[test]
7572    fn test_get_leaf_value_upper_subtrie_via_update_leaf() {
7573        let provider = NoRevealProvider;
7574
7575        // Create an empty trie with an empty root
7576        let mut trie = ParallelSparseTrie::default()
7577            .with_root(TrieNodeV2::EmptyRoot, None, false)
7578            .expect("root revealed");
7579
7580        // Create a full 64-nibble path (like a real account hash)
7581        let full_path = pad_nibbles_right(Nibbles::from_nibbles([0x0, 0xA, 0xB, 0xC]));
7582        let value = encode_account_value(42);
7583
7584        // Insert the leaf - since the trie is empty, the leaf node will be created
7585        // at the root level (depth 0), which is in the upper subtrie
7586        trie.update_leaf(full_path, value.clone(), provider).unwrap();
7587
7588        // Verify the value is stored in upper_subtrie (where update_leaf puts it)
7589        assert!(
7590            trie.upper_subtrie.inner.values.contains_key(&full_path),
7591            "value should be in upper subtrie after update_leaf"
7592        );
7593
7594        // Verify the value can be retrieved via get_leaf_value
7595        // Before the fix, this would return None because get_leaf_value only
7596        // checked the lower subtrie based on the path length
7597        let retrieved = trie.get_leaf_value(&full_path);
7598        assert_eq!(retrieved, Some(&value));
7599    }
7600
7601    /// Test that `get_leaf_value` works for values in both upper and lower subtries.
7602    #[test]
7603    fn test_get_leaf_value_upper_and_lower_subtries() {
7604        let provider = NoRevealProvider;
7605
7606        // Create an empty trie
7607        let mut trie = ParallelSparseTrie::default()
7608            .with_root(TrieNodeV2::EmptyRoot, None, false)
7609            .expect("root revealed");
7610
7611        // Insert first leaf - will be at root level (upper subtrie)
7612        let path1 = pad_nibbles_right(Nibbles::from_nibbles([0x0, 0xA]));
7613        let value1 = encode_account_value(1);
7614        trie.update_leaf(path1, value1.clone(), provider).unwrap();
7615
7616        // Insert second leaf with different prefix - creates a branch
7617        let path2 = pad_nibbles_right(Nibbles::from_nibbles([0x1, 0xB]));
7618        let value2 = encode_account_value(2);
7619        trie.update_leaf(path2, value2.clone(), provider).unwrap();
7620
7621        // Both values should be retrievable
7622        assert_eq!(trie.get_leaf_value(&path1), Some(&value1));
7623        assert_eq!(trie.get_leaf_value(&path2), Some(&value2));
7624    }
7625
7626    /// Test that `get_leaf_value` works for storage tries which are often very sparse.
7627    #[test]
7628    fn test_get_leaf_value_sparse_storage_trie() {
7629        let provider = NoRevealProvider;
7630
7631        // Simulate a storage trie with a single slot
7632        let mut trie = ParallelSparseTrie::default()
7633            .with_root(TrieNodeV2::EmptyRoot, None, false)
7634            .expect("root revealed");
7635
7636        // Single storage slot - leaf will be at root (depth 0)
7637        let slot_path = pad_nibbles_right(Nibbles::from_nibbles([0x2, 0x9]));
7638        let slot_value = alloy_rlp::encode(U256::from(12345));
7639        trie.update_leaf(slot_path, slot_value.clone(), provider).unwrap();
7640
7641        // Value should be retrievable
7642        assert_eq!(trie.get_leaf_value(&slot_path), Some(&slot_value));
7643    }
7644
7645    #[test]
7646    fn test_prune_empty_suffix_key_regression() {
7647        // Regression test: when a leaf has an empty suffix key (full path == node path),
7648        // the value must be removed when that path becomes a pruned root.
7649        // This catches the bug where is_strict_descendant fails to remove p == pruned_root.
7650
7651        use crate::provider::DefaultTrieNodeProvider;
7652
7653        let provider = DefaultTrieNodeProvider;
7654        let mut parallel = ParallelSparseTrie::default();
7655
7656        // Large value to ensure nodes have hashes (RLP >= 32 bytes)
7657        let value = {
7658            let account = Account {
7659                nonce: 0x123456789abcdef,
7660                balance: U256::from(0x123456789abcdef0123456789abcdef_u128),
7661                ..Default::default()
7662            };
7663            let mut buf = Vec::new();
7664            account.into_trie_account(EMPTY_ROOT_HASH).encode(&mut buf);
7665            buf
7666        };
7667
7668        // Create a trie with multiple leaves to force a branch at root
7669        for i in 0..16u8 {
7670            parallel
7671                .update_leaf(
7672                    pad_nibbles_right(Nibbles::from_nibbles([i, 0x1, 0x2, 0x3, 0x4, 0x5])),
7673                    value.clone(),
7674                    &provider,
7675                )
7676                .unwrap();
7677        }
7678
7679        // Compute root to get hashes
7680        let root_before = parallel.root();
7681
7682        // Prune with no retained leaves: all children of root become pruned roots
7683        parallel.prune(&[]);
7684
7685        let root_after = parallel.root();
7686        assert_eq!(root_before, root_after, "root hash must be preserved");
7687
7688        // Key assertion: values under pruned paths must be removed
7689        // With the bug, values at pruned_root paths (not strict descendants) would remain
7690        for i in 0..16u8 {
7691            let path = pad_nibbles_right(Nibbles::from_nibbles([i, 0x1, 0x2, 0x3, 0x4, 0x5]));
7692            assert!(
7693                parallel.get_leaf_value(&path).is_none(),
7694                "value at {:?} should be removed after prune",
7695                path
7696            );
7697        }
7698    }
7699
7700    #[test]
7701    fn test_prune_empty_trie() {
7702        let mut trie = ParallelSparseTrie::default();
7703        trie.prune(&[]);
7704        let root = trie.root();
7705        assert_eq!(root, EMPTY_ROOT_HASH, "empty trie should have empty root hash");
7706    }
7707
7708    #[test]
7709    fn test_prune_preserves_root_hash() {
7710        let provider = DefaultTrieNodeProvider;
7711        let mut trie = ParallelSparseTrie::default();
7712
7713        let value = large_account_value();
7714
7715        for i in 0..8u8 {
7716            for j in 0..4u8 {
7717                trie.update_leaf(
7718                    pad_nibbles_right(Nibbles::from_nibbles([i, j, 0x3, 0x4, 0x5, 0x6])),
7719                    value.clone(),
7720                    &provider,
7721                )
7722                .unwrap();
7723            }
7724        }
7725
7726        let root_before = trie.root();
7727        trie.prune(&[]);
7728        let root_after = trie.root();
7729        assert_eq!(root_before, root_after, "root hash must be preserved after prune");
7730    }
7731
7732    #[test]
7733    fn test_prune_single_leaf_trie() {
7734        let provider = DefaultTrieNodeProvider;
7735        let mut trie = ParallelSparseTrie::default();
7736
7737        let value = large_account_value();
7738        trie.update_leaf(
7739            pad_nibbles_right(Nibbles::from_nibbles([0x1, 0x2, 0x3, 0x4])),
7740            value,
7741            &provider,
7742        )
7743        .unwrap();
7744
7745        let root_before = trie.root();
7746        let nodes_before = trie.size_hint();
7747
7748        trie.prune(&[]);
7749
7750        let root_after = trie.root();
7751        assert_eq!(root_before, root_after, "root hash should be preserved");
7752        assert_eq!(trie.size_hint(), nodes_before, "single leaf trie should not change");
7753    }
7754
7755    #[test]
7756    fn test_prune_root_hash_preserved() {
7757        let provider = DefaultTrieNodeProvider;
7758        let mut trie = ParallelSparseTrie::default();
7759
7760        // Create two 64-nibble paths that differ only in the first nibble
7761        let key1 = Nibbles::unpack(B256::repeat_byte(0x00));
7762        let key2 = Nibbles::unpack(B256::repeat_byte(0x11));
7763
7764        let large_value = large_account_value();
7765        trie.update_leaf(key1, large_value.clone(), &provider).unwrap();
7766        trie.update_leaf(key2, large_value, &provider).unwrap();
7767
7768        let root_before = trie.root();
7769
7770        trie.prune(&[]);
7771
7772        assert_eq!(root_before, trie.root(), "root hash must be preserved after pruning");
7773    }
7774
7775    #[test]
7776    fn test_prune_mixed_embedded_and_hashed() {
7777        let provider = DefaultTrieNodeProvider;
7778        let mut trie = ParallelSparseTrie::default();
7779
7780        let large_value = large_account_value();
7781        let small_value = vec![0x80];
7782
7783        for i in 0..8u8 {
7784            let value = if i < 4 { large_value.clone() } else { small_value.clone() };
7785            trie.update_leaf(
7786                pad_nibbles_right(Nibbles::from_nibbles([i, 0x1, 0x2, 0x3])),
7787                value,
7788                &provider,
7789            )
7790            .unwrap();
7791        }
7792
7793        let root_before = trie.root();
7794        trie.prune(&[]);
7795        assert_eq!(root_before, trie.root(), "root hash must be preserved");
7796    }
7797
7798    #[test]
7799    fn test_prune_all_lower_subtries() {
7800        let provider = DefaultTrieNodeProvider;
7801
7802        let large_value = large_account_value();
7803
7804        let mut keys = Vec::new();
7805        for first in 0..16u8 {
7806            for second in 0..16u8 {
7807                keys.push(pad_nibbles_right(Nibbles::from_nibbles([
7808                    first, second, 0x1, 0x2, 0x3, 0x4,
7809                ])));
7810            }
7811        }
7812
7813        let mut trie = ParallelSparseTrie::default();
7814
7815        for key in &keys {
7816            trie.update_leaf(*key, large_value.clone(), &provider).unwrap();
7817        }
7818
7819        let root_before = trie.root();
7820
7821        let total_pruned = trie.prune(&[]);
7822
7823        assert!(total_pruned > 0, "should have pruned some nodes");
7824        assert_eq!(root_before, trie.root(), "root hash should be preserved");
7825
7826        for key in &keys {
7827            assert!(trie.get_leaf_value(key).is_none(), "value should be pruned");
7828        }
7829    }
7830
7831    #[test]
7832    fn test_prune_keeps_only_hot_paths() {
7833        let provider = DefaultTrieNodeProvider;
7834        let mut trie = ParallelSparseTrie::default();
7835
7836        let key_keep = pad_nibbles_right(Nibbles::from_nibbles([0x1, 0x2, 0x3, 0x4]));
7837        let key_drop_1 = pad_nibbles_right(Nibbles::from_nibbles([0x5, 0x2, 0x3, 0x4]));
7838        let key_drop_2 = pad_nibbles_right(Nibbles::from_nibbles([0x9, 0x2, 0x3, 0x4]));
7839
7840        let value = large_account_value();
7841        trie.update_leaf(key_keep, value.clone(), &provider).unwrap();
7842        trie.update_leaf(key_drop_1, value.clone(), &provider).unwrap();
7843        trie.update_leaf(key_drop_2, value, &provider).unwrap();
7844
7845        let root_before = trie.root();
7846
7847        let pruned = trie.prune(&[key_keep]);
7848        assert!(pruned > 0, "expected some nodes to be pruned");
7849        assert_eq!(root_before, trie.root(), "root hash should be preserved after LFU prune");
7850
7851        assert!(trie.get_leaf_value(&key_keep).is_some(), "retained key must remain revealed");
7852        assert!(trie.get_leaf_value(&key_drop_1).is_none(), "non-retained key should be pruned");
7853        assert!(trie.get_leaf_value(&key_drop_2).is_none(), "non-retained key should be pruned");
7854    }
7855
7856    #[test]
7857    fn test_prune_update_after() {
7858        // After pruning, we should be able to update leaves without panic.
7859        let provider = DefaultTrieNodeProvider;
7860        let mut trie = ParallelSparseTrie::default();
7861
7862        let value = large_account_value();
7863
7864        // Create keys that span into lower subtries (path.len() >= UPPER_TRIE_MAX_DEPTH)
7865        for first in 0..4u8 {
7866            for second in 0..4u8 {
7867                trie.update_leaf(
7868                    pad_nibbles_right(Nibbles::from_nibbles([
7869                        first, second, 0x1, 0x2, 0x3, 0x4, 0x5, 0x6,
7870                    ])),
7871                    value.clone(),
7872                    &provider,
7873                )
7874                .unwrap();
7875            }
7876        }
7877
7878        let root_before = trie.root();
7879
7880        trie.prune(&[]);
7881
7882        let root_after = trie.root();
7883        assert_eq!(root_before, root_after, "root hash should be preserved");
7884
7885        // Now try to update a leaf - this should not panic even though lower subtries
7886        // were replaced with Blind(None)
7887        let new_path =
7888            pad_nibbles_right(Nibbles::from_nibbles([0x5, 0x5, 0x1, 0x2, 0x3, 0x4, 0x5, 0x6]));
7889        trie.update_leaf(new_path, value, &provider).unwrap();
7890
7891        // The trie should still be functional
7892        let _ = trie.root();
7893    }
7894
7895    // update_leaves tests
7896
7897    #[test]
7898    fn test_update_leaves_successful_update() {
7899        use crate::LeafUpdate;
7900        use alloy_primitives::map::B256Map;
7901        use std::cell::RefCell;
7902
7903        let provider = DefaultTrieNodeProvider;
7904        let mut trie = ParallelSparseTrie::default();
7905
7906        // Create a leaf in the trie using a full-length key
7907        let b256_key = B256::with_last_byte(42);
7908        let key = Nibbles::unpack(b256_key);
7909        let value = encode_account_value(1);
7910        trie.update_leaf(key, value, &provider).unwrap();
7911
7912        // Create update map with a new value for the same key
7913        let new_value = encode_account_value(2);
7914
7915        let mut updates: B256Map<LeafUpdate> = B256Map::default();
7916        updates.insert(b256_key, LeafUpdate::Changed(new_value));
7917
7918        let proof_targets = RefCell::new(Vec::new());
7919        trie.update_leaves(&mut updates, |path, min_len| {
7920            proof_targets.borrow_mut().push((path, min_len));
7921        })
7922        .unwrap();
7923
7924        // Update should succeed: map empty, callback not invoked
7925        assert!(updates.is_empty(), "Update map should be empty after successful update");
7926        assert!(
7927            proof_targets.borrow().is_empty(),
7928            "Callback should not be invoked for revealed paths"
7929        );
7930    }
7931
7932    #[test]
7933    fn test_update_leaves_insert_new_leaf() {
7934        use crate::LeafUpdate;
7935        use alloy_primitives::map::B256Map;
7936        use std::cell::RefCell;
7937
7938        let mut trie = ParallelSparseTrie::default();
7939
7940        // Insert a NEW leaf (key doesn't exist yet) via update_leaves
7941        let b256_key = B256::with_last_byte(99);
7942        let new_value = encode_account_value(42);
7943
7944        let mut updates: B256Map<LeafUpdate> = B256Map::default();
7945        updates.insert(b256_key, LeafUpdate::Changed(new_value.clone()));
7946
7947        let proof_targets = RefCell::new(Vec::new());
7948        trie.update_leaves(&mut updates, |path, min_len| {
7949            proof_targets.borrow_mut().push((path, min_len));
7950        })
7951        .unwrap();
7952
7953        // Insert should succeed: map empty, callback not invoked
7954        assert!(updates.is_empty(), "Update map should be empty after successful insert");
7955        assert!(
7956            proof_targets.borrow().is_empty(),
7957            "Callback should not be invoked for new leaf insert"
7958        );
7959
7960        // Verify the leaf was actually inserted
7961        let full_path = Nibbles::unpack(b256_key);
7962        assert_eq!(
7963            trie.get_leaf_value(&full_path),
7964            Some(&new_value),
7965            "New leaf value should be retrievable"
7966        );
7967    }
7968
7969    #[test]
7970    fn test_update_leaves_blinded_node() {
7971        use crate::LeafUpdate;
7972        use alloy_primitives::map::B256Map;
7973        use std::cell::RefCell;
7974
7975        // Create a trie with a blinded node
7976        // Use a small value that fits in RLP encoding
7977        let small_value = alloy_rlp::encode_fixed_size(&U256::from(1)).to_vec();
7978        let leaf = LeafNode::new(
7979            Nibbles::default(), // short key for RLP encoding
7980            small_value,
7981        );
7982        let branch = TrieNodeV2::Branch(BranchNodeV2::new(
7983            Nibbles::default(),
7984            vec![
7985                RlpNode::word_rlp(&B256::repeat_byte(1)), // blinded child at 0
7986                RlpNode::from_raw_rlp(&alloy_rlp::encode(leaf.clone())).unwrap(), // revealed at 1
7987            ],
7988            TrieMask::new(0b11),
7989            None,
7990        ));
7991
7992        let mut trie = ParallelSparseTrie::from_root(
7993            branch.clone(),
7994            Some(BranchNodeMasks {
7995                hash_mask: TrieMask::new(0b01),
7996                tree_mask: TrieMask::default(),
7997            }),
7998            false,
7999        )
8000        .unwrap();
8001
8002        // Reveal only the branch and one child, leaving child 0 as a Hash node
8003        trie.reveal_node(
8004            Nibbles::default(),
8005            branch,
8006            Some(BranchNodeMasks {
8007                hash_mask: TrieMask::default(),
8008                tree_mask: TrieMask::new(0b01),
8009            }),
8010        )
8011        .unwrap();
8012        trie.reveal_node(Nibbles::from_nibbles([0x1]), TrieNodeV2::Leaf(leaf), None).unwrap();
8013
8014        // The path 0x0... is blinded (Hash node)
8015        // Create an update targeting the blinded path using a full B256 key
8016        let b256_key = B256::ZERO; // starts with 0x0...
8017
8018        let new_value = encode_account_value(42);
8019        let mut updates: B256Map<LeafUpdate> = B256Map::default();
8020        updates.insert(b256_key, LeafUpdate::Changed(new_value));
8021
8022        let proof_targets = RefCell::new(Vec::new());
8023        let prefix_set_len_before = trie.prefix_set.len();
8024        trie.update_leaves(&mut updates, |path, min_len| {
8025            proof_targets.borrow_mut().push((path, min_len));
8026        })
8027        .unwrap();
8028
8029        // Update should remain in map (blinded node)
8030        assert!(!updates.is_empty(), "Update should remain in map when hitting blinded node");
8031
8032        // prefix_set should be unchanged after failed update
8033        assert_eq!(
8034            trie.prefix_set.len(),
8035            prefix_set_len_before,
8036            "prefix_set should be unchanged after failed update on blinded node"
8037        );
8038
8039        // Callback should be invoked
8040        let targets = proof_targets.borrow();
8041        assert!(!targets.is_empty(), "Callback should be invoked for blinded path");
8042
8043        // min_len should equal the blinded node's path length (1 nibble)
8044        assert_eq!(targets[0].1, 1, "min_len should equal blinded node path length");
8045    }
8046
8047    #[test]
8048    fn test_update_leaves_removal() {
8049        use crate::LeafUpdate;
8050        use alloy_primitives::map::B256Map;
8051        use std::cell::RefCell;
8052
8053        let provider = DefaultTrieNodeProvider;
8054        let mut trie = ParallelSparseTrie::default();
8055
8056        // Create two leaves so removal doesn't result in empty trie issues
8057        // Use full-length keys
8058        let b256_key1 = B256::with_last_byte(1);
8059        let b256_key2 = B256::with_last_byte(2);
8060        let key1 = Nibbles::unpack(b256_key1);
8061        let key2 = Nibbles::unpack(b256_key2);
8062        let value = encode_account_value(1);
8063        trie.update_leaf(key1, value.clone(), &provider).unwrap();
8064        trie.update_leaf(key2, value, &provider).unwrap();
8065
8066        // Create an update to remove key1 (empty value = removal)
8067        let mut updates: B256Map<LeafUpdate> = B256Map::default();
8068        updates.insert(b256_key1, LeafUpdate::Changed(vec![])); // empty = removal
8069
8070        let proof_targets = RefCell::new(Vec::new());
8071        trie.update_leaves(&mut updates, |path, min_len| {
8072            proof_targets.borrow_mut().push((path, min_len));
8073        })
8074        .unwrap();
8075
8076        // Removal should succeed: map empty
8077        assert!(updates.is_empty(), "Update map should be empty after successful removal");
8078    }
8079
8080    #[test]
8081    fn test_update_leaves_removal_blinded() {
8082        use crate::LeafUpdate;
8083        use alloy_primitives::map::B256Map;
8084        use std::cell::RefCell;
8085
8086        // Create a trie with a blinded node
8087        // Use a small value that fits in RLP encoding
8088        let small_value = alloy_rlp::encode_fixed_size(&U256::from(1)).to_vec();
8089        let leaf = LeafNode::new(
8090            Nibbles::default(), // short key for RLP encoding
8091            small_value,
8092        );
8093        let branch = TrieNodeV2::Branch(BranchNodeV2::new(
8094            Nibbles::default(),
8095            vec![
8096                RlpNode::word_rlp(&B256::repeat_byte(1)), // blinded child at 0
8097                RlpNode::from_raw_rlp(&alloy_rlp::encode(leaf.clone())).unwrap(), // revealed at 1
8098            ],
8099            TrieMask::new(0b11),
8100            None,
8101        ));
8102
8103        let mut trie = ParallelSparseTrie::from_root(
8104            branch.clone(),
8105            Some(BranchNodeMasks {
8106                hash_mask: TrieMask::new(0b01),
8107                tree_mask: TrieMask::default(),
8108            }),
8109            false,
8110        )
8111        .unwrap();
8112
8113        trie.reveal_node(
8114            Nibbles::default(),
8115            branch,
8116            Some(BranchNodeMasks {
8117                hash_mask: TrieMask::default(),
8118                tree_mask: TrieMask::new(0b01),
8119            }),
8120        )
8121        .unwrap();
8122        trie.reveal_node(Nibbles::from_nibbles([0x1]), TrieNodeV2::Leaf(leaf), None).unwrap();
8123
8124        // Simulate having a known value behind the blinded node
8125        let b256_key = B256::ZERO; // starts with 0x0...
8126        let full_path = Nibbles::unpack(b256_key);
8127
8128        // Insert the value into the trie's values map (simulating we know about it)
8129        let old_value = encode_account_value(99);
8130        trie.upper_subtrie.inner.values.insert(full_path, old_value.clone());
8131
8132        let mut updates: B256Map<LeafUpdate> = B256Map::default();
8133        updates.insert(b256_key, LeafUpdate::Changed(vec![])); // empty = removal
8134
8135        let proof_targets = RefCell::new(Vec::new());
8136        let prefix_set_len_before = trie.prefix_set.len();
8137        trie.update_leaves(&mut updates, |path, min_len| {
8138            proof_targets.borrow_mut().push((path, min_len));
8139        })
8140        .unwrap();
8141
8142        // Callback should be invoked
8143        assert!(
8144            !proof_targets.borrow().is_empty(),
8145            "Callback should be invoked when removal hits blinded node"
8146        );
8147
8148        // Update should remain in map
8149        assert!(!updates.is_empty(), "Update should remain in map when removal hits blinded node");
8150
8151        // Original value should be preserved (reverted)
8152        assert_eq!(
8153            trie.upper_subtrie.inner.values.get(&full_path),
8154            Some(&old_value),
8155            "Original value should be preserved after failed removal"
8156        );
8157
8158        // prefix_set should be unchanged after failed removal
8159        assert_eq!(
8160            trie.prefix_set.len(),
8161            prefix_set_len_before,
8162            "prefix_set should be unchanged after failed removal on blinded node"
8163        );
8164    }
8165
8166    #[test]
8167    fn test_update_leaves_removal_branch_collapse_blinded() {
8168        use crate::LeafUpdate;
8169        use alloy_primitives::map::B256Map;
8170        use std::cell::RefCell;
8171
8172        // Create a branch node at root with two children:
8173        // - Child at nibble 0: a blinded Hash node
8174        // - Child at nibble 1: a revealed Leaf node
8175        let small_value = alloy_rlp::encode_fixed_size(&U256::from(1)).to_vec();
8176        let leaf = LeafNode::new(Nibbles::default(), small_value);
8177        let branch = TrieNodeV2::Branch(BranchNodeV2::new(
8178            Nibbles::default(),
8179            vec![
8180                RlpNode::word_rlp(&B256::repeat_byte(1)), // blinded child at nibble 0
8181                RlpNode::from_raw_rlp(&alloy_rlp::encode(leaf.clone())).unwrap(), /* leaf at nibble 1 */
8182            ],
8183            TrieMask::new(0b11),
8184            None,
8185        ));
8186
8187        let mut trie = ParallelSparseTrie::from_root(
8188            branch.clone(),
8189            Some(BranchNodeMasks {
8190                hash_mask: TrieMask::new(0b01), // nibble 0 is hashed
8191                tree_mask: TrieMask::default(),
8192            }),
8193            false,
8194        )
8195        .unwrap();
8196
8197        // Reveal the branch and the leaf at nibble 1, leaving nibble 0 as Hash node
8198        trie.reveal_node(
8199            Nibbles::default(),
8200            branch,
8201            Some(BranchNodeMasks {
8202                hash_mask: TrieMask::default(),
8203                tree_mask: TrieMask::new(0b01),
8204            }),
8205        )
8206        .unwrap();
8207        trie.reveal_node(Nibbles::from_nibbles([0x1]), TrieNodeV2::Leaf(leaf), None).unwrap();
8208
8209        // Insert the leaf's value into the values map for the revealed leaf
8210        // Use B256 key that starts with nibble 1 (0x10 has first nibble = 1)
8211        let b256_key = B256::with_last_byte(0x10);
8212        let full_path = Nibbles::unpack(b256_key);
8213        let leaf_value = encode_account_value(42);
8214        trie.upper_subtrie.inner.values.insert(full_path, leaf_value.clone());
8215
8216        // Record state before update_leaves
8217        let prefix_set_len_before = trie.prefix_set.len();
8218        let node_count_before = trie.upper_subtrie.nodes.len() +
8219            trie.lower_subtries
8220                .iter()
8221                .filter_map(|s| s.as_revealed_ref())
8222                .map(|s| s.nodes.len())
8223                .sum::<usize>();
8224
8225        let mut updates: B256Map<LeafUpdate> = B256Map::default();
8226        updates.insert(b256_key, LeafUpdate::Changed(vec![])); // removal
8227
8228        let proof_targets = RefCell::new(Vec::new());
8229        trie.update_leaves(&mut updates, |path, min_len| {
8230            proof_targets.borrow_mut().push((path, min_len));
8231        })
8232        .unwrap();
8233
8234        // Assert: update remains in map (removal blocked by blinded sibling)
8235        assert!(
8236            !updates.is_empty(),
8237            "Update should remain in map when removal would collapse branch with blinded sibling"
8238        );
8239
8240        // Assert: callback was invoked for the blinded path
8241        assert!(
8242            !proof_targets.borrow().is_empty(),
8243            "Callback should be invoked for blinded sibling path"
8244        );
8245
8246        // Assert: prefix_set unchanged (atomic failure)
8247        assert_eq!(
8248            trie.prefix_set.len(),
8249            prefix_set_len_before,
8250            "prefix_set should be unchanged after atomic failure"
8251        );
8252
8253        // Assert: node count unchanged
8254        let node_count_after = trie.upper_subtrie.nodes.len() +
8255            trie.lower_subtries
8256                .iter()
8257                .filter_map(|s| s.as_revealed_ref())
8258                .map(|s| s.nodes.len())
8259                .sum::<usize>();
8260        assert_eq!(
8261            node_count_before, node_count_after,
8262            "Node count should be unchanged after atomic failure"
8263        );
8264
8265        // Assert: the leaf value still exists (not removed)
8266        assert_eq!(
8267            trie.upper_subtrie.inner.values.get(&full_path),
8268            Some(&leaf_value),
8269            "Leaf value should still exist after failed removal"
8270        );
8271    }
8272
8273    #[test]
8274    fn test_update_leaves_touched() {
8275        use crate::LeafUpdate;
8276        use alloy_primitives::map::B256Map;
8277        use std::cell::RefCell;
8278
8279        let provider = DefaultTrieNodeProvider;
8280        let mut trie = ParallelSparseTrie::default();
8281
8282        // Create a leaf in the trie using a full-length key
8283        let b256_key = B256::with_last_byte(42);
8284        let key = Nibbles::unpack(b256_key);
8285        let value = encode_account_value(1);
8286        trie.update_leaf(key, value, &provider).unwrap();
8287
8288        // Create a Touched update for the existing key
8289        let mut updates: B256Map<LeafUpdate> = B256Map::default();
8290        updates.insert(b256_key, LeafUpdate::Touched);
8291
8292        let proof_targets = RefCell::new(Vec::new());
8293        let prefix_set_len_before = trie.prefix_set.len();
8294
8295        trie.update_leaves(&mut updates, |path, min_len| {
8296            proof_targets.borrow_mut().push((path, min_len));
8297        })
8298        .unwrap();
8299
8300        // Update should be removed (path is accessible)
8301        assert!(updates.is_empty(), "Touched update should be removed for accessible path");
8302
8303        // No callback
8304        assert!(
8305            proof_targets.borrow().is_empty(),
8306            "Callback should not be invoked for accessible path"
8307        );
8308
8309        // prefix_set should be unchanged since Touched is read-only
8310        assert_eq!(
8311            trie.prefix_set.len(),
8312            prefix_set_len_before,
8313            "prefix_set should be unchanged for Touched update (read-only)"
8314        );
8315    }
8316
8317    #[test]
8318    fn test_update_leaves_touched_nonexistent() {
8319        use crate::LeafUpdate;
8320        use alloy_primitives::map::B256Map;
8321        use std::cell::RefCell;
8322
8323        let mut trie = ParallelSparseTrie::default();
8324
8325        // Create a Touched update for a key that doesn't exist
8326        let b256_key = B256::with_last_byte(99);
8327        let full_path = Nibbles::unpack(b256_key);
8328
8329        let prefix_set_len_before = trie.prefix_set.len();
8330
8331        let mut updates: B256Map<LeafUpdate> = B256Map::default();
8332        updates.insert(b256_key, LeafUpdate::Touched);
8333
8334        let proof_targets = RefCell::new(Vec::new());
8335        trie.update_leaves(&mut updates, |path, min_len| {
8336            proof_targets.borrow_mut().push((path, min_len));
8337        })
8338        .unwrap();
8339
8340        // Update should be removed (path IS accessible - it's just empty)
8341        assert!(updates.is_empty(), "Touched update should be removed for accessible (empty) path");
8342
8343        // No callback should be invoked (path is revealed, just empty)
8344        assert!(
8345            proof_targets.borrow().is_empty(),
8346            "Callback should not be invoked for accessible path"
8347        );
8348
8349        // prefix_set should NOT be modified (Touched is read-only)
8350        assert_eq!(
8351            trie.prefix_set.len(),
8352            prefix_set_len_before,
8353            "prefix_set should not be modified by Touched update"
8354        );
8355
8356        // No value should be inserted
8357        assert!(
8358            trie.get_leaf_value(&full_path).is_none(),
8359            "No value should exist for non-existent key after Touched update"
8360        );
8361    }
8362
8363    #[test]
8364    fn test_update_leaves_touched_blinded() {
8365        use crate::LeafUpdate;
8366        use alloy_primitives::map::B256Map;
8367        use std::cell::RefCell;
8368
8369        // Create a trie with a blinded node
8370        // Use a small value that fits in RLP encoding
8371        let small_value = alloy_rlp::encode_fixed_size(&U256::from(1)).to_vec();
8372        let leaf = LeafNode::new(
8373            Nibbles::default(), // short key for RLP encoding
8374            small_value,
8375        );
8376        let branch = TrieNodeV2::Branch(BranchNodeV2::new(
8377            Nibbles::default(),
8378            vec![
8379                RlpNode::word_rlp(&B256::repeat_byte(1)), // blinded child at 0
8380                RlpNode::from_raw_rlp(&alloy_rlp::encode(leaf.clone())).unwrap(), // revealed at 1
8381            ],
8382            TrieMask::new(0b11),
8383            None,
8384        ));
8385
8386        let mut trie = ParallelSparseTrie::from_root(
8387            branch.clone(),
8388            Some(BranchNodeMasks {
8389                hash_mask: TrieMask::new(0b01),
8390                tree_mask: TrieMask::default(),
8391            }),
8392            false,
8393        )
8394        .unwrap();
8395
8396        trie.reveal_node(
8397            Nibbles::default(),
8398            branch,
8399            Some(BranchNodeMasks {
8400                hash_mask: TrieMask::default(),
8401                tree_mask: TrieMask::new(0b01),
8402            }),
8403        )
8404        .unwrap();
8405        trie.reveal_node(Nibbles::from_nibbles([0x1]), TrieNodeV2::Leaf(leaf), None).unwrap();
8406
8407        // Create a Touched update targeting the blinded path using full B256 key
8408        let b256_key = B256::ZERO; // starts with 0x0...
8409
8410        let mut updates: B256Map<LeafUpdate> = B256Map::default();
8411        updates.insert(b256_key, LeafUpdate::Touched);
8412
8413        let proof_targets = RefCell::new(Vec::new());
8414        let prefix_set_len_before = trie.prefix_set.len();
8415        trie.update_leaves(&mut updates, |path, min_len| {
8416            proof_targets.borrow_mut().push((path, min_len));
8417        })
8418        .unwrap();
8419
8420        // Callback should be invoked
8421        assert!(!proof_targets.borrow().is_empty(), "Callback should be invoked for blinded path");
8422
8423        // Update should remain in map
8424        assert!(!updates.is_empty(), "Touched update should remain in map for blinded path");
8425
8426        // prefix_set should be unchanged since Touched is read-only
8427        assert_eq!(
8428            trie.prefix_set.len(),
8429            prefix_set_len_before,
8430            "prefix_set should be unchanged for Touched update on blinded path"
8431        );
8432    }
8433
8434    #[test]
8435    fn test_update_leaves_deduplication() {
8436        use crate::LeafUpdate;
8437        use alloy_primitives::map::B256Map;
8438        use std::cell::RefCell;
8439
8440        // Create a trie with a blinded node
8441        // Use a small value that fits in RLP encoding
8442        let small_value = alloy_rlp::encode_fixed_size(&U256::from(1)).to_vec();
8443        let leaf = LeafNode::new(
8444            Nibbles::default(), // short key for RLP encoding
8445            small_value,
8446        );
8447        let branch = TrieNodeV2::Branch(BranchNodeV2::new(
8448            Nibbles::default(),
8449            vec![
8450                RlpNode::word_rlp(&B256::repeat_byte(1)), // blinded child at 0
8451                RlpNode::from_raw_rlp(&alloy_rlp::encode(leaf.clone())).unwrap(), // revealed at 1
8452            ],
8453            TrieMask::new(0b11),
8454            None,
8455        ));
8456
8457        let mut trie = ParallelSparseTrie::from_root(
8458            branch.clone(),
8459            Some(BranchNodeMasks {
8460                hash_mask: TrieMask::new(0b01),
8461                tree_mask: TrieMask::default(),
8462            }),
8463            false,
8464        )
8465        .unwrap();
8466
8467        trie.reveal_node(
8468            Nibbles::default(),
8469            branch,
8470            Some(BranchNodeMasks {
8471                hash_mask: TrieMask::default(),
8472                tree_mask: TrieMask::new(0b01),
8473            }),
8474        )
8475        .unwrap();
8476        trie.reveal_node(Nibbles::from_nibbles([0x1]), TrieNodeV2::Leaf(leaf), None).unwrap();
8477
8478        // Create multiple updates that would all hit the same blinded node at path 0x0
8479        // Use full B256 keys that all start with 0x0
8480        let b256_key1 = B256::ZERO;
8481        let b256_key2 = B256::with_last_byte(1); // still starts with 0x0
8482        let b256_key3 = B256::with_last_byte(2); // still starts with 0x0
8483
8484        let mut updates: B256Map<LeafUpdate> = B256Map::default();
8485        let value = encode_account_value(42);
8486
8487        updates.insert(b256_key1, LeafUpdate::Changed(value.clone()));
8488        updates.insert(b256_key2, LeafUpdate::Changed(value.clone()));
8489        updates.insert(b256_key3, LeafUpdate::Changed(value));
8490
8491        let proof_targets = RefCell::new(Vec::new());
8492        trie.update_leaves(&mut updates, |path, min_len| {
8493            proof_targets.borrow_mut().push((path, min_len));
8494        })
8495        .unwrap();
8496
8497        // The callback should be invoked 3 times - once for each unique full_path
8498        // The deduplication is by (full_path, min_len), not by blinded node
8499        let targets = proof_targets.borrow();
8500        assert_eq!(targets.len(), 3, "Callback should be invoked for each unique key");
8501
8502        // All should have the same min_len (1) since they all hit blinded node at path 0x0
8503        for (_, min_len) in targets.iter() {
8504            assert_eq!(*min_len, 1, "All should have min_len 1 from blinded node at 0x0");
8505        }
8506    }
8507
8508    #[test]
8509    fn test_nibbles_to_padded_b256() {
8510        // Empty nibbles should produce all zeros
8511        let empty = Nibbles::default();
8512        assert_eq!(ParallelSparseTrie::nibbles_to_padded_b256(&empty), B256::ZERO);
8513
8514        // Full 64-nibble path should round-trip through B256
8515        let full_key = b256!("0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef");
8516        let full_nibbles = Nibbles::unpack(full_key);
8517        assert_eq!(ParallelSparseTrie::nibbles_to_padded_b256(&full_nibbles), full_key);
8518
8519        // Partial nibbles should be left-aligned with zero padding on the right
8520        // 4 nibbles [0x1, 0x2, 0x3, 0x4] should pack to 0x1234...00
8521        let partial = Nibbles::from_nibbles_unchecked([0x1, 0x2, 0x3, 0x4]);
8522        let expected = b256!("1234000000000000000000000000000000000000000000000000000000000000");
8523        assert_eq!(ParallelSparseTrie::nibbles_to_padded_b256(&partial), expected);
8524
8525        // Single nibble
8526        let single = Nibbles::from_nibbles_unchecked([0xf]);
8527        let expected_single =
8528            b256!("f000000000000000000000000000000000000000000000000000000000000000");
8529        assert_eq!(ParallelSparseTrie::nibbles_to_padded_b256(&single), expected_single);
8530    }
8531
8532    #[test]
8533    fn test_memory_size() {
8534        // Test that memory_size returns a reasonable value for an empty trie
8535        let trie = ParallelSparseTrie::default();
8536        let empty_size = trie.memory_size();
8537
8538        // Should at least be the size of the struct itself
8539        assert!(empty_size >= core::mem::size_of::<ParallelSparseTrie>());
8540
8541        // Create a trie with some data. Set up a root branch with children at 0x1 and
8542        // 0x5, and branches at [0x1] and [0x5] pointing to 0x2 and 0x6 respectively,
8543        // so the lower subtries at [0x1, 0x2] and [0x5, 0x6] are reachable.
8544        let root_branch = create_branch_node_with_children(
8545            &[0x1, 0x5],
8546            [
8547                RlpNode::word_rlp(&B256::repeat_byte(0xAA)),
8548                RlpNode::word_rlp(&B256::repeat_byte(0xBB)),
8549            ],
8550        );
8551        let mut trie = ParallelSparseTrie::from_root(root_branch, None, false).unwrap();
8552
8553        let branch_at_1 =
8554            create_branch_node_with_children(&[0x2], [RlpNode::word_rlp(&B256::repeat_byte(0xCC))]);
8555        let branch_at_5 =
8556            create_branch_node_with_children(&[0x6], [RlpNode::word_rlp(&B256::repeat_byte(0xDD))]);
8557        trie.reveal_nodes(&mut [
8558            ProofTrieNodeV2 {
8559                path: Nibbles::from_nibbles_unchecked([0x1]),
8560                node: branch_at_1,
8561                masks: None,
8562            },
8563            ProofTrieNodeV2 {
8564                path: Nibbles::from_nibbles_unchecked([0x5]),
8565                node: branch_at_5,
8566                masks: None,
8567            },
8568        ])
8569        .unwrap();
8570
8571        let mut nodes = vec![
8572            ProofTrieNodeV2 {
8573                path: Nibbles::from_nibbles_unchecked([0x1, 0x2]),
8574                node: TrieNodeV2::Leaf(LeafNode {
8575                    key: Nibbles::from_nibbles_unchecked([0x3, 0x4]),
8576                    value: vec![1, 2, 3],
8577                }),
8578                masks: None,
8579            },
8580            ProofTrieNodeV2 {
8581                path: Nibbles::from_nibbles_unchecked([0x5, 0x6]),
8582                node: TrieNodeV2::Leaf(LeafNode {
8583                    key: Nibbles::from_nibbles_unchecked([0x7, 0x8]),
8584                    value: vec![4, 5, 6],
8585                }),
8586                masks: None,
8587            },
8588        ];
8589        trie.reveal_nodes(&mut nodes).unwrap();
8590
8591        let populated_size = trie.memory_size();
8592
8593        // Populated trie should use more memory than an empty one
8594        assert!(populated_size > empty_size);
8595    }
8596
8597    #[test]
8598    fn test_reveal_extension_branch_leaves_then_root() {
8599        // Test structure:
8600        // - 0x (root): extension node with key of 63 zeroes
8601        // - 0x000...000 (63 zeroes): branch node with children at 1 and 2
8602        // - 0x000...0001 (62 zeroes + 01): leaf with value 1
8603        // - 0x000...0002 (62 zeroes + 02): leaf with value 2
8604        //
8605        // The leaves and branch are small enough to be embedded (< 32 bytes),
8606        // so we manually RLP encode them and use those encodings in parent nodes.
8607
8608        // Create the extension key (63 zero nibbles)
8609        let ext_key: [u8; 63] = [0; 63];
8610
8611        // The branch is at the end of the extension (63 zeroes)
8612        let branch_path = Nibbles::from_nibbles(ext_key);
8613
8614        // Leaf paths: 63 zeroes + 1, 63 zeroes + 2
8615        let mut leaf1_path_bytes = [0u8; 64];
8616        leaf1_path_bytes[63] = 1;
8617        let leaf1_path = Nibbles::from_nibbles(leaf1_path_bytes);
8618
8619        let mut leaf2_path_bytes = [0u8; 64];
8620        leaf2_path_bytes[63] = 2;
8621        let leaf2_path = Nibbles::from_nibbles(leaf2_path_bytes);
8622
8623        // Create leaves with empty keys (full path consumed by extension + branch)
8624        // and simple values
8625        let leaf1_node = LeafNode::new(Nibbles::default(), vec![0x1]);
8626        let leaf2_node = LeafNode::new(Nibbles::default(), vec![0x2]);
8627
8628        // RLP encode the leaves to get their RlpNode representations
8629        let leaf1_rlp = RlpNode::from_rlp(&alloy_rlp::encode(TrieNodeV2::Leaf(leaf1_node.clone())));
8630        let leaf2_rlp = RlpNode::from_rlp(&alloy_rlp::encode(TrieNodeV2::Leaf(leaf2_node.clone())));
8631
8632        // Create the branch node with children at indices 1 and 2, using the RLP-encoded leaves.
8633        // In V2, branch and extension are combined: the key holds the extension prefix.
8634        let state_mask = TrieMask::new(0b0000_0110); // bits 1 and 2 set
8635        let stack = vec![leaf1_rlp, leaf2_rlp];
8636
8637        // First encode the bare branch (empty key) to get its RlpNode
8638        let bare_branch = BranchNodeV2::new(Nibbles::new(), stack.clone(), state_mask, None);
8639        let branch_rlp = RlpNode::from_rlp(&alloy_rlp::encode(&bare_branch));
8640
8641        // Create the combined extension+branch node as the root.
8642        let root_node = TrieNodeV2::Branch(BranchNodeV2::new(
8643            Nibbles::from_nibbles(ext_key),
8644            stack.clone(),
8645            state_mask,
8646            Some(branch_rlp),
8647        ));
8648
8649        // Initialize trie with the extension+branch as root
8650        let mut trie = ParallelSparseTrie::from_root(root_node, None, false).unwrap();
8651
8652        // Reveal the branch and leaves
8653        let mut nodes = vec![
8654            ProofTrieNodeV2 {
8655                path: branch_path,
8656                node: TrieNodeV2::Branch(BranchNodeV2::new(
8657                    Nibbles::new(),
8658                    stack,
8659                    state_mask,
8660                    None,
8661                )),
8662                masks: None,
8663            },
8664            ProofTrieNodeV2 { path: leaf1_path, node: TrieNodeV2::Leaf(leaf1_node), masks: None },
8665            ProofTrieNodeV2 { path: leaf2_path, node: TrieNodeV2::Leaf(leaf2_node), masks: None },
8666        ];
8667        trie.reveal_nodes(&mut nodes).unwrap();
8668
8669        // Add the leaf paths to prefix_set so that root() will update their hashes
8670        trie.prefix_set.insert(leaf1_path);
8671        trie.prefix_set.insert(leaf2_path);
8672
8673        // Call root() to compute the trie root hash
8674        let _root = trie.root();
8675    }
8676
8677    #[test]
8678    fn test_update_leaf_creates_embedded_nodes_then_root() {
8679        // Similar structure to test_reveal_extension_branch_leaves_then_root, but created
8680        // via update_leaf calls on an empty trie instead of revealing pre-built nodes.
8681        //
8682        // Two leaves with paths that share a long common prefix will create:
8683        // - Extension node at root with the shared prefix
8684        // - Branch node where the paths diverge
8685        // - Two leaf nodes (embedded in the branch since they're small)
8686
8687        // Create two paths that share 63 nibbles and differ only at the 64th
8688        let mut leaf1_path_bytes = [0u8; 64];
8689        leaf1_path_bytes[63] = 1;
8690        let leaf1_path = Nibbles::from_nibbles(leaf1_path_bytes);
8691
8692        let mut leaf2_path_bytes = [0u8; 64];
8693        leaf2_path_bytes[63] = 2;
8694        let leaf2_path = Nibbles::from_nibbles(leaf2_path_bytes);
8695
8696        // Create an empty trie and update with two leaves
8697        let mut trie = ParallelSparseTrie::default();
8698        trie.update_leaf(leaf1_path, vec![0x1], DefaultTrieNodeProvider).unwrap();
8699        trie.update_leaf(leaf2_path, vec![0x2], DefaultTrieNodeProvider).unwrap();
8700
8701        // Call root() to compute the trie root hash
8702        let _root = trie.root();
8703    }
8704}