Skip to main content

reth_trie_sparse/
parallel.rs

1#[cfg(feature = "trie-debug")]
2use crate::debug_recorder::{LeafUpdateRecord, ProofTrieNodeRecord, RecordedOp, TrieDebugRecorder};
3use crate::{
4    lower::LowerSparseSubtrie, LeafLookup, LeafLookupError, RlpNodeStackItem, SparseNode,
5    SparseNodeState, SparseNodeType, SparseTrie, SparseTrieUpdates,
6};
7use alloc::{borrow::Cow, boxed::Box, vec, vec::Vec};
8use alloy_primitives::{
9    map::{Entry, HashMap, HashSet},
10    B256, U256,
11};
12use alloy_rlp::Decodable;
13use alloy_trie::{BranchNodeCompact, TrieMask, EMPTY_ROOT_HASH};
14use core::cmp::{Ord, Ordering, PartialOrd};
15use reth_execution_errors::{SparseTrieError, SparseTrieErrorKind, SparseTrieResult};
16#[cfg(feature = "metrics")]
17use reth_primitives_traits::FastInstant as Instant;
18use reth_trie_common::{
19    prefix_set::{PrefixSet, PrefixSetMut},
20    BranchNodeMasks, BranchNodeMasksMap, BranchNodeRef, ExtensionNodeRef, LeafNodeRef, Nibbles,
21    ProofTrieNodeV2, RlpNode, TrieNodeV2,
22};
23use smallvec::SmallVec;
24use tracing::{instrument, trace};
25
26/// The maximum length of a path, in nibbles, which belongs to the upper subtrie of a
27/// [`ParallelSparseTrie`]. All longer paths belong to a lower subtrie.
28pub const UPPER_TRIE_MAX_DEPTH: usize = 2;
29
30/// Number of lower subtries which are managed by the [`ParallelSparseTrie`].
31pub const NUM_LOWER_SUBTRIES: usize = 16usize.pow(UPPER_TRIE_MAX_DEPTH as u32);
32
33/// Configuration for controlling when parallelism is enabled in [`ParallelSparseTrie`] operations.
34#[derive(Debug, Clone, Copy, PartialEq, Eq, Default)]
35pub struct ParallelismThresholds {
36    /// Minimum number of nodes to reveal before parallel processing is enabled.
37    /// When `reveal_nodes` has fewer nodes than this threshold, they will be processed serially.
38    pub min_revealed_nodes: usize,
39    /// Minimum number of changed keys (prefix set length) before parallel processing is enabled
40    /// for hash updates. When updating subtrie hashes with fewer changed keys than this threshold,
41    /// the updates will be processed serially.
42    pub min_updated_nodes: usize,
43}
44
45/// A revealed sparse trie with subtries that can be updated in parallel.
46///
47/// ## Structure
48///
49/// The trie is divided into two tiers for efficient parallel processing:
50/// - **Upper subtrie**: Contains nodes with paths shorter than [`UPPER_TRIE_MAX_DEPTH`]
51/// - **Lower subtries**: An array of [`NUM_LOWER_SUBTRIES`] subtries, each handling nodes with
52///   paths of at least [`UPPER_TRIE_MAX_DEPTH`] nibbles
53///
54/// Node placement is determined by path depth:
55/// - Paths with < [`UPPER_TRIE_MAX_DEPTH`] nibbles go to the upper subtrie
56/// - Paths with >= [`UPPER_TRIE_MAX_DEPTH`] nibbles go to lower subtries, indexed by their first
57///   [`UPPER_TRIE_MAX_DEPTH`] nibbles.
58///
59/// Each lower subtrie tracks its root via the `path` field, which represents the shortest path
60/// in that subtrie. This path will have at least [`UPPER_TRIE_MAX_DEPTH`] nibbles, but may be
61/// longer when an extension node in the upper trie "reaches into" the lower subtrie. For example,
62/// if the upper trie has an extension from `0x1` to `0x12345`, then the lower subtrie for prefix
63/// `0x12` will have its root at path `0x12345` rather than at `0x12`.
64///
65/// ## Node Revealing
66///
67/// The trie uses lazy loading to efficiently handle large state tries. Nodes can be:
68/// - **Blind nodes**: Stored as hashes on [`SparseNode::Branch::blinded_hashes`]
69/// - **Revealed nodes**: Fully loaded nodes (Branch, Extension, Leaf) with complete structure
70///
71/// Note: An empty trie contains an `EmptyRoot` node at the root path, rather than no nodes at all.
72/// A trie with no nodes is blinded, its root may be `EmptyRoot` or some other node type.
73///
74/// Revealing is generally done using pre-loaded node data provided to via `reveal_nodes`. In
75/// certain cases, such as edge-cases when updating/removing leaves, nodes are revealed on-demand.
76///
77/// ## Leaf Operations
78///
79/// **Update**: When updating a leaf, the new value is stored in the appropriate subtrie's values
80/// map. If the leaf is new, the trie structure is updated by walking to the leaf from the root,
81/// creating necessary intermediate branch nodes.
82///
83/// **Removal**: Leaf removal may require parent node modifications. The algorithm walks up the
84/// trie, removing nodes that become empty and converting single-child branches to extensions.
85///
86/// During leaf operations the overall structure of the trie may change, causing nodes to be moved
87/// from the upper to lower trie or vice-versa.
88///
89/// The `prefix_set` is modified during both leaf updates and removals to track changed leaf paths.
90///
91/// ## Root Hash Calculation
92///
93/// Root hash computation follows a bottom-up approach:
94/// 1. Update hashes for all modified lower subtries (can be done in parallel)
95/// 2. Update hashes for the upper subtrie (which may reference lower subtrie hashes)
96/// 3. Calculate the final root hash from the upper subtrie's root node
97///
98/// The `prefix_set` tracks which paths have been modified, enabling incremental updates instead of
99/// recalculating the entire trie.
100///
101/// ## Invariants
102///
103/// - Each leaf entry in the `subtries` and `upper_trie` collection must have a corresponding entry
104///   in `values` collection. If the root node is a leaf, it must also have an entry in `values`.
105/// - All keys in `values` collection are full leaf paths.
106#[derive(Clone, PartialEq, Eq, Debug)]
107pub struct ParallelSparseTrie {
108    /// This contains the trie nodes for the upper part of the trie.
109    upper_subtrie: Box<SparseSubtrie>,
110    /// An array containing the subtries at the second level of the trie.
111    lower_subtries: Box<[LowerSparseSubtrie; NUM_LOWER_SUBTRIES]>,
112    /// Set of prefixes (key paths) that have been marked as updated.
113    /// This is used to track which parts of the trie need to be recalculated.
114    prefix_set: PrefixSetMut,
115    /// Optional tracking of trie updates for later use.
116    updates: Option<SparseTrieUpdates>,
117    /// Branch node masks containing `tree_mask` and `hash_mask` for each path.
118    /// - `tree_mask`: When a bit is set, the corresponding child subtree is stored in the
119    ///   database.
120    /// - `hash_mask`: When a bit is set, the corresponding child is stored as a hash in the
121    ///   database.
122    branch_node_masks: BranchNodeMasksMap,
123    /// Reusable buffer pool used for collecting [`SparseTrieUpdatesAction`]s during hash
124    /// computations.
125    update_actions_buffers: Vec<Vec<SparseTrieUpdatesAction>>,
126    /// Thresholds controlling when parallelism is enabled for different operations.
127    parallelism_thresholds: ParallelismThresholds,
128    /// Metrics for the parallel sparse trie.
129    #[cfg(feature = "metrics")]
130    metrics: crate::metrics::ParallelSparseTrieMetrics,
131    /// Debug recorder for tracking mutating operations.
132    #[cfg(feature = "trie-debug")]
133    debug_recorder: TrieDebugRecorder,
134}
135
136impl Default for ParallelSparseTrie {
137    fn default() -> Self {
138        Self {
139            upper_subtrie: Box::new(SparseSubtrie {
140                nodes: HashMap::from_iter([(Nibbles::default(), SparseNode::Empty)]),
141                ..Default::default()
142            }),
143            lower_subtries: Box::new(
144                [const { LowerSparseSubtrie::Blind(None) }; NUM_LOWER_SUBTRIES],
145            ),
146            prefix_set: PrefixSetMut::default(),
147            updates: None,
148            branch_node_masks: BranchNodeMasksMap::default(),
149            update_actions_buffers: Vec::default(),
150            parallelism_thresholds: Default::default(),
151            #[cfg(feature = "metrics")]
152            metrics: Default::default(),
153            #[cfg(feature = "trie-debug")]
154            debug_recorder: Default::default(),
155        }
156    }
157}
158
159impl SparseTrie for ParallelSparseTrie {
160    fn set_root(
161        &mut self,
162        root: TrieNodeV2,
163        masks: Option<BranchNodeMasks>,
164        retain_updates: bool,
165    ) -> SparseTrieResult<()> {
166        #[cfg(feature = "trie-debug")]
167        self.debug_recorder.record(RecordedOp::SetRoot {
168            node: ProofTrieNodeRecord::from_proof_trie_node_v2(&ProofTrieNodeV2 {
169                path: Nibbles::default(),
170                node: root.clone(),
171                masks,
172            }),
173        });
174
175        // A fresh/cleared `ParallelSparseTrie` has a `SparseNode::Empty` at its root in the upper
176        // subtrie. Delete that so we can reveal the new root node.
177        let path = Nibbles::default();
178        let _removed_root = self.upper_subtrie.nodes.remove(&path).expect("root node should exist");
179        debug_assert_eq!(_removed_root, SparseNode::Empty);
180
181        self.set_updates(retain_updates);
182
183        if let Some(masks) = masks {
184            let branch_path = if let TrieNodeV2::Branch(branch) = &root {
185                branch.key
186            } else {
187                Nibbles::default()
188            };
189
190            self.branch_node_masks.insert(branch_path, masks);
191        }
192
193        self.reveal_upper_node(Nibbles::default(), &root, masks)
194    }
195
196    fn set_updates(&mut self, retain_updates: bool) {
197        self.updates = retain_updates.then(Default::default);
198    }
199
200    fn reveal_nodes(&mut self, nodes: &mut [ProofTrieNodeV2]) -> SparseTrieResult<()> {
201        if nodes.is_empty() {
202            return Ok(())
203        }
204
205        #[cfg(feature = "trie-debug")]
206        self.debug_recorder.record(RecordedOp::RevealNodes {
207            nodes: nodes.iter().map(ProofTrieNodeRecord::from_proof_trie_node_v2).collect(),
208        });
209
210        // Sort nodes first by their subtrie, and secondarily by their path. This allows for
211        // grouping nodes by their subtrie using `chunk_by`.
212        nodes.sort_unstable_by(
213            |ProofTrieNodeV2 { path: path_a, .. }, ProofTrieNodeV2 { path: path_b, .. }| {
214                let subtrie_type_a = SparseSubtrieType::from_path(path_a);
215                let subtrie_type_b = SparseSubtrieType::from_path(path_b);
216                subtrie_type_a.cmp(&subtrie_type_b).then_with(|| path_a.cmp(path_b))
217            },
218        );
219
220        // Update the top-level branch node masks. This is simple and can't be done in parallel.
221        self.branch_node_masks.reserve(nodes.len());
222        for ProofTrieNodeV2 { path, masks, node } in nodes.iter() {
223            if let Some(branch_masks) = masks {
224                // Use proper path for branch nodes by combining path and extension key.
225                let path = if let TrieNodeV2::Branch(branch) = node &&
226                    !branch.key.is_empty()
227                {
228                    let mut path = *path;
229                    path.extend(&branch.key);
230                    path
231                } else {
232                    *path
233                };
234                self.branch_node_masks.insert(path, *branch_masks);
235            }
236        }
237
238        // Due to the sorting all upper subtrie nodes will be at the front of the slice. We split
239        // them off from the rest to be handled specially by
240        // `ParallelSparseTrie::reveal_upper_node`.
241        let num_upper_nodes = nodes
242            .iter()
243            .position(|n| !SparseSubtrieType::path_len_is_upper(n.path.len()))
244            .unwrap_or(nodes.len());
245        let (upper_nodes, lower_nodes) = nodes.split_at(num_upper_nodes);
246
247        // Reserve the capacity of the upper subtrie's `nodes` HashMap before iterating, so we don't
248        // end up making many small capacity changes as we loop.
249        self.upper_subtrie.nodes.reserve(upper_nodes.len());
250        for node in upper_nodes {
251            self.reveal_upper_node(node.path, &node.node, node.masks)?;
252        }
253
254        let reachable_subtries = self.reachable_subtries();
255
256        // Best-effort for boundary nodes: if the parent upper node exists as a branch and the
257        // boundary child is still blinded, unset that blinded bit and carry the hash into
258        // `reveal_node`. If the parent path is absent/non-branch (for example upper extension
259        // crossing the boundary), skip without failing.
260        let hashes_from_upper = nodes
261            .iter()
262            .filter_map(|node| {
263                if node.path.len() != UPPER_TRIE_MAX_DEPTH ||
264                    !reachable_subtries.get(path_subtrie_index_unchecked(&node.path))
265                {
266                    return None;
267                }
268
269                let parent_path = node.path.slice(0..UPPER_TRIE_MAX_DEPTH - 1);
270                let Some(SparseNode::Branch { blinded_mask, blinded_hashes, .. }) =
271                    self.upper_subtrie.nodes.get_mut(&parent_path)
272                else {
273                    return None;
274                };
275
276                let nibble = node.path.last().unwrap();
277                blinded_mask.is_bit_set(nibble).then(|| {
278                    blinded_mask.unset_bit(nibble);
279                    (node.path, blinded_hashes[nibble as usize])
280                })
281            })
282            .collect::<HashMap<_, _>>();
283
284        if !self.is_reveal_parallelism_enabled(lower_nodes.len()) {
285            for node in lower_nodes {
286                let idx = path_subtrie_index_unchecked(&node.path);
287                if !reachable_subtries.get(idx) {
288                    trace!(
289                        target: "trie::parallel_sparse",
290                        reveal_path = ?node.path,
291                        "Node's lower subtrie is not reachable, skipping",
292                    );
293                    continue;
294                }
295                // For boundary leaves, check reachability from upper subtrie's parent branch
296                if node.path.len() == UPPER_TRIE_MAX_DEPTH &&
297                    !Self::is_boundary_leaf_reachable(
298                        &self.upper_subtrie.nodes,
299                        &node.path,
300                        &node.node,
301                    )
302                {
303                    trace!(
304                        target: "trie::parallel_sparse",
305                        path = ?node.path,
306                        "Boundary leaf not reachable from upper subtrie, skipping",
307                    );
308                    continue;
309                }
310                self.lower_subtries[idx].reveal(&node.path);
311                self.lower_subtries[idx].as_revealed_mut().expect("just revealed").reveal_node(
312                    node.path,
313                    &node.node,
314                    node.masks,
315                    hashes_from_upper.get(&node.path).copied(),
316                )?;
317            }
318            return Ok(())
319        }
320
321        #[cfg(not(feature = "std"))]
322        unreachable!("nostd is checked by is_reveal_parallelism_enabled");
323
324        #[cfg(feature = "std")]
325        // Reveal lower subtrie nodes in parallel
326        {
327            use rayon::iter::{IntoParallelIterator, ParallelIterator};
328            use tracing::Span;
329
330            // Capture the current span so it can be propagated to rayon worker threads
331            let parent_span = Span::current();
332
333            // Capture reference to upper subtrie nodes for boundary leaf reachability checks
334            let upper_nodes = &self.upper_subtrie.nodes;
335
336            // Group the nodes by lower subtrie.
337            let results = lower_nodes
338                .chunk_by(|node_a, node_b| {
339                    SparseSubtrieType::from_path(&node_a.path) ==
340                        SparseSubtrieType::from_path(&node_b.path)
341                })
342                // Filter out chunks for unreachable subtries.
343                .filter_map(|nodes| {
344                    let mut nodes = nodes
345                        .iter()
346                        .filter(|node| {
347                            // For boundary leaves, check reachability from upper subtrie's parent
348                            // branch.
349                            if node.path.len() == UPPER_TRIE_MAX_DEPTH &&
350                                !Self::is_boundary_leaf_reachable(
351                                    upper_nodes,
352                                    &node.path,
353                                    &node.node,
354                                )
355                            {
356                                trace!(
357                                    target: "trie::parallel_sparse",
358                                    path = ?node.path,
359                                    "Boundary leaf not reachable from upper subtrie, skipping",
360                                );
361                                false
362                            } else {
363                                true
364                            }
365                        })
366                        .peekable();
367
368                    let node = nodes.peek()?;
369                    let idx =
370                        SparseSubtrieType::from_path(&node.path).lower_index().unwrap_or_else(
371                            || panic!("upper subtrie node {node:?} found amongst lower nodes"),
372                        );
373
374                    if !reachable_subtries.get(idx) {
375                        trace!(
376                            target: "trie::parallel_sparse",
377                            nodes = ?nodes,
378                            "Lower subtrie is not reachable, skipping reveal",
379                        );
380                        return None;
381                    }
382
383                    // due to the nodes being sorted secondarily on their path, and chunk_by keeping
384                    // the first element of each group, the `path` here will necessarily be the
385                    // shortest path being revealed for each subtrie. Therefore we can reveal the
386                    // subtrie itself using this path and retain correct behavior.
387                    self.lower_subtries[idx].reveal(&node.path);
388                    Some((
389                        idx,
390                        self.lower_subtries[idx].take_revealed().expect("just revealed"),
391                        nodes,
392                    ))
393                })
394                .collect::<Vec<_>>()
395                .into_par_iter()
396                .map(|(subtrie_idx, mut subtrie, nodes)| {
397                    // Enter the parent span to propagate context (e.g., hashed_address for storage
398                    // tries) to the worker thread
399                    let _guard = parent_span.enter();
400
401                    // reserve space in the HashMap ahead of time; doing it on a node-by-node basis
402                    // can cause multiple re-allocations as the hashmap grows.
403                    subtrie.nodes.reserve(nodes.size_hint().1.unwrap_or(0));
404
405                    for node in nodes {
406                        // Reveal each node in the subtrie, returning early on any errors
407                        let res = subtrie.reveal_node(
408                            node.path,
409                            &node.node,
410                            node.masks,
411                            hashes_from_upper.get(&node.path).copied(),
412                        );
413                        if res.is_err() {
414                            return (subtrie_idx, subtrie, res.map(|_| ()))
415                        }
416                    }
417                    (subtrie_idx, subtrie, Ok(()))
418                })
419                .collect::<Vec<_>>();
420
421            // Put subtries back which were processed in the rayon pool, collecting the last
422            // seen error in the process and returning that.
423            let mut any_err = Ok(());
424            for (subtrie_idx, subtrie, res) in results {
425                self.lower_subtries[subtrie_idx] = LowerSparseSubtrie::Revealed(subtrie);
426                if res.is_err() {
427                    any_err = res;
428                }
429            }
430
431            any_err
432        }
433    }
434
435    #[instrument(level = "trace", target = "trie::sparse::parallel", skip(self))]
436    fn root(&mut self) -> B256 {
437        trace!(target: "trie::parallel_sparse", "Calculating trie root hash");
438
439        #[cfg(feature = "trie-debug")]
440        self.debug_recorder.record(RecordedOp::Root);
441
442        if self.prefix_set.is_empty() &&
443            let Some(rlp_node) = self
444                .upper_subtrie
445                .nodes
446                .get(&Nibbles::default())
447                .and_then(|node| node.cached_rlp_node())
448        {
449            return rlp_node
450                .as_hash()
451                .expect("RLP-encoding of the root node cannot be less than 32 bytes")
452        }
453
454        // Update all lower subtrie hashes
455        self.update_subtrie_hashes();
456
457        // Update hashes for the upper subtrie using our specialized function
458        // that can access both upper and lower subtrie nodes
459        let mut prefix_set = core::mem::take(&mut self.prefix_set).freeze();
460        let root_rlp = self.update_upper_subtrie_hashes(&mut prefix_set);
461
462        // Return the root hash
463        root_rlp.as_hash().unwrap_or(EMPTY_ROOT_HASH)
464    }
465
466    fn is_root_cached(&self) -> bool {
467        self.prefix_set.is_empty() &&
468            self.upper_subtrie
469                .nodes
470                .get(&Nibbles::default())
471                .is_some_and(|node| node.cached_rlp_node().is_some())
472    }
473
474    #[instrument(level = "trace", target = "trie::sparse::parallel", skip(self))]
475    fn update_subtrie_hashes(&mut self) {
476        trace!(target: "trie::parallel_sparse", "Updating subtrie hashes");
477
478        #[cfg(feature = "trie-debug")]
479        self.debug_recorder.record(RecordedOp::UpdateSubtrieHashes);
480
481        // Take changed subtries according to the prefix set
482        let mut prefix_set = core::mem::take(&mut self.prefix_set).freeze();
483        let num_changed_keys = prefix_set.len();
484        let (mut changed_subtries, unchanged_prefix_set) =
485            self.take_changed_lower_subtries(&mut prefix_set);
486
487        // update metrics
488        #[cfg(feature = "metrics")]
489        self.metrics.subtries_updated.record(changed_subtries.len() as f64);
490
491        // Update the prefix set with the keys that didn't have matching subtries
492        self.prefix_set = unchanged_prefix_set;
493
494        // Update subtrie hashes serially parallelism is not enabled
495        if !self.is_update_parallelism_enabled(num_changed_keys) {
496            for changed_subtrie in &mut changed_subtries {
497                changed_subtrie.subtrie.update_hashes(
498                    &mut changed_subtrie.prefix_set,
499                    &mut changed_subtrie.update_actions_buf,
500                    &self.branch_node_masks,
501                );
502            }
503
504            self.insert_changed_subtries(changed_subtries);
505            return
506        }
507
508        #[cfg(not(feature = "std"))]
509        unreachable!("nostd is checked by is_update_parallelism_enabled");
510
511        #[cfg(feature = "std")]
512        // Update subtrie hashes in parallel
513        {
514            use rayon::prelude::*;
515
516            changed_subtries.par_iter_mut().for_each(|changed_subtrie| {
517                #[cfg(feature = "metrics")]
518                let start = Instant::now();
519                changed_subtrie.subtrie.update_hashes(
520                    &mut changed_subtrie.prefix_set,
521                    &mut changed_subtrie.update_actions_buf,
522                    &self.branch_node_masks,
523                );
524                #[cfg(feature = "metrics")]
525                self.metrics.subtrie_hash_update_latency.record(start.elapsed());
526            });
527
528            self.insert_changed_subtries(changed_subtries);
529        }
530    }
531
532    fn get_leaf_value(&self, full_path: &Nibbles) -> Option<&Vec<u8>> {
533        // `subtrie_for_path` is intended for a node path, but here we are using a full key path. So
534        // we need to check if the subtrie that the key might belong to has any nodes; if not then
535        // the key's portion of the trie doesn't have enough depth to reach into the subtrie, and
536        // the key will be in the upper subtrie
537        if let Some(subtrie) = self.subtrie_for_path(full_path) &&
538            !subtrie.is_empty()
539        {
540            return subtrie.inner.values.get(full_path);
541        }
542
543        self.upper_subtrie.inner.values.get(full_path)
544    }
545
546    fn updates_ref(&self) -> Cow<'_, SparseTrieUpdates> {
547        self.updates.as_ref().map_or(Cow::Owned(SparseTrieUpdates::default()), Cow::Borrowed)
548    }
549
550    fn take_updates(&mut self) -> SparseTrieUpdates {
551        match self.updates.take() {
552            Some(updates) => {
553                // NOTE: we need to preserve Some case
554                self.updates = Some(SparseTrieUpdates::with_capacity(
555                    updates.updated_nodes.len(),
556                    updates.removed_nodes.len(),
557                ));
558                updates
559            }
560            None => SparseTrieUpdates::default(),
561        }
562    }
563
564    fn wipe(&mut self) {
565        self.upper_subtrie.wipe();
566        for trie in &mut *self.lower_subtries {
567            trie.wipe();
568        }
569        self.prefix_set = PrefixSetMut::all();
570        self.updates = self.updates.is_some().then(SparseTrieUpdates::wiped);
571    }
572
573    fn clear(&mut self) {
574        self.upper_subtrie.clear();
575        self.upper_subtrie.nodes.insert(Nibbles::default(), SparseNode::Empty);
576        for subtrie in &mut *self.lower_subtries {
577            subtrie.clear();
578        }
579        self.prefix_set.clear();
580        self.updates = None;
581        self.branch_node_masks.clear();
582        #[cfg(feature = "trie-debug")]
583        self.debug_recorder.reset();
584        // `update_actions_buffers` doesn't need to be cleared; we want to reuse the Vecs it has
585        // buffered, and all of those are already inherently cleared when they get used.
586    }
587
588    fn find_leaf(
589        &self,
590        full_path: &Nibbles,
591        expected_value: Option<&Vec<u8>>,
592    ) -> Result<LeafLookup, LeafLookupError> {
593        // Inclusion proof
594        //
595        // First, do a quick check if the value exists in either the upper or lower subtrie's values
596        // map. We assume that if there exists a leaf node, then its value will be in the `values`
597        // map.
598        if let Some(actual_value) = core::iter::once(self.upper_subtrie.as_ref())
599            .chain(self.lower_subtrie_for_path(full_path))
600            .filter_map(|subtrie| subtrie.inner.values.get(full_path))
601            .next()
602        {
603            // We found the leaf, check if the value matches (if expected value was provided)
604            return expected_value
605                .is_none_or(|v| v == actual_value)
606                .then_some(LeafLookup::Exists)
607                .ok_or_else(|| LeafLookupError::ValueMismatch {
608                    path: *full_path,
609                    expected: expected_value.cloned(),
610                    actual: actual_value.clone(),
611                })
612        }
613
614        // If the value does not exist in the `values` map, then this means that the leaf either:
615        // - Does not exist in the trie
616        // - Is missing from the witness
617        // We traverse the trie to find the location where this leaf would have been, showing
618        // that it is not in the trie. Or we find a blinded node, showing that the witness is
619        // not complete.
620        let mut curr_path = Nibbles::new(); // start traversal from root
621        let mut curr_subtrie = self.upper_subtrie.as_ref();
622        let mut curr_subtrie_is_upper = true;
623
624        loop {
625            match curr_subtrie.nodes.get(&curr_path).unwrap() {
626                SparseNode::Empty => return Ok(LeafLookup::NonExistent),
627                SparseNode::Leaf { key, .. } => {
628                    let mut found_full_path = curr_path;
629                    found_full_path.extend(key);
630                    assert!(&found_full_path != full_path, "target leaf {full_path:?} found, even though value wasn't in values hashmap");
631                    return Ok(LeafLookup::NonExistent)
632                }
633                SparseNode::Extension { key, .. } => {
634                    if full_path.len() == curr_path.len() {
635                        return Ok(LeafLookup::NonExistent)
636                    }
637                    curr_path.extend(key);
638                    if !full_path.starts_with(&curr_path) {
639                        return Ok(LeafLookup::NonExistent)
640                    }
641                }
642                SparseNode::Branch { state_mask, blinded_mask, blinded_hashes, .. } => {
643                    if full_path.len() == curr_path.len() {
644                        return Ok(LeafLookup::NonExistent)
645                    }
646                    let nibble = full_path.get_unchecked(curr_path.len());
647                    if !state_mask.is_bit_set(nibble) {
648                        return Ok(LeafLookup::NonExistent)
649                    }
650                    curr_path.push_unchecked(nibble);
651                    if blinded_mask.is_bit_set(nibble) {
652                        return Err(LeafLookupError::BlindedNode {
653                            path: curr_path,
654                            hash: blinded_hashes[nibble as usize],
655                        })
656                    }
657                }
658            }
659
660            // If we were previously looking at the upper trie, and the new path is in the
661            // lower trie, we need to pull out a ref to the lower trie.
662            if curr_subtrie_is_upper &&
663                let Some(lower_subtrie) = self.lower_subtrie_for_path(&curr_path)
664            {
665                curr_subtrie = lower_subtrie;
666                curr_subtrie_is_upper = false;
667            }
668        }
669    }
670
671    fn shrink_nodes_to(&mut self, size: usize) {
672        // Distribute the capacity across upper and lower subtries
673        //
674        // Always include upper subtrie, plus any lower subtries
675        let total_subtries = 1 + NUM_LOWER_SUBTRIES;
676        let size_per_subtrie = size / total_subtries;
677
678        // Shrink the upper subtrie
679        self.upper_subtrie.shrink_nodes_to(size_per_subtrie);
680
681        // Shrink lower subtries (works for both revealed and blind with allocation)
682        for subtrie in &mut *self.lower_subtries {
683            subtrie.shrink_nodes_to(size_per_subtrie);
684        }
685
686        // shrink masks map
687        self.branch_node_masks.shrink_to(size);
688    }
689
690    fn shrink_values_to(&mut self, size: usize) {
691        // Distribute the capacity across upper and lower subtries
692        //
693        // Always include upper subtrie, plus any lower subtries
694        let total_subtries = 1 + NUM_LOWER_SUBTRIES;
695        let size_per_subtrie = size / total_subtries;
696
697        // Shrink the upper subtrie
698        self.upper_subtrie.shrink_values_to(size_per_subtrie);
699
700        // Shrink lower subtries (works for both revealed and blind with allocation)
701        for subtrie in &mut *self.lower_subtries {
702            subtrie.shrink_values_to(size_per_subtrie);
703        }
704    }
705
706    /// O(1) size hint based on total node count (including hash stubs).
707    fn size_hint(&self) -> usize {
708        let upper_count = self.upper_subtrie.nodes.len();
709        let lower_count: usize = self
710            .lower_subtries
711            .iter()
712            .filter_map(|s| s.as_revealed_ref())
713            .map(|s| s.nodes.len())
714            .sum();
715        upper_count + lower_count
716    }
717
718    fn memory_size(&self) -> usize {
719        self.memory_size()
720    }
721
722    fn prune(&mut self, retained_leaves: &[Nibbles]) -> usize {
723        #[cfg(feature = "trie-debug")]
724        self.debug_recorder.reset();
725
726        let mut retained_leaves = retained_leaves.to_vec();
727        retained_leaves.sort_unstable();
728
729        let mut effective_pruned_roots = Vec::<Nibbles>::new();
730        let mut stack: SmallVec<[Nibbles; 32]> = SmallVec::new();
731        stack.push(Nibbles::default());
732
733        while let Some(path) = stack.pop() {
734            let Some(node) =
735                self.subtrie_for_path(&path).and_then(|subtrie| subtrie.nodes.get(&path).cloned())
736            else {
737                continue;
738            };
739
740            match node {
741                SparseNode::Empty | SparseNode::Leaf { .. } => {}
742                SparseNode::Extension { key, state, .. } => {
743                    let mut child = path;
744                    child.extend(&key);
745
746                    if has_retained_descendant(&retained_leaves, &child) {
747                        stack.push(child);
748                        continue;
749                    }
750
751                    // Root extension has no parent branch edge to blind; keep it as-is.
752                    if path.is_empty() {
753                        continue;
754                    }
755
756                    let Some(hash) = state.cached_hash() else { continue };
757                    self.subtrie_for_path_mut_untracked(&path)
758                        .expect("node subtrie exists")
759                        .nodes
760                        .remove(&path);
761
762                    let parent_path = path.slice(0..path.len() - 1);
763                    // Parent can live in a different subtrie when `path` is the root of a lower
764                    // subtrie, so resolve it by `parent_path` rather than reusing `path`'s subtrie.
765                    let SparseNode::Branch { blinded_mask, blinded_hashes, .. } = self
766                        .subtrie_for_path_mut_untracked(&parent_path)
767                        .expect("parent subtrie exists")
768                        .nodes
769                        .get_mut(&parent_path)
770                        .expect("expected parent branch node")
771                    else {
772                        panic!("expected branch node at path {parent_path:?}");
773                    };
774
775                    let nibble = path.last().unwrap();
776                    blinded_mask.set_bit(nibble);
777                    blinded_hashes[nibble as usize] = hash;
778                    effective_pruned_roots.push(path);
779                }
780                SparseNode::Branch { state_mask, blinded_mask, blinded_hashes, .. } => {
781                    let mut blinded_mask = blinded_mask;
782                    let mut blinded_hashes = blinded_hashes;
783                    for nibble in state_mask.iter() {
784                        if blinded_mask.is_bit_set(nibble) {
785                            continue;
786                        }
787
788                        let mut child = path;
789                        child.push_unchecked(nibble);
790                        if has_retained_descendant(&retained_leaves, &child) {
791                            stack.push(child);
792                            continue;
793                        }
794
795                        let Entry::Occupied(entry) =
796                            self.subtrie_for_path_mut_untracked(&child).unwrap().nodes.entry(child)
797                        else {
798                            panic!("expected node at path {child:?}");
799                        };
800
801                        let Some(hash) = entry.get().cached_hash() else {
802                            continue;
803                        };
804                        entry.remove();
805                        blinded_mask.set_bit(nibble);
806                        blinded_hashes[nibble as usize] = hash;
807                        effective_pruned_roots.push(child);
808                    }
809
810                    let SparseNode::Branch {
811                        blinded_mask: old_blinded_mask,
812                        blinded_hashes: old_blinded_hashes,
813                        ..
814                    } = self
815                        .subtrie_for_path_mut_untracked(&path)
816                        .unwrap()
817                        .nodes
818                        .get_mut(&path)
819                        .unwrap()
820                    else {
821                        unreachable!("expected branch node at path {path:?}");
822                    };
823                    *old_blinded_mask = blinded_mask;
824                    *old_blinded_hashes = blinded_hashes;
825                }
826            }
827        }
828
829        Self::finalize_pruned_roots(self, effective_pruned_roots)
830    }
831
832    fn update_leaves(
833        &mut self,
834        updates: &mut alloy_primitives::map::B256Map<crate::LeafUpdate>,
835        mut proof_required_fn: impl FnMut(B256, u8),
836    ) -> SparseTrieResult<()> {
837        use crate::LeafUpdate;
838
839        #[cfg(feature = "trie-debug")]
840        let recorded_updates: Vec<_> =
841            updates.iter().map(|(k, v)| (*k, LeafUpdateRecord::from(v))).collect();
842        #[cfg(feature = "trie-debug")]
843        let mut recorded_proof_targets: Vec<(B256, u8)> = Vec::new();
844
845        // Drain updates to avoid cloning keys while preserving the map's allocation.
846        // On success, entries remain removed; on blinded node failure, they're re-inserted.
847        let drained: Vec<_> = updates.drain().collect();
848
849        for (key, update) in drained {
850            let full_path = Nibbles::unpack(key);
851
852            match update {
853                LeafUpdate::Changed(value) => {
854                    if value.is_empty() {
855                        // Removal is atomic - returns a retriable error before any mutations (via
856                        // pre_validate_reveal_chain).
857                        match self.remove_leaf(&full_path) {
858                            Ok(()) => {}
859                            Err(e) => {
860                                if let Some(path) = Self::get_retriable_path(&e) {
861                                    let (target_key, min_len) =
862                                        Self::proof_target_for_path(key, &full_path, &path);
863                                    proof_required_fn(target_key, min_len);
864                                    #[cfg(feature = "trie-debug")]
865                                    recorded_proof_targets.push((target_key, min_len));
866                                    updates.insert(key, LeafUpdate::Changed(value));
867                                } else {
868                                    return Err(e);
869                                }
870                            }
871                        }
872                    } else {
873                        // Update/insert: update_leaf is atomic - cleans up on error.
874                        if let Err(e) = self.update_leaf(full_path, value.clone()) {
875                            if let Some(path) = Self::get_retriable_path(&e) {
876                                let (target_key, min_len) =
877                                    Self::proof_target_for_path(key, &full_path, &path);
878                                proof_required_fn(target_key, min_len);
879                                #[cfg(feature = "trie-debug")]
880                                recorded_proof_targets.push((target_key, min_len));
881                                updates.insert(key, LeafUpdate::Changed(value));
882                            } else {
883                                return Err(e);
884                            }
885                        }
886                    }
887                }
888                LeafUpdate::Touched => {
889                    // Touched is read-only: check if path is accessible, request proof if blinded.
890                    match self.find_leaf(&full_path, None) {
891                        Err(LeafLookupError::BlindedNode { path, .. }) => {
892                            let (target_key, min_len) =
893                                Self::proof_target_for_path(key, &full_path, &path);
894                            proof_required_fn(target_key, min_len);
895                            #[cfg(feature = "trie-debug")]
896                            recorded_proof_targets.push((target_key, min_len));
897                            updates.insert(key, LeafUpdate::Touched);
898                        }
899                        // Path is fully revealed (exists or proven non-existent), no action needed.
900                        Ok(_) | Err(LeafLookupError::ValueMismatch { .. }) => {}
901                    }
902                }
903            }
904        }
905
906        #[cfg(feature = "trie-debug")]
907        self.debug_recorder.record(RecordedOp::UpdateLeaves {
908            updates: recorded_updates,
909            proof_targets: recorded_proof_targets,
910        });
911
912        Ok(())
913    }
914
915    #[cfg(feature = "trie-debug")]
916    fn take_debug_recorder(&mut self) -> TrieDebugRecorder {
917        core::mem::take(&mut self.debug_recorder)
918    }
919
920    fn commit_updates(
921        &mut self,
922        updated: &HashMap<Nibbles, BranchNodeCompact>,
923        removed: &HashSet<Nibbles>,
924    ) {
925        // Sync branch_node_masks with what's being committed to DB.
926        // This ensures that on subsequent root() calls, the masks reflect the actual
927        // DB state, which is needed for correct removal detection.
928        self.branch_node_masks.reserve(updated.len());
929        for (path, node) in updated {
930            self.branch_node_masks.insert(
931                *path,
932                BranchNodeMasks { tree_mask: node.tree_mask, hash_mask: node.hash_mask },
933            );
934        }
935        for path in removed {
936            self.branch_node_masks.remove(path);
937        }
938    }
939}
940
941impl ParallelSparseTrie {
942    /// Sets the thresholds that control when parallelism is used during operations.
943    pub const fn with_parallelism_thresholds(mut self, thresholds: ParallelismThresholds) -> Self {
944        self.parallelism_thresholds = thresholds;
945        self
946    }
947
948    /// Returns true if retaining updates is enabled for the overall trie.
949    const fn updates_enabled(&self) -> bool {
950        self.updates.is_some()
951    }
952
953    /// Returns true if parallelism should be enabled for revealing the given number of nodes.
954    /// Will always return false in nostd builds.
955    const fn is_reveal_parallelism_enabled(&self, num_nodes: usize) -> bool {
956        #[cfg(not(feature = "std"))]
957        {
958            let _ = num_nodes;
959            false
960        }
961
962        #[cfg(feature = "std")]
963        {
964            num_nodes >= self.parallelism_thresholds.min_revealed_nodes
965        }
966    }
967
968    /// Returns true if parallelism should be enabled for updating hashes with the given number
969    /// of changed keys. Will always return false in nostd builds.
970    const fn is_update_parallelism_enabled(&self, num_changed_keys: usize) -> bool {
971        #[cfg(not(feature = "std"))]
972        {
973            let _ = num_changed_keys;
974            false
975        }
976
977        #[cfg(feature = "std")]
978        {
979            num_changed_keys >= self.parallelism_thresholds.min_updated_nodes
980        }
981    }
982
983    /// Checks if an error is retriable (`BlindedNode` or `NodeNotFoundInProvider`) and extracts
984    /// the path if so.
985    ///
986    /// Both error types indicate that a node needs to be revealed before the operation can
987    /// succeed. `BlindedNode` occurs when traversing to a Hash node, while `NodeNotFoundInProvider`
988    /// occurs when `retain_updates` is enabled and an extension node's child needs revealing.
989    const fn get_retriable_path(e: &SparseTrieError) -> Option<Nibbles> {
990        match e.kind() {
991            SparseTrieErrorKind::BlindedNode(path) |
992            SparseTrieErrorKind::NodeNotFoundInProvider { path } => Some(*path),
993            _ => None,
994        }
995    }
996
997    /// Converts a nibbles path to a B256, right-padding with zeros to 64 nibbles.
998    fn nibbles_to_padded_b256(path: &Nibbles) -> B256 {
999        let mut bytes = [0u8; 32];
1000        path.pack_to(&mut bytes);
1001        B256::from(bytes)
1002    }
1003
1004    /// Computes the proof target key and `min_len` for a blinded node error.
1005    ///
1006    /// Returns `(target_key, min_len)` where:
1007    /// - `target_key` is `full_key` if `path` is a prefix of `full_path`, otherwise the padded path
1008    /// - `min_len` is always based on `path.len()`
1009    fn proof_target_for_path(full_key: B256, full_path: &Nibbles, path: &Nibbles) -> (B256, u8) {
1010        let min_len = (path.len() as u8).min(64);
1011        let target_key =
1012            if full_path.starts_with(path) { full_key } else { Self::nibbles_to_padded_b256(path) };
1013        (target_key, min_len)
1014    }
1015
1016    /// Creates a new revealed sparse trie from the given root node.
1017    ///
1018    /// This function initializes the internal structures and then reveals the root.
1019    /// It is a convenient method to create a trie when you already have the root node available.
1020    ///
1021    /// # Arguments
1022    ///
1023    /// * `root` - The root node of the trie
1024    /// * `masks` - Trie masks for root branch node
1025    /// * `retain_updates` - Whether to track updates
1026    ///
1027    /// # Returns
1028    ///
1029    /// Self if successful, or an error if revealing fails.
1030    pub fn from_root(
1031        root: TrieNodeV2,
1032        masks: Option<BranchNodeMasks>,
1033        retain_updates: bool,
1034    ) -> SparseTrieResult<Self> {
1035        Self::default().with_root(root, masks, retain_updates)
1036    }
1037
1038    /// Updates the value of a leaf node at the specified path.
1039    pub fn update_leaf(&mut self, full_path: Nibbles, value: Vec<u8>) -> SparseTrieResult<()> {
1040        debug_assert_eq!(
1041            full_path.len(),
1042            B256::len_bytes() * 2,
1043            "update_leaf full_path must be 64 nibbles (32 bytes), got {} nibbles",
1044            full_path.len()
1045        );
1046
1047        trace!(
1048            target: "trie::parallel_sparse",
1049            ?full_path,
1050            value_len = value.len(),
1051            "Updating leaf",
1052        );
1053
1054        if self.upper_subtrie.inner.values.contains_key(&full_path) {
1055            self.prefix_set.insert(full_path);
1056            self.upper_subtrie.inner.values.insert(full_path, value);
1057            return Ok(());
1058        }
1059        if let Some(subtrie) = self.lower_subtrie_for_path(&full_path) &&
1060            subtrie.inner.values.contains_key(&full_path)
1061        {
1062            self.prefix_set.insert(full_path);
1063            self.lower_subtrie_for_path_mut(&full_path)
1064                .expect("subtrie exists")
1065                .inner
1066                .values
1067                .insert(full_path, value);
1068            return Ok(());
1069        }
1070
1071        self.upper_subtrie.inner.values.insert(full_path, value.clone());
1072
1073        let mut new_nodes = Vec::new();
1074        let mut next = Some(Nibbles::default());
1075
1076        while let Some(current) =
1077            next.as_mut().filter(|next| SparseSubtrieType::path_len_is_upper(next.len()))
1078        {
1079            let step_result = self.upper_subtrie.update_next_node(current, &full_path);
1080
1081            if step_result.is_err() {
1082                self.upper_subtrie.inner.values.remove(&full_path);
1083                return step_result.map(|_| ());
1084            }
1085
1086            match step_result? {
1087                LeafUpdateStep::Continue => {}
1088                LeafUpdateStep::Complete { inserted_nodes } => {
1089                    new_nodes.extend(inserted_nodes);
1090                    next = None;
1091                }
1092                LeafUpdateStep::NodeNotFound => {
1093                    next = None;
1094                }
1095            }
1096        }
1097
1098        for node_path in &new_nodes {
1099            if SparseSubtrieType::path_len_is_upper(node_path.len()) {
1100                continue;
1101            }
1102
1103            let node =
1104                self.upper_subtrie.nodes.remove(node_path).expect("node belongs to upper subtrie");
1105
1106            let leaf_value = if let SparseNode::Leaf { key, .. } = &node {
1107                let mut leaf_full_path = *node_path;
1108                leaf_full_path.extend(key);
1109                Some((
1110                    leaf_full_path,
1111                    self.upper_subtrie
1112                        .inner
1113                        .values
1114                        .remove(&leaf_full_path)
1115                        .expect("leaf nodes have associated values entries"),
1116                ))
1117            } else {
1118                None
1119            };
1120
1121            let subtrie = self.subtrie_for_path_mut(node_path);
1122
1123            if let Some((leaf_full_path, value)) = leaf_value {
1124                subtrie.inner.values.insert(leaf_full_path, value);
1125            }
1126
1127            subtrie.nodes.insert(*node_path, node);
1128        }
1129
1130        if let Some(next_path) = next.filter(|n| !SparseSubtrieType::path_len_is_upper(n.len())) {
1131            self.upper_subtrie.inner.values.remove(&full_path);
1132
1133            let subtrie = self.subtrie_for_path_mut(&next_path);
1134
1135            if subtrie.nodes.is_empty() {
1136                subtrie.nodes.insert(subtrie.path, SparseNode::Empty);
1137            }
1138
1139            if let Err(e) = subtrie.update_leaf(full_path, value) {
1140                if let Some(lower) = self.lower_subtrie_for_path_mut(&full_path) {
1141                    lower.inner.values.remove(&full_path);
1142                }
1143                return Err(e);
1144            }
1145        }
1146
1147        self.prefix_set.insert(full_path);
1148
1149        Ok(())
1150    }
1151
1152    /// Removes a leaf node at the specified path.
1153    pub fn remove_leaf(&mut self, full_path: &Nibbles) -> SparseTrieResult<()> {
1154        debug_assert_eq!(
1155            full_path.len(),
1156            B256::len_bytes() * 2,
1157            "remove_leaf full_path must be 64 nibbles (32 bytes), got {} nibbles",
1158            full_path.len()
1159        );
1160
1161        trace!(
1162            target: "trie::parallel_sparse",
1163            ?full_path,
1164            "Removing leaf",
1165        );
1166
1167        let leaf_path;
1168        let leaf_subtrie_type;
1169
1170        let mut branch_parent_path: Option<Nibbles> = None;
1171        let mut branch_parent_node: Option<SparseNode> = None;
1172
1173        let mut ext_grandparent_path: Option<Nibbles> = None;
1174        let mut ext_grandparent_node: Option<SparseNode> = None;
1175
1176        let mut curr_path = Nibbles::new();
1177        let mut curr_subtrie_type = SparseSubtrieType::Upper;
1178
1179        let mut paths_to_mark_dirty = Vec::new();
1180
1181        loop {
1182            let curr_subtrie = match curr_subtrie_type {
1183                SparseSubtrieType::Upper => &mut self.upper_subtrie,
1184                SparseSubtrieType::Lower(idx) => {
1185                    self.lower_subtries[idx].as_revealed_mut().expect("lower subtrie is revealed")
1186                }
1187            };
1188            let curr_node = curr_subtrie.nodes.get_mut(&curr_path).unwrap();
1189
1190            match Self::find_next_to_leaf(&curr_path, curr_node, full_path) {
1191                FindNextToLeafOutcome::NotFound => return Ok(()),
1192                FindNextToLeafOutcome::BlindedNode(path) => {
1193                    return Err(SparseTrieErrorKind::BlindedNode(path).into())
1194                }
1195                FindNextToLeafOutcome::Found => {
1196                    leaf_path = curr_path;
1197                    leaf_subtrie_type = curr_subtrie_type;
1198                    break;
1199                }
1200                FindNextToLeafOutcome::ContinueFrom(next_path) => {
1201                    match curr_node {
1202                        SparseNode::Branch { .. } => {
1203                            paths_to_mark_dirty
1204                                .push((SparseSubtrieType::from_path(&curr_path), curr_path));
1205
1206                            match (&branch_parent_path, &ext_grandparent_path) {
1207                                (Some(branch), Some(ext)) if branch.len() > ext.len() => {
1208                                    ext_grandparent_path = None;
1209                                    ext_grandparent_node = None;
1210                                }
1211                                _ => (),
1212                            };
1213                            branch_parent_path = Some(curr_path);
1214                            branch_parent_node = Some(curr_node.clone());
1215                        }
1216                        SparseNode::Extension { .. } => {
1217                            paths_to_mark_dirty
1218                                .push((SparseSubtrieType::from_path(&curr_path), curr_path));
1219                            ext_grandparent_path = Some(curr_path);
1220                            ext_grandparent_node = Some(curr_node.clone());
1221                        }
1222                        SparseNode::Empty | SparseNode::Leaf { .. } => {
1223                            unreachable!(
1224                                "find_next_to_leaf only continues to a branch or extension"
1225                            )
1226                        }
1227                    }
1228
1229                    curr_path = next_path;
1230
1231                    let next_subtrie_type = SparseSubtrieType::from_path(&curr_path);
1232                    if matches!(curr_subtrie_type, SparseSubtrieType::Upper) &&
1233                        matches!(next_subtrie_type, SparseSubtrieType::Lower(_))
1234                    {
1235                        curr_subtrie_type = next_subtrie_type;
1236                    }
1237                }
1238            };
1239        }
1240
1241        if let (Some(branch_path), Some(SparseNode::Branch { state_mask, blinded_mask, .. })) =
1242            (&branch_parent_path, &branch_parent_node)
1243        {
1244            let mut check_mask = *state_mask;
1245            let child_nibble = leaf_path.get_unchecked(branch_path.len());
1246            check_mask.unset_bit(child_nibble);
1247
1248            if check_mask.count_bits() == 1 {
1249                let remaining_nibble =
1250                    check_mask.first_set_bit_index().expect("state mask is not empty");
1251
1252                if blinded_mask.is_bit_set(remaining_nibble) {
1253                    let mut path = *branch_path;
1254                    path.push_unchecked(remaining_nibble);
1255                    return Err(SparseTrieErrorKind::BlindedNode(path).into());
1256                }
1257            }
1258        }
1259
1260        self.prefix_set.insert(*full_path);
1261        let leaf_subtrie = match leaf_subtrie_type {
1262            SparseSubtrieType::Upper => &mut self.upper_subtrie,
1263            SparseSubtrieType::Lower(idx) => {
1264                self.lower_subtries[idx].as_revealed_mut().expect("lower subtrie is revealed")
1265            }
1266        };
1267        leaf_subtrie.inner.values.remove(full_path);
1268        for (subtrie_type, path) in paths_to_mark_dirty {
1269            let node = match subtrie_type {
1270                SparseSubtrieType::Upper => self.upper_subtrie.nodes.get_mut(&path),
1271                SparseSubtrieType::Lower(idx) => self.lower_subtries[idx]
1272                    .as_revealed_mut()
1273                    .expect("lower subtrie is revealed")
1274                    .nodes
1275                    .get_mut(&path),
1276            }
1277            .expect("node exists");
1278
1279            match node {
1280                SparseNode::Extension { state, .. } | SparseNode::Branch { state, .. } => {
1281                    *state = SparseNodeState::Dirty
1282                }
1283                SparseNode::Empty | SparseNode::Leaf { .. } => {
1284                    unreachable!(
1285                        "only branch and extension nodes can be marked dirty when removing a leaf"
1286                    )
1287                }
1288            }
1289        }
1290        self.remove_node(&leaf_path);
1291
1292        if leaf_path.is_empty() {
1293            self.upper_subtrie.nodes.insert(leaf_path, SparseNode::Empty);
1294            return Ok(());
1295        }
1296
1297        if let (
1298            Some(branch_path),
1299            &Some(SparseNode::Branch { mut state_mask, blinded_mask, ref blinded_hashes, .. }),
1300        ) = (&branch_parent_path, &branch_parent_node)
1301        {
1302            let child_nibble = leaf_path.get_unchecked(branch_path.len());
1303            state_mask.unset_bit(child_nibble);
1304
1305            let new_branch_node = if state_mask.count_bits() == 1 {
1306                let remaining_child_nibble =
1307                    state_mask.first_set_bit_index().expect("state mask is not empty");
1308                let mut remaining_child_path = *branch_path;
1309                remaining_child_path.push_unchecked(remaining_child_nibble);
1310
1311                trace!(
1312                    target: "trie::parallel_sparse",
1313                    ?leaf_path,
1314                    ?branch_path,
1315                    ?remaining_child_path,
1316                    "Branch node has only one child",
1317                );
1318
1319                if blinded_mask.is_bit_set(remaining_child_nibble) {
1320                    return Err(SparseTrieErrorKind::BlindedNode(remaining_child_path).into());
1321                }
1322
1323                let remaining_child_node = self
1324                    .subtrie_for_path_mut(&remaining_child_path)
1325                    .nodes
1326                    .get(&remaining_child_path)
1327                    .unwrap();
1328
1329                let (new_branch_node, remove_child) = Self::branch_changes_on_leaf_removal(
1330                    branch_path,
1331                    &remaining_child_path,
1332                    remaining_child_node,
1333                );
1334
1335                if remove_child {
1336                    self.move_value_on_leaf_removal(
1337                        branch_path,
1338                        &new_branch_node,
1339                        &remaining_child_path,
1340                    );
1341                    self.remove_node(&remaining_child_path);
1342                }
1343
1344                if let Some(updates) = self.updates.as_mut() {
1345                    updates.updated_nodes.remove(branch_path);
1346                    updates.removed_nodes.insert(*branch_path);
1347                }
1348
1349                new_branch_node
1350            } else {
1351                SparseNode::Branch {
1352                    state_mask,
1353                    blinded_mask,
1354                    blinded_hashes: blinded_hashes.clone(),
1355                    state: SparseNodeState::Dirty,
1356                }
1357            };
1358
1359            let branch_subtrie = self.subtrie_for_path_mut(branch_path);
1360            branch_subtrie.nodes.insert(*branch_path, new_branch_node.clone());
1361            branch_parent_node = Some(new_branch_node);
1362        };
1363
1364        if let (Some(ext_path), Some(SparseNode::Extension { key: shortkey, .. })) =
1365            (ext_grandparent_path, &ext_grandparent_node)
1366        {
1367            let ext_subtrie = self.subtrie_for_path_mut(&ext_path);
1368            let branch_path = branch_parent_path.as_ref().unwrap();
1369
1370            if let Some(new_ext_node) = Self::extension_changes_on_leaf_removal(
1371                &ext_path,
1372                shortkey,
1373                branch_path,
1374                branch_parent_node.as_ref().unwrap(),
1375            ) {
1376                ext_subtrie.nodes.insert(ext_path, new_ext_node.clone());
1377                self.move_value_on_leaf_removal(&ext_path, &new_ext_node, branch_path);
1378                self.remove_node(branch_path);
1379            }
1380        }
1381
1382        Ok(())
1383    }
1384
1385    fn finalize_pruned_roots(&mut self, mut effective_pruned_roots: Vec<Nibbles>) -> usize {
1386        if effective_pruned_roots.is_empty() {
1387            return 0;
1388        }
1389
1390        let nodes_converted = effective_pruned_roots.len();
1391
1392        // Sort roots by subtrie type (upper first), then by path for efficient partitioning.
1393        effective_pruned_roots.sort_unstable_by(|path_a, path_b| {
1394            let subtrie_type_a = SparseSubtrieType::from_path(path_a);
1395            let subtrie_type_b = SparseSubtrieType::from_path(path_b);
1396            subtrie_type_a.cmp(&subtrie_type_b).then(path_a.cmp(path_b))
1397        });
1398
1399        // Split off upper subtrie roots (they come first due to sorting)
1400        let num_upper_roots = effective_pruned_roots
1401            .iter()
1402            .position(|p| !SparseSubtrieType::path_len_is_upper(p.len()))
1403            .unwrap_or(effective_pruned_roots.len());
1404
1405        let roots_upper = &effective_pruned_roots[..num_upper_roots];
1406        let roots_lower = &effective_pruned_roots[num_upper_roots..];
1407
1408        debug_assert!(
1409            {
1410                let mut all_roots: Vec<_> = effective_pruned_roots.clone();
1411                all_roots.sort_unstable();
1412                all_roots.windows(2).all(|w| !w[1].starts_with(&w[0]))
1413            },
1414            "prune roots must be prefix-free"
1415        );
1416
1417        // Upper prune roots that are prefixes of lower subtrie root paths cause the entire
1418        // subtrie to be cleared (preserving allocations for reuse).
1419        if !roots_upper.is_empty() {
1420            for subtrie in &mut *self.lower_subtries {
1421                let should_clear = subtrie.as_revealed_ref().is_some_and(|s| {
1422                    let search_idx = roots_upper.partition_point(|root| root <= &s.path);
1423                    search_idx > 0 && s.path.starts_with(&roots_upper[search_idx - 1])
1424                });
1425                if should_clear {
1426                    subtrie.clear();
1427                }
1428            }
1429        }
1430
1431        // Upper subtrie: prune nodes and values
1432        self.upper_subtrie.nodes.retain(|p, _| !is_strict_descendant_in(roots_upper, p));
1433        self.upper_subtrie.inner.values.retain(|p, _| {
1434            !starts_with_pruned_in(roots_upper, p) && !starts_with_pruned_in(roots_lower, p)
1435        });
1436
1437        // Process lower subtries using chunk_by to group roots by subtrie
1438        for roots_group in roots_lower.chunk_by(|path_a, path_b| {
1439            SparseSubtrieType::from_path(path_a) == SparseSubtrieType::from_path(path_b)
1440        }) {
1441            let subtrie_idx = path_subtrie_index_unchecked(&roots_group[0]);
1442
1443            // Skip unrevealed/blinded subtries - nothing to prune.
1444            let should_clear = {
1445                let Some(subtrie) = self.lower_subtries[subtrie_idx].as_revealed_mut() else {
1446                    continue;
1447                };
1448
1449                // Retain only nodes/values not descended from any pruned root.
1450                subtrie.nodes.retain(|p, _| !is_strict_descendant_in(roots_group, p));
1451                subtrie.inner.values.retain(|p, _| !starts_with_pruned_in(roots_group, p));
1452
1453                // If prune removed the node at `subtrie.path`, the subtrie can no longer be
1454                // represented as revealed and must be blinded.
1455                !subtrie.nodes.contains_key(&subtrie.path)
1456            };
1457
1458            if should_clear {
1459                self.lower_subtries[subtrie_idx].clear();
1460            }
1461        }
1462
1463        // Branch node masks pruning
1464        self.branch_node_masks.retain(|p, _| {
1465            if SparseSubtrieType::path_len_is_upper(p.len()) {
1466                !starts_with_pruned_in(roots_upper, p)
1467            } else {
1468                !starts_with_pruned_in(roots_lower, p) && !starts_with_pruned_in(roots_upper, p)
1469            }
1470        });
1471
1472        nodes_converted
1473    }
1474
1475    /// Returns a reference to the lower `SparseSubtrie` for the given path, or None if the
1476    /// path belongs to the upper trie, or if the lower subtrie for the path doesn't exist or is
1477    /// blinded.
1478    fn lower_subtrie_for_path(&self, path: &Nibbles) -> Option<&SparseSubtrie> {
1479        match SparseSubtrieType::from_path(path) {
1480            SparseSubtrieType::Upper => None,
1481            SparseSubtrieType::Lower(idx) => self.lower_subtries[idx].as_revealed_ref(),
1482        }
1483    }
1484
1485    /// Returns a mutable reference to the lower `SparseSubtrie` for the given path, or None if the
1486    /// path belongs to the upper trie.
1487    ///
1488    /// This method will create/reveal a new lower subtrie for the given path if one isn't already.
1489    /// If one does exist, but its path field is longer than the given path, then the field will be
1490    /// set to the given path.
1491    fn lower_subtrie_for_path_mut(&mut self, path: &Nibbles) -> Option<&mut SparseSubtrie> {
1492        match SparseSubtrieType::from_path(path) {
1493            SparseSubtrieType::Upper => None,
1494            SparseSubtrieType::Lower(idx) => {
1495                self.lower_subtries[idx].reveal(path);
1496                Some(self.lower_subtries[idx].as_revealed_mut().expect("just revealed"))
1497            }
1498        }
1499    }
1500
1501    /// Returns a reference to either the lower or upper `SparseSubtrie` for the given path,
1502    /// depending on the path's length.
1503    ///
1504    /// Returns `None` if a lower subtrie does not exist for the given path.
1505    fn subtrie_for_path(&self, path: &Nibbles) -> Option<&SparseSubtrie> {
1506        if SparseSubtrieType::path_len_is_upper(path.len()) {
1507            Some(&self.upper_subtrie)
1508        } else {
1509            self.lower_subtrie_for_path(path)
1510        }
1511    }
1512
1513    /// Returns a mutable reference to either the lower or upper `SparseSubtrie` for the given path,
1514    /// depending on the path's length.
1515    ///
1516    /// This method will create/reveal a new lower subtrie for the given path if one isn't already.
1517    /// If one does exist, but its path field is longer than the given path, then the field will be
1518    /// set to the given path.
1519    fn subtrie_for_path_mut(&mut self, path: &Nibbles) -> &mut SparseSubtrie {
1520        // We can't just call `lower_subtrie_for_path` and return `upper_subtrie` if it returns
1521        // None, because Rust complains about double mutable borrowing `self`.
1522        if SparseSubtrieType::path_len_is_upper(path.len()) {
1523            &mut self.upper_subtrie
1524        } else {
1525            self.lower_subtrie_for_path_mut(path).unwrap()
1526        }
1527    }
1528
1529    /// Returns a mutable reference to a subtrie without marking it as modified.
1530    /// Used for internal operations like pruning that shouldn't affect heat tracking.
1531    fn subtrie_for_path_mut_untracked(&mut self, path: &Nibbles) -> Option<&mut SparseSubtrie> {
1532        if SparseSubtrieType::path_len_is_upper(path.len()) {
1533            Some(&mut self.upper_subtrie)
1534        } else {
1535            match SparseSubtrieType::from_path(path) {
1536                SparseSubtrieType::Upper => None,
1537                SparseSubtrieType::Lower(idx) => self.lower_subtries[idx].as_revealed_mut(),
1538            }
1539        }
1540    }
1541
1542    /// Returns the next node in the traversal path from the given path towards the leaf for the
1543    /// given full leaf path, or an error if any node along the traversal path is not revealed.
1544    ///
1545    ///
1546    /// ## Panics
1547    ///
1548    /// If `from_path` is not a prefix of `leaf_full_path`.
1549    fn find_next_to_leaf(
1550        from_path: &Nibbles,
1551        from_node: &SparseNode,
1552        leaf_full_path: &Nibbles,
1553    ) -> FindNextToLeafOutcome {
1554        debug_assert!(leaf_full_path.len() >= from_path.len());
1555        debug_assert!(leaf_full_path.starts_with(from_path));
1556
1557        match from_node {
1558            // If empty node is found it means the subtrie doesn't have any nodes in it, let alone
1559            // the target leaf.
1560            SparseNode::Empty => FindNextToLeafOutcome::NotFound,
1561            SparseNode::Leaf { key, .. } => {
1562                let mut found_full_path = *from_path;
1563                found_full_path.extend(key);
1564
1565                if &found_full_path == leaf_full_path {
1566                    return FindNextToLeafOutcome::Found
1567                }
1568                FindNextToLeafOutcome::NotFound
1569            }
1570            SparseNode::Extension { key, .. } => {
1571                if leaf_full_path.len() == from_path.len() {
1572                    return FindNextToLeafOutcome::NotFound
1573                }
1574
1575                let mut child_path = *from_path;
1576                child_path.extend(key);
1577
1578                if !leaf_full_path.starts_with(&child_path) {
1579                    return FindNextToLeafOutcome::NotFound
1580                }
1581                FindNextToLeafOutcome::ContinueFrom(child_path)
1582            }
1583            SparseNode::Branch { state_mask, blinded_mask, .. } => {
1584                if leaf_full_path.len() == from_path.len() {
1585                    return FindNextToLeafOutcome::NotFound
1586                }
1587
1588                let nibble = leaf_full_path.get_unchecked(from_path.len());
1589                if !state_mask.is_bit_set(nibble) {
1590                    return FindNextToLeafOutcome::NotFound
1591                }
1592
1593                let mut child_path = *from_path;
1594                child_path.push_unchecked(nibble);
1595
1596                if blinded_mask.is_bit_set(nibble) {
1597                    return FindNextToLeafOutcome::BlindedNode(child_path);
1598                }
1599
1600                FindNextToLeafOutcome::ContinueFrom(child_path)
1601            }
1602        }
1603    }
1604
1605    /// Called when a child node has collapsed into its parent as part of `remove_leaf`. If the
1606    /// new parent node is a leaf, then the previous child also was, and if the previous child was
1607    /// on a lower subtrie while the parent is on an upper then the leaf value needs to be moved to
1608    /// the upper.
1609    fn move_value_on_leaf_removal(
1610        &mut self,
1611        parent_path: &Nibbles,
1612        new_parent_node: &SparseNode,
1613        prev_child_path: &Nibbles,
1614    ) {
1615        // If the parent path isn't in the upper then it doesn't matter what the new node is,
1616        // there's no situation where a leaf value needs to be moved.
1617        if SparseSubtrieType::from_path(parent_path).lower_index().is_some() {
1618            return;
1619        }
1620
1621        if let SparseNode::Leaf { key, .. } = new_parent_node {
1622            let Some(prev_child_subtrie) = self.lower_subtrie_for_path_mut(prev_child_path) else {
1623                return;
1624            };
1625
1626            let mut leaf_full_path = *parent_path;
1627            leaf_full_path.extend(key);
1628
1629            let val = prev_child_subtrie.inner.values.remove(&leaf_full_path).expect("ParallelSparseTrie is in an inconsistent state, expected value on subtrie which wasn't found");
1630            self.upper_subtrie.inner.values.insert(leaf_full_path, val);
1631        }
1632    }
1633
1634    /// Used by `remove_leaf` to ensure that when a node is removed from a lower subtrie that any
1635    /// externalities are handled. These can include:
1636    /// - Removing the lower subtrie completely, if it is now empty.
1637    /// - Updating the `path` field of the lower subtrie to indicate that its root node has changed.
1638    ///
1639    /// This method assumes that the caller will deal with putting all other nodes in the trie into
1640    /// a consistent state after the removal of this one.
1641    ///
1642    /// ## Panics
1643    ///
1644    /// - If the removed node was not a leaf or extension.
1645    fn remove_node(&mut self, path: &Nibbles) {
1646        let subtrie = self.subtrie_for_path_mut(path);
1647        let node = subtrie.nodes.remove(path);
1648
1649        let Some(idx) = SparseSubtrieType::from_path(path).lower_index() else {
1650            // When removing a node from the upper trie there's nothing special we need to do to fix
1651            // its path field; the upper trie's path is always empty.
1652            return;
1653        };
1654
1655        match node {
1656            Some(SparseNode::Leaf { .. }) => {
1657                // If the leaf was the final node in its lower subtrie then we can blind the
1658                // subtrie, effectively marking it as empty.
1659                if subtrie.nodes.is_empty() {
1660                    self.lower_subtries[idx].clear();
1661                }
1662            }
1663            Some(SparseNode::Extension { key, .. }) => {
1664                // If the removed extension was the root node of a lower subtrie then the lower
1665                // subtrie's `path` needs to be updated to be whatever node the extension used to
1666                // point to.
1667                if &subtrie.path == path {
1668                    subtrie.path.extend(&key);
1669                }
1670            }
1671            _ => panic!("Expected to remove a leaf or extension, but removed {node:?}"),
1672        }
1673    }
1674
1675    /// Given the path to a parent branch node and a child node which is the sole remaining child on
1676    /// that branch after removing a leaf, returns a node to replace the parent branch node and a
1677    /// boolean indicating if the child should be deleted.
1678    ///
1679    /// ## Panics
1680    ///
1681    /// - If either parent or child node is not already revealed.
1682    /// - If parent's path is not a prefix of the child's path.
1683    fn branch_changes_on_leaf_removal(
1684        parent_path: &Nibbles,
1685        remaining_child_path: &Nibbles,
1686        remaining_child_node: &SparseNode,
1687    ) -> (SparseNode, bool) {
1688        debug_assert!(remaining_child_path.len() > parent_path.len());
1689        debug_assert!(remaining_child_path.starts_with(parent_path));
1690
1691        let remaining_child_nibble = remaining_child_path.get_unchecked(parent_path.len());
1692
1693        // If we swap the branch node out either an extension or leaf, depending on
1694        // what its remaining child is.
1695        match remaining_child_node {
1696            SparseNode::Empty => {
1697                panic!("remaining child must have been revealed already")
1698            }
1699            // If the only child is a leaf node, we downgrade the branch node into a
1700            // leaf node, prepending the nibble to the key, and delete the old
1701            // child.
1702            SparseNode::Leaf { key, .. } => {
1703                let mut new_key = Nibbles::from_nibbles_unchecked([remaining_child_nibble]);
1704                new_key.extend(key);
1705                (SparseNode::new_leaf(new_key), true)
1706            }
1707            // If the only child node is an extension node, we downgrade the branch
1708            // node into an even longer extension node, prepending the nibble to the
1709            // key, and delete the old child.
1710            SparseNode::Extension { key, .. } => {
1711                let mut new_key = Nibbles::from_nibbles_unchecked([remaining_child_nibble]);
1712                new_key.extend(key);
1713                (SparseNode::new_ext(new_key), true)
1714            }
1715            // If the only child is a branch node, we downgrade the current branch
1716            // node into a one-nibble extension node.
1717            SparseNode::Branch { .. } => (
1718                SparseNode::new_ext(Nibbles::from_nibbles_unchecked([remaining_child_nibble])),
1719                false,
1720            ),
1721        }
1722    }
1723
1724    /// Given the path to a parent extension and its key, and a child node (not necessarily on this
1725    /// subtrie), returns an optional replacement parent node. If a replacement is returned then the
1726    /// child node should be deleted.
1727    ///
1728    /// ## Panics
1729    ///
1730    /// - If either parent or child node is not already revealed.
1731    /// - If parent's path is not a prefix of the child's path.
1732    fn extension_changes_on_leaf_removal(
1733        parent_path: &Nibbles,
1734        parent_key: &Nibbles,
1735        child_path: &Nibbles,
1736        child: &SparseNode,
1737    ) -> Option<SparseNode> {
1738        debug_assert!(child_path.len() > parent_path.len());
1739        debug_assert!(child_path.starts_with(parent_path));
1740
1741        // If the parent node is an extension node, we need to look at its child to see
1742        // if we need to merge it.
1743        match child {
1744            SparseNode::Empty => {
1745                panic!("child must be revealed")
1746            }
1747            // For a leaf node, we collapse the extension node into a leaf node,
1748            // extending the key. While it's impossible to encounter an extension node
1749            // followed by a leaf node in a complete trie, it's possible here because we
1750            // could have downgraded the extension node's child into a leaf node from a
1751            // branch in a previous call to `branch_changes_on_leaf_removal`.
1752            SparseNode::Leaf { key, .. } => {
1753                let mut new_key = *parent_key;
1754                new_key.extend(key);
1755                Some(SparseNode::new_leaf(new_key))
1756            }
1757            // Similar to the leaf node, for an extension node, we collapse them into one
1758            // extension node, extending the key.
1759            SparseNode::Extension { key, .. } => {
1760                let mut new_key = *parent_key;
1761                new_key.extend(key);
1762                Some(SparseNode::new_ext(new_key))
1763            }
1764            // For a branch node, we just leave the extension node as-is.
1765            SparseNode::Branch { .. } => None,
1766        }
1767    }
1768
1769    /// Drains any [`SparseTrieUpdatesAction`]s from the given subtrie, and applies each action to
1770    /// the given `updates` set. If the given set is None then this is a no-op.
1771    #[instrument(level = "trace", target = "trie::parallel_sparse", skip_all)]
1772    fn apply_subtrie_update_actions(
1773        &mut self,
1774        update_actions: impl Iterator<Item = SparseTrieUpdatesAction>,
1775    ) {
1776        if let Some(updates) = self.updates.as_mut() {
1777            let additional = update_actions.size_hint().0;
1778            updates.updated_nodes.reserve(additional);
1779            updates.removed_nodes.reserve(additional);
1780            for action in update_actions {
1781                match action {
1782                    SparseTrieUpdatesAction::InsertRemoved(path) => {
1783                        updates.updated_nodes.remove(&path);
1784                        updates.removed_nodes.insert(path);
1785                    }
1786                    SparseTrieUpdatesAction::RemoveUpdated(path) => {
1787                        updates.updated_nodes.remove(&path);
1788                    }
1789                    SparseTrieUpdatesAction::InsertUpdated(path, branch_node) => {
1790                        updates.updated_nodes.insert(path, branch_node);
1791                        updates.removed_nodes.remove(&path);
1792                    }
1793                }
1794            }
1795        };
1796    }
1797
1798    /// Updates hashes for the upper subtrie, using nodes from both upper and lower subtries.
1799    #[instrument(level = "trace", target = "trie::parallel_sparse", skip_all, ret)]
1800    fn update_upper_subtrie_hashes(&mut self, prefix_set: &mut PrefixSet) -> RlpNode {
1801        trace!(target: "trie::parallel_sparse", "Updating upper subtrie hashes");
1802
1803        debug_assert!(self.upper_subtrie.inner.buffers.path_stack.is_empty());
1804        self.upper_subtrie.inner.buffers.path_stack.push(RlpNodePathStackItem {
1805            path: Nibbles::default(), // Start from root
1806            is_in_prefix_set: None,
1807        });
1808
1809        #[cfg(feature = "metrics")]
1810        let start = Instant::now();
1811
1812        let mut update_actions_buf =
1813            self.updates_enabled().then(|| self.update_actions_buffers.pop().unwrap_or_default());
1814
1815        while let Some(stack_item) = self.upper_subtrie.inner.buffers.path_stack.pop() {
1816            let path = stack_item.path;
1817            let node = if path.len() < UPPER_TRIE_MAX_DEPTH {
1818                self.upper_subtrie.nodes.get_mut(&path).expect("upper subtrie node must exist")
1819            } else {
1820                let index = path_subtrie_index_unchecked(&path);
1821                let node = self.lower_subtries[index]
1822                    .as_revealed_mut()
1823                    .expect("lower subtrie must exist")
1824                    .nodes
1825                    .get_mut(&path)
1826                    .expect("lower subtrie node must exist");
1827                // Lower subtrie root node RLP nodes must be computed before updating upper subtrie
1828                // hashes
1829                debug_assert!(
1830                    node.cached_rlp_node().is_some(),
1831                    "Lower subtrie root node {node:?} at path {path:?} has no cached RLP node"
1832                );
1833                node
1834            };
1835
1836            // Calculate the RLP node for the current node using upper subtrie
1837            self.upper_subtrie.inner.rlp_node(
1838                prefix_set,
1839                &mut update_actions_buf,
1840                stack_item,
1841                node,
1842                &self.branch_node_masks,
1843            );
1844        }
1845
1846        // If there were any branch node updates as a result of calculating the RLP node for the
1847        // upper trie then apply them to the top-level set.
1848        if let Some(mut update_actions_buf) = update_actions_buf {
1849            self.apply_subtrie_update_actions(
1850                #[expect(clippy::iter_with_drain)]
1851                update_actions_buf.drain(..),
1852            );
1853            self.update_actions_buffers.push(update_actions_buf);
1854        }
1855
1856        #[cfg(feature = "metrics")]
1857        self.metrics.subtrie_upper_hash_latency.record(start.elapsed());
1858
1859        debug_assert_eq!(self.upper_subtrie.inner.buffers.rlp_node_stack.len(), 1);
1860        self.upper_subtrie.inner.buffers.rlp_node_stack.pop().unwrap().rlp_node
1861    }
1862
1863    /// Returns:
1864    /// 1. List of lower [subtries](SparseSubtrie) that have changed according to the provided
1865    ///    [prefix set](PrefixSet). See documentation of [`ChangedSubtrie`] for more details. Lower
1866    ///    subtries whose root node is missing a hash will also be returned; this is required to
1867    ///    handle cases where extensions/leafs get shortened and therefore moved from the upper to a
1868    ///    lower subtrie.
1869    /// 2. Prefix set of keys that do not belong to any lower subtrie.
1870    ///
1871    /// This method helps optimize hash recalculations by identifying which specific
1872    /// lower subtries need to be updated. Each lower subtrie can then be updated in parallel.
1873    ///
1874    /// IMPORTANT: The method removes the subtries from `lower_subtries`, and the caller is
1875    /// responsible for returning them back into the array.
1876    #[instrument(level = "trace", target = "trie::parallel_sparse", skip_all, fields(prefix_set_len = prefix_set.len()))]
1877    fn take_changed_lower_subtries(
1878        &mut self,
1879        prefix_set: &mut PrefixSet,
1880    ) -> (Vec<ChangedSubtrie>, PrefixSetMut) {
1881        // Fast-path: If the prefix set is empty then no subtries can have been changed. Just return
1882        // empty values.
1883        if prefix_set.is_empty() {
1884            return Default::default();
1885        }
1886
1887        // Clone the prefix set to iterate over its keys. Cloning is cheap, it's just an Arc.
1888        let prefix_set_clone = prefix_set.clone();
1889        let mut prefix_set_iter = prefix_set_clone.into_iter().copied().peekable();
1890        let mut changed_subtries = Vec::new();
1891        let mut unchanged_prefix_set = PrefixSetMut::default();
1892        let updates_enabled = self.updates_enabled();
1893
1894        for (index, subtrie) in self.lower_subtries.iter_mut().enumerate() {
1895            if let Some(subtrie) = subtrie.take_revealed_if(|subtrie| {
1896                prefix_set.contains(&subtrie.path) ||
1897                    subtrie
1898                        .nodes
1899                        .get(&subtrie.path)
1900                        .is_some_and(|n| n.cached_rlp_node().is_none())
1901            }) {
1902                let prefix_set = if prefix_set.all() {
1903                    unchanged_prefix_set = PrefixSetMut::all();
1904                    PrefixSetMut::all()
1905                } else {
1906                    // Take those keys from the original prefix set that start with the subtrie path
1907                    //
1908                    // Subtries are stored in the order of their paths, so we can use the same
1909                    // prefix set iterator.
1910                    let mut new_prefix_set = Vec::new();
1911                    while let Some(key) = prefix_set_iter.peek() {
1912                        if key.starts_with(&subtrie.path) {
1913                            // If the key starts with the subtrie path, add it to the new prefix set
1914                            new_prefix_set.push(prefix_set_iter.next().unwrap());
1915                        } else if new_prefix_set.is_empty() && key < &subtrie.path {
1916                            // If we didn't yet have any keys that belong to this subtrie, and the
1917                            // current key is still less than the subtrie path, add it to the
1918                            // unchanged prefix set
1919                            unchanged_prefix_set.insert(prefix_set_iter.next().unwrap());
1920                        } else {
1921                            // If we're past the subtrie path, we're done with this subtrie. Do not
1922                            // advance the iterator, the next key will be processed either by the
1923                            // next subtrie or inserted into the unchanged prefix set.
1924                            break
1925                        }
1926                    }
1927                    PrefixSetMut::from(new_prefix_set)
1928                }
1929                .freeze();
1930
1931                // We need the full path of root node of the lower subtrie to the unchanged prefix
1932                // set, so that we don't skip it when calculating hashes for the upper subtrie.
1933                match subtrie.nodes.get(&subtrie.path) {
1934                    Some(SparseNode::Extension { key, .. } | SparseNode::Leaf { key, .. }) => {
1935                        unchanged_prefix_set.insert(subtrie.path.join(key));
1936                    }
1937                    Some(SparseNode::Branch { .. }) => {
1938                        unchanged_prefix_set.insert(subtrie.path);
1939                    }
1940                    _ => {}
1941                }
1942
1943                let update_actions_buf =
1944                    updates_enabled.then(|| self.update_actions_buffers.pop().unwrap_or_default());
1945
1946                changed_subtries.push(ChangedSubtrie {
1947                    index,
1948                    subtrie,
1949                    prefix_set,
1950                    update_actions_buf,
1951                });
1952            }
1953        }
1954
1955        // Extend the unchanged prefix set with the remaining keys that are not part of any subtries
1956        unchanged_prefix_set.extend_keys(prefix_set_iter);
1957
1958        (changed_subtries, unchanged_prefix_set)
1959    }
1960
1961    /// Returns an iterator over all nodes in the trie in no particular order.
1962    #[cfg(test)]
1963    fn all_nodes(&self) -> impl IntoIterator<Item = (&Nibbles, &SparseNode)> {
1964        let mut nodes = vec![];
1965        for subtrie in self.lower_subtries.iter().filter_map(LowerSparseSubtrie::as_revealed_ref) {
1966            nodes.extend(subtrie.nodes.iter())
1967        }
1968        nodes.extend(self.upper_subtrie.nodes.iter());
1969        nodes
1970    }
1971
1972    /// Reveals a trie node in the upper trie if it has not been revealed before. When revealing
1973    /// branch/extension nodes this may recurse into a lower trie to reveal a child.
1974    ///
1975    /// This function decodes a trie node and inserts it into the trie structure. It handles
1976    /// different node types (leaf, extension, branch) by appropriately adding them to the trie and
1977    /// recursively revealing their children.
1978    ///
1979    /// # Arguments
1980    ///
1981    /// * `path` - The path where the node should be revealed
1982    /// * `node` - The trie node to reveal
1983    /// * `masks` - Branch node masks if known
1984    ///
1985    /// # Returns
1986    ///
1987    /// `Ok(())` if successful, or an error if the node was not revealed.
1988    fn reveal_upper_node(
1989        &mut self,
1990        path: Nibbles,
1991        node: &TrieNodeV2,
1992        masks: Option<BranchNodeMasks>,
1993    ) -> SparseTrieResult<()> {
1994        // Only reveal nodes that can be reached given the current state of the upper trie. If they
1995        // can't be reached, it means that they were removed.
1996        if !self.is_path_reachable_from_upper(&path) {
1997            return Ok(())
1998        }
1999
2000        // Exit early if the node was already revealed before.
2001        if !self.upper_subtrie.reveal_node(path, node, masks, None)? {
2002            if let TrieNodeV2::Branch(branch) = node {
2003                if branch.key.is_empty() {
2004                    return Ok(());
2005                }
2006
2007                // We might still potentially need to reveal a child branch node in the lower
2008                // subtrie, even if the upper subtrie already knew about the extension node.
2009                if SparseSubtrieType::path_len_is_upper(path.len() + branch.key.len()) {
2010                    return Ok(())
2011                }
2012            } else {
2013                return Ok(());
2014            }
2015        }
2016
2017        // The previous upper_trie.reveal_node call will not have revealed any child nodes via
2018        // reveal_node_or_hash if the child node would be found on a lower subtrie. We handle that
2019        // here by manually checking the specific cases where this could happen, and calling
2020        // reveal_node_or_hash for each.
2021        match node {
2022            TrieNodeV2::Branch(branch) => {
2023                let mut branch_path = path;
2024                branch_path.extend(&branch.key);
2025
2026                // If only the parent extension belongs to the upper trie, we need to reveal the
2027                // actual branch node in the corresponding lower subtrie.
2028                if !SparseSubtrieType::path_len_is_upper(branch_path.len()) {
2029                    self.lower_subtrie_for_path_mut(&branch_path)
2030                        .expect("branch_path must have a lower subtrie")
2031                        .reveal_branch(
2032                            branch_path,
2033                            branch.state_mask,
2034                            &branch.stack,
2035                            masks,
2036                            branch.branch_rlp_node.clone(),
2037                        )?
2038                } else if !SparseSubtrieType::path_len_is_upper(branch_path.len() + 1) {
2039                    // If a branch is at the cutoff level of the trie then it will be in the upper
2040                    // trie, but all of its children will be in a lower trie.
2041                    // Check if a child node would be in the lower subtrie, and
2042                    // reveal accordingly.
2043                    for (stack_ptr, idx) in branch.state_mask.iter().enumerate() {
2044                        let mut child_path = branch_path;
2045                        child_path.push_unchecked(idx);
2046                        let child = &branch.stack[stack_ptr];
2047
2048                        // Only reveal children that are not hashes. Hashes are stored on branch
2049                        // nodes directly.
2050                        if !child.is_hash() {
2051                            self.lower_subtrie_for_path_mut(&child_path)
2052                                .expect("child_path must have a lower subtrie")
2053                                .reveal_node(
2054                                    child_path,
2055                                    &TrieNodeV2::decode(&mut branch.stack[stack_ptr].as_ref())?,
2056                                    None,
2057                                    None,
2058                                )?;
2059                        }
2060                    }
2061                }
2062            }
2063            TrieNodeV2::Extension(ext) => {
2064                let mut child_path = path;
2065                child_path.extend(&ext.key);
2066                if let Some(subtrie) = self.lower_subtrie_for_path_mut(&child_path) {
2067                    subtrie.reveal_node(
2068                        child_path,
2069                        &TrieNodeV2::decode(&mut ext.child.as_ref())?,
2070                        None,
2071                        None,
2072                    )?;
2073                }
2074            }
2075            TrieNodeV2::EmptyRoot | TrieNodeV2::Leaf(_) => (),
2076        }
2077
2078        Ok(())
2079    }
2080
2081    /// Return updated subtries back to the trie after executing any actions required on the
2082    /// top-level `SparseTrieUpdates`.
2083    #[instrument(level = "trace", target = "trie::parallel_sparse", skip_all)]
2084    fn insert_changed_subtries(
2085        &mut self,
2086        changed_subtries: impl IntoIterator<Item = ChangedSubtrie>,
2087    ) {
2088        for ChangedSubtrie { index, subtrie, update_actions_buf, .. } in changed_subtries {
2089            if let Some(mut update_actions_buf) = update_actions_buf {
2090                self.apply_subtrie_update_actions(
2091                    #[expect(clippy::iter_with_drain)]
2092                    update_actions_buf.drain(..),
2093                );
2094                self.update_actions_buffers.push(update_actions_buf);
2095            }
2096
2097            self.lower_subtries[index] = LowerSparseSubtrie::Revealed(subtrie);
2098        }
2099    }
2100
2101    /// Returns a heuristic for the in-memory size of this trie in bytes.
2102    ///
2103    /// This is an approximation that accounts for:
2104    /// - The upper subtrie nodes and values
2105    /// - All revealed lower subtries nodes and values
2106    /// - The prefix set keys
2107    /// - The branch node masks map
2108    /// - Updates if retained
2109    /// - Update action buffers
2110    ///
2111    /// Note: Heap allocations for hash maps may be larger due to load factor overhead.
2112    pub fn memory_size(&self) -> usize {
2113        let mut size = core::mem::size_of::<Self>();
2114
2115        // Upper subtrie
2116        size += self.upper_subtrie.memory_size();
2117
2118        // Lower subtries (both Revealed and Blind with allocation)
2119        for subtrie in self.lower_subtries.iter() {
2120            size += subtrie.memory_size();
2121        }
2122
2123        // Prefix set keys
2124        size += self.prefix_set.len() * core::mem::size_of::<Nibbles>();
2125
2126        // Branch node masks map
2127        size += self.branch_node_masks.len() *
2128            (core::mem::size_of::<Nibbles>() + core::mem::size_of::<BranchNodeMasks>());
2129
2130        // Updates if present
2131        if let Some(updates) = &self.updates {
2132            size += updates.updated_nodes.len() *
2133                (core::mem::size_of::<Nibbles>() + core::mem::size_of::<BranchNodeCompact>());
2134            size += updates.removed_nodes.len() * core::mem::size_of::<Nibbles>();
2135        }
2136
2137        // Update actions buffers
2138        for buf in &self.update_actions_buffers {
2139            size += buf.capacity() * core::mem::size_of::<SparseTrieUpdatesAction>();
2140        }
2141
2142        size
2143    }
2144
2145    /// Determines if the given path can be directly reached from the upper trie.
2146    fn is_path_reachable_from_upper(&self, path: &Nibbles) -> bool {
2147        let mut current = Nibbles::default();
2148        while current.len() < path.len() {
2149            let Some(node) = self.upper_subtrie.nodes.get(&current) else { return false };
2150            match node {
2151                SparseNode::Branch { state_mask, .. } => {
2152                    if !state_mask.is_bit_set(path.get_unchecked(current.len())) {
2153                        return false
2154                    }
2155
2156                    current.push_unchecked(path.get_unchecked(current.len()));
2157                }
2158                SparseNode::Extension { key, .. } => {
2159                    if *key != path.slice(current.len()..current.len() + key.len()) {
2160                        return false
2161                    }
2162                    current.extend(key);
2163                }
2164                SparseNode::Empty | SparseNode::Leaf { .. } => return false,
2165            }
2166        }
2167        true
2168    }
2169
2170    /// Checks if a boundary leaf (at `path.len() == UPPER_TRIE_MAX_DEPTH`) is reachable from its
2171    /// parent branch in the upper subtrie.
2172    ///
2173    /// This is used for leaves that sit at the upper/lower subtrie boundary, where the leaf is
2174    /// in a lower subtrie but its parent branch is in the upper subtrie.
2175    fn is_boundary_leaf_reachable(
2176        upper_nodes: &HashMap<Nibbles, SparseNode>,
2177        path: &Nibbles,
2178        node: &TrieNodeV2,
2179    ) -> bool {
2180        debug_assert_eq!(path.len(), UPPER_TRIE_MAX_DEPTH);
2181
2182        if !matches!(node, TrieNodeV2::Leaf(_)) {
2183            return true
2184        }
2185
2186        let parent_path = path.slice(..path.len() - 1);
2187        let leaf_nibble = path.get_unchecked(path.len() - 1);
2188
2189        match upper_nodes.get(&parent_path) {
2190            Some(SparseNode::Branch { state_mask, .. }) => state_mask.is_bit_set(leaf_nibble),
2191            _ => false,
2192        }
2193    }
2194
2195    /// Returns a bitset of all subtries that are reachable from the upper trie. If subtrie is not
2196    /// reachable it means that it does not exist.
2197    fn reachable_subtries(&self) -> SubtriesBitmap {
2198        let mut reachable = SubtriesBitmap::default();
2199
2200        let mut stack = Vec::new();
2201        stack.push(Nibbles::default());
2202
2203        while let Some(current) = stack.pop() {
2204            let Some(node) = self.upper_subtrie.nodes.get(&current) else { continue };
2205            match node {
2206                SparseNode::Branch { state_mask, .. } => {
2207                    for idx in state_mask.iter() {
2208                        let mut next = current;
2209                        next.push_unchecked(idx);
2210                        if next.len() >= UPPER_TRIE_MAX_DEPTH {
2211                            reachable.set(path_subtrie_index_unchecked(&next));
2212                        } else {
2213                            stack.push(next);
2214                        }
2215                    }
2216                }
2217                SparseNode::Extension { key, .. } => {
2218                    let mut next = current;
2219                    next.extend(key);
2220                    if next.len() >= UPPER_TRIE_MAX_DEPTH {
2221                        reachable.set(path_subtrie_index_unchecked(&next));
2222                    } else {
2223                        stack.push(next);
2224                    }
2225                }
2226                SparseNode::Empty | SparseNode::Leaf { .. } => {}
2227            };
2228        }
2229
2230        reachable
2231    }
2232}
2233
2234/// Bitset tracking which of the 256 lower subtries were modified in the current cycle.
2235#[derive(Clone, Default, PartialEq, Eq, Debug)]
2236struct SubtriesBitmap(U256);
2237
2238impl SubtriesBitmap {
2239    /// Marks a subtrie index.
2240    #[inline]
2241    fn set(&mut self, idx: usize) {
2242        debug_assert!(idx < NUM_LOWER_SUBTRIES);
2243        self.0.set_bit(idx, true);
2244    }
2245
2246    /// Returns whether a subtrie index is set.
2247    #[inline]
2248    fn get(&self, idx: usize) -> bool {
2249        debug_assert!(idx < NUM_LOWER_SUBTRIES);
2250        self.0.bit(idx)
2251    }
2252}
2253
2254/// This is a subtrie of the [`ParallelSparseTrie`] that contains a map from path to sparse trie
2255/// nodes.
2256#[derive(Clone, PartialEq, Eq, Debug, Default)]
2257pub struct SparseSubtrie {
2258    /// The root path of this subtrie.
2259    ///
2260    /// This is the _full_ path to this subtrie, meaning it includes the first
2261    /// [`UPPER_TRIE_MAX_DEPTH`] nibbles that we also use for indexing subtries in the
2262    /// [`ParallelSparseTrie`].
2263    ///
2264    /// There should be a node for this path in `nodes` map.
2265    pub(crate) path: Nibbles,
2266    /// The map from paths to sparse trie nodes within this subtrie.
2267    nodes: HashMap<Nibbles, SparseNode>,
2268    /// Subset of fields for mutable access while `nodes` field is also being mutably borrowed.
2269    inner: SparseSubtrieInner,
2270}
2271
2272/// Returned by the `find_next_to_leaf` method to indicate either that the leaf has been found,
2273/// traversal should be continued from the given path, or the leaf is not in the trie.
2274enum FindNextToLeafOutcome {
2275    /// `Found` indicates that the leaf was found at the given path.
2276    Found,
2277    /// `ContinueFrom` indicates that traversal should continue from the given path.
2278    ContinueFrom(Nibbles),
2279    /// `NotFound` indicates that there is no way to traverse to the leaf, as it is not in the
2280    /// trie.
2281    NotFound,
2282    /// `BlindedNode` indicates that the node is blinded with the contained hash and cannot be
2283    /// traversed.
2284    BlindedNode(Nibbles),
2285}
2286
2287impl SparseSubtrie {
2288    /// Creates a new empty subtrie with the specified root path.
2289    pub(crate) fn new(path: Nibbles) -> Self {
2290        Self { path, ..Default::default() }
2291    }
2292
2293    /// Returns true if this subtrie has any nodes, false otherwise.
2294    pub(crate) fn is_empty(&self) -> bool {
2295        self.nodes.is_empty()
2296    }
2297
2298    /// Returns true if the current path and its child are both found in the same level.
2299    fn is_child_same_level(current_path: &Nibbles, child_path: &Nibbles) -> bool {
2300        let current_level = core::mem::discriminant(&SparseSubtrieType::from_path(current_path));
2301        let child_level = core::mem::discriminant(&SparseSubtrieType::from_path(child_path));
2302        current_level == child_level
2303    }
2304
2305    /// Checks if a leaf node at the given path is reachable from its parent branch node.
2306    ///
2307    /// Returns `true` if:
2308    /// - The path is at the root (no parent to check)
2309    /// - The parent branch node has the corresponding `state_mask` bit set for this leaf
2310    ///
2311    /// Returns `false` if the parent is a branch node that doesn't have the `state_mask` bit set
2312    /// for this leaf's nibble, meaning the leaf is not reachable.
2313    fn is_leaf_reachable_from_parent(&self, path: &Nibbles) -> bool {
2314        if path.is_empty() {
2315            return true
2316        }
2317
2318        let parent_path = path.slice(..path.len() - 1);
2319        let leaf_nibble = path.get_unchecked(path.len() - 1);
2320
2321        match self.nodes.get(&parent_path) {
2322            Some(SparseNode::Branch { state_mask, .. }) => state_mask.is_bit_set(leaf_nibble),
2323            _ => false,
2324        }
2325    }
2326
2327    /// Updates or inserts a leaf node at the specified key path with the provided RLP-encoded
2328    /// value.
2329    ///
2330    /// If the leaf did not previously exist, this method adjusts the trie structure by inserting
2331    /// new leaf nodes, splitting branch nodes, or collapsing extension nodes as needed.
2332    ///
2333    /// # Returns
2334    ///
2335    /// This method is atomic: if an error occurs during structural changes, all modifications
2336    /// are rolled back and the trie state is unchanged.
2337    pub fn update_leaf(&mut self, full_path: Nibbles, value: Vec<u8>) -> SparseTrieResult<()> {
2338        debug_assert!(full_path.starts_with(&self.path));
2339
2340        // Check if value already exists - if so, just update it (no structural changes needed)
2341        if let Entry::Occupied(mut e) = self.inner.values.entry(full_path) {
2342            e.insert(value);
2343            return Ok(())
2344        }
2345
2346        // Here we are starting at the root of the subtrie, and traversing from there.
2347        let mut current = Some(self.path);
2348
2349        while let Some(current_path) = current.as_mut() {
2350            match self.update_next_node(current_path, &full_path)? {
2351                LeafUpdateStep::Continue => {}
2352                LeafUpdateStep::NodeNotFound | LeafUpdateStep::Complete { .. } => break,
2353            }
2354        }
2355
2356        // Only insert the value after all structural changes succeed
2357        self.inner.values.insert(full_path, value);
2358
2359        Ok(())
2360    }
2361
2362    /// Processes the current node, returning what to do next in the leaf update process.
2363    ///
2364    /// This will add or update any nodes in the trie as necessary.
2365    ///
2366    /// Returns a `LeafUpdateStep` containing the next node to process (if any) and
2367    /// the paths of nodes that were inserted during this step.
2368    fn update_next_node(
2369        &mut self,
2370        current: &mut Nibbles,
2371        path: &Nibbles,
2372    ) -> SparseTrieResult<LeafUpdateStep> {
2373        debug_assert!(path.starts_with(&self.path));
2374        debug_assert!(current.starts_with(&self.path));
2375        debug_assert!(path.starts_with(current));
2376        let Some(node) = self.nodes.get_mut(current) else {
2377            return Ok(LeafUpdateStep::NodeNotFound);
2378        };
2379
2380        match node {
2381            SparseNode::Empty => {
2382                // We need to insert the node with a different path and key depending on the path of
2383                // the subtrie.
2384                let path = path.slice(self.path.len()..);
2385                *node = SparseNode::new_leaf(path);
2386                Ok(LeafUpdateStep::complete_with_insertions(vec![*current]))
2387            }
2388            SparseNode::Leaf { key: current_key, .. } => {
2389                current.extend(current_key);
2390
2391                // this leaf is being updated
2392                debug_assert!(current != path, "we already checked leaf presence in the beginning");
2393
2394                // find the common prefix
2395                let common = current.common_prefix_length(path);
2396
2397                // update existing node
2398                let new_ext_key = current.slice(current.len() - current_key.len()..common);
2399                *node = SparseNode::new_ext(new_ext_key);
2400
2401                // create a branch node and corresponding leaves
2402                self.nodes.reserve(3);
2403                let branch_path = current.slice(..common);
2404                let new_leaf_path = path.slice(..=common);
2405                let existing_leaf_path = current.slice(..=common);
2406
2407                self.nodes.insert(
2408                    branch_path,
2409                    SparseNode::new_split_branch(
2410                        current.get_unchecked(common),
2411                        path.get_unchecked(common),
2412                    ),
2413                );
2414                self.nodes.insert(new_leaf_path, SparseNode::new_leaf(path.slice(common + 1..)));
2415                self.nodes
2416                    .insert(existing_leaf_path, SparseNode::new_leaf(current.slice(common + 1..)));
2417
2418                Ok(LeafUpdateStep::complete_with_insertions(vec![
2419                    branch_path,
2420                    new_leaf_path,
2421                    existing_leaf_path,
2422                ]))
2423            }
2424            SparseNode::Extension { key, .. } => {
2425                current.extend(key);
2426
2427                if !path.starts_with(current) {
2428                    // find the common prefix
2429                    let common = current.common_prefix_length(path);
2430                    *key = current.slice(current.len() - key.len()..common);
2431
2432                    // create state mask for new branch node
2433                    // NOTE: this might overwrite the current extension node
2434                    self.nodes.reserve(3);
2435                    let branch_path = current.slice(..common);
2436                    let new_leaf_path = path.slice(..=common);
2437                    let branch = SparseNode::new_split_branch(
2438                        current.get_unchecked(common),
2439                        path.get_unchecked(common),
2440                    );
2441
2442                    self.nodes.insert(branch_path, branch);
2443
2444                    // create new leaf
2445                    let new_leaf = SparseNode::new_leaf(path.slice(common + 1..));
2446                    self.nodes.insert(new_leaf_path, new_leaf);
2447
2448                    let mut inserted_nodes = vec![branch_path, new_leaf_path];
2449
2450                    // recreate extension to previous child if needed
2451                    let key = current.slice(common + 1..);
2452                    if !key.is_empty() {
2453                        let ext_path = current.slice(..=common);
2454                        self.nodes.insert(ext_path, SparseNode::new_ext(key));
2455                        inserted_nodes.push(ext_path);
2456                    }
2457
2458                    return Ok(LeafUpdateStep::complete_with_insertions(inserted_nodes))
2459                }
2460
2461                Ok(LeafUpdateStep::Continue)
2462            }
2463            SparseNode::Branch { state_mask, blinded_mask, .. } => {
2464                let nibble = path.get_unchecked(current.len());
2465                current.push_unchecked(nibble);
2466
2467                if !state_mask.is_bit_set(nibble) {
2468                    state_mask.set_bit(nibble);
2469                    let new_leaf = SparseNode::new_leaf(path.slice(current.len()..));
2470                    self.nodes.insert(*current, new_leaf);
2471                    return Ok(LeafUpdateStep::complete_with_insertions(vec![*current]))
2472                }
2473
2474                if blinded_mask.is_bit_set(nibble) {
2475                    return Err(SparseTrieErrorKind::BlindedNode(*current).into());
2476                }
2477
2478                // If the nibble is set, we can continue traversing the branch.
2479                Ok(LeafUpdateStep::Continue)
2480            }
2481        }
2482    }
2483
2484    /// Reveals a branch node at the given path.
2485    fn reveal_branch(
2486        &mut self,
2487        path: Nibbles,
2488        state_mask: TrieMask,
2489        children: &[RlpNode],
2490        masks: Option<BranchNodeMasks>,
2491        rlp_node: Option<RlpNode>,
2492    ) -> SparseTrieResult<()> {
2493        match self.nodes.entry(path) {
2494            Entry::Occupied(_) => {
2495                // Branch already revealed, do nothing
2496                return Ok(());
2497            }
2498            Entry::Vacant(entry) => {
2499                let state =
2500                    match rlp_node.as_ref() {
2501                        Some(rlp_node) => SparseNodeState::Cached {
2502                            rlp_node: rlp_node.clone(),
2503                            store_in_db_trie: Some(masks.is_some_and(|m| {
2504                                !m.hash_mask.is_empty() || !m.tree_mask.is_empty()
2505                            })),
2506                        },
2507                        None => SparseNodeState::Dirty,
2508                    };
2509
2510                let mut blinded_mask = TrieMask::default();
2511                let mut blinded_hashes = Box::new([B256::ZERO; 16]);
2512
2513                for (stack_ptr, idx) in state_mask.iter().enumerate() {
2514                    let mut child_path = path;
2515                    child_path.push_unchecked(idx);
2516                    let child = &children[stack_ptr];
2517
2518                    if let Some(hash) = child.as_hash() {
2519                        blinded_mask.set_bit(idx);
2520                        blinded_hashes[idx as usize] = hash;
2521                    }
2522                }
2523
2524                entry.insert(SparseNode::Branch {
2525                    state_mask,
2526                    state,
2527                    blinded_mask,
2528                    blinded_hashes,
2529                });
2530            }
2531        }
2532
2533        // For a branch node, iterate over all children. This must happen second so leaf
2534        // children can check connectivity with parent branch.
2535        for (stack_ptr, idx) in state_mask.iter().enumerate() {
2536            let mut child_path = path;
2537            child_path.push_unchecked(idx);
2538            let child = &children[stack_ptr];
2539            if !child.is_hash() && Self::is_child_same_level(&path, &child_path) {
2540                // Reveal each child node or hash it has, but only if the child is on
2541                // the same level as the parent.
2542                self.reveal_node(
2543                    child_path,
2544                    &TrieNodeV2::decode(&mut child.as_ref())?,
2545                    None,
2546                    None,
2547                )?;
2548            }
2549        }
2550
2551        Ok(())
2552    }
2553
2554    /// Internal implementation of the method of the same name on `ParallelSparseTrie`.
2555    ///
2556    /// This accepts `hash_from_upper` to handle cases when boundary nodes revealed in lower subtrie
2557    /// but its blinded hash is known from the upper subtrie.
2558    fn reveal_node(
2559        &mut self,
2560        path: Nibbles,
2561        node: &TrieNodeV2,
2562        masks: Option<BranchNodeMasks>,
2563        hash_from_upper: Option<B256>,
2564    ) -> SparseTrieResult<bool> {
2565        debug_assert!(path.starts_with(&self.path));
2566
2567        // If the node is already revealed, do nothing.
2568        if self.nodes.contains_key(&path) {
2569            return Ok(false);
2570        }
2571
2572        // If the hash is provided from the upper subtrie, use it. Otherwise, find the parent branch
2573        // node, unset its blinded bit and use the hash.
2574        let hash = if let Some(hash) = hash_from_upper {
2575            Some(hash)
2576        } else if path.len() != UPPER_TRIE_MAX_DEPTH && !path.is_empty() {
2577            let Some(SparseNode::Branch { state_mask, blinded_mask, blinded_hashes, .. }) =
2578                self.nodes.get_mut(&path.slice(0..path.len() - 1))
2579            else {
2580                return Ok(false);
2581            };
2582            let nibble = path.last().unwrap();
2583            if !state_mask.is_bit_set(nibble) {
2584                return Ok(false);
2585            }
2586
2587            blinded_mask.is_bit_set(nibble).then(|| {
2588                blinded_mask.unset_bit(nibble);
2589                blinded_hashes[nibble as usize]
2590            })
2591        } else {
2592            None
2593        };
2594
2595        trace!(
2596            target: "trie::parallel_sparse",
2597            ?path,
2598            ?node,
2599            ?masks,
2600            "Revealing node",
2601        );
2602
2603        match node {
2604            TrieNodeV2::EmptyRoot => {
2605                // For an empty root, ensure that we are at the root path, and at the upper subtrie.
2606                debug_assert!(path.is_empty());
2607                debug_assert!(self.path.is_empty());
2608                self.nodes.insert(path, SparseNode::Empty);
2609            }
2610            TrieNodeV2::Branch(branch) => {
2611                if branch.key.is_empty() {
2612                    self.reveal_branch(
2613                        path,
2614                        branch.state_mask,
2615                        &branch.stack,
2616                        masks,
2617                        hash.as_ref().map(RlpNode::word_rlp),
2618                    )?;
2619                    return Ok(true);
2620                }
2621
2622                self.nodes.insert(
2623                    path,
2624                    SparseNode::Extension {
2625                        key: branch.key,
2626                        state: hash
2627                            .as_ref()
2628                            .map(|hash| SparseNodeState::Cached {
2629                                rlp_node: RlpNode::word_rlp(hash),
2630                                // Inherit `store_in_db_trie` from the child branch
2631                                // node masks so that the memoized hash can be used
2632                                // without needing to fetch the child branch.
2633                                store_in_db_trie: Some(masks.is_some_and(|m| {
2634                                    !m.hash_mask.is_empty() || !m.tree_mask.is_empty()
2635                                })),
2636                            })
2637                            .unwrap_or(SparseNodeState::Dirty),
2638                    },
2639                );
2640
2641                let mut branch_path = path;
2642                branch_path.extend(&branch.key);
2643
2644                // Exit early if the actual branch node does not belong to this subtrie.
2645                if !Self::is_child_same_level(&path, &branch_path) {
2646                    return Ok(true);
2647                }
2648
2649                // Reveal the actual branch node.
2650                self.reveal_branch(
2651                    branch_path,
2652                    branch.state_mask,
2653                    &branch.stack,
2654                    masks,
2655                    branch.branch_rlp_node.clone(),
2656                )?;
2657            }
2658            TrieNodeV2::Extension(_) => unreachable!(),
2659            TrieNodeV2::Leaf(leaf) => {
2660                // Skip the reachability check when path.len() == UPPER_TRIE_MAX_DEPTH because
2661                // at that boundary the leaf is in the lower subtrie but its parent branch is in
2662                // the upper subtrie. The subtrie cannot check connectivity across the upper/lower
2663                // boundary, so that check happens in `reveal_nodes` instead.
2664                if path.len() != UPPER_TRIE_MAX_DEPTH && !self.is_leaf_reachable_from_parent(&path)
2665                {
2666                    trace!(
2667                        target: "trie::parallel_sparse",
2668                        ?path,
2669                        "Leaf not reachable from parent branch, skipping",
2670                    );
2671                    return Ok(false)
2672                }
2673
2674                let mut full_key = path;
2675                full_key.extend(&leaf.key);
2676
2677                match self.inner.values.entry(full_key) {
2678                    Entry::Occupied(_) => {
2679                        trace!(
2680                            target: "trie::parallel_sparse",
2681                            ?path,
2682                            ?full_key,
2683                            "Leaf full key value already present, skipping",
2684                        );
2685                        return Ok(false)
2686                    }
2687                    Entry::Vacant(entry) => {
2688                        entry.insert(leaf.value.clone());
2689                    }
2690                }
2691
2692                self.nodes.insert(
2693                    path,
2694                    SparseNode::Leaf {
2695                        key: leaf.key,
2696                        state: hash
2697                            .as_ref()
2698                            .map(|hash| SparseNodeState::Cached {
2699                                rlp_node: RlpNode::word_rlp(hash),
2700                                store_in_db_trie: Some(false),
2701                            })
2702                            .unwrap_or(SparseNodeState::Dirty),
2703                    },
2704                );
2705            }
2706        }
2707
2708        Ok(true)
2709    }
2710
2711    /// Recalculates and updates the RLP hashes for the changed nodes in this subtrie.
2712    ///
2713    /// The function starts from the subtrie root, traverses down to leaves, and then calculates
2714    /// the hashes from leaves back up to the root. It uses a stack from [`SparseSubtrieBuffers`] to
2715    /// track the traversal and accumulate RLP encodings.
2716    ///
2717    /// # Parameters
2718    ///
2719    /// - `prefix_set`: The set of trie paths whose nodes have changed.
2720    /// - `update_actions`: A buffer which `SparseTrieUpdatesAction`s will be written to in the
2721    ///   event that any changes to the top-level updates are required. If None then update
2722    ///   retention is disabled.
2723    /// - `branch_node_masks`: The tree and hash masks for branch nodes.
2724    ///
2725    /// # Returns
2726    ///
2727    /// A tuple containing the root node of the updated subtrie.
2728    ///
2729    /// # Panics
2730    ///
2731    /// If the node at the root path does not exist.
2732    #[instrument(level = "trace", target = "trie::parallel_sparse", skip_all, fields(root = ?self.path), ret)]
2733    fn update_hashes(
2734        &mut self,
2735        prefix_set: &mut PrefixSet,
2736        update_actions: &mut Option<Vec<SparseTrieUpdatesAction>>,
2737        branch_node_masks: &BranchNodeMasksMap,
2738    ) -> RlpNode {
2739        trace!(target: "trie::parallel_sparse", "Updating subtrie hashes");
2740
2741        debug_assert!(prefix_set.iter().all(|path| path.starts_with(&self.path)));
2742
2743        debug_assert!(self.inner.buffers.path_stack.is_empty());
2744        self.inner
2745            .buffers
2746            .path_stack
2747            .push(RlpNodePathStackItem { path: self.path, is_in_prefix_set: None });
2748
2749        while let Some(stack_item) = self.inner.buffers.path_stack.pop() {
2750            let path = stack_item.path;
2751            let node = self
2752                .nodes
2753                .get_mut(&path)
2754                .unwrap_or_else(|| panic!("node at path {path:?} does not exist"));
2755
2756            self.inner.rlp_node(prefix_set, update_actions, stack_item, node, branch_node_masks);
2757        }
2758
2759        debug_assert_eq!(self.inner.buffers.rlp_node_stack.len(), 1);
2760        self.inner.buffers.rlp_node_stack.pop().unwrap().rlp_node
2761    }
2762
2763    /// Removes all nodes and values from the subtrie, resetting it to a blank state
2764    /// with only an empty root node. This is used when a storage root is deleted.
2765    fn wipe(&mut self) {
2766        self.nodes.clear();
2767        self.nodes.insert(Nibbles::default(), SparseNode::Empty);
2768        self.inner.clear();
2769    }
2770
2771    /// Clears the subtrie, keeping the data structures allocated.
2772    pub(crate) fn clear(&mut self) {
2773        self.nodes.clear();
2774        self.inner.clear();
2775    }
2776
2777    /// Shrinks the capacity of the subtrie's node storage.
2778    pub(crate) fn shrink_nodes_to(&mut self, size: usize) {
2779        self.nodes.shrink_to(size);
2780    }
2781
2782    /// Shrinks the capacity of the subtrie's value storage.
2783    pub(crate) fn shrink_values_to(&mut self, size: usize) {
2784        self.inner.values.shrink_to(size);
2785    }
2786
2787    /// Returns a heuristic for the in-memory size of this subtrie in bytes.
2788    pub(crate) fn memory_size(&self) -> usize {
2789        let mut size = core::mem::size_of::<Self>();
2790
2791        // Nodes map: key (Nibbles) + value (SparseNode)
2792        for (path, node) in &self.nodes {
2793            size += core::mem::size_of::<Nibbles>();
2794            size += path.len(); // Nibbles heap allocation
2795            size += node.memory_size();
2796        }
2797
2798        // Values map: key (Nibbles) + value (Vec<u8>)
2799        for (path, value) in &self.inner.values {
2800            size += core::mem::size_of::<Nibbles>();
2801            size += path.len(); // Nibbles heap allocation
2802            size += core::mem::size_of::<Vec<u8>>() + value.capacity();
2803        }
2804
2805        // Buffers
2806        size += self.inner.buffers.memory_size();
2807
2808        size
2809    }
2810}
2811
2812/// Helper type for [`SparseSubtrie`] to mutably access only a subset of fields from the original
2813/// struct.
2814#[derive(Clone, PartialEq, Eq, Debug, Default)]
2815struct SparseSubtrieInner {
2816    /// Map from leaf key paths to their values.
2817    /// All values are stored here instead of directly in leaf nodes.
2818    values: HashMap<Nibbles, Vec<u8>>,
2819    /// Reusable buffers for [`SparseSubtrie::update_hashes`].
2820    buffers: SparseSubtrieBuffers,
2821}
2822
2823impl SparseSubtrieInner {
2824    /// Computes the RLP encoding and its hash for a single (trie node)[`SparseNode`].
2825    ///
2826    /// # Deferred Processing
2827    ///
2828    /// When an extension or a branch node depends on child nodes that haven't been computed yet,
2829    /// the function pushes the current node back onto the path stack along with its children,
2830    /// then returns early. This allows the iterative algorithm to process children first before
2831    /// retrying the parent.
2832    ///
2833    /// # Parameters
2834    ///
2835    /// - `prefix_set`: Set of prefixes (key paths) that have been marked as updated
2836    /// - `update_actions`: A buffer which `SparseTrieUpdatesAction`s will be written to in the
2837    ///   event that any changes to the top-level updates are required. If None then update
2838    ///   retention is disabled.
2839    /// - `stack_item`: The stack item to process
2840    /// - `node`: The sparse node to process (will be mutated to update hash)
2841    /// - `branch_node_masks`: The tree and hash masks for branch nodes.
2842    ///
2843    /// # Side Effects
2844    ///
2845    /// - Updates the node's hash field after computing RLP
2846    /// - Pushes nodes to [`SparseSubtrieBuffers::path_stack`] to manage traversal
2847    /// - May push items onto the path stack for deferred processing
2848    ///
2849    /// # Exit condition
2850    ///
2851    /// Once all nodes have been processed and all RLPs and hashes calculated, pushes the root node
2852    /// onto the [`SparseSubtrieBuffers::rlp_node_stack`] and exits.
2853    fn rlp_node(
2854        &mut self,
2855        prefix_set: &mut PrefixSet,
2856        update_actions: &mut Option<Vec<SparseTrieUpdatesAction>>,
2857        mut stack_item: RlpNodePathStackItem,
2858        node: &mut SparseNode,
2859        branch_node_masks: &BranchNodeMasksMap,
2860    ) {
2861        let path = stack_item.path;
2862        trace!(
2863            target: "trie::parallel_sparse",
2864            ?path,
2865            ?node,
2866            "Calculating node RLP"
2867        );
2868
2869        // Check if the path is in the prefix set.
2870        // First, check the cached value. If it's `None`, then check the prefix set, and update
2871        // the cached value.
2872        let mut prefix_set_contains = |path: &Nibbles| {
2873            *stack_item.is_in_prefix_set.get_or_insert_with(|| prefix_set.contains(path))
2874        };
2875
2876        let (rlp_node, node_type) = match node {
2877            SparseNode::Empty => (RlpNode::word_rlp(&EMPTY_ROOT_HASH), SparseNodeType::Empty),
2878            SparseNode::Leaf { key, state } => {
2879                let mut path = path;
2880                path.extend(key);
2881                let value = self.values.get(&path);
2882
2883                // Check if we should use cached RLP:
2884                // - If there's a cached RLP and the path is not in prefix_set, use cached
2885                // - If the value is not in this subtrie's values (e.g., lower subtrie leaf being
2886                //   processed via upper subtrie), we must use cached RLP
2887                let cached_rlp_node = state.cached_rlp_node();
2888                let use_cached =
2889                    cached_rlp_node.is_some() && (!prefix_set_contains(&path) || value.is_none());
2890
2891                if let Some(rlp_node) = use_cached.then(|| cached_rlp_node.unwrap()) {
2892                    // Return the cached RLP
2893                    (rlp_node.clone(), SparseNodeType::Leaf)
2894                } else {
2895                    // Encode the leaf node and update its RlpNode
2896                    let value = value.expect("leaf value must exist in subtrie");
2897                    self.buffers.rlp_buf.clear();
2898                    let rlp_node = LeafNodeRef { key, value }.rlp(&mut self.buffers.rlp_buf);
2899                    *state = SparseNodeState::Cached {
2900                        rlp_node: rlp_node.clone(),
2901                        store_in_db_trie: Some(false),
2902                    };
2903                    trace!(
2904                        target: "trie::parallel_sparse",
2905                        ?path,
2906                        ?key,
2907                        value = %alloy_primitives::hex::encode(value),
2908                        ?rlp_node,
2909                        "Calculated leaf RLP node",
2910                    );
2911                    (rlp_node, SparseNodeType::Leaf)
2912                }
2913            }
2914            SparseNode::Extension { key, state } => {
2915                let mut child_path = path;
2916                child_path.extend(key);
2917                if let Some((rlp_node, store_in_db_trie)) = state
2918                    .cached_rlp_node()
2919                    .zip(state.store_in_db_trie())
2920                    .filter(|_| !prefix_set_contains(&path))
2921                {
2922                    // If the node is already computed, and the node path is not in
2923                    // the prefix set, return the pre-computed node
2924                    (
2925                        rlp_node.clone(),
2926                        SparseNodeType::Extension { store_in_db_trie: Some(store_in_db_trie) },
2927                    )
2928                } else if self.buffers.rlp_node_stack.last().is_some_and(|e| e.path == child_path) {
2929                    // Top of the stack has the child node, we can encode the extension node and
2930                    // update its hash
2931                    let RlpNodeStackItem { path: _, rlp_node: child, node_type: child_node_type } =
2932                        self.buffers.rlp_node_stack.pop().unwrap();
2933                    self.buffers.rlp_buf.clear();
2934                    let rlp_node =
2935                        ExtensionNodeRef::new(key, &child).rlp(&mut self.buffers.rlp_buf);
2936
2937                    let store_in_db_trie_value = child_node_type.store_in_db_trie();
2938
2939                    trace!(
2940                        target: "trie::parallel_sparse",
2941                        ?path,
2942                        ?child_path,
2943                        ?child_node_type,
2944                        "Extension node"
2945                    );
2946
2947                    *state = SparseNodeState::Cached {
2948                        rlp_node: rlp_node.clone(),
2949                        store_in_db_trie: store_in_db_trie_value,
2950                    };
2951
2952                    (
2953                        rlp_node,
2954                        SparseNodeType::Extension {
2955                            // Inherit the `store_in_db_trie` flag from the child node, which is
2956                            // always the branch node
2957                            store_in_db_trie: store_in_db_trie_value,
2958                        },
2959                    )
2960                } else {
2961                    // Need to defer processing until child is computed, on the next
2962                    // invocation update the node's hash.
2963                    self.buffers.path_stack.extend([
2964                        RlpNodePathStackItem {
2965                            path,
2966                            is_in_prefix_set: Some(prefix_set_contains(&path)),
2967                        },
2968                        RlpNodePathStackItem { path: child_path, is_in_prefix_set: None },
2969                    ]);
2970                    return
2971                }
2972            }
2973            SparseNode::Branch { state_mask, state, blinded_mask, blinded_hashes } => {
2974                if let Some((rlp_node, store_in_db_trie)) = state
2975                    .cached_rlp_node()
2976                    .zip(state.store_in_db_trie())
2977                    .filter(|_| !prefix_set_contains(&path))
2978                {
2979                    let node_type =
2980                        SparseNodeType::Branch { store_in_db_trie: Some(store_in_db_trie) };
2981
2982                    trace!(
2983                        target: "trie::parallel_sparse",
2984                        ?path,
2985                        ?node_type,
2986                        ?rlp_node,
2987                        "Adding node to RLP node stack (cached branch)"
2988                    );
2989
2990                    // If the node hash is already computed, and the node path is not in
2991                    // the prefix set, return the pre-computed hash
2992                    self.buffers.rlp_node_stack.push(RlpNodeStackItem {
2993                        path,
2994                        rlp_node: rlp_node.clone(),
2995                        node_type,
2996                    });
2997                    return
2998                }
2999
3000                let retain_updates = update_actions.is_some() && prefix_set_contains(&path);
3001
3002                self.buffers.branch_child_buf.clear();
3003                // Walk children in a reverse order from `f` to `0`, so we pop the `0` first
3004                // from the stack and keep walking in the sorted order.
3005                for bit in state_mask.iter().rev() {
3006                    let mut child = path;
3007                    child.push_unchecked(bit);
3008
3009                    if !blinded_mask.is_bit_set(bit) {
3010                        self.buffers.branch_child_buf.push(child);
3011                    }
3012                }
3013
3014                self.buffers.branch_value_stack_buf.resize(state_mask.len(), Default::default());
3015
3016                let mut tree_mask = TrieMask::default();
3017                let mut hash_mask = TrieMask::default();
3018                let mut hashes = Vec::new();
3019
3020                // Lazy lookup for branch node masks - shared across loop iterations
3021                let mut path_masks_storage = None;
3022                let mut path_masks =
3023                    || *path_masks_storage.get_or_insert_with(|| branch_node_masks.get(&path));
3024
3025                for (i, child_nibble) in state_mask.iter().enumerate().rev() {
3026                    let mut child_path = path;
3027                    child_path.push_unchecked(child_nibble);
3028
3029                    let (child, child_node_type) = if blinded_mask.is_bit_set(child_nibble) {
3030                        (
3031                            RlpNode::word_rlp(&blinded_hashes[child_nibble as usize]),
3032                            SparseNodeType::Hash,
3033                        )
3034                    } else if self
3035                        .buffers
3036                        .rlp_node_stack
3037                        .last()
3038                        .is_some_and(|e| e.path == child_path)
3039                    {
3040                        let RlpNodeStackItem { path: _, rlp_node, node_type } =
3041                            self.buffers.rlp_node_stack.pop().unwrap();
3042
3043                        (rlp_node, node_type)
3044                    } else {
3045                        // Need to defer processing until children are computed, on the next
3046                        // invocation update the node's hash.
3047                        self.buffers.path_stack.push(RlpNodePathStackItem {
3048                            path,
3049                            is_in_prefix_set: Some(prefix_set_contains(&path)),
3050                        });
3051                        self.buffers.path_stack.extend(
3052                            self.buffers
3053                                .branch_child_buf
3054                                .drain(..)
3055                                .map(|path| RlpNodePathStackItem { path, is_in_prefix_set: None }),
3056                        );
3057                        return
3058                    };
3059
3060                    // Update the masks only if we need to retain trie updates
3061                    if retain_updates {
3062                        // Determine whether we need to set trie mask bit.
3063                        let should_set_tree_mask_bit =
3064                            if let Some(store_in_db_trie) = child_node_type.store_in_db_trie() {
3065                                // A branch or an extension node explicitly set the
3066                                // `store_in_db_trie` flag
3067                                store_in_db_trie
3068                            } else {
3069                                // A blinded node has the tree mask bit set
3070                                child_node_type.is_hash() &&
3071                                    path_masks().is_some_and(|masks| {
3072                                        masks.tree_mask.is_bit_set(child_nibble)
3073                                    })
3074                            };
3075                        if should_set_tree_mask_bit {
3076                            tree_mask.set_bit(child_nibble);
3077                        }
3078                        // Set the hash mask. If a child node is a revealed branch node OR
3079                        // is a blinded node that has its hash mask bit set according to the
3080                        // database, set the hash mask bit and save the hash.
3081                        let hash = child.as_hash().filter(|_| {
3082                            child_node_type.is_branch() ||
3083                                (child_node_type.is_hash() &&
3084                                    path_masks().is_some_and(|masks| {
3085                                        masks.hash_mask.is_bit_set(child_nibble)
3086                                    }))
3087                        });
3088                        if let Some(hash) = hash {
3089                            hash_mask.set_bit(child_nibble);
3090                            hashes.push(hash);
3091                        }
3092                    }
3093
3094                    // Insert children in the resulting buffer in a normal order,
3095                    // because initially we iterated in reverse.
3096                    // SAFETY: i < len and len is never 0
3097                    self.buffers.branch_value_stack_buf[i] = child;
3098                }
3099
3100                trace!(
3101                    target: "trie::parallel_sparse",
3102                    ?path,
3103                    ?tree_mask,
3104                    ?hash_mask,
3105                    "Branch node masks"
3106                );
3107
3108                // Top of the stack has all children node, we can encode the branch node and
3109                // update its hash
3110                self.buffers.rlp_buf.clear();
3111                let branch_node_ref =
3112                    BranchNodeRef::new(&self.buffers.branch_value_stack_buf, *state_mask);
3113                let rlp_node = branch_node_ref.rlp(&mut self.buffers.rlp_buf);
3114
3115                // Save a branch node update only if it's not a root node, and we need to
3116                // persist updates.
3117                let store_in_db_trie_value = if let Some(update_actions) =
3118                    update_actions.as_mut().filter(|_| retain_updates && !path.is_empty())
3119                {
3120                    let store_in_db_trie = !tree_mask.is_empty() || !hash_mask.is_empty();
3121                    if store_in_db_trie {
3122                        // Store in DB trie if there are either any children that are stored in
3123                        // the DB trie, or any children represent hashed values
3124                        hashes.reverse();
3125                        let branch_node =
3126                            BranchNodeCompact::new(*state_mask, tree_mask, hash_mask, hashes, None);
3127                        update_actions
3128                            .push(SparseTrieUpdatesAction::InsertUpdated(path, branch_node));
3129                    } else {
3130                        // New tree and hash masks are empty - check previous state
3131                        let prev_had_masks = path_masks()
3132                            .is_some_and(|m| !m.tree_mask.is_empty() || !m.hash_mask.is_empty());
3133                        if prev_had_masks {
3134                            // Previously had masks, now empty - mark as removed
3135                            update_actions.push(SparseTrieUpdatesAction::InsertRemoved(path));
3136                        } else {
3137                            // Previously empty too - just remove the update
3138                            update_actions.push(SparseTrieUpdatesAction::RemoveUpdated(path));
3139                        }
3140                    }
3141
3142                    store_in_db_trie
3143                } else {
3144                    false
3145                };
3146
3147                *state = SparseNodeState::Cached {
3148                    rlp_node: rlp_node.clone(),
3149                    store_in_db_trie: Some(store_in_db_trie_value),
3150                };
3151
3152                (
3153                    rlp_node,
3154                    SparseNodeType::Branch { store_in_db_trie: Some(store_in_db_trie_value) },
3155                )
3156            }
3157        };
3158
3159        trace!(
3160            target: "trie::parallel_sparse",
3161            ?path,
3162            ?node_type,
3163            ?rlp_node,
3164            "Adding node to RLP node stack"
3165        );
3166        self.buffers.rlp_node_stack.push(RlpNodeStackItem { path, rlp_node, node_type });
3167    }
3168
3169    /// Clears the subtrie, keeping the data structures allocated.
3170    fn clear(&mut self) {
3171        self.values.clear();
3172        self.buffers.clear();
3173    }
3174}
3175
3176/// Represents the outcome of processing a node during leaf insertion
3177#[derive(Clone, Debug, PartialEq, Eq, Default)]
3178pub enum LeafUpdateStep {
3179    /// Continue traversing to the next node
3180    Continue,
3181    /// Update is complete with nodes inserted
3182    Complete {
3183        /// The node paths that were inserted during this step
3184        inserted_nodes: Vec<Nibbles>,
3185    },
3186    /// The node was not found
3187    #[default]
3188    NodeNotFound,
3189}
3190
3191impl LeafUpdateStep {
3192    /// Creates a step indicating completion with inserted nodes
3193    pub const fn complete_with_insertions(inserted_nodes: Vec<Nibbles>) -> Self {
3194        Self::Complete { inserted_nodes }
3195    }
3196}
3197
3198/// Sparse Subtrie Type.
3199///
3200/// Used to determine the type of subtrie a certain path belongs to:
3201/// - Paths in the range `0x..=0xf` belong to the upper subtrie.
3202/// - Paths in the range `0x00..` belong to one of the lower subtries. The index of the lower
3203///   subtrie is determined by the first [`UPPER_TRIE_MAX_DEPTH`] nibbles of the path.
3204///
3205/// There can be at most [`NUM_LOWER_SUBTRIES`] lower subtries.
3206#[derive(Clone, Copy, PartialEq, Eq, Debug)]
3207pub enum SparseSubtrieType {
3208    /// Upper subtrie with paths in the range `0x..=0xf`
3209    Upper,
3210    /// Lower subtrie with paths in the range `0x00..`. Includes the index of the subtrie,
3211    /// according to the path prefix.
3212    Lower(usize),
3213}
3214
3215impl SparseSubtrieType {
3216    /// Returns true if a node at a path of the given length would be placed in the upper subtrie.
3217    ///
3218    /// Nodes with paths shorter than [`UPPER_TRIE_MAX_DEPTH`] nibbles belong to the upper subtrie,
3219    /// while longer paths belong to the lower subtries.
3220    pub const fn path_len_is_upper(len: usize) -> bool {
3221        len < UPPER_TRIE_MAX_DEPTH
3222    }
3223
3224    /// Returns the type of subtrie based on the given path.
3225    pub fn from_path(path: &Nibbles) -> Self {
3226        if Self::path_len_is_upper(path.len()) {
3227            Self::Upper
3228        } else {
3229            Self::Lower(path_subtrie_index_unchecked(path))
3230        }
3231    }
3232
3233    /// Returns the index of the lower subtrie, if it exists.
3234    pub const fn lower_index(&self) -> Option<usize> {
3235        match self {
3236            Self::Upper => None,
3237            Self::Lower(index) => Some(*index),
3238        }
3239    }
3240}
3241
3242impl Ord for SparseSubtrieType {
3243    /// Orders two [`SparseSubtrieType`]s such that `Upper` is less than `Lower(_)`, and `Lower`s
3244    /// are ordered by their index.
3245    fn cmp(&self, other: &Self) -> Ordering {
3246        match (self, other) {
3247            (Self::Upper, Self::Upper) => Ordering::Equal,
3248            (Self::Upper, Self::Lower(_)) => Ordering::Less,
3249            (Self::Lower(_), Self::Upper) => Ordering::Greater,
3250            (Self::Lower(idx_a), Self::Lower(idx_b)) if idx_a == idx_b => Ordering::Equal,
3251            (Self::Lower(idx_a), Self::Lower(idx_b)) => idx_a.cmp(idx_b),
3252        }
3253    }
3254}
3255
3256impl PartialOrd for SparseSubtrieType {
3257    fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
3258        Some(self.cmp(other))
3259    }
3260}
3261
3262/// Collection of reusable buffers for calculating subtrie hashes.
3263///
3264/// These buffers reduce allocations when computing RLP representations during trie updates.
3265#[derive(Clone, PartialEq, Eq, Debug, Default)]
3266pub struct SparseSubtrieBuffers {
3267    /// Stack of RLP node paths
3268    path_stack: Vec<RlpNodePathStackItem>,
3269    /// Stack of RLP nodes
3270    rlp_node_stack: Vec<RlpNodeStackItem>,
3271    /// Reusable branch child path
3272    branch_child_buf: Vec<Nibbles>,
3273    /// Reusable branch value stack
3274    branch_value_stack_buf: Vec<RlpNode>,
3275    /// Reusable RLP buffer
3276    rlp_buf: Vec<u8>,
3277}
3278
3279impl SparseSubtrieBuffers {
3280    /// Clears all buffers.
3281    fn clear(&mut self) {
3282        self.path_stack.clear();
3283        self.rlp_node_stack.clear();
3284        self.branch_child_buf.clear();
3285        self.branch_value_stack_buf.clear();
3286        self.rlp_buf.clear();
3287    }
3288
3289    /// Returns a heuristic for the in-memory size of these buffers in bytes.
3290    const fn memory_size(&self) -> usize {
3291        let mut size = core::mem::size_of::<Self>();
3292
3293        size += self.path_stack.capacity() * core::mem::size_of::<RlpNodePathStackItem>();
3294        size += self.rlp_node_stack.capacity() * core::mem::size_of::<RlpNodeStackItem>();
3295        size += self.branch_child_buf.capacity() * core::mem::size_of::<Nibbles>();
3296        size += self.branch_value_stack_buf.capacity() * core::mem::size_of::<RlpNode>();
3297        size += self.rlp_buf.capacity();
3298
3299        size
3300    }
3301}
3302
3303/// RLP node path stack item.
3304#[derive(Clone, PartialEq, Eq, Debug)]
3305pub struct RlpNodePathStackItem {
3306    /// Path to the node.
3307    pub path: Nibbles,
3308    /// Whether the path is in the prefix set. If [`None`], then unknown yet.
3309    pub is_in_prefix_set: Option<bool>,
3310}
3311
3312/// Changed subtrie.
3313#[derive(Debug)]
3314struct ChangedSubtrie {
3315    /// Lower subtrie index in the range [0, [`NUM_LOWER_SUBTRIES`]).
3316    index: usize,
3317    /// Changed subtrie
3318    subtrie: Box<SparseSubtrie>,
3319    /// Prefix set of keys that belong to the subtrie.
3320    prefix_set: PrefixSet,
3321    /// Reusable buffer for collecting [`SparseTrieUpdatesAction`]s during computations. Will be
3322    /// None if update retention is disabled.
3323    update_actions_buf: Option<Vec<SparseTrieUpdatesAction>>,
3324}
3325
3326/// Convert first [`UPPER_TRIE_MAX_DEPTH`] nibbles of the path into a lower subtrie index in the
3327/// range [0, [`NUM_LOWER_SUBTRIES`]).
3328///
3329/// # Panics
3330///
3331/// If the path is shorter than [`UPPER_TRIE_MAX_DEPTH`] nibbles.
3332fn path_subtrie_index_unchecked(path: &Nibbles) -> usize {
3333    debug_assert_eq!(UPPER_TRIE_MAX_DEPTH, 2);
3334    let idx = path.get_byte_unchecked(0) as usize;
3335    // SAFETY: always true.
3336    unsafe { core::hint::assert_unchecked(idx < NUM_LOWER_SUBTRIES) };
3337    idx
3338}
3339
3340/// Checks if `path` is a strict descendant of any root in a sorted slice.
3341///
3342/// Uses binary search to find the candidate root that could be an ancestor.
3343/// Returns `true` if `path` starts with a root and is longer (strict descendant).
3344fn is_strict_descendant_in(roots: &[Nibbles], path: &Nibbles) -> bool {
3345    if roots.is_empty() {
3346        return false;
3347    }
3348    debug_assert!(roots.windows(2).all(|w| w[0] <= w[1]), "roots must be sorted by path");
3349    let idx = roots.partition_point(|root| root <= path);
3350    if idx > 0 {
3351        let candidate = &roots[idx - 1];
3352        if path.starts_with(candidate) && path.len() > candidate.len() {
3353            return true;
3354        }
3355    }
3356    false
3357}
3358
3359/// Returns true if any retained leaf path has `prefix` as a prefix.
3360///
3361/// The `retained` slice must be sorted.
3362fn has_retained_descendant(retained: &[Nibbles], prefix: &Nibbles) -> bool {
3363    if retained.is_empty() {
3364        return false;
3365    }
3366    debug_assert!(retained.windows(2).all(|w| w[0] <= w[1]), "retained must be sorted by path");
3367    let idx = retained.partition_point(|path| path < prefix);
3368    idx < retained.len() && retained[idx].starts_with(prefix)
3369}
3370
3371/// Checks if `path` starts with any root in a sorted slice (inclusive).
3372///
3373/// Uses binary search to find the candidate root that could be a prefix.
3374/// Returns `true` if `path` starts with a root (including exact match).
3375fn starts_with_pruned_in(roots: &[Nibbles], path: &Nibbles) -> bool {
3376    if roots.is_empty() {
3377        return false;
3378    }
3379    debug_assert!(roots.windows(2).all(|w| w[0] <= w[1]), "roots must be sorted by path");
3380    let idx = roots.partition_point(|root| root <= path);
3381    if idx > 0 {
3382        let candidate = &roots[idx - 1];
3383        if path.starts_with(candidate) {
3384            return true;
3385        }
3386    }
3387    false
3388}
3389
3390/// Used by lower subtries to communicate updates to the top-level [`SparseTrieUpdates`] set.
3391#[derive(Clone, Debug, Eq, PartialEq)]
3392enum SparseTrieUpdatesAction {
3393    /// Remove the path from the `updated_nodes`, if it was present, and add it to `removed_nodes`.
3394    InsertRemoved(Nibbles),
3395    /// Remove the path from the `updated_nodes`, if it was present, leaving `removed_nodes`
3396    /// unaffected.
3397    RemoveUpdated(Nibbles),
3398    /// Insert the branch node into `updated_nodes`.
3399    InsertUpdated(Nibbles, BranchNodeCompact),
3400}
3401
3402#[cfg(test)]
3403mod tests {
3404    use super::{
3405        path_subtrie_index_unchecked, LowerSparseSubtrie, ParallelSparseTrie, SparseSubtrie,
3406        SparseSubtrieType,
3407    };
3408    use crate::{
3409        parallel::ChangedSubtrie, trie::SparseNodeState, LeafLookup, LeafLookupError, SparseNode,
3410        SparseTrie, SparseTrieUpdates,
3411    };
3412    use alloy_primitives::{
3413        b256, hex,
3414        map::{B256Set, HashMap},
3415        B256, U256,
3416    };
3417    use alloy_rlp::{Decodable, Encodable};
3418    use alloy_trie::{proof::AddedRemovedKeys, BranchNodeCompact, Nibbles};
3419    use assert_matches::assert_matches;
3420    use itertools::Itertools;
3421    use proptest::{prelude::*, sample::SizeRange};
3422    use proptest_arbitrary_interop::arb;
3423    use reth_execution_errors::SparseTrieErrorKind;
3424    use reth_primitives_traits::Account;
3425    use reth_provider::{
3426        test_utils::create_test_provider_factory, StorageSettingsCache, TrieWriter,
3427    };
3428    use reth_trie::{
3429        hashed_cursor::{noop::NoopHashedCursor, HashedPostStateCursor},
3430        node_iter::{TrieElement, TrieNodeIter},
3431        trie_cursor::{noop::NoopAccountTrieCursor, TrieCursor, TrieCursorFactory},
3432        walker::TrieWalker,
3433        HashedPostState,
3434    };
3435    use reth_trie_common::{
3436        prefix_set::PrefixSetMut,
3437        proof::{ProofNodes, ProofRetainer},
3438        updates::TrieUpdates,
3439        BranchNodeMasks, BranchNodeMasksMap, BranchNodeRef, BranchNodeV2, ExtensionNode,
3440        HashBuilder, LeafNode, ProofTrieNodeV2, RlpNode, TrieMask, TrieNode, TrieNodeV2,
3441        EMPTY_ROOT_HASH,
3442    };
3443    use reth_trie_db::DatabaseTrieCursorFactory;
3444    use std::collections::{BTreeMap, BTreeSet};
3445
3446    /// Pad nibbles to the length of a B256 hash with zeros on the right.
3447    fn pad_nibbles_right(mut nibbles: Nibbles) -> Nibbles {
3448        nibbles.extend(&Nibbles::from_nibbles_unchecked(vec![
3449            0;
3450            B256::len_bytes() * 2 - nibbles.len()
3451        ]));
3452        nibbles
3453    }
3454
3455    /// Create a leaf key (suffix) for a leaf at a given position depth.
3456    /// `suffix` contains the non-zero nibbles, padded with zeros to reach `total_len`.
3457    fn leaf_key(suffix: impl AsRef<[u8]>, total_len: usize) -> Nibbles {
3458        let suffix = suffix.as_ref();
3459        let mut nibbles = Nibbles::from_nibbles(suffix);
3460        nibbles.extend(&Nibbles::from_nibbles_unchecked(vec![0; total_len - suffix.len()]));
3461        nibbles
3462    }
3463
3464    fn create_account(nonce: u64) -> Account {
3465        Account { nonce, ..Default::default() }
3466    }
3467
3468    fn large_account_value() -> Vec<u8> {
3469        let account = Account {
3470            nonce: 0x123456789abcdef,
3471            balance: U256::from(0x123456789abcdef0123456789abcdef_u128),
3472            ..Default::default()
3473        };
3474        let mut buf = Vec::new();
3475        account.into_trie_account(EMPTY_ROOT_HASH).encode(&mut buf);
3476        buf
3477    }
3478
3479    fn encode_account_value(nonce: u64) -> Vec<u8> {
3480        let account = Account { nonce, ..Default::default() };
3481        let trie_account = account.into_trie_account(EMPTY_ROOT_HASH);
3482        let mut buf = Vec::new();
3483        trie_account.encode(&mut buf);
3484        buf
3485    }
3486
3487    /// Test context that provides helper methods for trie testing
3488    #[derive(Default)]
3489    struct ParallelSparseTrieTestContext;
3490
3491    impl ParallelSparseTrieTestContext {
3492        /// Assert that a lower subtrie exists at the given path
3493        fn assert_subtrie_exists(&self, trie: &ParallelSparseTrie, path: &Nibbles) {
3494            let idx = path_subtrie_index_unchecked(path);
3495            assert!(
3496                trie.lower_subtries[idx].as_revealed_ref().is_some(),
3497                "Expected lower subtrie at path {path:?} to exist",
3498            );
3499        }
3500
3501        /// Get a lower subtrie, panicking if it doesn't exist
3502        fn get_subtrie<'a>(
3503            &self,
3504            trie: &'a ParallelSparseTrie,
3505            path: &Nibbles,
3506        ) -> &'a SparseSubtrie {
3507            let idx = path_subtrie_index_unchecked(path);
3508            trie.lower_subtries[idx]
3509                .as_revealed_ref()
3510                .unwrap_or_else(|| panic!("Lower subtrie at path {path:?} should exist"))
3511        }
3512
3513        /// Assert that a lower subtrie has a specific path field value
3514        fn assert_subtrie_path(
3515            &self,
3516            trie: &ParallelSparseTrie,
3517            subtrie_prefix: impl AsRef<[u8]>,
3518            expected_path: impl AsRef<[u8]>,
3519        ) {
3520            let subtrie_prefix = Nibbles::from_nibbles(subtrie_prefix);
3521            let expected_path = Nibbles::from_nibbles(expected_path);
3522            let idx = path_subtrie_index_unchecked(&subtrie_prefix);
3523
3524            let subtrie = trie.lower_subtries[idx].as_revealed_ref().unwrap_or_else(|| {
3525                panic!("Lower subtrie at prefix {subtrie_prefix:?} should exist")
3526            });
3527
3528            assert_eq!(
3529                subtrie.path, expected_path,
3530                "Subtrie at prefix {subtrie_prefix:?} should have path {expected_path:?}, but has {:?}",
3531                subtrie.path
3532            );
3533        }
3534
3535        /// Create test leaves with consecutive account values
3536        fn create_test_leaves(&self, paths: &[&[u8]]) -> Vec<(Nibbles, Vec<u8>)> {
3537            paths
3538                .iter()
3539                .enumerate()
3540                .map(|(i, path)| {
3541                    (
3542                        pad_nibbles_right(Nibbles::from_nibbles(path)),
3543                        encode_account_value(i as u64 + 1),
3544                    )
3545                })
3546                .collect()
3547        }
3548
3549        /// Create a single test leaf with the given path and value nonce
3550        fn create_test_leaf(&self, path: impl AsRef<[u8]>, value_nonce: u64) -> (Nibbles, Vec<u8>) {
3551            (pad_nibbles_right(Nibbles::from_nibbles(path)), encode_account_value(value_nonce))
3552        }
3553
3554        /// Update multiple leaves in the trie
3555        fn update_leaves(
3556            &self,
3557            trie: &mut ParallelSparseTrie,
3558            leaves: impl IntoIterator<Item = (Nibbles, Vec<u8>)>,
3559        ) {
3560            for (path, value) in leaves {
3561                trie.update_leaf(path, value).unwrap();
3562            }
3563        }
3564
3565        /// Create an assertion builder for a subtrie
3566        fn assert_subtrie<'a>(
3567            &self,
3568            trie: &'a ParallelSparseTrie,
3569            path: Nibbles,
3570        ) -> SubtrieAssertion<'a> {
3571            self.assert_subtrie_exists(trie, &path);
3572            let subtrie = self.get_subtrie(trie, &path);
3573            SubtrieAssertion::new(subtrie)
3574        }
3575
3576        /// Create an assertion builder for the upper subtrie
3577        fn assert_upper_subtrie<'a>(&self, trie: &'a ParallelSparseTrie) -> SubtrieAssertion<'a> {
3578            SubtrieAssertion::new(&trie.upper_subtrie)
3579        }
3580
3581        /// Assert the root, trie updates, and nodes against the hash builder output.
3582        fn assert_with_hash_builder(
3583            &self,
3584            trie: &mut ParallelSparseTrie,
3585            hash_builder_root: B256,
3586            hash_builder_updates: TrieUpdates,
3587            hash_builder_proof_nodes: ProofNodes,
3588        ) {
3589            assert_eq!(trie.root(), hash_builder_root);
3590            pretty_assertions::assert_eq!(
3591                BTreeMap::from_iter(trie.updates_ref().updated_nodes.clone()),
3592                BTreeMap::from_iter(hash_builder_updates.account_nodes)
3593            );
3594            assert_eq_parallel_sparse_trie_proof_nodes(trie, hash_builder_proof_nodes);
3595        }
3596    }
3597
3598    /// Assertion builder for subtrie structure
3599    struct SubtrieAssertion<'a> {
3600        subtrie: &'a SparseSubtrie,
3601    }
3602
3603    impl<'a> SubtrieAssertion<'a> {
3604        fn new(subtrie: &'a SparseSubtrie) -> Self {
3605            Self { subtrie }
3606        }
3607
3608        fn has_branch(self, path: &Nibbles, expected_mask_bits: &[u8]) -> Self {
3609            match self.subtrie.nodes.get(path) {
3610                Some(SparseNode::Branch { state_mask, .. }) => {
3611                    for bit in expected_mask_bits {
3612                        assert!(
3613                            state_mask.is_bit_set(*bit),
3614                            "Expected branch at {path:?} to have bit {bit} set, instead mask is: {state_mask:?}",
3615                        );
3616                    }
3617                }
3618                node => panic!("Expected branch node at {path:?}, found {node:?}"),
3619            }
3620            self
3621        }
3622
3623        fn has_leaf(self, path: &Nibbles, expected_key: &Nibbles) -> Self {
3624            match self.subtrie.nodes.get(path) {
3625                Some(SparseNode::Leaf { key, .. }) => {
3626                    assert_eq!(
3627                        *key, *expected_key,
3628                        "Expected leaf at {path:?} to have key {expected_key:?}, found {key:?}",
3629                    );
3630                }
3631                node => panic!("Expected leaf node at {path:?}, found {node:?}"),
3632            }
3633            self
3634        }
3635
3636        fn has_extension(self, path: &Nibbles, expected_key: &Nibbles) -> Self {
3637            match self.subtrie.nodes.get(path) {
3638                Some(SparseNode::Extension { key, .. }) => {
3639                    assert_eq!(
3640                        *key, *expected_key,
3641                        "Expected extension at {path:?} to have key {expected_key:?}, found {key:?}",
3642                    );
3643                }
3644                node => panic!("Expected extension node at {path:?}, found {node:?}"),
3645            }
3646            self
3647        }
3648
3649        fn has_value(self, path: &Nibbles, expected_value: &[u8]) -> Self {
3650            let actual = self.subtrie.inner.values.get(path);
3651            assert_eq!(
3652                actual.map(|v| v.as_slice()),
3653                Some(expected_value),
3654                "Expected value at {path:?} to be {expected_value:?}, found {actual:?}",
3655            );
3656            self
3657        }
3658
3659        fn has_no_value(self, path: &Nibbles) -> Self {
3660            let actual = self.subtrie.inner.values.get(path);
3661            assert!(actual.is_none(), "Expected no value at {path:?}, but found {actual:?}");
3662            self
3663        }
3664    }
3665
3666    fn create_leaf_node(key: impl AsRef<[u8]>, value_nonce: u64) -> TrieNodeV2 {
3667        TrieNodeV2::Leaf(LeafNode::new(
3668            Nibbles::from_nibbles(key),
3669            encode_account_value(value_nonce),
3670        ))
3671    }
3672
3673    fn create_branch_node(
3674        key: Nibbles,
3675        children_indices: &[u8],
3676        child_hashes: impl IntoIterator<Item = RlpNode>,
3677    ) -> TrieNodeV2 {
3678        let mut stack = Vec::new();
3679        let mut state_mask = TrieMask::default();
3680
3681        for (&idx, hash) in children_indices.iter().zip(child_hashes) {
3682            state_mask.set_bit(idx);
3683            stack.push(hash);
3684        }
3685
3686        let branch_rlp_node = if key.is_empty() {
3687            None
3688        } else {
3689            Some(RlpNode::from_rlp(&alloy_rlp::encode(BranchNodeRef::new(&stack, state_mask))))
3690        };
3691
3692        TrieNodeV2::Branch(BranchNodeV2::new(key, stack, state_mask, branch_rlp_node))
3693    }
3694
3695    fn create_branch_node_with_children(
3696        children_indices: &[u8],
3697        child_hashes: impl IntoIterator<Item = RlpNode>,
3698    ) -> TrieNodeV2 {
3699        create_branch_node(Nibbles::default(), children_indices, child_hashes)
3700    }
3701
3702    /// Calculate the state root by feeding the provided state to the hash builder and retaining the
3703    /// proofs for the provided targets.
3704    ///
3705    /// Returns the state root and the retained proof nodes.
3706    fn run_hash_builder(
3707        state: impl IntoIterator<Item = (Nibbles, Account)> + Clone,
3708        trie_cursor: impl TrieCursor,
3709        destroyed_accounts: B256Set,
3710        proof_targets: impl IntoIterator<Item = Nibbles>,
3711    ) -> (B256, TrieUpdates, ProofNodes, HashMap<Nibbles, TrieMask>, HashMap<Nibbles, TrieMask>)
3712    {
3713        let mut account_rlp = Vec::new();
3714
3715        let mut hash_builder = HashBuilder::default()
3716            .with_updates(true)
3717            .with_proof_retainer(ProofRetainer::from_iter(proof_targets).with_added_removed_keys(
3718                Some(AddedRemovedKeys::default().with_assume_added(true)),
3719            ));
3720
3721        let mut prefix_set = PrefixSetMut::default();
3722        prefix_set.extend_keys(state.clone().into_iter().map(|(nibbles, _)| nibbles));
3723        prefix_set.extend_keys(destroyed_accounts.iter().map(Nibbles::unpack));
3724        let walker = TrieWalker::<_>::state_trie(trie_cursor, prefix_set.freeze())
3725            .with_deletions_retained(true);
3726        let hashed_post_state = HashedPostState::default()
3727            .with_accounts(state.into_iter().map(|(nibbles, account)| {
3728                (nibbles.pack().into_inner().unwrap().into(), Some(account))
3729            }))
3730            .into_sorted();
3731        let mut node_iter = TrieNodeIter::state_trie(
3732            walker,
3733            HashedPostStateCursor::new_account(
3734                NoopHashedCursor::<Account>::default(),
3735                &hashed_post_state,
3736            ),
3737        );
3738
3739        while let Some(node) = node_iter.try_next().unwrap() {
3740            match node {
3741                TrieElement::Branch(branch) => {
3742                    hash_builder.add_branch(branch.key, branch.value, branch.children_are_in_trie);
3743                }
3744                TrieElement::Leaf(key, account) => {
3745                    let account = account.into_trie_account(EMPTY_ROOT_HASH);
3746                    account.encode(&mut account_rlp);
3747
3748                    hash_builder.add_leaf(Nibbles::unpack(key), &account_rlp);
3749                    account_rlp.clear();
3750                }
3751            }
3752        }
3753        let root = hash_builder.root();
3754        let proof_nodes = hash_builder.take_proof_nodes();
3755        let branch_node_hash_masks = hash_builder
3756            .updated_branch_nodes
3757            .clone()
3758            .unwrap_or_default()
3759            .iter()
3760            .map(|(path, node)| (*path, node.hash_mask))
3761            .collect();
3762        let branch_node_tree_masks = hash_builder
3763            .updated_branch_nodes
3764            .clone()
3765            .unwrap_or_default()
3766            .iter()
3767            .map(|(path, node)| (*path, node.tree_mask))
3768            .collect();
3769
3770        let mut trie_updates = TrieUpdates::default();
3771        let removed_keys = node_iter.walker.take_removed_keys();
3772        trie_updates.finalize(hash_builder, removed_keys, destroyed_accounts);
3773
3774        (root, trie_updates, proof_nodes, branch_node_hash_masks, branch_node_tree_masks)
3775    }
3776
3777    /// Returns a `ParallelSparseTrie` pre-loaded with the given nodes, as well as leaf values
3778    /// inferred from any provided leaf nodes.
3779    fn new_test_trie<Nodes>(nodes: Nodes) -> ParallelSparseTrie
3780    where
3781        Nodes: Iterator<Item = (Nibbles, SparseNode)>,
3782    {
3783        let mut trie = ParallelSparseTrie::default().with_updates(true);
3784
3785        for (path, node) in nodes {
3786            let subtrie = trie.subtrie_for_path_mut(&path);
3787            if let SparseNode::Leaf { key, .. } = &node {
3788                let mut full_key = path;
3789                full_key.extend(key);
3790                subtrie.inner.values.insert(full_key, "LEAF VALUE".into());
3791            }
3792            subtrie.nodes.insert(path, node);
3793        }
3794        trie
3795    }
3796
3797    fn parallel_sparse_trie_nodes(
3798        sparse_trie: &ParallelSparseTrie,
3799    ) -> impl IntoIterator<Item = (&Nibbles, &SparseNode)> {
3800        let lower_sparse_nodes = sparse_trie
3801            .lower_subtries
3802            .iter()
3803            .filter_map(|subtrie| subtrie.as_revealed_ref())
3804            .flat_map(|subtrie| subtrie.nodes.iter());
3805
3806        let upper_sparse_nodes = sparse_trie.upper_subtrie.nodes.iter();
3807
3808        lower_sparse_nodes.chain(upper_sparse_nodes).sorted_by_key(|(path, _)| *path)
3809    }
3810
3811    /// Assert that the parallel sparse trie nodes and the proof nodes from the hash builder are
3812    /// equal.
3813    fn assert_eq_parallel_sparse_trie_proof_nodes(
3814        sparse_trie: &ParallelSparseTrie,
3815        proof_nodes: ProofNodes,
3816    ) {
3817        let proof_nodes = proof_nodes
3818            .into_nodes_sorted()
3819            .into_iter()
3820            .map(|(path, node)| (path, TrieNodeV2::decode(&mut node.as_ref()).unwrap()));
3821
3822        let all_sparse_nodes = parallel_sparse_trie_nodes(sparse_trie);
3823
3824        for ((proof_node_path, proof_node), (sparse_node_path, sparse_node)) in
3825            proof_nodes.zip(all_sparse_nodes)
3826        {
3827            assert_eq!(&proof_node_path, sparse_node_path);
3828
3829            let equals = match (&proof_node, &sparse_node) {
3830                // Both nodes are empty
3831                (TrieNodeV2::EmptyRoot, SparseNode::Empty) => true,
3832                // Both nodes are branches and have the same state mask
3833                (
3834                    TrieNodeV2::Branch(BranchNodeV2 { state_mask: proof_state_mask, .. }),
3835                    SparseNode::Branch { state_mask: sparse_state_mask, .. },
3836                ) => proof_state_mask == sparse_state_mask,
3837                // Both nodes are extensions and have the same key
3838                (
3839                    TrieNodeV2::Extension(ExtensionNode { key: proof_key, .. }),
3840                    SparseNode::Extension { key: sparse_key, .. },
3841                ) |
3842                // Both nodes are leaves and have the same key
3843                (
3844                    TrieNodeV2::Leaf(LeafNode { key: proof_key, .. }),
3845                    SparseNode::Leaf { key: sparse_key, .. },
3846                ) => proof_key == sparse_key,
3847                // Empty and hash nodes are specific to the sparse trie, skip them
3848                (_, SparseNode::Empty) => continue,
3849                _ => false,
3850            };
3851            assert!(
3852                equals,
3853                "path: {proof_node_path:?}\nproof node: {proof_node:?}\nsparse node: {sparse_node:?}"
3854            );
3855        }
3856    }
3857
3858    #[test]
3859    fn test_get_changed_subtries_empty() {
3860        let mut trie = ParallelSparseTrie::default();
3861        let mut prefix_set = PrefixSetMut::from([Nibbles::default()]).freeze();
3862
3863        let (subtries, unchanged_prefix_set) = trie.take_changed_lower_subtries(&mut prefix_set);
3864        assert!(subtries.is_empty());
3865        assert_eq!(unchanged_prefix_set, PrefixSetMut::from(prefix_set.iter().copied()));
3866    }
3867
3868    #[test]
3869    fn test_get_changed_subtries() {
3870        // Create a trie with three subtries
3871        let mut trie = ParallelSparseTrie::default();
3872        let subtrie_1 = Box::new(SparseSubtrie::new(Nibbles::from_nibbles([0x0, 0x0])));
3873        let subtrie_1_index = path_subtrie_index_unchecked(&subtrie_1.path);
3874        let subtrie_2 = Box::new(SparseSubtrie::new(Nibbles::from_nibbles([0x1, 0x0])));
3875        let subtrie_2_index = path_subtrie_index_unchecked(&subtrie_2.path);
3876        let subtrie_3 = Box::new(SparseSubtrie::new(Nibbles::from_nibbles([0x3, 0x0])));
3877        let subtrie_3_index = path_subtrie_index_unchecked(&subtrie_3.path);
3878
3879        // Add subtries at specific positions
3880        trie.lower_subtries[subtrie_1_index] = LowerSparseSubtrie::Revealed(subtrie_1.clone());
3881        trie.lower_subtries[subtrie_2_index] = LowerSparseSubtrie::Revealed(subtrie_2.clone());
3882        trie.lower_subtries[subtrie_3_index] = LowerSparseSubtrie::Revealed(subtrie_3);
3883
3884        let unchanged_prefix_set = PrefixSetMut::from([
3885            Nibbles::from_nibbles([0x0]),
3886            Nibbles::from_nibbles([0x2, 0x0, 0x0]),
3887        ]);
3888        // Create a prefix set with the keys that match only the second subtrie
3889        let mut prefix_set = PrefixSetMut::from([
3890            // Match second subtrie
3891            Nibbles::from_nibbles([0x1, 0x0, 0x0]),
3892            Nibbles::from_nibbles([0x1, 0x0, 0x1, 0x0]),
3893        ]);
3894        prefix_set.extend(unchanged_prefix_set);
3895        let mut prefix_set = prefix_set.freeze();
3896
3897        // Second subtrie should be removed and returned
3898        let (subtries, unchanged_prefix_set) = trie.take_changed_lower_subtries(&mut prefix_set);
3899        assert_eq!(
3900            subtries
3901                .into_iter()
3902                .map(|ChangedSubtrie { index, subtrie, prefix_set, .. }| {
3903                    (index, subtrie, prefix_set.iter().copied().collect::<Vec<_>>())
3904                })
3905                .collect::<Vec<_>>(),
3906            vec![(
3907                subtrie_2_index,
3908                subtrie_2,
3909                vec![
3910                    Nibbles::from_nibbles([0x1, 0x0, 0x0]),
3911                    Nibbles::from_nibbles([0x1, 0x0, 0x1, 0x0])
3912                ]
3913            )]
3914        );
3915        assert_eq!(unchanged_prefix_set, unchanged_prefix_set);
3916        assert!(trie.lower_subtries[subtrie_2_index].as_revealed_ref().is_none());
3917
3918        // First subtrie should remain unchanged
3919        assert_eq!(trie.lower_subtries[subtrie_1_index], LowerSparseSubtrie::Revealed(subtrie_1));
3920    }
3921
3922    #[test]
3923    fn test_get_changed_subtries_all() {
3924        // Create a trie with three subtries
3925        let mut trie = ParallelSparseTrie::default();
3926        let subtrie_1 = Box::new(SparseSubtrie::new(Nibbles::from_nibbles([0x0, 0x0])));
3927        let subtrie_1_index = path_subtrie_index_unchecked(&subtrie_1.path);
3928        let subtrie_2 = Box::new(SparseSubtrie::new(Nibbles::from_nibbles([0x1, 0x0])));
3929        let subtrie_2_index = path_subtrie_index_unchecked(&subtrie_2.path);
3930        let subtrie_3 = Box::new(SparseSubtrie::new(Nibbles::from_nibbles([0x3, 0x0])));
3931        let subtrie_3_index = path_subtrie_index_unchecked(&subtrie_3.path);
3932
3933        // Add subtries at specific positions
3934        trie.lower_subtries[subtrie_1_index] = LowerSparseSubtrie::Revealed(subtrie_1.clone());
3935        trie.lower_subtries[subtrie_2_index] = LowerSparseSubtrie::Revealed(subtrie_2.clone());
3936        trie.lower_subtries[subtrie_3_index] = LowerSparseSubtrie::Revealed(subtrie_3.clone());
3937
3938        // Create a prefix set that matches any key
3939        let mut prefix_set = PrefixSetMut::all().freeze();
3940
3941        // All subtries should be removed and returned
3942        let (subtries, unchanged_prefix_set) = trie.take_changed_lower_subtries(&mut prefix_set);
3943        assert_eq!(
3944            subtries
3945                .into_iter()
3946                .map(|ChangedSubtrie { index, subtrie, prefix_set, .. }| {
3947                    (index, subtrie, prefix_set.all())
3948                })
3949                .collect::<Vec<_>>(),
3950            vec![
3951                (subtrie_1_index, subtrie_1, true),
3952                (subtrie_2_index, subtrie_2, true),
3953                (subtrie_3_index, subtrie_3, true)
3954            ]
3955        );
3956        assert_eq!(unchanged_prefix_set, PrefixSetMut::all());
3957
3958        assert!(trie.lower_subtries.iter().all(|subtrie| subtrie.as_revealed_ref().is_none()));
3959    }
3960
3961    #[test]
3962    fn test_sparse_subtrie_type() {
3963        assert_eq!(SparseSubtrieType::from_path(&Nibbles::new()), SparseSubtrieType::Upper);
3964        assert_eq!(
3965            SparseSubtrieType::from_path(&Nibbles::from_nibbles([0])),
3966            SparseSubtrieType::Upper
3967        );
3968        assert_eq!(
3969            SparseSubtrieType::from_path(&Nibbles::from_nibbles([15])),
3970            SparseSubtrieType::Upper
3971        );
3972        assert_eq!(
3973            SparseSubtrieType::from_path(&Nibbles::from_nibbles([0, 0])),
3974            SparseSubtrieType::Lower(0)
3975        );
3976        assert_eq!(
3977            SparseSubtrieType::from_path(&Nibbles::from_nibbles([0, 0, 0])),
3978            SparseSubtrieType::Lower(0)
3979        );
3980        assert_eq!(
3981            SparseSubtrieType::from_path(&Nibbles::from_nibbles([0, 1])),
3982            SparseSubtrieType::Lower(1)
3983        );
3984        assert_eq!(
3985            SparseSubtrieType::from_path(&Nibbles::from_nibbles([0, 1, 0])),
3986            SparseSubtrieType::Lower(1)
3987        );
3988        assert_eq!(
3989            SparseSubtrieType::from_path(&Nibbles::from_nibbles([0, 15])),
3990            SparseSubtrieType::Lower(15)
3991        );
3992        assert_eq!(
3993            SparseSubtrieType::from_path(&Nibbles::from_nibbles([15, 0])),
3994            SparseSubtrieType::Lower(240)
3995        );
3996        assert_eq!(
3997            SparseSubtrieType::from_path(&Nibbles::from_nibbles([15, 1])),
3998            SparseSubtrieType::Lower(241)
3999        );
4000        assert_eq!(
4001            SparseSubtrieType::from_path(&Nibbles::from_nibbles([15, 15])),
4002            SparseSubtrieType::Lower(255)
4003        );
4004        assert_eq!(
4005            SparseSubtrieType::from_path(&Nibbles::from_nibbles([15, 15, 15])),
4006            SparseSubtrieType::Lower(255)
4007        );
4008    }
4009
4010    #[test]
4011    fn test_reveal_node_leaves() {
4012        // Reveal leaf in the upper trie. A root branch with child 0x1 makes path [0x1]
4013        // reachable for the subsequent reveal_nodes call.
4014        let root_branch =
4015            create_branch_node_with_children(&[0x1], [RlpNode::word_rlp(&B256::repeat_byte(0xAA))]);
4016        let mut trie = ParallelSparseTrie::from_root(root_branch, None, false).unwrap();
4017
4018        {
4019            let path = Nibbles::from_nibbles([0x1]);
4020            let node = create_leaf_node([0x2, 0x3], 42);
4021            let masks = None;
4022
4023            trie.reveal_nodes(&mut [ProofTrieNodeV2 { path, node, masks }]).unwrap();
4024
4025            assert_matches!(
4026                trie.upper_subtrie.nodes.get(&path),
4027                Some(SparseNode::Leaf { key, state: SparseNodeState::Cached { .. } })
4028                if key == &Nibbles::from_nibbles([0x2, 0x3])
4029            );
4030
4031            let full_path = Nibbles::from_nibbles([0x1, 0x2, 0x3]);
4032            assert_eq!(
4033                trie.upper_subtrie.inner.values.get(&full_path),
4034                Some(&encode_account_value(42))
4035            );
4036        }
4037
4038        // Reveal leaf in a lower trie. A separate trie is needed because the structure at
4039        // [0x1] conflicts: the upper trie test placed a leaf there, but reaching [0x1, 0x2]
4040        // requires a branch at [0x1]. A root branch → branch at [0x1] with child 0x2
4041        // makes path [0x1, 0x2] reachable.
4042        let root_branch =
4043            create_branch_node_with_children(&[0x1], [RlpNode::word_rlp(&B256::repeat_byte(0xAA))]);
4044        let branch_at_1 =
4045            create_branch_node_with_children(&[0x2], [RlpNode::word_rlp(&B256::repeat_byte(0xBB))]);
4046        let mut trie = ParallelSparseTrie::from_root(root_branch, None, false).unwrap();
4047        trie.reveal_nodes(&mut [ProofTrieNodeV2 {
4048            path: Nibbles::from_nibbles([0x1]),
4049            node: branch_at_1,
4050            masks: None,
4051        }])
4052        .unwrap();
4053
4054        {
4055            let path = Nibbles::from_nibbles([0x1, 0x2]);
4056            let node = create_leaf_node([0x3, 0x4], 42);
4057            let masks = None;
4058
4059            trie.reveal_nodes(&mut [ProofTrieNodeV2 { path, node, masks }]).unwrap();
4060
4061            // Check that the lower subtrie was created
4062            let idx = path_subtrie_index_unchecked(&path);
4063            assert!(trie.lower_subtries[idx].as_revealed_ref().is_some());
4064
4065            // Check that the lower subtrie's path was correctly set
4066            let lower_subtrie = trie.lower_subtries[idx].as_revealed_ref().unwrap();
4067            assert_eq!(lower_subtrie.path, path);
4068
4069            assert_matches!(
4070                lower_subtrie.nodes.get(&path),
4071                Some(SparseNode::Leaf { key, state: SparseNodeState::Cached { .. } })
4072                if key == &Nibbles::from_nibbles([0x3, 0x4])
4073            );
4074        }
4075
4076        // Reveal leaf in a lower trie with a longer path, shouldn't result in the subtrie's root
4077        // path changing.
4078        {
4079            let path = Nibbles::from_nibbles([0x1, 0x2, 0x3]);
4080            let node = create_leaf_node([0x4, 0x5], 42);
4081            let masks = None;
4082
4083            trie.reveal_nodes(&mut [ProofTrieNodeV2 { path, node, masks }]).unwrap();
4084
4085            // Check that the lower subtrie's path hasn't changed
4086            let idx = path_subtrie_index_unchecked(&path);
4087            let lower_subtrie = trie.lower_subtries[idx].as_revealed_ref().unwrap();
4088            assert_eq!(lower_subtrie.path, Nibbles::from_nibbles([0x1, 0x2]));
4089        }
4090    }
4091
4092    #[test]
4093    fn test_reveal_node_branch_all_upper() {
4094        let path = Nibbles::new();
4095        let child_hashes = [
4096            RlpNode::word_rlp(&B256::repeat_byte(0x11)),
4097            RlpNode::word_rlp(&B256::repeat_byte(0x22)),
4098        ];
4099        let node = create_branch_node_with_children(&[0x0, 0x5], child_hashes.clone());
4100        let masks = None;
4101        let trie = ParallelSparseTrie::from_root(node, masks, true).unwrap();
4102
4103        // Branch node should be in upper trie
4104        assert_eq!(
4105            trie.upper_subtrie.nodes.get(&path).unwrap(),
4106            &SparseNode::new_branch(
4107                0b0000000000100001.into(),
4108                &[(0, child_hashes[0].as_hash().unwrap()), (5, child_hashes[1].as_hash().unwrap())]
4109            )
4110        );
4111
4112        // Children should not be revealed yet
4113        let child_path_0 = Nibbles::from_nibbles([0x0]);
4114        let child_path_5 = Nibbles::from_nibbles([0x5]);
4115        assert!(!trie.upper_subtrie.nodes.contains_key(&child_path_0));
4116        assert!(!trie.upper_subtrie.nodes.contains_key(&child_path_5));
4117    }
4118
4119    #[test]
4120    fn test_reveal_node_branch_cross_level() {
4121        // Set up root branch with nibble 0x1 so path [0x1] is reachable.
4122        let root_branch =
4123            create_branch_node_with_children(&[0x1], [RlpNode::word_rlp(&B256::repeat_byte(0xAA))]);
4124        let mut trie = ParallelSparseTrie::from_root(root_branch, None, false).unwrap();
4125
4126        let path = Nibbles::from_nibbles([0x1]); // Exactly 1 nibbles - boundary case
4127        let child_hashes = [
4128            RlpNode::word_rlp(&B256::repeat_byte(0x33)),
4129            RlpNode::word_rlp(&B256::repeat_byte(0x44)),
4130            RlpNode::word_rlp(&B256::repeat_byte(0x55)),
4131        ];
4132        let node = create_branch_node_with_children(&[0x0, 0x7, 0xf], child_hashes.clone());
4133        let masks = None;
4134
4135        trie.reveal_nodes(&mut [ProofTrieNodeV2 { path, node, masks }]).unwrap();
4136
4137        // Branch node should be in upper trie, hash is memoized from the previous Hash node
4138        assert_eq!(
4139            trie.upper_subtrie.nodes.get(&path).unwrap(),
4140            &SparseNode::new_branch(
4141                0b1000000010000001.into(),
4142                &[
4143                    (0x0, child_hashes[0].as_hash().unwrap()),
4144                    (0x7, child_hashes[1].as_hash().unwrap()),
4145                    (0xf, child_hashes[2].as_hash().unwrap())
4146                ]
4147            )
4148            .with_state(SparseNodeState::Cached {
4149                rlp_node: RlpNode::word_rlp(&B256::repeat_byte(0xAA)),
4150                store_in_db_trie: Some(false),
4151            })
4152        );
4153
4154        // All children should be in lower tries since they have paths of length 3
4155        let child_paths = [
4156            Nibbles::from_nibbles([0x1, 0x0]),
4157            Nibbles::from_nibbles([0x1, 0x7]),
4158            Nibbles::from_nibbles([0x1, 0xf]),
4159        ];
4160
4161        let mut children = child_paths
4162            .iter()
4163            .map(|path| ProofTrieNodeV2 {
4164                path: *path,
4165                node: create_leaf_node([0x0], 1),
4166                masks: None,
4167            })
4168            .collect::<Vec<_>>();
4169
4170        trie.reveal_nodes(&mut children).unwrap();
4171
4172        // Branch node should still be in upper trie but without any blinded children
4173        assert_matches!(
4174            trie.upper_subtrie.nodes.get(&path),
4175            Some(&SparseNode::Branch {
4176                state_mask,
4177                state: SparseNodeState::Cached { ref rlp_node, store_in_db_trie: Some(false) },
4178                blinded_mask,
4179                ..
4180            }) if state_mask == 0b1000000010000001.into() && blinded_mask.is_empty() && *rlp_node == RlpNode::word_rlp(&B256::repeat_byte(0xAA))
4181        );
4182
4183        for (i, child_path) in child_paths.iter().enumerate() {
4184            let idx = path_subtrie_index_unchecked(child_path);
4185            let lower_subtrie = trie.lower_subtries[idx].as_revealed_ref().unwrap();
4186            assert_eq!(&lower_subtrie.path, child_path);
4187            assert_eq!(
4188                lower_subtrie.nodes.get(child_path),
4189                Some(&SparseNode::Leaf {
4190                    key: Nibbles::from_nibbles([0x0]),
4191                    state: SparseNodeState::Cached {
4192                        rlp_node: child_hashes[i].clone(),
4193                        store_in_db_trie: Some(false)
4194                    }
4195                })
4196            );
4197        }
4198    }
4199
4200    #[test]
4201    fn test_update_subtrie_hashes_prefix_set_matching() {
4202        // Create a trie with a root branch that makes paths [0x0, ...] and [0x3, ...]
4203        // reachable from the upper trie.
4204        let root_branch = create_branch_node_with_children(
4205            &[0x0, 0x3],
4206            [
4207                RlpNode::word_rlp(&B256::repeat_byte(0xAA)),
4208                RlpNode::word_rlp(&B256::repeat_byte(0xBB)),
4209            ],
4210        );
4211        let mut trie = ParallelSparseTrie::from_root(root_branch, None, false).unwrap();
4212
4213        // Create leaf paths.
4214        let leaf_1_full_path = Nibbles::from_nibbles([0; 64]);
4215        let leaf_1_path = leaf_1_full_path.slice(..2);
4216        let leaf_1_key = leaf_1_full_path.slice(2..);
4217        let leaf_2_full_path = Nibbles::from_nibbles([vec![0, 1], vec![0; 62]].concat());
4218        let leaf_2_path = leaf_2_full_path.slice(..2);
4219        let leaf_2_key = leaf_2_full_path.slice(2..);
4220        let leaf_3_full_path = Nibbles::from_nibbles([vec![0, 2], vec![0; 62]].concat());
4221        let leaf_1 = create_leaf_node(leaf_1_key.to_vec(), 1);
4222        let leaf_2 = create_leaf_node(leaf_2_key.to_vec(), 2);
4223
4224        // Create branch node at [0x0] with only children 0x0 and 0x1.
4225        // Child 0x2 (leaf_3) will be inserted via update_leaf to create a fresh node
4226        // with hash: None.
4227        let child_hashes = [
4228            RlpNode::word_rlp(&B256::repeat_byte(0x00)),
4229            RlpNode::word_rlp(&B256::repeat_byte(0x11)),
4230        ];
4231        let branch_path = Nibbles::from_nibbles([0x0]);
4232        let branch_node = create_branch_node_with_children(&[0x0, 0x1], child_hashes);
4233
4234        // Reveal the existing nodes
4235        trie.reveal_nodes(&mut [
4236            ProofTrieNodeV2 { path: branch_path, node: branch_node, masks: None },
4237            ProofTrieNodeV2 { path: leaf_1_path, node: leaf_1, masks: None },
4238            ProofTrieNodeV2 { path: leaf_2_path, node: leaf_2, masks: None },
4239        ])
4240        .unwrap();
4241
4242        // Insert leaf_3 via update_leaf. This modifies the branch at [0x0] to add child
4243        // 0x2 and creates a fresh leaf node with hash: None in the lower subtrie.
4244        trie.update_leaf(leaf_3_full_path, encode_account_value(3)).unwrap();
4245
4246        // Calculate subtrie indexes
4247        let subtrie_1_index = SparseSubtrieType::from_path(&leaf_1_path).lower_index().unwrap();
4248        let subtrie_2_index = SparseSubtrieType::from_path(&leaf_2_path).lower_index().unwrap();
4249        let leaf_3_path = leaf_3_full_path.slice(..2);
4250        let subtrie_3_index = SparseSubtrieType::from_path(&leaf_3_path).lower_index().unwrap();
4251
4252        let mut unchanged_prefix_set = PrefixSetMut::from([
4253            Nibbles::from_nibbles([0x0]),
4254            leaf_2_full_path,
4255            Nibbles::from_nibbles([0x3, 0x0, 0x0]),
4256        ]);
4257        // Create a prefix set with the keys that match only the second subtrie
4258        let mut prefix_set = PrefixSetMut::from([
4259            // Match second subtrie
4260            Nibbles::from_nibbles([0x0, 0x1, 0x0]),
4261            Nibbles::from_nibbles([0x0, 0x1, 0x1, 0x0]),
4262        ]);
4263        prefix_set.extend(unchanged_prefix_set.clone());
4264        trie.prefix_set = prefix_set;
4265
4266        // Update subtrie hashes
4267        trie.update_subtrie_hashes();
4268
4269        // We expect that leaf 3 (0x02) should have been added to the prefix set, because it is
4270        // missing a hash and is the root node of a lower subtrie, and therefore would need to have
4271        // that hash calculated by `update_upper_subtrie_hashes`.
4272        unchanged_prefix_set.insert(leaf_3_full_path);
4273
4274        // Check that the prefix set was updated
4275        assert_eq!(
4276            trie.prefix_set.clone().freeze().into_iter().collect::<Vec<_>>(),
4277            unchanged_prefix_set.freeze().into_iter().collect::<Vec<_>>()
4278        );
4279        // Check that subtries were returned back to the array
4280        assert!(trie.lower_subtries[subtrie_1_index].as_revealed_ref().is_some());
4281        assert!(trie.lower_subtries[subtrie_2_index].as_revealed_ref().is_some());
4282        assert!(trie.lower_subtries[subtrie_3_index].as_revealed_ref().is_some());
4283    }
4284
4285    #[test]
4286    fn test_subtrie_update_hashes() {
4287        let mut subtrie = Box::new(SparseSubtrie::new(Nibbles::from_nibbles([0x0, 0x0])));
4288
4289        // Create leaf nodes with paths 0x0...0, 0x00001...0, 0x0010...0
4290        let leaf_1_full_path = Nibbles::from_nibbles([0; 64]);
4291        let leaf_1_path = leaf_1_full_path.slice(..5);
4292        let leaf_1_key = leaf_1_full_path.slice(5..);
4293        let leaf_2_full_path = Nibbles::from_nibbles([vec![0, 0, 0, 0, 1], vec![0; 59]].concat());
4294        let leaf_2_path = leaf_2_full_path.slice(..5);
4295        let leaf_2_key = leaf_2_full_path.slice(5..);
4296        let leaf_3_full_path = Nibbles::from_nibbles([vec![0, 0, 1], vec![0; 61]].concat());
4297        let leaf_3_path = leaf_3_full_path.slice(..3);
4298        let leaf_3_key = leaf_3_full_path.slice(3..);
4299
4300        let account_1 = create_account(1);
4301        let account_2 = create_account(2);
4302        let account_3 = create_account(3);
4303        let leaf_1 = create_leaf_node(leaf_1_key.to_vec(), account_1.nonce);
4304        let leaf_2 = create_leaf_node(leaf_2_key.to_vec(), account_2.nonce);
4305        let leaf_3 = create_leaf_node(leaf_3_key.to_vec(), account_3.nonce);
4306
4307        // Create bottom branch node
4308        let extension_path = Nibbles::from_nibbles([0, 0, 0]);
4309        let branch_1_path = Nibbles::from_nibbles([0, 0, 0, 0]);
4310        let branch_1 = create_branch_node(
4311            Nibbles::from_nibbles([0]),
4312            &[0, 1],
4313            vec![
4314                RlpNode::from_rlp(&alloy_rlp::encode(&leaf_1)),
4315                RlpNode::from_rlp(&alloy_rlp::encode(&leaf_2)),
4316            ],
4317        );
4318
4319        // Create top branch node
4320        let branch_2_path = Nibbles::from_nibbles([0, 0]);
4321        let branch_2 = create_branch_node_with_children(
4322            &[0, 1],
4323            vec![
4324                RlpNode::from_rlp(&alloy_rlp::encode(&branch_1)),
4325                RlpNode::from_rlp(&alloy_rlp::encode(&leaf_3)),
4326            ],
4327        );
4328
4329        // Reveal nodes
4330        subtrie.reveal_node(branch_2_path, &branch_2, None, None).unwrap();
4331        subtrie.reveal_node(extension_path, &branch_1, None, None).unwrap();
4332        subtrie.reveal_node(leaf_1_path, &leaf_1, None, None).unwrap();
4333        subtrie.reveal_node(leaf_2_path, &leaf_2, None, None).unwrap();
4334        subtrie.reveal_node(leaf_3_path, &leaf_3, None, None).unwrap();
4335
4336        // Run hash builder for two leaf nodes
4337        let (_, _, proof_nodes, _, _) = run_hash_builder(
4338            [
4339                (leaf_1_full_path, account_1),
4340                (leaf_2_full_path, account_2),
4341                (leaf_3_full_path, account_3),
4342            ],
4343            NoopAccountTrieCursor::default(),
4344            Default::default(),
4345            [extension_path, branch_2_path, leaf_1_full_path, leaf_2_full_path, leaf_3_full_path],
4346        );
4347
4348        // Update hashes for the subtrie
4349        subtrie.update_hashes(
4350            &mut PrefixSetMut::from([leaf_1_full_path, leaf_2_full_path, leaf_3_full_path])
4351                .freeze(),
4352            &mut None,
4353            &BranchNodeMasksMap::default(),
4354        );
4355
4356        // Compare hashes between hash builder and subtrie
4357        let hash_builder_branch_1_hash =
4358            RlpNode::from_rlp(proof_nodes.get(&branch_1_path).unwrap().as_ref()).as_hash().unwrap();
4359        let subtrie_branch_1_hash =
4360            subtrie.nodes.get(&branch_1_path).unwrap().cached_hash().unwrap();
4361        assert_eq!(hash_builder_branch_1_hash, subtrie_branch_1_hash);
4362
4363        let hash_builder_extension_hash =
4364            RlpNode::from_rlp(proof_nodes.get(&extension_path).unwrap().as_ref())
4365                .as_hash()
4366                .unwrap();
4367        let subtrie_extension_hash =
4368            subtrie.nodes.get(&extension_path).unwrap().cached_hash().unwrap();
4369        assert_eq!(hash_builder_extension_hash, subtrie_extension_hash);
4370
4371        let hash_builder_branch_2_hash =
4372            RlpNode::from_rlp(proof_nodes.get(&branch_2_path).unwrap().as_ref()).as_hash().unwrap();
4373        let subtrie_branch_2_hash =
4374            subtrie.nodes.get(&branch_2_path).unwrap().cached_hash().unwrap();
4375        assert_eq!(hash_builder_branch_2_hash, subtrie_branch_2_hash);
4376
4377        let subtrie_leaf_1_hash = subtrie.nodes.get(&leaf_1_path).unwrap().cached_hash().unwrap();
4378        let hash_builder_leaf_1_hash =
4379            RlpNode::from_rlp(proof_nodes.get(&leaf_1_path).unwrap().as_ref()).as_hash().unwrap();
4380        assert_eq!(hash_builder_leaf_1_hash, subtrie_leaf_1_hash);
4381
4382        let hash_builder_leaf_2_hash =
4383            RlpNode::from_rlp(proof_nodes.get(&leaf_2_path).unwrap().as_ref()).as_hash().unwrap();
4384        let subtrie_leaf_2_hash = subtrie.nodes.get(&leaf_2_path).unwrap().cached_hash().unwrap();
4385        assert_eq!(hash_builder_leaf_2_hash, subtrie_leaf_2_hash);
4386
4387        let hash_builder_leaf_3_hash =
4388            RlpNode::from_rlp(proof_nodes.get(&leaf_3_path).unwrap().as_ref()).as_hash().unwrap();
4389        let subtrie_leaf_3_hash = subtrie.nodes.get(&leaf_3_path).unwrap().cached_hash().unwrap();
4390        assert_eq!(hash_builder_leaf_3_hash, subtrie_leaf_3_hash);
4391    }
4392
4393    #[test]
4394    fn test_remove_leaf_branch_becomes_extension() {
4395        //
4396        // 0x:      Extension (Key = 5)
4397        // 0x5:     └── Branch (Mask = 1001)
4398        // 0x50:        ├── 0 -> Extension (Key = 23)
4399        // 0x5023:      │        └── Branch (Mask = 0101)
4400        // 0x50231:     │            ├── 1 -> Leaf
4401        // 0x50233:     │            └── 3 -> Leaf
4402        // 0x53:        └── 3 -> Leaf (Key = 7)
4403        //
4404        // After removing 0x53, extension+branch+extension become a single extension
4405        //
4406        let mut trie = new_test_trie(
4407            [
4408                (Nibbles::default(), SparseNode::new_ext(Nibbles::from_nibbles([0x5]))),
4409                (Nibbles::from_nibbles([0x5]), SparseNode::new_branch(TrieMask::new(0b1001), &[])),
4410                (
4411                    Nibbles::from_nibbles([0x5, 0x0]),
4412                    SparseNode::new_ext(Nibbles::from_nibbles([0x2, 0x3])),
4413                ),
4414                (
4415                    Nibbles::from_nibbles([0x5, 0x0, 0x2, 0x3]),
4416                    SparseNode::new_branch(TrieMask::new(0b0101), &[]),
4417                ),
4418                (
4419                    Nibbles::from_nibbles([0x5, 0x0, 0x2, 0x3, 0x1]),
4420                    SparseNode::new_leaf(leaf_key([], 59)),
4421                ),
4422                (
4423                    Nibbles::from_nibbles([0x5, 0x0, 0x2, 0x3, 0x3]),
4424                    SparseNode::new_leaf(leaf_key([], 59)),
4425                ),
4426                (Nibbles::from_nibbles([0x5, 0x3]), SparseNode::new_leaf(leaf_key([0x7], 62))),
4427            ]
4428            .into_iter(),
4429        );
4430
4431        // Remove the leaf with a full path of 0x537
4432        let leaf_full_path = pad_nibbles_right(Nibbles::from_nibbles([0x5, 0x3, 0x7]));
4433        trie.remove_leaf(&leaf_full_path).unwrap();
4434
4435        let upper_subtrie = &trie.upper_subtrie;
4436        let lower_subtrie_50 = trie.lower_subtries[0x50].as_revealed_ref().unwrap();
4437
4438        // Check that the `SparseSubtrie` the leaf was removed from was itself removed, as it is now
4439        // empty.
4440        assert_matches!(trie.lower_subtries[0x53].as_revealed_ref(), None);
4441
4442        // Check that the leaf node was removed, and that its parent/grandparent were modified
4443        // appropriately.
4444        assert_matches!(
4445            upper_subtrie.nodes.get(&Nibbles::from_nibbles([])),
4446            Some(SparseNode::Extension{ key, ..})
4447            if key == &Nibbles::from_nibbles([0x5, 0x0, 0x2, 0x3])
4448        );
4449        assert_matches!(upper_subtrie.nodes.get(&Nibbles::from_nibbles([0x5])), None);
4450        assert_matches!(lower_subtrie_50.nodes.get(&Nibbles::from_nibbles([0x5, 0x0])), None);
4451        assert_matches!(
4452            lower_subtrie_50.nodes.get(&Nibbles::from_nibbles([0x5, 0x0, 0x2, 0x3])),
4453            Some(SparseNode::Branch{ state_mask, .. })
4454            if *state_mask == 0b0101.into()
4455        );
4456    }
4457
4458    #[test]
4459    fn test_remove_leaf_branch_becomes_leaf() {
4460        //
4461        // 0x:      Branch (Mask = 0011)
4462        // 0x0:     ├── 0 -> Leaf (Key = 12)
4463        // 0x1:     └── 1 -> Leaf (Key = 34)
4464        //
4465        // After removing 0x012, branch becomes a leaf
4466        //
4467        let mut trie = new_test_trie(
4468            [
4469                (Nibbles::default(), SparseNode::new_branch(TrieMask::new(0b0011), &[])),
4470                (Nibbles::from_nibbles([0x0]), SparseNode::new_leaf(leaf_key([0x1, 0x2], 63))),
4471                (Nibbles::from_nibbles([0x1]), SparseNode::new_leaf(leaf_key([0x3, 0x4], 63))),
4472            ]
4473            .into_iter(),
4474        );
4475
4476        // Add the branch node to updated_nodes to simulate it being modified earlier
4477        if let Some(updates) = trie.updates.as_mut() {
4478            updates
4479                .updated_nodes
4480                .insert(Nibbles::default(), BranchNodeCompact::new(0b11, 0, 0, vec![], None));
4481        }
4482
4483        // Remove the leaf with a full path of 0x012
4484        let leaf_full_path = pad_nibbles_right(Nibbles::from_nibbles([0x0, 0x1, 0x2]));
4485        trie.remove_leaf(&leaf_full_path).unwrap();
4486
4487        let upper_subtrie = &trie.upper_subtrie;
4488
4489        // Check that the leaf's value was removed
4490        assert_matches!(upper_subtrie.inner.values.get(&leaf_full_path), None);
4491
4492        // Check that the branch node collapsed into a leaf node with the remaining child's key
4493        assert_matches!(
4494            upper_subtrie.nodes.get(&Nibbles::default()),
4495            Some(SparseNode::Leaf{ key, ..})
4496            if key == &pad_nibbles_right(Nibbles::from_nibbles([0x1, 0x3, 0x4]))
4497        );
4498
4499        // Check that the remaining child node was removed
4500        assert_matches!(upper_subtrie.nodes.get(&Nibbles::from_nibbles([0x1])), None);
4501        // Check that the removed child node was also removed
4502        assert_matches!(upper_subtrie.nodes.get(&Nibbles::from_nibbles([0x0])), None);
4503
4504        // Check that updates were tracked correctly when branch collapsed
4505        let updates = trie.updates.as_ref().unwrap();
4506
4507        // The branch at root should be marked as removed since it collapsed
4508        assert!(updates.removed_nodes.contains(&Nibbles::default()));
4509
4510        // The branch should no longer be in updated_nodes
4511        assert!(!updates.updated_nodes.contains_key(&Nibbles::default()));
4512    }
4513
4514    #[test]
4515    fn test_remove_leaf_extension_becomes_leaf() {
4516        //
4517        // 0x:      Extension (Key = 5)
4518        // 0x5:     └── Branch (Mask = 0011)
4519        // 0x50:        ├── 0 -> Leaf (Key = 12)
4520        // 0x51:        └── 1 -> Leaf (Key = 34)
4521        //
4522        // After removing 0x5012, extension+branch becomes a leaf
4523        //
4524        let mut trie = new_test_trie(
4525            [
4526                (Nibbles::default(), SparseNode::new_ext(Nibbles::from_nibbles([0x5]))),
4527                (Nibbles::from_nibbles([0x5]), SparseNode::new_branch(TrieMask::new(0b0011), &[])),
4528                (Nibbles::from_nibbles([0x5, 0x0]), SparseNode::new_leaf(leaf_key([0x1, 0x2], 62))),
4529                (Nibbles::from_nibbles([0x5, 0x1]), SparseNode::new_leaf(leaf_key([0x3, 0x4], 62))),
4530            ]
4531            .into_iter(),
4532        );
4533
4534        // Remove the leaf with a full path of 0x5012
4535        let leaf_full_path = pad_nibbles_right(Nibbles::from_nibbles([0x5, 0x0, 0x1, 0x2]));
4536        trie.remove_leaf(&leaf_full_path).unwrap();
4537
4538        let upper_subtrie = &trie.upper_subtrie;
4539
4540        // Check that both lower subtries were removed. 0x50 should have been removed because
4541        // removing its leaf made it empty. 0x51 should have been removed after its own leaf was
4542        // collapsed into the upper trie, leaving it also empty.
4543        assert_matches!(trie.lower_subtries[0x50].as_revealed_ref(), None);
4544        assert_matches!(trie.lower_subtries[0x51].as_revealed_ref(), None);
4545
4546        // Check that the other leaf's value was moved to the upper trie
4547        let other_leaf_full_value = pad_nibbles_right(Nibbles::from_nibbles([0x5, 0x1, 0x3, 0x4]));
4548        assert_matches!(upper_subtrie.inner.values.get(&other_leaf_full_value), Some(_));
4549
4550        // Check that the extension node collapsed into a leaf node
4551        assert_matches!(
4552            upper_subtrie.nodes.get(&Nibbles::default()),
4553            Some(SparseNode::Leaf{ key, ..})
4554            if key == &pad_nibbles_right(Nibbles::from_nibbles([0x5, 0x1, 0x3, 0x4]))
4555        );
4556
4557        // Check that intermediate nodes were removed
4558        assert_matches!(upper_subtrie.nodes.get(&Nibbles::from_nibbles([0x5])), None);
4559    }
4560
4561    #[test]
4562    fn test_remove_leaf_branch_on_branch() {
4563        //
4564        // 0x:      Branch (Mask = 0101)
4565        // 0x0:     ├── 0 -> Leaf (Key = 12)
4566        // 0x2:     └── 2 -> Branch (Mask = 0011)
4567        // 0x20:        ├── 0 -> Leaf (Key = 34)
4568        // 0x21:        └── 1 -> Leaf (Key = 56)
4569        //
4570        // After removing 0x2034, the inner branch becomes a leaf
4571        //
4572        let mut trie = new_test_trie(
4573            [
4574                (Nibbles::default(), SparseNode::new_branch(TrieMask::new(0b0101), &[])),
4575                (Nibbles::from_nibbles([0x0]), SparseNode::new_leaf(leaf_key([0x1, 0x2], 63))),
4576                (Nibbles::from_nibbles([0x2]), SparseNode::new_branch(TrieMask::new(0b0011), &[])),
4577                (Nibbles::from_nibbles([0x2, 0x0]), SparseNode::new_leaf(leaf_key([0x3, 0x4], 62))),
4578                (Nibbles::from_nibbles([0x2, 0x1]), SparseNode::new_leaf(leaf_key([0x5, 0x6], 62))),
4579            ]
4580            .into_iter(),
4581        );
4582
4583        // Remove the leaf with a full path of 0x2034
4584        let leaf_full_path = pad_nibbles_right(Nibbles::from_nibbles([0x2, 0x0, 0x3, 0x4]));
4585        trie.remove_leaf(&leaf_full_path).unwrap();
4586
4587        let upper_subtrie = &trie.upper_subtrie;
4588
4589        // Check that both lower subtries were removed. 0x20 should have been removed because
4590        // removing its leaf made it empty. 0x21 should have been removed after its own leaf was
4591        // collapsed into the upper trie, leaving it also empty.
4592        assert_matches!(trie.lower_subtries[0x20].as_revealed_ref(), None);
4593        assert_matches!(trie.lower_subtries[0x21].as_revealed_ref(), None);
4594
4595        // Check that the other leaf's value was moved to the upper trie
4596        let other_leaf_full_value = pad_nibbles_right(Nibbles::from_nibbles([0x2, 0x1, 0x5, 0x6]));
4597        assert_matches!(upper_subtrie.inner.values.get(&other_leaf_full_value), Some(_));
4598
4599        // Check that the root branch still exists unchanged
4600        assert_matches!(
4601            upper_subtrie.nodes.get(&Nibbles::default()),
4602            Some(SparseNode::Branch{ state_mask, .. })
4603            if *state_mask == 0b0101.into()
4604        );
4605
4606        // Check that the inner branch became an extension
4607        assert_matches!(
4608            upper_subtrie.nodes.get(&Nibbles::from_nibbles([0x2])),
4609            Some(SparseNode::Leaf{ key, ..})
4610            if key == &leaf_key([0x1, 0x5, 0x6], 63)
4611        );
4612    }
4613
4614    #[test]
4615    fn test_remove_leaf_lower_subtrie_root_path_update() {
4616        //
4617        // 0x:        Extension (Key = 123, root of lower subtrie)
4618        // 0x123:     └── Branch (Mask = 0011000)
4619        // 0x1233:        ├── 3 -> Leaf (Key = [])
4620        // 0x1234:        └── 4 -> Extension (Key = 5)
4621        // 0x12345:           └── Branch (Mask = 0011)
4622        // 0x123450:              ├── 0 -> Leaf (Key = [])
4623        // 0x123451:              └── 1 -> Leaf (Key = [])
4624        //
4625        // After removing leaf at 0x1233, the branch at 0x123 becomes an extension to 0x12345, which
4626        // then gets merged with the root extension at 0x. The lower subtrie's `path` field should
4627        // be updated from 0x123 to 0x12345.
4628        //
4629        let mut trie = new_test_trie(
4630            [
4631                (Nibbles::default(), SparseNode::new_ext(Nibbles::from_nibbles([0x1, 0x2, 0x3]))),
4632                (
4633                    Nibbles::from_nibbles([0x1, 0x2, 0x3]),
4634                    SparseNode::new_branch(TrieMask::new(0b0011000), &[]),
4635                ),
4636                (
4637                    Nibbles::from_nibbles([0x1, 0x2, 0x3, 0x3]),
4638                    SparseNode::new_leaf(leaf_key([], 60)),
4639                ),
4640                (
4641                    Nibbles::from_nibbles([0x1, 0x2, 0x3, 0x4]),
4642                    SparseNode::new_ext(Nibbles::from_nibbles([0x5])),
4643                ),
4644                (
4645                    Nibbles::from_nibbles([0x1, 0x2, 0x3, 0x4, 0x5]),
4646                    SparseNode::new_branch(TrieMask::new(0b0011), &[]),
4647                ),
4648                (
4649                    Nibbles::from_nibbles([0x1, 0x2, 0x3, 0x4, 0x5, 0x0]),
4650                    SparseNode::new_leaf(leaf_key([], 58)),
4651                ),
4652                (
4653                    Nibbles::from_nibbles([0x1, 0x2, 0x3, 0x4, 0x5, 0x1]),
4654                    SparseNode::new_leaf(leaf_key([], 58)),
4655                ),
4656            ]
4657            .into_iter(),
4658        );
4659
4660        // Verify initial state - the lower subtrie's path should be 0x123
4661        let lower_subtrie_root_path = Nibbles::from_nibbles([0x1, 0x2, 0x3]);
4662        assert_matches!(
4663            trie.lower_subtrie_for_path_mut(&lower_subtrie_root_path),
4664            Some(subtrie)
4665            if subtrie.path == lower_subtrie_root_path
4666        );
4667
4668        // Remove the leaf at 0x1233
4669        let leaf_full_path = pad_nibbles_right(Nibbles::from_nibbles([0x1, 0x2, 0x3, 0x3]));
4670        trie.remove_leaf(&leaf_full_path).unwrap();
4671
4672        // After removal:
4673        // 1. The branch at 0x123 should become an extension to 0x12345
4674        // 2. That extension should merge with the root extension at 0x
4675        // 3. The lower subtrie's path should be updated to 0x12345
4676        let lower_subtrie = trie.lower_subtries[0x12].as_revealed_ref().unwrap();
4677        assert_eq!(lower_subtrie.path, Nibbles::from_nibbles([0x1, 0x2, 0x3, 0x4, 0x5]));
4678
4679        // Verify the root extension now points all the way to 0x12345
4680        assert_matches!(
4681            trie.upper_subtrie.nodes.get(&Nibbles::default()),
4682            Some(SparseNode::Extension { key, .. })
4683            if key == &Nibbles::from_nibbles([0x1, 0x2, 0x3, 0x4, 0x5])
4684        );
4685
4686        // Verify the branch at 0x12345 hasn't been modified
4687        assert_matches!(
4688            lower_subtrie.nodes.get(&Nibbles::from_nibbles([0x1, 0x2, 0x3, 0x4, 0x5])),
4689            Some(SparseNode::Branch { state_mask, .. })
4690            if state_mask == &TrieMask::new(0b0011)
4691        );
4692    }
4693
4694    #[test]
4695    fn test_remove_leaf_remaining_child_needs_reveal() {
4696        //
4697        // 0x:      Branch (Mask = 0011)
4698        // 0x0:     ├── 0 -> Leaf (Key = 12)
4699        // 0x1:     └── 1 -> Hash (blinded leaf)
4700        //
4701        // After removing 0x012, the hash node needs to be revealed to collapse the branch
4702        //
4703        let mut trie = new_test_trie(
4704            [
4705                (
4706                    Nibbles::default(),
4707                    SparseNode::new_branch(
4708                        TrieMask::new(0b0011),
4709                        &[(0x1, B256::repeat_byte(0xab))],
4710                    ),
4711                ),
4712                (Nibbles::from_nibbles([0x0]), SparseNode::new_leaf(leaf_key([0x1, 0x2], 63))),
4713            ]
4714            .into_iter(),
4715        );
4716
4717        // Create the revealed leaf used by the test setup.
4718        let revealed_leaf = create_leaf_node(leaf_key([0x3, 0x4], 63).to_vec(), 42);
4719        let mut encoded = Vec::new();
4720        revealed_leaf.encode(&mut encoded);
4721
4722        // Try removing the leaf with a full path of 0x012, this should fail because the leaf is
4723        // blinded
4724        let leaf_full_path = pad_nibbles_right(Nibbles::from_nibbles([0x0, 0x1, 0x2]));
4725        let Err(err) = trie.remove_leaf(&leaf_full_path) else {
4726            panic!("expected error");
4727        };
4728        assert_matches!(err.kind(), SparseTrieErrorKind::BlindedNode(path) if *path == Nibbles::from_nibbles([0x1]));
4729
4730        // Now reveal the leaf and try removing it again
4731        trie.reveal_nodes(&mut [ProofTrieNodeV2 {
4732            path: Nibbles::from_nibbles([0x1]),
4733            node: revealed_leaf,
4734            masks: None,
4735        }])
4736        .unwrap();
4737        trie.remove_leaf(&leaf_full_path).unwrap();
4738
4739        let upper_subtrie = &trie.upper_subtrie;
4740
4741        // Check that the leaf value was removed
4742        assert_matches!(upper_subtrie.inner.values.get(&leaf_full_path), None);
4743
4744        // Check that the branch node collapsed into a leaf node with the revealed child's key
4745        assert_matches!(
4746            upper_subtrie.nodes.get(&Nibbles::default()),
4747            Some(SparseNode::Leaf{ key, ..})
4748            if key == &pad_nibbles_right(Nibbles::from_nibbles([0x1, 0x3, 0x4]))
4749        );
4750
4751        // Check that the remaining child node was removed (since it was merged)
4752        assert_matches!(upper_subtrie.nodes.get(&Nibbles::from_nibbles([0x1])), None);
4753    }
4754
4755    #[test]
4756    fn test_remove_leaf_root() {
4757        //
4758        // 0x:      Leaf (Key = 123)
4759        //
4760        // After removing 0x123, the trie becomes empty
4761        //
4762        let mut trie = new_test_trie(core::iter::once((
4763            Nibbles::default(),
4764            SparseNode::new_leaf(pad_nibbles_right(Nibbles::from_nibbles([0x1, 0x2, 0x3]))),
4765        )));
4766
4767        // Remove the leaf with a full key of 0x123
4768        let leaf_full_path = pad_nibbles_right(Nibbles::from_nibbles([0x1, 0x2, 0x3]));
4769        trie.remove_leaf(&leaf_full_path).unwrap();
4770
4771        let upper_subtrie = &trie.upper_subtrie;
4772
4773        // Check that the leaf value was removed
4774        assert_matches!(upper_subtrie.inner.values.get(&leaf_full_path), None);
4775
4776        // Check that the root node was changed to Empty
4777        assert_matches!(upper_subtrie.nodes.get(&Nibbles::default()), Some(SparseNode::Empty));
4778    }
4779
4780    #[test]
4781    fn test_remove_leaf_unsets_hash_along_path() {
4782        //
4783        // Creates a trie structure:
4784        // 0x:      Branch (with hash set)
4785        // 0x0:     ├── Extension (with hash set)
4786        // 0x01:    │   └── Branch (with hash set)
4787        // 0x012:   │       ├── Leaf (Key = 34, with hash set)
4788        // 0x013:   │       ├── Leaf (Key = 56, with hash set)
4789        // 0x014:   │       └── Leaf (Key = 78, with hash set)
4790        // 0x1:     └── Leaf (Key = 78, with hash set)
4791        //
4792        // When removing leaf at 0x01234, all nodes along the path (root branch,
4793        // extension at 0x0, branch at 0x01) should have their hash field unset
4794        //
4795
4796        let make_revealed = |hash: B256| SparseNodeState::Cached {
4797            rlp_node: RlpNode::word_rlp(&hash),
4798            store_in_db_trie: None,
4799        };
4800        let mut trie = new_test_trie(
4801            [
4802                (
4803                    Nibbles::default(),
4804                    SparseNode::Branch {
4805                        state_mask: TrieMask::new(0b0011),
4806                        state: make_revealed(B256::repeat_byte(0x10)),
4807                        blinded_mask: Default::default(),
4808                        blinded_hashes: Default::default(),
4809                    },
4810                ),
4811                (
4812                    Nibbles::from_nibbles([0x0]),
4813                    SparseNode::Extension {
4814                        key: Nibbles::from_nibbles([0x1]),
4815                        state: make_revealed(B256::repeat_byte(0x20)),
4816                    },
4817                ),
4818                (
4819                    Nibbles::from_nibbles([0x0, 0x1]),
4820                    SparseNode::Branch {
4821                        state_mask: TrieMask::new(0b11100),
4822                        state: make_revealed(B256::repeat_byte(0x30)),
4823                        blinded_mask: Default::default(),
4824                        blinded_hashes: Default::default(),
4825                    },
4826                ),
4827                (
4828                    Nibbles::from_nibbles([0x0, 0x1, 0x2]),
4829                    SparseNode::Leaf {
4830                        key: leaf_key([0x3, 0x4], 61),
4831                        state: make_revealed(B256::repeat_byte(0x40)),
4832                    },
4833                ),
4834                (
4835                    Nibbles::from_nibbles([0x0, 0x1, 0x3]),
4836                    SparseNode::Leaf {
4837                        key: leaf_key([0x5, 0x6], 61),
4838                        state: make_revealed(B256::repeat_byte(0x50)),
4839                    },
4840                ),
4841                (
4842                    Nibbles::from_nibbles([0x0, 0x1, 0x4]),
4843                    SparseNode::Leaf {
4844                        key: leaf_key([0x6, 0x7], 61),
4845                        state: make_revealed(B256::repeat_byte(0x60)),
4846                    },
4847                ),
4848                (
4849                    Nibbles::from_nibbles([0x1]),
4850                    SparseNode::Leaf {
4851                        key: leaf_key([0x7, 0x8], 63),
4852                        state: make_revealed(B256::repeat_byte(0x70)),
4853                    },
4854                ),
4855            ]
4856            .into_iter(),
4857        );
4858
4859        // Remove a leaf which does not exist; this should have no effect.
4860        trie.remove_leaf(&pad_nibbles_right(Nibbles::from_nibbles([0x0, 0x1, 0x2, 0x3, 0x4, 0xF])))
4861            .unwrap();
4862        for (path, node) in trie.all_nodes() {
4863            assert!(node.cached_hash().is_some(), "path {path:?} should still have a hash");
4864        }
4865
4866        // Remove the leaf at path 0x01234
4867        let leaf_full_path = pad_nibbles_right(Nibbles::from_nibbles([0x0, 0x1, 0x2, 0x3, 0x4]));
4868        trie.remove_leaf(&leaf_full_path).unwrap();
4869
4870        let upper_subtrie = &trie.upper_subtrie;
4871        let lower_subtrie_10 = trie.lower_subtries[0x01].as_revealed_ref().unwrap();
4872
4873        // Verify that hash fields are unset for all nodes along the path to the removed leaf
4874        assert_matches!(
4875            upper_subtrie.nodes.get(&Nibbles::default()),
4876            Some(SparseNode::Branch { state: SparseNodeState::Dirty, .. })
4877        );
4878        assert_matches!(
4879            upper_subtrie.nodes.get(&Nibbles::from_nibbles([0x0])),
4880            Some(SparseNode::Extension { state: SparseNodeState::Dirty, .. })
4881        );
4882        assert_matches!(
4883            lower_subtrie_10.nodes.get(&Nibbles::from_nibbles([0x0, 0x1])),
4884            Some(SparseNode::Branch { state: SparseNodeState::Dirty, .. })
4885        );
4886
4887        // Verify that nodes not on the path still have their hashes
4888        assert_matches!(
4889            upper_subtrie.nodes.get(&Nibbles::from_nibbles([0x1])),
4890            Some(SparseNode::Leaf { state: SparseNodeState::Cached { .. }, .. })
4891        );
4892        assert_matches!(
4893            lower_subtrie_10.nodes.get(&Nibbles::from_nibbles([0x0, 0x1, 0x3])),
4894            Some(SparseNode::Leaf { state: SparseNodeState::Cached { .. }, .. })
4895        );
4896        assert_matches!(
4897            lower_subtrie_10.nodes.get(&Nibbles::from_nibbles([0x0, 0x1, 0x4])),
4898            Some(SparseNode::Leaf { state: SparseNodeState::Cached { .. }, .. })
4899        );
4900    }
4901
4902    #[test]
4903    fn test_parallel_sparse_trie_root() {
4904        // Step 1: Create the trie structure
4905        // Extension node at 0x with key 0x2 (goes to upper subtrie)
4906        let extension_path = Nibbles::new();
4907        let extension_key = Nibbles::from_nibbles([0x2]);
4908
4909        // Branch node at 0x2 with children 0 and 1 (goes to upper subtrie)
4910        let branch_path = Nibbles::from_nibbles([0x2]);
4911
4912        // Leaf nodes at 0x20 and 0x21 (go to lower subtries)
4913        let leaf_1_path = Nibbles::from_nibbles([0x2, 0x0]);
4914        let leaf_1_key = Nibbles::from_nibbles(vec![0; 62]); // Remaining key
4915        let leaf_1_full_path = Nibbles::from_nibbles([vec![0x2, 0x0], vec![0; 62]].concat());
4916
4917        let leaf_2_path = Nibbles::from_nibbles([0x2, 0x1]);
4918        let leaf_2_key = Nibbles::from_nibbles(vec![0; 62]); // Remaining key
4919        let leaf_2_full_path = Nibbles::from_nibbles([vec![0x2, 0x1], vec![0; 62]].concat());
4920
4921        // Create accounts
4922        let account_1 = create_account(1);
4923        let account_2 = create_account(2);
4924
4925        // Create leaf nodes
4926        let leaf_1 = create_leaf_node(leaf_1_key.to_vec(), account_1.nonce);
4927        let leaf_2 = create_leaf_node(leaf_2_key.to_vec(), account_2.nonce);
4928
4929        // Create branch node with children at indices 0 and 1
4930        let branch = create_branch_node(
4931            extension_key,
4932            &[0, 1],
4933            vec![
4934                RlpNode::from_rlp(&alloy_rlp::encode(&leaf_1)),
4935                RlpNode::from_rlp(&alloy_rlp::encode(&leaf_2)),
4936            ],
4937        );
4938
4939        // Step 2: Reveal nodes in the trie
4940        let mut trie = ParallelSparseTrie::from_root(branch, None, true).unwrap();
4941        trie.reveal_nodes(&mut [
4942            ProofTrieNodeV2 { path: leaf_1_path, node: leaf_1, masks: None },
4943            ProofTrieNodeV2 { path: leaf_2_path, node: leaf_2, masks: None },
4944        ])
4945        .unwrap();
4946
4947        // Step 3: Reset hashes for all revealed nodes to test actual hash calculation
4948        // Reset upper subtrie node hashes
4949        trie.upper_subtrie
4950            .nodes
4951            .get_mut(&extension_path)
4952            .unwrap()
4953            .set_state(SparseNodeState::Dirty);
4954        trie.upper_subtrie.nodes.get_mut(&branch_path).unwrap().set_state(SparseNodeState::Dirty);
4955
4956        // Reset lower subtrie node hashes
4957        let leaf_1_subtrie_idx = path_subtrie_index_unchecked(&leaf_1_path);
4958        let leaf_2_subtrie_idx = path_subtrie_index_unchecked(&leaf_2_path);
4959
4960        trie.lower_subtries[leaf_1_subtrie_idx]
4961            .as_revealed_mut()
4962            .unwrap()
4963            .nodes
4964            .get_mut(&leaf_1_path)
4965            .unwrap()
4966            .set_state(SparseNodeState::Dirty);
4967        trie.lower_subtries[leaf_2_subtrie_idx]
4968            .as_revealed_mut()
4969            .unwrap()
4970            .nodes
4971            .get_mut(&leaf_2_path)
4972            .unwrap()
4973            .set_state(SparseNodeState::Dirty);
4974
4975        // Step 4: Add changed leaf node paths to prefix set
4976        trie.prefix_set.insert(leaf_1_full_path);
4977        trie.prefix_set.insert(leaf_2_full_path);
4978
4979        // Step 5: Calculate root using our implementation
4980        let root = trie.root();
4981
4982        // Step 6: Calculate root using HashBuilder for comparison
4983        let (hash_builder_root, _, _proof_nodes, _, _) = run_hash_builder(
4984            [(leaf_1_full_path, account_1), (leaf_2_full_path, account_2)],
4985            NoopAccountTrieCursor::default(),
4986            Default::default(),
4987            [extension_path, branch_path, leaf_1_full_path, leaf_2_full_path],
4988        );
4989
4990        // Step 7: Verify the roots match
4991        assert_eq!(root, hash_builder_root);
4992
4993        // Verify hashes were computed
4994        let leaf_1_subtrie = trie.lower_subtries[leaf_1_subtrie_idx].as_revealed_ref().unwrap();
4995        let leaf_2_subtrie = trie.lower_subtries[leaf_2_subtrie_idx].as_revealed_ref().unwrap();
4996        assert!(trie.upper_subtrie.nodes.get(&extension_path).unwrap().cached_hash().is_some());
4997        assert!(trie.upper_subtrie.nodes.get(&branch_path).unwrap().cached_hash().is_some());
4998        assert!(leaf_1_subtrie.nodes.get(&leaf_1_path).unwrap().cached_hash().is_some());
4999        assert!(leaf_2_subtrie.nodes.get(&leaf_2_path).unwrap().cached_hash().is_some());
5000    }
5001
5002    #[test]
5003    fn sparse_trie_empty_update_one() {
5004        let ctx = ParallelSparseTrieTestContext;
5005
5006        let key = Nibbles::unpack(B256::with_last_byte(42));
5007        let value = || Account::default();
5008        let value_encoded = || {
5009            let mut account_rlp = Vec::new();
5010            value().into_trie_account(EMPTY_ROOT_HASH).encode(&mut account_rlp);
5011            account_rlp
5012        };
5013
5014        let (hash_builder_root, hash_builder_updates, hash_builder_proof_nodes, _, _) =
5015            run_hash_builder(
5016                [(key, value())],
5017                NoopAccountTrieCursor::default(),
5018                Default::default(),
5019                [key],
5020            );
5021
5022        let mut sparse = ParallelSparseTrie::default().with_updates(true);
5023        ctx.update_leaves(&mut sparse, [(key, value_encoded())]);
5024        ctx.assert_with_hash_builder(
5025            &mut sparse,
5026            hash_builder_root,
5027            hash_builder_updates,
5028            hash_builder_proof_nodes,
5029        );
5030    }
5031
5032    #[test]
5033    fn sparse_trie_empty_update_multiple_lower_nibbles() {
5034        let ctx = ParallelSparseTrieTestContext;
5035
5036        let paths = (0..=16).map(|b| Nibbles::unpack(B256::with_last_byte(b))).collect::<Vec<_>>();
5037        let value = || Account::default();
5038        let value_encoded = || {
5039            let mut account_rlp = Vec::new();
5040            value().into_trie_account(EMPTY_ROOT_HASH).encode(&mut account_rlp);
5041            account_rlp
5042        };
5043
5044        let (hash_builder_root, hash_builder_updates, hash_builder_proof_nodes, _, _) =
5045            run_hash_builder(
5046                paths.iter().copied().zip(core::iter::repeat_with(value)),
5047                NoopAccountTrieCursor::default(),
5048                Default::default(),
5049                paths.clone(),
5050            );
5051
5052        let mut sparse = ParallelSparseTrie::default().with_updates(true);
5053        ctx.update_leaves(
5054            &mut sparse,
5055            paths.into_iter().zip(core::iter::repeat_with(value_encoded)),
5056        );
5057
5058        ctx.assert_with_hash_builder(
5059            &mut sparse,
5060            hash_builder_root,
5061            hash_builder_updates,
5062            hash_builder_proof_nodes,
5063        );
5064    }
5065
5066    #[test]
5067    fn sparse_trie_empty_update_multiple_upper_nibbles() {
5068        let paths = (239..=255).map(|b| Nibbles::unpack(B256::repeat_byte(b))).collect::<Vec<_>>();
5069        let value = || Account::default();
5070        let value_encoded = || {
5071            let mut account_rlp = Vec::new();
5072            value().into_trie_account(EMPTY_ROOT_HASH).encode(&mut account_rlp);
5073            account_rlp
5074        };
5075
5076        let (hash_builder_root, hash_builder_updates, hash_builder_proof_nodes, _, _) =
5077            run_hash_builder(
5078                paths.iter().copied().zip(core::iter::repeat_with(value)),
5079                NoopAccountTrieCursor::default(),
5080                Default::default(),
5081                paths.clone(),
5082            );
5083        let mut sparse = ParallelSparseTrie::default().with_updates(true);
5084        for path in &paths {
5085            sparse.update_leaf(*path, value_encoded()).unwrap();
5086        }
5087        let sparse_root = sparse.root();
5088        let sparse_updates = sparse.take_updates();
5089
5090        assert_eq!(sparse_root, hash_builder_root);
5091        assert_eq!(sparse_updates.updated_nodes, hash_builder_updates.account_nodes);
5092        assert_eq_parallel_sparse_trie_proof_nodes(&sparse, hash_builder_proof_nodes);
5093    }
5094
5095    #[test]
5096    fn sparse_trie_empty_update_multiple() {
5097        let ctx = ParallelSparseTrieTestContext;
5098
5099        let paths = (0..=255)
5100            .map(|b| {
5101                Nibbles::unpack(if b % 2 == 0 {
5102                    B256::repeat_byte(b)
5103                } else {
5104                    B256::with_last_byte(b)
5105                })
5106            })
5107            .collect::<Vec<_>>();
5108        let value = || Account::default();
5109        let value_encoded = || {
5110            let mut account_rlp = Vec::new();
5111            value().into_trie_account(EMPTY_ROOT_HASH).encode(&mut account_rlp);
5112            account_rlp
5113        };
5114
5115        let (hash_builder_root, hash_builder_updates, hash_builder_proof_nodes, _, _) =
5116            run_hash_builder(
5117                paths.iter().sorted_unstable().copied().zip(core::iter::repeat_with(value)),
5118                NoopAccountTrieCursor::default(),
5119                Default::default(),
5120                paths.clone(),
5121            );
5122
5123        let mut sparse = ParallelSparseTrie::default().with_updates(true);
5124        ctx.update_leaves(
5125            &mut sparse,
5126            paths.iter().copied().zip(core::iter::repeat_with(value_encoded)),
5127        );
5128        ctx.assert_with_hash_builder(
5129            &mut sparse,
5130            hash_builder_root,
5131            hash_builder_updates,
5132            hash_builder_proof_nodes,
5133        );
5134    }
5135
5136    #[test]
5137    fn sparse_trie_empty_update_repeated() {
5138        let ctx = ParallelSparseTrieTestContext;
5139
5140        let paths = (0..=255).map(|b| Nibbles::unpack(B256::repeat_byte(b))).collect::<Vec<_>>();
5141        let old_value = Account { nonce: 1, ..Default::default() };
5142        let old_value_encoded = {
5143            let mut account_rlp = Vec::new();
5144            old_value.into_trie_account(EMPTY_ROOT_HASH).encode(&mut account_rlp);
5145            account_rlp
5146        };
5147        let new_value = Account { nonce: 2, ..Default::default() };
5148        let new_value_encoded = {
5149            let mut account_rlp = Vec::new();
5150            new_value.into_trie_account(EMPTY_ROOT_HASH).encode(&mut account_rlp);
5151            account_rlp
5152        };
5153
5154        let (hash_builder_root, hash_builder_updates, hash_builder_proof_nodes, _, _) =
5155            run_hash_builder(
5156                paths.iter().copied().zip(core::iter::repeat_with(|| old_value)),
5157                NoopAccountTrieCursor::default(),
5158                Default::default(),
5159                paths.clone(),
5160            );
5161
5162        let mut sparse = ParallelSparseTrie::default().with_updates(true);
5163        ctx.update_leaves(
5164            &mut sparse,
5165            paths.iter().copied().zip(core::iter::repeat(old_value_encoded)),
5166        );
5167        ctx.assert_with_hash_builder(
5168            &mut sparse,
5169            hash_builder_root,
5170            hash_builder_updates,
5171            hash_builder_proof_nodes,
5172        );
5173
5174        let (hash_builder_root, hash_builder_updates, hash_builder_proof_nodes, _, _) =
5175            run_hash_builder(
5176                paths.iter().copied().zip(core::iter::repeat(new_value)),
5177                NoopAccountTrieCursor::default(),
5178                Default::default(),
5179                paths.clone(),
5180            );
5181
5182        ctx.update_leaves(
5183            &mut sparse,
5184            paths.iter().copied().zip(core::iter::repeat(new_value_encoded)),
5185        );
5186        ctx.assert_with_hash_builder(
5187            &mut sparse,
5188            hash_builder_root,
5189            hash_builder_updates,
5190            hash_builder_proof_nodes,
5191        );
5192    }
5193
5194    #[test]
5195    fn sparse_trie_remove_leaf() {
5196        let ctx = ParallelSparseTrieTestContext;
5197        let mut sparse = ParallelSparseTrie::default();
5198
5199        let value = alloy_rlp::encode_fixed_size(&U256::ZERO).to_vec();
5200
5201        ctx.update_leaves(
5202            &mut sparse,
5203            [
5204                (
5205                    pad_nibbles_right(Nibbles::from_nibbles([0x5, 0x0, 0x2, 0x3, 0x1])),
5206                    value.clone(),
5207                ),
5208                (
5209                    pad_nibbles_right(Nibbles::from_nibbles([0x5, 0x0, 0x2, 0x3, 0x3])),
5210                    value.clone(),
5211                ),
5212                (
5213                    pad_nibbles_right(Nibbles::from_nibbles([0x5, 0x2, 0x0, 0x1, 0x3])),
5214                    value.clone(),
5215                ),
5216                (
5217                    pad_nibbles_right(Nibbles::from_nibbles([0x5, 0x3, 0x1, 0x0, 0x2])),
5218                    value.clone(),
5219                ),
5220                (
5221                    pad_nibbles_right(Nibbles::from_nibbles([0x5, 0x3, 0x3, 0x0, 0x2])),
5222                    value.clone(),
5223                ),
5224                (pad_nibbles_right(Nibbles::from_nibbles([0x5, 0x3, 0x3, 0x2, 0x0])), value),
5225            ],
5226        );
5227
5228        // Extension (Key = 5)
5229        // └── Branch (Mask = 1011)
5230        //     ├── 0 -> Extension (Key = 23)
5231        //     │        └── Branch (Mask = 0101)
5232        //     │              ├── 1 -> Leaf (Key = 1, Path = 50231)
5233        //     │              └── 3 -> Leaf (Key = 3, Path = 50233)
5234        //     ├── 2 -> Leaf (Key = 013, Path = 52013)
5235        //     └── 3 -> Branch (Mask = 0101)
5236        //                ├── 1 -> Leaf (Key = 3102, Path = 53102)
5237        //                └── 3 -> Branch (Mask = 1010)
5238        //                       ├── 0 -> Leaf (Key = 3302, Path = 53302)
5239        //                       └── 2 -> Leaf (Key = 3320, Path = 53320)
5240        pretty_assertions::assert_eq!(
5241            parallel_sparse_trie_nodes(&sparse)
5242                .into_iter()
5243                .map(|(k, v)| (*k, v.clone()))
5244                .collect::<BTreeMap<_, _>>(),
5245            BTreeMap::from_iter([
5246                (Nibbles::default(), SparseNode::new_ext(Nibbles::from_nibbles([0x5]))),
5247                (Nibbles::from_nibbles([0x5]), SparseNode::new_branch(0b1101.into(), &[])),
5248                (
5249                    Nibbles::from_nibbles([0x5, 0x0]),
5250                    SparseNode::new_ext(Nibbles::from_nibbles([0x2, 0x3]))
5251                ),
5252                (
5253                    Nibbles::from_nibbles([0x5, 0x0, 0x2, 0x3]),
5254                    SparseNode::new_branch(0b1010.into(), &[])
5255                ),
5256                (
5257                    Nibbles::from_nibbles([0x5, 0x0, 0x2, 0x3, 0x1]),
5258                    SparseNode::new_leaf(leaf_key([], 59))
5259                ),
5260                (
5261                    Nibbles::from_nibbles([0x5, 0x0, 0x2, 0x3, 0x3]),
5262                    SparseNode::new_leaf(leaf_key([], 59))
5263                ),
5264                (
5265                    Nibbles::from_nibbles([0x5, 0x2]),
5266                    SparseNode::new_leaf(leaf_key([0x0, 0x1, 0x3], 62))
5267                ),
5268                (Nibbles::from_nibbles([0x5, 0x3]), SparseNode::new_branch(0b1010.into(), &[])),
5269                (
5270                    Nibbles::from_nibbles([0x5, 0x3, 0x1]),
5271                    SparseNode::new_leaf(leaf_key([0x0, 0x2], 61))
5272                ),
5273                (
5274                    Nibbles::from_nibbles([0x5, 0x3, 0x3]),
5275                    SparseNode::new_branch(0b0101.into(), &[])
5276                ),
5277                (
5278                    Nibbles::from_nibbles([0x5, 0x3, 0x3, 0x0]),
5279                    SparseNode::new_leaf(leaf_key([0x2], 60))
5280                ),
5281                (
5282                    Nibbles::from_nibbles([0x5, 0x3, 0x3, 0x2]),
5283                    SparseNode::new_leaf(leaf_key([0x0], 60))
5284                )
5285            ])
5286        );
5287
5288        sparse
5289            .remove_leaf(&pad_nibbles_right(Nibbles::from_nibbles([0x5, 0x2, 0x0, 0x1, 0x3])))
5290            .unwrap();
5291
5292        // Extension (Key = 5)
5293        // └── Branch (Mask = 1001)
5294        //     ├── 0 -> Extension (Key = 23)
5295        //     │        └── Branch (Mask = 0101)
5296        //     │              ├── 1 -> Leaf (Path = 50231...)
5297        //     │              └── 3 -> Leaf (Path = 50233...)
5298        //     └── 3 -> Branch (Mask = 0101)
5299        //                ├── 1 -> Leaf (Path = 53102...)
5300        //                └── 3 -> Branch (Mask = 1010)
5301        //                       ├── 0 -> Leaf (Path = 53302...)
5302        //                       └── 2 -> Leaf (Path = 53320...)
5303        pretty_assertions::assert_eq!(
5304            parallel_sparse_trie_nodes(&sparse)
5305                .into_iter()
5306                .map(|(k, v)| (*k, v.clone()))
5307                .collect::<BTreeMap<_, _>>(),
5308            BTreeMap::from_iter([
5309                (Nibbles::default(), SparseNode::new_ext(Nibbles::from_nibbles([0x5]))),
5310                (Nibbles::from_nibbles([0x5]), SparseNode::new_branch(0b1001.into(), &[])),
5311                (
5312                    Nibbles::from_nibbles([0x5, 0x0]),
5313                    SparseNode::new_ext(Nibbles::from_nibbles([0x2, 0x3]))
5314                ),
5315                (
5316                    Nibbles::from_nibbles([0x5, 0x0, 0x2, 0x3]),
5317                    SparseNode::new_branch(0b1010.into(), &[])
5318                ),
5319                (
5320                    Nibbles::from_nibbles([0x5, 0x0, 0x2, 0x3, 0x1]),
5321                    SparseNode::new_leaf(leaf_key([], 59))
5322                ),
5323                (
5324                    Nibbles::from_nibbles([0x5, 0x0, 0x2, 0x3, 0x3]),
5325                    SparseNode::new_leaf(leaf_key([], 59))
5326                ),
5327                (Nibbles::from_nibbles([0x5, 0x3]), SparseNode::new_branch(0b1010.into(), &[])),
5328                (
5329                    Nibbles::from_nibbles([0x5, 0x3, 0x1]),
5330                    SparseNode::new_leaf(leaf_key([0x0, 0x2], 61))
5331                ),
5332                (
5333                    Nibbles::from_nibbles([0x5, 0x3, 0x3]),
5334                    SparseNode::new_branch(0b0101.into(), &[])
5335                ),
5336                (
5337                    Nibbles::from_nibbles([0x5, 0x3, 0x3, 0x0]),
5338                    SparseNode::new_leaf(leaf_key([0x2], 60))
5339                ),
5340                (
5341                    Nibbles::from_nibbles([0x5, 0x3, 0x3, 0x2]),
5342                    SparseNode::new_leaf(leaf_key([0x0], 60))
5343                )
5344            ])
5345        );
5346
5347        sparse
5348            .remove_leaf(&pad_nibbles_right(Nibbles::from_nibbles([0x5, 0x0, 0x2, 0x3, 0x1])))
5349            .unwrap();
5350
5351        // Extension (Key = 5)
5352        // └── Branch (Mask = 1001)
5353        //     ├── 0 -> Leaf (Path = 50233...)
5354        //     └── 3 -> Branch (Mask = 0101)
5355        //                ├── 1 -> Leaf (Path = 53102...)
5356        //                └── 3 -> Branch (Mask = 1010)
5357        //                       ├── 0 -> Leaf (Path = 53302...)
5358        //                       └── 2 -> Leaf (Path = 53320...)
5359        pretty_assertions::assert_eq!(
5360            parallel_sparse_trie_nodes(&sparse)
5361                .into_iter()
5362                .map(|(k, v)| (*k, v.clone()))
5363                .collect::<BTreeMap<_, _>>(),
5364            BTreeMap::from_iter([
5365                (Nibbles::default(), SparseNode::new_ext(Nibbles::from_nibbles([0x5]))),
5366                (Nibbles::from_nibbles([0x5]), SparseNode::new_branch(0b1001.into(), &[])),
5367                (
5368                    Nibbles::from_nibbles([0x5, 0x0]),
5369                    SparseNode::new_leaf(leaf_key([0x2, 0x3, 0x3], 62))
5370                ),
5371                (Nibbles::from_nibbles([0x5, 0x3]), SparseNode::new_branch(0b1010.into(), &[])),
5372                (
5373                    Nibbles::from_nibbles([0x5, 0x3, 0x1]),
5374                    SparseNode::new_leaf(leaf_key([0x0, 0x2], 61))
5375                ),
5376                (
5377                    Nibbles::from_nibbles([0x5, 0x3, 0x3]),
5378                    SparseNode::new_branch(0b0101.into(), &[])
5379                ),
5380                (
5381                    Nibbles::from_nibbles([0x5, 0x3, 0x3, 0x0]),
5382                    SparseNode::new_leaf(leaf_key([0x2], 60))
5383                ),
5384                (
5385                    Nibbles::from_nibbles([0x5, 0x3, 0x3, 0x2]),
5386                    SparseNode::new_leaf(leaf_key([0x0], 60))
5387                )
5388            ])
5389        );
5390
5391        sparse
5392            .remove_leaf(&pad_nibbles_right(Nibbles::from_nibbles([0x5, 0x3, 0x1, 0x0, 0x2])))
5393            .unwrap();
5394
5395        // Extension (Key = 5)
5396        // └── Branch (Mask = 1001)
5397        //     ├── 0 -> Leaf (Path = 50233...)
5398        //     └── 3 -> Branch (Mask = 1010)
5399        //                ├── 0 -> Leaf (Path = 53302...)
5400        //                └── 2 -> Leaf (Path = 53320...)
5401        pretty_assertions::assert_eq!(
5402            parallel_sparse_trie_nodes(&sparse)
5403                .into_iter()
5404                .map(|(k, v)| (*k, v.clone()))
5405                .collect::<BTreeMap<_, _>>(),
5406            BTreeMap::from_iter([
5407                (Nibbles::default(), SparseNode::new_ext(Nibbles::from_nibbles([0x5]))),
5408                (Nibbles::from_nibbles([0x5]), SparseNode::new_branch(0b1001.into(), &[])),
5409                (
5410                    Nibbles::from_nibbles([0x5, 0x0]),
5411                    SparseNode::new_leaf(leaf_key([0x2, 0x3, 0x3], 62))
5412                ),
5413                (
5414                    Nibbles::from_nibbles([0x5, 0x3]),
5415                    SparseNode::new_ext(Nibbles::from_nibbles([0x3]))
5416                ),
5417                (
5418                    Nibbles::from_nibbles([0x5, 0x3, 0x3]),
5419                    SparseNode::new_branch(0b0101.into(), &[])
5420                ),
5421                (
5422                    Nibbles::from_nibbles([0x5, 0x3, 0x3, 0x0]),
5423                    SparseNode::new_leaf(leaf_key([0x2], 60))
5424                ),
5425                (
5426                    Nibbles::from_nibbles([0x5, 0x3, 0x3, 0x2]),
5427                    SparseNode::new_leaf(leaf_key([0x0], 60))
5428                )
5429            ])
5430        );
5431
5432        sparse
5433            .remove_leaf(&pad_nibbles_right(Nibbles::from_nibbles([0x5, 0x3, 0x3, 0x2, 0x0])))
5434            .unwrap();
5435
5436        // Extension (Key = 5)
5437        // └── Branch (Mask = 1001)
5438        //     ├── 0 -> Leaf (Path = 50233...)
5439        //     └── 3 -> Leaf (Path = 53302...)
5440        pretty_assertions::assert_eq!(
5441            parallel_sparse_trie_nodes(&sparse)
5442                .into_iter()
5443                .map(|(k, v)| (*k, v.clone()))
5444                .collect::<BTreeMap<_, _>>(),
5445            BTreeMap::from_iter([
5446                (Nibbles::default(), SparseNode::new_ext(Nibbles::from_nibbles([0x5]))),
5447                (Nibbles::from_nibbles([0x5]), SparseNode::new_branch(0b1001.into(), &[])),
5448                (
5449                    Nibbles::from_nibbles([0x5, 0x0]),
5450                    SparseNode::new_leaf(leaf_key([0x2, 0x3, 0x3], 62))
5451                ),
5452                (
5453                    Nibbles::from_nibbles([0x5, 0x3]),
5454                    SparseNode::new_leaf(leaf_key([0x3, 0x0, 0x2], 62))
5455                ),
5456            ])
5457        );
5458
5459        sparse
5460            .remove_leaf(&pad_nibbles_right(Nibbles::from_nibbles([0x5, 0x0, 0x2, 0x3, 0x3])))
5461            .unwrap();
5462
5463        // Leaf (Path = 53302...)
5464        pretty_assertions::assert_eq!(
5465            parallel_sparse_trie_nodes(&sparse)
5466                .into_iter()
5467                .map(|(k, v)| (*k, v.clone()))
5468                .collect::<BTreeMap<_, _>>(),
5469            BTreeMap::from_iter([(
5470                Nibbles::default(),
5471                SparseNode::new_leaf(pad_nibbles_right(Nibbles::from_nibbles([
5472                    0x5, 0x3, 0x3, 0x0, 0x2
5473                ])))
5474            ),])
5475        );
5476
5477        sparse
5478            .remove_leaf(&pad_nibbles_right(Nibbles::from_nibbles([0x5, 0x3, 0x3, 0x0, 0x2])))
5479            .unwrap();
5480
5481        // Empty
5482        pretty_assertions::assert_eq!(
5483            parallel_sparse_trie_nodes(&sparse)
5484                .into_iter()
5485                .map(|(k, v)| (*k, v.clone()))
5486                .collect::<BTreeMap<_, _>>(),
5487            BTreeMap::from_iter([(Nibbles::default(), SparseNode::Empty)])
5488        );
5489    }
5490
5491    #[test]
5492    fn sparse_trie_remove_leaf_blinded() {
5493        let leaf = LeafNode::new(
5494            Nibbles::default(),
5495            alloy_rlp::encode_fixed_size(&U256::from(1)).to_vec(),
5496        );
5497        let branch = TrieNodeV2::Branch(BranchNodeV2::new(
5498            Nibbles::default(),
5499            vec![
5500                RlpNode::word_rlp(&B256::repeat_byte(1)),
5501                RlpNode::from_raw_rlp(&alloy_rlp::encode(leaf.clone())).unwrap(),
5502            ],
5503            TrieMask::new(0b11),
5504            None,
5505        ));
5506        let mut sparse = ParallelSparseTrie::from_root(
5507            branch.clone(),
5508            Some(BranchNodeMasks {
5509                hash_mask: TrieMask::new(0b01),
5510                tree_mask: TrieMask::default(),
5511            }),
5512            false,
5513        )
5514        .unwrap();
5515
5516        // Reveal a branch node and one of its children
5517        //
5518        // Branch (Mask = 11)
5519        // ├── 0 -> Hash (Path = 0)
5520        // └── 1 -> Leaf (Path = 1)
5521        sparse
5522            .reveal_nodes(&mut [
5523                ProofTrieNodeV2 {
5524                    path: Nibbles::default(),
5525                    node: branch,
5526                    masks: Some(BranchNodeMasks {
5527                        hash_mask: TrieMask::default(),
5528                        tree_mask: TrieMask::new(0b01),
5529                    }),
5530                },
5531                ProofTrieNodeV2 {
5532                    path: Nibbles::from_nibbles([0x1]),
5533                    node: TrieNodeV2::Leaf(leaf),
5534                    masks: None,
5535                },
5536            ])
5537            .unwrap();
5538
5539        // Removing a blinded leaf should result in an error
5540        assert_matches!(
5541            sparse.remove_leaf(&pad_nibbles_right(Nibbles::from_nibbles([0x0]))).map_err(|e| e.into_kind()),
5542            Err(SparseTrieErrorKind::BlindedNode(path)) if path == Nibbles::from_nibbles([0x0])
5543        );
5544    }
5545
5546    #[test]
5547    fn sparse_trie_remove_leaf_non_existent() {
5548        let leaf = LeafNode::new(
5549            Nibbles::default(),
5550            alloy_rlp::encode_fixed_size(&U256::from(1)).to_vec(),
5551        );
5552        let branch = TrieNodeV2::Branch(BranchNodeV2::new(
5553            Nibbles::default(),
5554            vec![
5555                RlpNode::word_rlp(&B256::repeat_byte(1)),
5556                RlpNode::from_raw_rlp(&alloy_rlp::encode(leaf.clone())).unwrap(),
5557            ],
5558            TrieMask::new(0b11),
5559            None,
5560        ));
5561        let mut sparse = ParallelSparseTrie::from_root(
5562            branch.clone(),
5563            Some(BranchNodeMasks {
5564                hash_mask: TrieMask::new(0b01),
5565                tree_mask: TrieMask::default(),
5566            }),
5567            false,
5568        )
5569        .unwrap();
5570
5571        // Reveal a branch node and one of its children
5572        //
5573        // Branch (Mask = 11)
5574        // ├── 0 -> Hash (Path = 0)
5575        // └── 1 -> Leaf (Path = 1)
5576        sparse
5577            .reveal_nodes(&mut [
5578                ProofTrieNodeV2 {
5579                    path: Nibbles::default(),
5580                    node: branch,
5581                    masks: Some(BranchNodeMasks {
5582                        hash_mask: TrieMask::default(),
5583                        tree_mask: TrieMask::new(0b01),
5584                    }),
5585                },
5586                ProofTrieNodeV2 {
5587                    path: Nibbles::from_nibbles([0x1]),
5588                    node: TrieNodeV2::Leaf(leaf),
5589                    masks: None,
5590                },
5591            ])
5592            .unwrap();
5593
5594        // Removing a non-existent leaf should be a noop
5595        let sparse_old = sparse.clone();
5596        assert_matches!(
5597            sparse.remove_leaf(&pad_nibbles_right(Nibbles::from_nibbles([0x2]))),
5598            Ok(())
5599        );
5600        assert_eq!(sparse, sparse_old);
5601    }
5602
5603    #[test]
5604    fn sparse_trie_fuzz() {
5605        // Having only the first 3 nibbles set, we narrow down the range of keys
5606        // to 4096 different hashes. It allows us to generate collisions more likely
5607        // to test the sparse trie updates.
5608        const KEY_NIBBLES_LEN: usize = 3;
5609
5610        fn test(updates: Vec<(BTreeMap<Nibbles, Account>, BTreeSet<Nibbles>)>) {
5611            {
5612                let mut state = BTreeMap::default();
5613                let provider_factory = create_test_provider_factory();
5614                let mut sparse = ParallelSparseTrie::default().with_updates(true);
5615
5616                for (update, keys_to_delete) in updates {
5617                    // Insert state updates into the sparse trie and calculate the root
5618                    for (key, account) in update.clone() {
5619                        let account = account.into_trie_account(EMPTY_ROOT_HASH);
5620                        let mut account_rlp = Vec::new();
5621                        account.encode(&mut account_rlp);
5622                        sparse.update_leaf(key, account_rlp).unwrap();
5623                    }
5624                    // We need to clone the sparse trie, so that all updated branch nodes are
5625                    // preserved, and not only those that were changed after the last call to
5626                    // `root()`.
5627                    let mut updated_sparse = sparse.clone();
5628                    let sparse_root = updated_sparse.root();
5629                    let sparse_updates = updated_sparse.take_updates();
5630
5631                    // Insert state updates into the hash builder and calculate the root
5632                    state.extend(update);
5633                    let provider = provider_factory.provider().unwrap();
5634                    let (hash_builder_root, hash_builder_updates, hash_builder_proof_nodes, _, _) =
5635                        reth_trie_db::with_adapter!(provider_factory, |A| {
5636                            let trie_cursor =
5637                                DatabaseTrieCursorFactory::<_, A>::new(provider.tx_ref());
5638                            run_hash_builder(
5639                                state.clone(),
5640                                trie_cursor.account_trie_cursor().unwrap(),
5641                                Default::default(),
5642                                state.keys().copied(),
5643                            )
5644                        });
5645
5646                    // Extract account nodes before moving hash_builder_updates
5647                    let hash_builder_account_nodes = hash_builder_updates.account_nodes.clone();
5648
5649                    // Write trie updates to the database
5650                    let provider_rw = provider_factory.provider_rw().unwrap();
5651                    provider_rw.write_trie_updates(hash_builder_updates).unwrap();
5652                    provider_rw.commit().unwrap();
5653
5654                    // Assert that the sparse trie root matches the hash builder root
5655                    assert_eq!(sparse_root, hash_builder_root);
5656                    // Assert that the sparse trie updates match the hash builder updates
5657                    pretty_assertions::assert_eq!(
5658                        BTreeMap::from_iter(sparse_updates.updated_nodes),
5659                        BTreeMap::from_iter(hash_builder_account_nodes)
5660                    );
5661                    // Assert that the sparse trie nodes match the hash builder proof nodes
5662                    assert_eq_parallel_sparse_trie_proof_nodes(
5663                        &updated_sparse,
5664                        hash_builder_proof_nodes,
5665                    );
5666
5667                    // Delete some keys from both the hash builder and the sparse trie and check
5668                    // that the sparse trie root still matches the hash builder root
5669                    for key in &keys_to_delete {
5670                        state.remove(key).unwrap();
5671                        sparse.remove_leaf(key).unwrap();
5672                    }
5673
5674                    // We need to clone the sparse trie, so that all updated branch nodes are
5675                    // preserved, and not only those that were changed after the last call to
5676                    // `root()`.
5677                    let mut updated_sparse = sparse.clone();
5678                    let sparse_root = updated_sparse.root();
5679                    let sparse_updates = updated_sparse.take_updates();
5680
5681                    let provider = provider_factory.provider().unwrap();
5682                    let (hash_builder_root, hash_builder_updates, hash_builder_proof_nodes, _, _) =
5683                        reth_trie_db::with_adapter!(provider_factory, |A| {
5684                            let trie_cursor =
5685                                DatabaseTrieCursorFactory::<_, A>::new(provider.tx_ref());
5686                            run_hash_builder(
5687                                state.clone(),
5688                                trie_cursor.account_trie_cursor().unwrap(),
5689                                keys_to_delete
5690                                    .iter()
5691                                    .map(|nibbles| B256::from_slice(&nibbles.pack()))
5692                                    .collect(),
5693                                state.keys().copied(),
5694                            )
5695                        });
5696
5697                    // Extract account nodes before moving hash_builder_updates
5698                    let hash_builder_account_nodes = hash_builder_updates.account_nodes.clone();
5699
5700                    // Write trie updates to the database
5701                    let provider_rw = provider_factory.provider_rw().unwrap();
5702                    provider_rw.write_trie_updates(hash_builder_updates).unwrap();
5703                    provider_rw.commit().unwrap();
5704
5705                    // Assert that the sparse trie root matches the hash builder root
5706                    assert_eq!(sparse_root, hash_builder_root);
5707                    // Assert that the sparse trie updates match the hash builder updates
5708                    pretty_assertions::assert_eq!(
5709                        BTreeMap::from_iter(sparse_updates.updated_nodes),
5710                        BTreeMap::from_iter(hash_builder_account_nodes)
5711                    );
5712                    // Assert that the sparse trie nodes match the hash builder proof nodes
5713                    assert_eq_parallel_sparse_trie_proof_nodes(
5714                        &updated_sparse,
5715                        hash_builder_proof_nodes,
5716                    );
5717                }
5718            }
5719        }
5720
5721        fn transform_updates(
5722            updates: Vec<BTreeMap<Nibbles, Account>>,
5723            mut rng: impl rand::Rng,
5724        ) -> Vec<(BTreeMap<Nibbles, Account>, BTreeSet<Nibbles>)> {
5725            let mut keys = BTreeSet::new();
5726            updates
5727                .into_iter()
5728                .map(|update| {
5729                    keys.extend(update.keys().copied());
5730
5731                    let keys_to_delete_len = update.len() / 2;
5732                    let keys_to_delete = (0..keys_to_delete_len)
5733                        .map(|_| {
5734                            let key =
5735                                *rand::seq::IteratorRandom::choose(keys.iter(), &mut rng).unwrap();
5736                            keys.take(&key).unwrap()
5737                        })
5738                        .collect();
5739
5740                    (update, keys_to_delete)
5741                })
5742                .collect::<Vec<_>>()
5743        }
5744
5745        proptest!(ProptestConfig::with_cases(10), |(
5746            updates in proptest::collection::vec(
5747                proptest::collection::btree_map(
5748                    any_with::<Nibbles>(SizeRange::new(KEY_NIBBLES_LEN..=KEY_NIBBLES_LEN)).prop_map(pad_nibbles_right),
5749                    arb::<Account>(),
5750                    1..50,
5751                ),
5752                1..50,
5753            ).prop_perturb(transform_updates)
5754        )| {
5755            test(updates)
5756        });
5757    }
5758
5759    #[test]
5760    fn sparse_trie_two_leaves_at_lower_roots() {
5761        let mut trie = ParallelSparseTrie::default().with_updates(true);
5762        let key_50 = Nibbles::unpack(hex!(
5763            "0x5000000000000000000000000000000000000000000000000000000000000000"
5764        ));
5765        let key_51 = Nibbles::unpack(hex!(
5766            "0x5100000000000000000000000000000000000000000000000000000000000000"
5767        ));
5768
5769        let account = Account::default().into_trie_account(EMPTY_ROOT_HASH);
5770        let mut account_rlp = Vec::new();
5771        account.encode(&mut account_rlp);
5772
5773        // Add a leaf and calculate the root.
5774        trie.update_leaf(key_50, account_rlp.clone()).unwrap();
5775        trie.root();
5776
5777        // Add a second leaf and assert that the root is the expected value.
5778        trie.update_leaf(key_51, account_rlp.clone()).unwrap();
5779
5780        let expected_root =
5781            hex!("0xdaf0ef9f91a2f179bb74501209effdb5301db1697bcab041eca2234b126e25de");
5782        let root = trie.root();
5783        assert_eq!(root, expected_root);
5784        assert_eq!(SparseTrieUpdates::default(), trie.take_updates());
5785    }
5786
5787    /// We have three leaves that share the same prefix: 0x00, 0x01 and 0x02. Hash builder trie has
5788    /// only nodes 0x00 and 0x01, and we have proofs for them. Node B is new and inserted in the
5789    /// sparse trie first.
5790    ///
5791    /// 1. Reveal the hash builder proof to leaf 0x00 in the sparse trie.
5792    /// 2. Insert leaf 0x01 into the sparse trie.
5793    /// 3. Reveal the hash builder proof to leaf 0x02 in the sparse trie.
5794    ///
5795    /// The hash builder proof to the leaf 0x02 didn't have the leaf 0x01 at the corresponding
5796    /// nibble of the branch node, so we need to adjust the branch node instead of fully
5797    /// replacing it.
5798    #[test]
5799    fn sparse_trie_reveal_node_1() {
5800        let key1 = || pad_nibbles_right(Nibbles::from_nibbles_unchecked([0x00]));
5801        let key2 = || pad_nibbles_right(Nibbles::from_nibbles_unchecked([0x01]));
5802        let key3 = || pad_nibbles_right(Nibbles::from_nibbles_unchecked([0x02]));
5803        let value = || Account::default();
5804        let value_encoded = || {
5805            let mut account_rlp = Vec::new();
5806            value().into_trie_account(EMPTY_ROOT_HASH).encode(&mut account_rlp);
5807            account_rlp
5808        };
5809
5810        // Generate the proof for the root node and initialize the sparse trie with it
5811        let (_, _, hash_builder_proof_nodes, branch_node_hash_masks, branch_node_tree_masks) =
5812            run_hash_builder(
5813                [(key1(), value()), (key3(), value())],
5814                NoopAccountTrieCursor::default(),
5815                Default::default(),
5816                [Nibbles::default()],
5817            );
5818        let masks = match (
5819            branch_node_hash_masks.get(&Nibbles::default()).copied(),
5820            branch_node_tree_masks.get(&Nibbles::default()).copied(),
5821        ) {
5822            (Some(h), Some(t)) => Some(BranchNodeMasks { hash_mask: h, tree_mask: t }),
5823            (Some(h), None) => {
5824                Some(BranchNodeMasks { hash_mask: h, tree_mask: TrieMask::default() })
5825            }
5826            (None, Some(t)) => {
5827                Some(BranchNodeMasks { hash_mask: TrieMask::default(), tree_mask: t })
5828            }
5829            (None, None) => None,
5830        };
5831        let mut sparse = ParallelSparseTrie::from_root(
5832            TrieNodeV2::decode(&mut &hash_builder_proof_nodes.nodes_sorted()[0].1[..]).unwrap(),
5833            masks,
5834            false,
5835        )
5836        .unwrap();
5837
5838        // Generate the proof for the first key and reveal it in the sparse trie
5839        let (_, _, hash_builder_proof_nodes, branch_node_hash_masks, branch_node_tree_masks) =
5840            run_hash_builder(
5841                [(key1(), value()), (key3(), value())],
5842                NoopAccountTrieCursor::default(),
5843                Default::default(),
5844                [key1()],
5845            );
5846        let mut revealed_nodes: Vec<ProofTrieNodeV2> = hash_builder_proof_nodes
5847            .nodes_sorted()
5848            .into_iter()
5849            .map(|(path, node)| {
5850                let hash_mask = branch_node_hash_masks.get(&path).copied();
5851                let tree_mask = branch_node_tree_masks.get(&path).copied();
5852                let masks = BranchNodeMasks::from_optional(hash_mask, tree_mask);
5853                ProofTrieNodeV2 { path, node: TrieNodeV2::decode(&mut &node[..]).unwrap(), masks }
5854            })
5855            .collect();
5856        sparse.reveal_nodes(&mut revealed_nodes).unwrap();
5857
5858        // Check that the branch node exists with only two nibbles set
5859        assert_matches!(
5860            sparse.upper_subtrie.nodes.get(&Nibbles::default()),
5861            Some(&SparseNode::Branch { state_mask, state: SparseNodeState::Dirty, .. }) if state_mask == TrieMask::new(0b101)
5862        );
5863
5864        // Insert the leaf for the second key
5865        sparse.update_leaf(key2(), value_encoded()).unwrap();
5866
5867        // Check that the branch node was updated and another nibble was set
5868        assert_matches!(
5869            sparse.upper_subtrie.nodes.get(&Nibbles::default()),
5870            Some(&SparseNode::Branch { state_mask, state: SparseNodeState::Dirty, .. }) if state_mask == TrieMask::new(0b111)
5871        );
5872
5873        // Generate the proof for the third key and reveal it in the sparse trie
5874        let (_, _, hash_builder_proof_nodes, branch_node_hash_masks, branch_node_tree_masks) =
5875            run_hash_builder(
5876                [(key1(), value()), (key3(), value())],
5877                NoopAccountTrieCursor::default(),
5878                Default::default(),
5879                [key3()],
5880            );
5881        let mut revealed_nodes: Vec<ProofTrieNodeV2> = hash_builder_proof_nodes
5882            .nodes_sorted()
5883            .into_iter()
5884            .map(|(path, node)| {
5885                let hash_mask = branch_node_hash_masks.get(&path).copied();
5886                let tree_mask = branch_node_tree_masks.get(&path).copied();
5887                let masks = BranchNodeMasks::from_optional(hash_mask, tree_mask);
5888                ProofTrieNodeV2 { path, node: TrieNodeV2::decode(&mut &node[..]).unwrap(), masks }
5889            })
5890            .collect();
5891        sparse.reveal_nodes(&mut revealed_nodes).unwrap();
5892
5893        // Check that nothing changed in the branch node
5894        assert_matches!(
5895            sparse.upper_subtrie.nodes.get(&Nibbles::default()),
5896            Some(&SparseNode::Branch { state_mask, state: SparseNodeState::Dirty, .. }) if state_mask == TrieMask::new(0b111)
5897        );
5898
5899        // Generate the nodes for the full trie with all three key using the hash builder, and
5900        // compare them to the sparse trie
5901        let (_, _, hash_builder_proof_nodes, _, _) = run_hash_builder(
5902            [(key1(), value()), (key2(), value()), (key3(), value())],
5903            NoopAccountTrieCursor::default(),
5904            Default::default(),
5905            [key1(), key2(), key3()],
5906        );
5907
5908        assert_eq_parallel_sparse_trie_proof_nodes(&sparse, hash_builder_proof_nodes);
5909    }
5910
5911    /// We have three leaves: 0x0000, 0x0101, and 0x0102. Hash builder trie has all nodes, and we
5912    /// have proofs for them.
5913    ///
5914    /// 1. Reveal the hash builder proof to leaf 0x00 in the sparse trie.
5915    /// 2. Remove leaf 0x00 from the sparse trie (that will remove the branch node and create an
5916    ///    extension node with the key 0x0000).
5917    /// 3. Reveal the hash builder proof to leaf 0x0101 in the sparse trie.
5918    ///
5919    /// The hash builder proof to the leaf 0x0101 had a branch node in the path, but we turned it
5920    /// into an extension node, so it should ignore this node.
5921    #[test]
5922    fn sparse_trie_reveal_node_2() {
5923        let key1 = || pad_nibbles_right(Nibbles::from_nibbles_unchecked([0x00, 0x00]));
5924        let key2 = || pad_nibbles_right(Nibbles::from_nibbles_unchecked([0x01, 0x01]));
5925        let key3 = || pad_nibbles_right(Nibbles::from_nibbles_unchecked([0x01, 0x02]));
5926        let value = || Account::default();
5927
5928        // Generate the proof for the root node and initialize the sparse trie with it
5929        let (_, _, hash_builder_proof_nodes, branch_node_hash_masks, branch_node_tree_masks) =
5930            run_hash_builder(
5931                [(key1(), value()), (key2(), value()), (key3(), value())],
5932                NoopAccountTrieCursor::default(),
5933                Default::default(),
5934                [Nibbles::default()],
5935            );
5936        let masks = match (
5937            branch_node_hash_masks.get(&Nibbles::default()).copied(),
5938            branch_node_tree_masks.get(&Nibbles::default()).copied(),
5939        ) {
5940            (Some(h), Some(t)) => Some(BranchNodeMasks { hash_mask: h, tree_mask: t }),
5941            (Some(h), None) => {
5942                Some(BranchNodeMasks { hash_mask: h, tree_mask: TrieMask::default() })
5943            }
5944            (None, Some(t)) => {
5945                Some(BranchNodeMasks { hash_mask: TrieMask::default(), tree_mask: t })
5946            }
5947            (None, None) => None,
5948        };
5949        let mut sparse = ParallelSparseTrie::from_root(
5950            TrieNodeV2::decode(&mut &hash_builder_proof_nodes.nodes_sorted()[0].1[..]).unwrap(),
5951            masks,
5952            false,
5953        )
5954        .unwrap();
5955
5956        // Generate the proof for the children of the root branch node and reveal it in the sparse
5957        // trie
5958        let (_, _, hash_builder_proof_nodes, branch_node_hash_masks, branch_node_tree_masks) =
5959            run_hash_builder(
5960                [(key1(), value()), (key2(), value()), (key3(), value())],
5961                NoopAccountTrieCursor::default(),
5962                Default::default(),
5963                [key1(), Nibbles::from_nibbles_unchecked([0x01])],
5964            );
5965        let mut revealed_nodes: Vec<ProofTrieNodeV2> = hash_builder_proof_nodes
5966            .nodes_sorted()
5967            .into_iter()
5968            .map(|(path, node)| {
5969                let hash_mask = branch_node_hash_masks.get(&path).copied();
5970                let tree_mask = branch_node_tree_masks.get(&path).copied();
5971                let masks = BranchNodeMasks::from_optional(hash_mask, tree_mask);
5972                ProofTrieNodeV2 { path, node: TrieNodeV2::decode(&mut &node[..]).unwrap(), masks }
5973            })
5974            .collect();
5975        sparse.reveal_nodes(&mut revealed_nodes).unwrap();
5976
5977        // Check that the branch node exists
5978        assert_matches!(
5979            sparse.upper_subtrie.nodes.get(&Nibbles::default()),
5980            Some(&SparseNode::Branch { state_mask, state: SparseNodeState::Dirty, .. }) if state_mask == TrieMask::new(0b11)
5981        );
5982
5983        // Remove the leaf for the first key
5984        sparse.remove_leaf(&key1()).unwrap();
5985
5986        // Check that the branch node was turned into an extension node
5987        assert_eq!(
5988            sparse.upper_subtrie.nodes.get(&Nibbles::default()),
5989            Some(&SparseNode::new_ext(Nibbles::from_nibbles_unchecked([0x01])))
5990        );
5991
5992        // Generate the proof for the third key and reveal it in the sparse trie
5993        let (_, _, hash_builder_proof_nodes, branch_node_hash_masks, branch_node_tree_masks) =
5994            run_hash_builder(
5995                [(key1(), value()), (key2(), value()), (key3(), value())],
5996                NoopAccountTrieCursor::default(),
5997                Default::default(),
5998                [key2()],
5999            );
6000        let mut revealed_nodes: Vec<ProofTrieNodeV2> = hash_builder_proof_nodes
6001            .nodes_sorted()
6002            .into_iter()
6003            .map(|(path, node)| {
6004                let hash_mask = branch_node_hash_masks.get(&path).copied();
6005                let tree_mask = branch_node_tree_masks.get(&path).copied();
6006                let masks = BranchNodeMasks::from_optional(hash_mask, tree_mask);
6007                ProofTrieNodeV2 { path, node: TrieNodeV2::decode(&mut &node[..]).unwrap(), masks }
6008            })
6009            .collect();
6010        sparse.reveal_nodes(&mut revealed_nodes).unwrap();
6011
6012        // Check that nothing changed in the extension node
6013        assert_eq!(
6014            sparse.upper_subtrie.nodes.get(&Nibbles::default()),
6015            Some(&SparseNode::new_ext(Nibbles::from_nibbles_unchecked([0x01])))
6016        );
6017    }
6018
6019    /// We have two leaves that share the same prefix: 0x0001 and 0x0002, and a leaf with a
6020    /// different prefix: 0x0100. Hash builder trie has only the first two leaves, and we have
6021    /// proofs for them.
6022    ///
6023    /// 1. Insert the leaf 0x0100 into the sparse trie, and check that the root extension node was
6024    ///    turned into a branch node.
6025    /// 2. Reveal the leaf 0x0001 in the sparse trie, and check that the root branch node wasn't
6026    ///    overwritten with the extension node from the proof.
6027    #[test]
6028    fn sparse_trie_reveal_node_3() {
6029        let key1 = || pad_nibbles_right(Nibbles::from_nibbles_unchecked([0x00, 0x01]));
6030        let key2 = || pad_nibbles_right(Nibbles::from_nibbles_unchecked([0x00, 0x02]));
6031        let key3 = || pad_nibbles_right(Nibbles::from_nibbles_unchecked([0x01, 0x00]));
6032        let value = || Account::default();
6033        let value_encoded = || {
6034            let mut account_rlp = Vec::new();
6035            value().into_trie_account(EMPTY_ROOT_HASH).encode(&mut account_rlp);
6036            account_rlp
6037        };
6038
6039        // Generate the proof for the root node and initialize the sparse trie with it
6040        let (_, _, hash_builder_proof_nodes, branch_node_hash_masks, branch_node_tree_masks) =
6041            run_hash_builder(
6042                [(key1(), value()), (key2(), value())],
6043                NoopAccountTrieCursor::default(),
6044                Default::default(),
6045                [Nibbles::default()],
6046            );
6047
6048        let mut nodes = Vec::new();
6049
6050        for (path, node) in hash_builder_proof_nodes.nodes_sorted() {
6051            let hash_mask = branch_node_hash_masks.get(&path).copied();
6052            let tree_mask = branch_node_tree_masks.get(&path).copied();
6053            let masks = BranchNodeMasks::from_optional(hash_mask, tree_mask);
6054            nodes.push((path, TrieNode::decode(&mut &node[..]).unwrap(), masks));
6055        }
6056
6057        nodes.sort_unstable_by(|a, b| reth_trie_common::depth_first_cmp(&a.0, &b.0));
6058
6059        let nodes = ProofTrieNodeV2::from_sorted_trie_nodes(nodes);
6060        let mut sparse =
6061            ParallelSparseTrie::from_root(nodes[0].node.clone(), nodes[0].masks, false).unwrap();
6062
6063        // Check that the root extension node exists
6064        assert_matches!(
6065            sparse.upper_subtrie.nodes.get(&Nibbles::default()),
6066            Some(SparseNode::Extension { key, state: SparseNodeState::Dirty }) if *key == Nibbles::from_nibbles([0x00])
6067        );
6068
6069        // Insert the leaf with a different prefix
6070        sparse.update_leaf(key3(), value_encoded()).unwrap();
6071
6072        // Check that the extension node was turned into a branch node
6073        assert_eq!(
6074            sparse.upper_subtrie.nodes.get(&Nibbles::default()),
6075            Some(&SparseNode::new_branch(TrieMask::new(0b11), &[]))
6076        );
6077
6078        // Generate the proof for the first key and reveal it in the sparse trie
6079        let (_, _, hash_builder_proof_nodes, branch_node_hash_masks, branch_node_tree_masks) =
6080            run_hash_builder(
6081                [(key1(), value()), (key2(), value())],
6082                NoopAccountTrieCursor::default(),
6083                Default::default(),
6084                [key1()],
6085            );
6086        let mut revealed_nodes: Vec<ProofTrieNodeV2> = hash_builder_proof_nodes
6087            .nodes_sorted()
6088            .into_iter()
6089            .map(|(path, node)| {
6090                let hash_mask = branch_node_hash_masks.get(&path).copied();
6091                let tree_mask = branch_node_tree_masks.get(&path).copied();
6092                let masks = BranchNodeMasks::from_optional(hash_mask, tree_mask);
6093                ProofTrieNodeV2 { path, node: TrieNodeV2::decode(&mut &node[..]).unwrap(), masks }
6094            })
6095            .collect();
6096        sparse.reveal_nodes(&mut revealed_nodes).unwrap();
6097
6098        // Check that the branch node wasn't overwritten by the extension node in the proof
6099        assert_eq!(
6100            sparse.upper_subtrie.nodes.get(&Nibbles::default()),
6101            Some(&SparseNode::new_branch(TrieMask::new(0b11), &[]))
6102        );
6103    }
6104
6105    #[test]
6106    fn test_update_leaf_cross_level() {
6107        let ctx = ParallelSparseTrieTestContext;
6108        let mut trie = ParallelSparseTrie::from_root(TrieNodeV2::EmptyRoot, None, true).unwrap();
6109
6110        // Test adding leaves that demonstrate the cross-level behavior
6111        // Based on the example: leaves 0x1234, 0x1245, 0x1334, 0x1345
6112        //
6113        // Final trie structure:
6114        // Upper trie:
6115        //   0x: Extension { key: 0x1 }
6116        //   └── 0x1: Branch { state_mask: 0x1100 }
6117        //       └── Subtrie (0x12): pointer to lower subtrie
6118        //       └── Subtrie (0x13): pointer to lower subtrie
6119        //
6120        // Lower subtrie (0x12):
6121        //   0x12: Branch { state_mask: 0x8 | 0x10 }
6122        //   ├── 0x123: Leaf { key: 0x4 }
6123        //   └── 0x124: Leaf { key: 0x5 }
6124        //
6125        // Lower subtrie (0x13):
6126        //   0x13: Branch { state_mask: 0x8 | 0x10 }
6127        //   ├── 0x133: Leaf { key: 0x4 }
6128        //   └── 0x134: Leaf { key: 0x5 }
6129
6130        // First add leaf 0x1345 - this should create a leaf in upper trie at 0x
6131        let (leaf1_path, value1) = ctx.create_test_leaf([0x1, 0x3, 0x4, 0x5], 1);
6132        trie.update_leaf(leaf1_path, value1.clone()).unwrap();
6133
6134        // Verify upper trie has a leaf at the root with key 1345
6135        ctx.assert_upper_subtrie(&trie)
6136            .has_leaf(
6137                &Nibbles::default(),
6138                &pad_nibbles_right(Nibbles::from_nibbles([0x1, 0x3, 0x4, 0x5])),
6139            )
6140            .has_value(&leaf1_path, &value1);
6141
6142        // Add leaf 0x1234 - this should go first in the upper subtrie
6143        let (leaf2_path, value2) = ctx.create_test_leaf([0x1, 0x2, 0x3, 0x4], 2);
6144        trie.update_leaf(leaf2_path, value2.clone()).unwrap();
6145
6146        // Upper trie should now have a branch at 0x1
6147        ctx.assert_upper_subtrie(&trie)
6148            .has_branch(&Nibbles::from_nibbles([0x1]), &[0x2, 0x3])
6149            .has_no_value(&leaf1_path)
6150            .has_no_value(&leaf2_path);
6151
6152        // Add leaf 0x1245 - this should cause a branch and create the 0x12 subtrie
6153        let (leaf3_path, value3) = ctx.create_test_leaf([0x1, 0x2, 0x4, 0x5], 3);
6154        trie.update_leaf(leaf3_path, value3.clone()).unwrap();
6155
6156        // Verify lower subtrie at 0x12 exists with correct structure
6157        ctx.assert_subtrie(&trie, Nibbles::from_nibbles([0x1, 0x2]))
6158            .has_branch(&Nibbles::from_nibbles([0x1, 0x2]), &[0x3, 0x4])
6159            .has_leaf(&Nibbles::from_nibbles([0x1, 0x2, 0x3]), &leaf_key([0x4], 61))
6160            .has_leaf(&Nibbles::from_nibbles([0x1, 0x2, 0x4]), &leaf_key([0x5], 61))
6161            .has_value(&leaf2_path, &value2)
6162            .has_value(&leaf3_path, &value3);
6163
6164        // Add leaf 0x1334 - this should create another lower subtrie
6165        let (leaf4_path, value4) = ctx.create_test_leaf([0x1, 0x3, 0x3, 0x4], 4);
6166        trie.update_leaf(leaf4_path, value4.clone()).unwrap();
6167
6168        // Verify lower subtrie at 0x13 exists with correct values
6169        ctx.assert_subtrie(&trie, Nibbles::from_nibbles([0x1, 0x3]))
6170            .has_value(&leaf1_path, &value1)
6171            .has_value(&leaf4_path, &value4);
6172
6173        // Verify the 0x12 subtrie still has its values
6174        ctx.assert_subtrie(&trie, Nibbles::from_nibbles([0x1, 0x2]))
6175            .has_value(&leaf2_path, &value2)
6176            .has_value(&leaf3_path, &value3);
6177
6178        // Upper trie has no values
6179        ctx.assert_upper_subtrie(&trie)
6180            .has_extension(&Nibbles::default(), &Nibbles::from_nibbles([0x1]))
6181            .has_branch(&Nibbles::from_nibbles([0x1]), &[0x2, 0x3])
6182            .has_no_value(&leaf1_path)
6183            .has_no_value(&leaf2_path)
6184            .has_no_value(&leaf3_path)
6185            .has_no_value(&leaf4_path);
6186    }
6187
6188    #[test]
6189    fn test_update_leaf_split_at_level_boundary() {
6190        let ctx = ParallelSparseTrieTestContext;
6191        let mut trie = ParallelSparseTrie::from_root(TrieNodeV2::EmptyRoot, None, true).unwrap();
6192
6193        // This test demonstrates what happens when we insert leaves that cause
6194        // splitting exactly at the upper/lower trie boundary (2 nibbles).
6195        //
6196        // Final trie structure:
6197        // Upper trie:
6198        //   0x: Extension { key: 0x12 }
6199        //       └── Subtrie (0x12): pointer to lower subtrie
6200        //
6201        // Lower subtrie (0x12):
6202        //   0x12: Branch { state_mask: 0x4 | 0x8 }
6203        //   ├── 0x122: Leaf { key: 0x4 }
6204        //   └── 0x123: Leaf { key: 0x4 }
6205
6206        // First insert a leaf that ends exactly at the boundary (2 nibbles)
6207        let (first_leaf_path, first_value) = ctx.create_test_leaf([0x1, 0x2, 0x2, 0x4], 1);
6208
6209        trie.update_leaf(first_leaf_path, first_value.clone()).unwrap();
6210
6211        // In an empty trie, the first leaf becomes the root, regardless of path length
6212        ctx.assert_upper_subtrie(&trie)
6213            .has_leaf(
6214                &Nibbles::default(),
6215                &pad_nibbles_right(Nibbles::from_nibbles([0x1, 0x2, 0x2, 0x4])),
6216            )
6217            .has_value(&first_leaf_path, &first_value);
6218
6219        // Now insert another leaf that shares the same 2-nibble prefix
6220        let (second_leaf_path, second_value) = ctx.create_test_leaf([0x1, 0x2, 0x3, 0x4], 2);
6221
6222        trie.update_leaf(second_leaf_path, second_value.clone()).unwrap();
6223
6224        // Now both leaves should be in a lower subtrie at index [0x1, 0x2]
6225        ctx.assert_subtrie(&trie, Nibbles::from_nibbles([0x1, 0x2]))
6226            .has_branch(&Nibbles::from_nibbles([0x1, 0x2]), &[0x2, 0x3])
6227            .has_leaf(&Nibbles::from_nibbles([0x1, 0x2, 0x2]), &leaf_key([0x4], 61))
6228            .has_leaf(&Nibbles::from_nibbles([0x1, 0x2, 0x3]), &leaf_key([0x4], 61))
6229            .has_value(&first_leaf_path, &first_value)
6230            .has_value(&second_leaf_path, &second_value);
6231
6232        // Upper subtrie should no longer have these values
6233        ctx.assert_upper_subtrie(&trie)
6234            .has_no_value(&first_leaf_path)
6235            .has_no_value(&second_leaf_path);
6236    }
6237
6238    #[test]
6239    fn test_update_subtrie_with_multiple_leaves() {
6240        let ctx = ParallelSparseTrieTestContext;
6241        let mut trie = ParallelSparseTrie::from_root(TrieNodeV2::EmptyRoot, None, true).unwrap();
6242
6243        // First, add multiple leaves that will create a subtrie structure
6244        // All leaves share the prefix [0x1, 0x2] to ensure they create a subtrie
6245        //
6246        // This should result in a trie with the following structure:
6247        // 0x: Extension { key: 0x12 }
6248        //  └── Subtrie (0x12):
6249        //      0x12: Branch { state_mask: 0x3 | 0x4 }
6250        //      ├── 0x123: Branch { state_mask: 0x4 | 0x5 }
6251        //      │   ├── 0x1234: Leaf { key: 0x }
6252        //      │   └── 0x1235: Leaf { key: 0x }
6253        //      └── 0x124: Branch { state_mask: 0x6 | 0x7 }
6254        //          ├── 0x1246: Leaf { key: 0x }
6255        //          └── 0x1247: Leaf { key: 0x }
6256        let leaves = ctx.create_test_leaves(&[
6257            &[0x1, 0x2, 0x3, 0x4],
6258            &[0x1, 0x2, 0x3, 0x5],
6259            &[0x1, 0x2, 0x4, 0x6],
6260            &[0x1, 0x2, 0x4, 0x7],
6261        ]);
6262
6263        // Insert all leaves
6264        ctx.update_leaves(&mut trie, leaves.clone());
6265
6266        // Verify the upper subtrie has an extension node at the root with key 0x12
6267        ctx.assert_upper_subtrie(&trie)
6268            .has_extension(&Nibbles::default(), &Nibbles::from_nibbles([0x1, 0x2]));
6269
6270        // Verify the subtrie structure using fluent assertions
6271        ctx.assert_subtrie(&trie, Nibbles::from_nibbles([0x1, 0x2]))
6272            .has_branch(&Nibbles::from_nibbles([0x1, 0x2]), &[0x3, 0x4])
6273            .has_branch(&Nibbles::from_nibbles([0x1, 0x2, 0x3]), &[0x4, 0x5])
6274            .has_branch(&Nibbles::from_nibbles([0x1, 0x2, 0x4]), &[0x6, 0x7])
6275            .has_value(&leaves[0].0, &leaves[0].1)
6276            .has_value(&leaves[1].0, &leaves[1].1)
6277            .has_value(&leaves[2].0, &leaves[2].1)
6278            .has_value(&leaves[3].0, &leaves[3].1);
6279
6280        // Now update one of the leaves with a new value
6281        let updated_path = pad_nibbles_right(Nibbles::from_nibbles([0x1, 0x2, 0x3, 0x4]));
6282        let (_, updated_value) = ctx.create_test_leaf([0x1, 0x2, 0x3, 0x4], 100);
6283
6284        trie.update_leaf(updated_path, updated_value.clone()).unwrap();
6285
6286        // Verify the subtrie structure is maintained and value is updated
6287        // The branch structure should remain the same and all values should be present
6288        ctx.assert_subtrie(&trie, Nibbles::from_nibbles([0x1, 0x2]))
6289            .has_branch(&Nibbles::from_nibbles([0x1, 0x2]), &[0x3, 0x4])
6290            .has_value(&updated_path, &updated_value)
6291            .has_value(&leaves[1].0, &leaves[1].1)
6292            .has_value(&leaves[2].0, &leaves[2].1)
6293            .has_value(&leaves[3].0, &leaves[3].1);
6294
6295        // Add a new leaf that extends an existing branch
6296        let (new_leaf_path, new_leaf_value) = ctx.create_test_leaf([0x1, 0x2, 0x3, 0x6], 200);
6297
6298        trie.update_leaf(new_leaf_path, new_leaf_value.clone()).unwrap();
6299
6300        // Verify the branch at [0x1, 0x2, 0x3] now has an additional child
6301        ctx.assert_subtrie(&trie, Nibbles::from_nibbles([0x1, 0x2]))
6302            .has_branch(&Nibbles::from_nibbles([0x1, 0x2, 0x3]), &[0x4, 0x5, 0x6])
6303            .has_value(&new_leaf_path, &new_leaf_value);
6304    }
6305
6306    #[test]
6307    fn test_update_subtrie_extension_node_subtrie() {
6308        let ctx = ParallelSparseTrieTestContext;
6309        let mut trie = ParallelSparseTrie::from_root(TrieNodeV2::EmptyRoot, None, true).unwrap();
6310
6311        // All leaves share the prefix [0x1, 0x2] to ensure they create a subtrie
6312        //
6313        // This should result in a trie with the following structure
6314        // 0x: Extension { key: 0x123 }
6315        //  └── Subtrie (0x12):
6316        //      0x123: Branch { state_mask: 0x3 | 0x4 }
6317        //      ├── 0x123: Leaf { key: 0x4 }
6318        //      └── 0x124: Leaf { key: 0x5 }
6319        let leaves = ctx.create_test_leaves(&[&[0x1, 0x2, 0x3, 0x4], &[0x1, 0x2, 0x3, 0x5]]);
6320
6321        // Insert all leaves
6322        ctx.update_leaves(&mut trie, leaves.clone());
6323
6324        // Verify the upper subtrie has an extension node at the root with key 0x123
6325        ctx.assert_upper_subtrie(&trie)
6326            .has_extension(&Nibbles::default(), &Nibbles::from_nibbles([0x1, 0x2, 0x3]));
6327
6328        // Verify the lower subtrie structure
6329        ctx.assert_subtrie(&trie, Nibbles::from_nibbles([0x1, 0x2]))
6330            .has_branch(&Nibbles::from_nibbles([0x1, 0x2, 0x3]), &[0x4, 0x5])
6331            .has_value(&leaves[0].0, &leaves[0].1)
6332            .has_value(&leaves[1].0, &leaves[1].1);
6333    }
6334
6335    #[test]
6336    fn update_subtrie_extension_node_cross_level() {
6337        let ctx = ParallelSparseTrieTestContext;
6338        let mut trie = ParallelSparseTrie::from_root(TrieNodeV2::EmptyRoot, None, true).unwrap();
6339
6340        // First, add multiple leaves that will create a subtrie structure
6341        // All leaves share the prefix [0x1, 0x2] to ensure they create a branch node and subtrie
6342        //
6343        // This should result in a trie with the following structure
6344        // 0x: Extension { key: 0x12 }
6345        //  └── Subtrie (0x12):
6346        //      0x12: Branch { state_mask: 0x3 | 0x4 }
6347        //      ├── 0x123: Leaf { key: 0x4 }
6348        //      └── 0x124: Leaf { key: 0x5 }
6349        let leaves = ctx.create_test_leaves(&[&[0x1, 0x2, 0x3, 0x4], &[0x1, 0x2, 0x4, 0x5]]);
6350
6351        // Insert all leaves
6352        ctx.update_leaves(&mut trie, leaves.clone());
6353
6354        // Verify the upper subtrie has an extension node at the root with key 0x12
6355        ctx.assert_upper_subtrie(&trie)
6356            .has_extension(&Nibbles::default(), &Nibbles::from_nibbles([0x1, 0x2]));
6357
6358        // Verify the lower subtrie structure
6359        ctx.assert_subtrie(&trie, Nibbles::from_nibbles([0x1, 0x2]))
6360            .has_branch(&Nibbles::from_nibbles([0x1, 0x2]), &[0x3, 0x4])
6361            .has_leaf(&Nibbles::from_nibbles([0x1, 0x2, 0x3]), &leaf_key([0x4], 61))
6362            .has_leaf(&Nibbles::from_nibbles([0x1, 0x2, 0x4]), &leaf_key([0x5], 61))
6363            .has_value(&leaves[0].0, &leaves[0].1)
6364            .has_value(&leaves[1].0, &leaves[1].1);
6365    }
6366
6367    #[test]
6368    fn test_update_single_nibble_paths() {
6369        let ctx = ParallelSparseTrieTestContext;
6370        let mut trie = ParallelSparseTrie::from_root(TrieNodeV2::EmptyRoot, None, true).unwrap();
6371
6372        // Test edge case: single nibble paths that create branches in upper trie
6373        //
6374        // Final trie structure:
6375        // Upper trie:
6376        //   0x: Branch { state_mask: 0x1 | 0x2 | 0x4 | 0x8 }
6377        //   ├── 0x0: Leaf { key: 0x }
6378        //   ├── 0x1: Leaf { key: 0x }
6379        //   ├── 0x2: Leaf { key: 0x }
6380        //   └── 0x3: Leaf { key: 0x }
6381
6382        // Insert leaves with single nibble paths
6383        let (leaf1_path, value1) = ctx.create_test_leaf([0x0], 1);
6384        let (leaf2_path, value2) = ctx.create_test_leaf([0x1], 2);
6385        let (leaf3_path, value3) = ctx.create_test_leaf([0x2], 3);
6386        let (leaf4_path, value4) = ctx.create_test_leaf([0x3], 4);
6387
6388        ctx.update_leaves(
6389            &mut trie,
6390            [
6391                (leaf1_path, value1.clone()),
6392                (leaf2_path, value2.clone()),
6393                (leaf3_path, value3.clone()),
6394                (leaf4_path, value4.clone()),
6395            ],
6396        );
6397
6398        // Verify upper trie has a branch at root with 4 children
6399        ctx.assert_upper_subtrie(&trie)
6400            .has_branch(&Nibbles::default(), &[0x0, 0x1, 0x2, 0x3])
6401            .has_leaf(&Nibbles::from_nibbles([0x0]), &leaf_key([], 63))
6402            .has_leaf(&Nibbles::from_nibbles([0x1]), &leaf_key([], 63))
6403            .has_leaf(&Nibbles::from_nibbles([0x2]), &leaf_key([], 63))
6404            .has_leaf(&Nibbles::from_nibbles([0x3]), &leaf_key([], 63))
6405            .has_value(&leaf1_path, &value1)
6406            .has_value(&leaf2_path, &value2)
6407            .has_value(&leaf3_path, &value3)
6408            .has_value(&leaf4_path, &value4);
6409    }
6410
6411    #[test]
6412    fn test_update_deep_extension_chain() {
6413        let ctx = ParallelSparseTrieTestContext;
6414        let mut trie = ParallelSparseTrie::from_root(TrieNodeV2::EmptyRoot, None, true).unwrap();
6415
6416        // Test edge case: deep extension chains that span multiple levels
6417        //
6418        // Final trie structure:
6419        // Upper trie:
6420        //   0x: Extension { key: 0x111111 }
6421        //       └── Subtrie (0x11): pointer to lower subtrie
6422        //
6423        // Lower subtrie (0x11):
6424        //   0x111111: Branch { state_mask: 0x1 | 0x2 }
6425        //   ├── 0x1111110: Leaf { key: 0x }
6426        //   └── 0x1111111: Leaf { key: 0x }
6427
6428        // Create leaves with a long common prefix
6429        let (leaf1_path, value1) = ctx.create_test_leaf([0x1, 0x1, 0x1, 0x1, 0x1, 0x1, 0x0], 1);
6430        let (leaf2_path, value2) = ctx.create_test_leaf([0x1, 0x1, 0x1, 0x1, 0x1, 0x1, 0x1], 2);
6431
6432        ctx.update_leaves(&mut trie, [(leaf1_path, value1.clone()), (leaf2_path, value2.clone())]);
6433
6434        // Verify upper trie has extension with the full common prefix
6435        ctx.assert_upper_subtrie(&trie).has_extension(
6436            &Nibbles::default(),
6437            &Nibbles::from_nibbles([0x1, 0x1, 0x1, 0x1, 0x1, 0x1]),
6438        );
6439
6440        // Verify lower subtrie has branch structure
6441        ctx.assert_subtrie(&trie, Nibbles::from_nibbles([0x1, 0x1]))
6442            .has_branch(&Nibbles::from_nibbles([0x1, 0x1, 0x1, 0x1, 0x1, 0x1]), &[0x0, 0x1])
6443            .has_leaf(
6444                &Nibbles::from_nibbles([0x1, 0x1, 0x1, 0x1, 0x1, 0x1, 0x0]),
6445                &leaf_key([], 57),
6446            )
6447            .has_leaf(
6448                &Nibbles::from_nibbles([0x1, 0x1, 0x1, 0x1, 0x1, 0x1, 0x1]),
6449                &leaf_key([], 57),
6450            )
6451            .has_value(&leaf1_path, &value1)
6452            .has_value(&leaf2_path, &value2);
6453    }
6454
6455    #[test]
6456    fn test_update_branch_with_all_nibbles() {
6457        let ctx = ParallelSparseTrieTestContext;
6458        let mut trie = ParallelSparseTrie::from_root(TrieNodeV2::EmptyRoot, None, true).unwrap();
6459
6460        // Test edge case: branch node with all 16 possible nibble children
6461        //
6462        // Final trie structure:
6463        // Upper trie:
6464        //   0x: Extension { key: 0xA }
6465        //       └── Subtrie (0xA0): pointer to lower subtrie
6466        //
6467        // Lower subtrie (0xA0):
6468        //   0xA0: Branch { state_mask: 0xFFFF } (all 16 children)
6469        //   ├── 0xA00: Leaf { key: 0x }
6470        //   ├── 0xA01: Leaf { key: 0x }
6471        //   ├── 0xA02: Leaf { key: 0x }
6472        //   ... (all nibbles 0x0 through 0xF)
6473        //   └── 0xA0F: Leaf { key: 0x }
6474
6475        // Create leaves for all 16 possible nibbles
6476        let mut leaves = Vec::new();
6477        for nibble in 0x0..=0xF {
6478            let (path, value) = ctx.create_test_leaf([0xA, 0x0, nibble], nibble as u64 + 1);
6479            leaves.push((path, value));
6480        }
6481
6482        // Insert all leaves
6483        ctx.update_leaves(&mut trie, leaves.iter().cloned());
6484
6485        // Verify upper trie structure
6486        ctx.assert_upper_subtrie(&trie)
6487            .has_extension(&Nibbles::default(), &Nibbles::from_nibbles([0xA, 0x0]));
6488
6489        // Verify lower subtrie has branch with all 16 children
6490        let mut subtrie_assert =
6491            ctx.assert_subtrie(&trie, Nibbles::from_nibbles([0xA, 0x0])).has_branch(
6492                &Nibbles::from_nibbles([0xA, 0x0]),
6493                &[0x0, 0x1, 0x2, 0x3, 0x4, 0x5, 0x6, 0x7, 0x8, 0x9, 0xA, 0xB, 0xC, 0xD, 0xE, 0xF],
6494            );
6495
6496        // Verify all leaves exist
6497        for (i, (path, value)) in leaves.iter().enumerate() {
6498            subtrie_assert = subtrie_assert
6499                .has_leaf(&Nibbles::from_nibbles([0xA, 0x0, i as u8]), &leaf_key([], 61))
6500                .has_value(path, value);
6501        }
6502    }
6503
6504    #[test]
6505    fn test_update_creates_multiple_subtries() {
6506        let ctx = ParallelSparseTrieTestContext;
6507        let mut trie = ParallelSparseTrie::from_root(TrieNodeV2::EmptyRoot, None, true).unwrap();
6508
6509        // Test edge case: updates that create multiple subtries at once
6510        //
6511        // Final trie structure:
6512        // Upper trie:
6513        //   0x: Extension { key: 0x0 }
6514        //       └── 0x0: Branch { state_mask: 0xF }
6515        //           ├── Subtrie (0x00): pointer
6516        //           ├── Subtrie (0x01): pointer
6517        //           ├── Subtrie (0x02): pointer
6518        //           └── Subtrie (0x03): pointer
6519        //
6520        // Each lower subtrie has leaves:
6521        //   0xXY: Leaf { key: 0xZ... }
6522
6523        // Create leaves that will force multiple subtries
6524        let leaves = [
6525            ctx.create_test_leaf([0x0, 0x0, 0x1, 0x2], 1),
6526            ctx.create_test_leaf([0x0, 0x1, 0x3, 0x4], 2),
6527            ctx.create_test_leaf([0x0, 0x2, 0x5, 0x6], 3),
6528            ctx.create_test_leaf([0x0, 0x3, 0x7, 0x8], 4),
6529        ];
6530
6531        // Insert all leaves
6532        ctx.update_leaves(&mut trie, leaves.iter().cloned());
6533
6534        // Verify upper trie has extension then branch
6535        ctx.assert_upper_subtrie(&trie)
6536            .has_extension(&Nibbles::default(), &Nibbles::from_nibbles([0x0]))
6537            .has_branch(&Nibbles::from_nibbles([0x0]), &[0x0, 0x1, 0x2, 0x3]);
6538
6539        // Verify each subtrie exists and contains its leaf
6540        for (i, (leaf_path, leaf_value)) in leaves.iter().enumerate() {
6541            let subtrie_path = Nibbles::from_nibbles([0x0, i as u8]);
6542            let full_path: [u8; 4] = match i {
6543                0 => [0x0, 0x0, 0x1, 0x2],
6544                1 => [0x0, 0x1, 0x3, 0x4],
6545                2 => [0x0, 0x2, 0x5, 0x6],
6546                3 => [0x0, 0x3, 0x7, 0x8],
6547                _ => unreachable!(),
6548            };
6549            ctx.assert_subtrie(&trie, subtrie_path)
6550                .has_leaf(&subtrie_path, &leaf_key(&full_path[2..], 62))
6551                .has_value(leaf_path, leaf_value);
6552        }
6553    }
6554
6555    #[test]
6556    fn test_update_extension_to_branch_transformation() {
6557        let ctx = ParallelSparseTrieTestContext;
6558        let mut trie = ParallelSparseTrie::from_root(TrieNodeV2::EmptyRoot, None, true).unwrap();
6559
6560        // Test edge case: extension node transforms to branch when split
6561        //
6562        // Initial state after first two leaves:
6563        // Upper trie:
6564        //   0x: Extension { key: 0xFF0 }
6565        //       └── Subtrie (0xFF): pointer
6566        //
6567        // After third leaf (0xF0...):
6568        // Upper trie:
6569        //   0x: Extension { key: 0xF }
6570        //       └── 0xF: Branch { state_mask: 0x10 | 0x8000 }
6571        //           ├── Subtrie (0xF0): pointer
6572        //           └── Subtrie (0xFF): pointer
6573
6574        // First two leaves share prefix 0xFF0
6575        let (leaf1_path, value1) = ctx.create_test_leaf([0xF, 0xF, 0x0, 0x1], 1);
6576        let (leaf2_path, value2) = ctx.create_test_leaf([0xF, 0xF, 0x0, 0x2], 2);
6577        let (leaf3_path, value3) = ctx.create_test_leaf([0xF, 0x0, 0x0, 0x3], 3);
6578
6579        ctx.update_leaves(&mut trie, [(leaf1_path, value1.clone()), (leaf2_path, value2.clone())]);
6580
6581        // Verify initial extension structure
6582        ctx.assert_upper_subtrie(&trie)
6583            .has_extension(&Nibbles::default(), &Nibbles::from_nibbles([0xF, 0xF, 0x0]));
6584
6585        // Add leaf that splits the extension
6586        ctx.update_leaves(&mut trie, [(leaf3_path, value3.clone())]);
6587
6588        // Verify transformed structure
6589        ctx.assert_upper_subtrie(&trie)
6590            .has_extension(&Nibbles::default(), &Nibbles::from_nibbles([0xF]))
6591            .has_branch(&Nibbles::from_nibbles([0xF]), &[0x0, 0xF]);
6592
6593        // Verify subtries
6594        ctx.assert_subtrie(&trie, Nibbles::from_nibbles([0xF, 0xF]))
6595            .has_branch(&Nibbles::from_nibbles([0xF, 0xF, 0x0]), &[0x1, 0x2])
6596            .has_leaf(&Nibbles::from_nibbles([0xF, 0xF, 0x0, 0x1]), &leaf_key([], 60))
6597            .has_leaf(&Nibbles::from_nibbles([0xF, 0xF, 0x0, 0x2]), &leaf_key([], 60))
6598            .has_value(&leaf1_path, &value1)
6599            .has_value(&leaf2_path, &value2);
6600
6601        ctx.assert_subtrie(&trie, Nibbles::from_nibbles([0xF, 0x0]))
6602            .has_leaf(&Nibbles::from_nibbles([0xF, 0x0]), &leaf_key([0x0, 0x3], 62))
6603            .has_value(&leaf3_path, &value3);
6604    }
6605
6606    #[test]
6607    fn test_update_long_shared_prefix_at_boundary() {
6608        let ctx = ParallelSparseTrieTestContext;
6609        let mut trie = ParallelSparseTrie::from_root(TrieNodeV2::EmptyRoot, None, true).unwrap();
6610
6611        // Test edge case: leaves with long shared prefix that ends exactly at 2-nibble boundary
6612        //
6613        // Final trie structure:
6614        // Upper trie:
6615        //   0x: Extension { key: 0xAB }
6616        //       └── Subtrie (0xAB): pointer to lower subtrie
6617        //
6618        // Lower subtrie (0xAB):
6619        //   0xAB: Branch { state_mask: 0x1000 | 0x2000 }
6620        //   ├── 0xABC: Leaf { key: 0xDEF }
6621        //   └── 0xABD: Leaf { key: 0xEF0 }
6622
6623        // Create leaves that share exactly 2 nibbles
6624        let (leaf1_path, value1) = ctx.create_test_leaf([0xA, 0xB, 0xC, 0xD, 0xE, 0xF], 1);
6625        let (leaf2_path, value2) = ctx.create_test_leaf([0xA, 0xB, 0xD, 0xE, 0xF, 0x0], 2);
6626
6627        trie.update_leaf(leaf1_path, value1.clone()).unwrap();
6628        trie.update_leaf(leaf2_path, value2.clone()).unwrap();
6629
6630        // Verify upper trie structure
6631        ctx.assert_upper_subtrie(&trie)
6632            .has_extension(&Nibbles::default(), &Nibbles::from_nibbles([0xA, 0xB]));
6633
6634        // Verify lower subtrie structure
6635        ctx.assert_subtrie(&trie, Nibbles::from_nibbles([0xA, 0xB]))
6636            .has_branch(&Nibbles::from_nibbles([0xA, 0xB]), &[0xC, 0xD])
6637            .has_leaf(&Nibbles::from_nibbles([0xA, 0xB, 0xC]), &leaf_key([0xD, 0xE, 0xF], 61))
6638            .has_leaf(&Nibbles::from_nibbles([0xA, 0xB, 0xD]), &leaf_key([0xE, 0xF, 0x0], 61))
6639            .has_value(&leaf1_path, &value1)
6640            .has_value(&leaf2_path, &value2);
6641    }
6642
6643    #[test]
6644    fn test_update_branch_to_extension_collapse() {
6645        let ctx = ParallelSparseTrieTestContext;
6646        let mut trie = ParallelSparseTrie::from_root(TrieNodeV2::EmptyRoot, None, true).unwrap();
6647
6648        // Test creating a trie with leaves that share a long common prefix
6649        //
6650        // Initial state with 3 leaves (0x1234, 0x2345, 0x2356):
6651        // Upper trie:
6652        //   0x: Branch { state_mask: 0x6 }
6653        //       ├── 0x1: Leaf { key: 0x234 }
6654        //       └── 0x2: Extension { key: 0x3 }
6655        //           └── Subtrie (0x23): pointer
6656        // Lower subtrie (0x23):
6657        //   0x23: Branch { state_mask: 0x30 }
6658        //       ├── 0x234: Leaf { key: 0x5 }
6659        //       └── 0x235: Leaf { key: 0x6 }
6660        //
6661        // Then we create a new trie with leaves (0x1234, 0x1235, 0x1236):
6662        // Expected structure:
6663        // Upper trie:
6664        //   0x: Extension { key: 0x123 }
6665        //       └── Subtrie (0x12): pointer
6666        // Lower subtrie (0x12):
6667        //   0x123: Branch { state_mask: 0x70 } // bits 4, 5, 6 set
6668        //       ├── 0x1234: Leaf { key: 0x }
6669        //       ├── 0x1235: Leaf { key: 0x }
6670        //       └── 0x1236: Leaf { key: 0x }
6671
6672        // Create initial leaves
6673        let (leaf1_path, value1) = ctx.create_test_leaf([0x1, 0x2, 0x3, 0x4], 1);
6674        let (leaf2_path, value2) = ctx.create_test_leaf([0x2, 0x3, 0x4, 0x5], 2);
6675        let (leaf3_path, value3) = ctx.create_test_leaf([0x2, 0x3, 0x5, 0x6], 3);
6676
6677        trie.update_leaf(leaf1_path, value1).unwrap();
6678        trie.update_leaf(leaf2_path, value2).unwrap();
6679        trie.update_leaf(leaf3_path, value3).unwrap();
6680
6681        // Verify initial structure has branch at root
6682        ctx.assert_upper_subtrie(&trie).has_branch(&Nibbles::default(), &[0x1, 0x2]);
6683
6684        // Now update to create a pattern where extension is more efficient
6685        // Replace leaves to all share prefix 0x123
6686        let (new_leaf1_path, new_value1) = ctx.create_test_leaf([0x1, 0x2, 0x3, 0x4], 10);
6687        let (new_leaf2_path, new_value2) = ctx.create_test_leaf([0x1, 0x2, 0x3, 0x5], 11);
6688        let (new_leaf3_path, new_value3) = ctx.create_test_leaf([0x1, 0x2, 0x3, 0x6], 12);
6689
6690        // Clear and add new leaves
6691        let mut trie = ParallelSparseTrie::from_root(TrieNodeV2::EmptyRoot, None, true).unwrap();
6692        trie.update_leaf(new_leaf1_path, new_value1.clone()).unwrap();
6693        trie.update_leaf(new_leaf2_path, new_value2.clone()).unwrap();
6694        trie.update_leaf(new_leaf3_path, new_value3.clone()).unwrap();
6695
6696        // Verify new structure has extension
6697        ctx.assert_upper_subtrie(&trie)
6698            .has_extension(&Nibbles::default(), &Nibbles::from_nibbles([0x1, 0x2, 0x3]));
6699
6700        // Verify lower subtrie path was correctly updated to 0x123
6701        ctx.assert_subtrie_path(&trie, [0x1, 0x2], [0x1, 0x2, 0x3]);
6702
6703        // Verify lower subtrie - all three leaves should be properly inserted
6704        ctx.assert_subtrie(&trie, Nibbles::from_nibbles([0x1, 0x2]))
6705            .has_branch(&Nibbles::from_nibbles([0x1, 0x2, 0x3]), &[0x4, 0x5, 0x6]) // All three children
6706            .has_leaf(&Nibbles::from_nibbles([0x1, 0x2, 0x3, 0x4]), &leaf_key([], 60))
6707            .has_leaf(&Nibbles::from_nibbles([0x1, 0x2, 0x3, 0x5]), &leaf_key([], 60))
6708            .has_leaf(&Nibbles::from_nibbles([0x1, 0x2, 0x3, 0x6]), &leaf_key([], 60))
6709            .has_value(&new_leaf1_path, &new_value1)
6710            .has_value(&new_leaf2_path, &new_value2)
6711            .has_value(&new_leaf3_path, &new_value3);
6712    }
6713
6714    #[test]
6715    fn test_update_shared_prefix_patterns() {
6716        let ctx = ParallelSparseTrieTestContext;
6717        let mut trie = ParallelSparseTrie::from_root(TrieNodeV2::EmptyRoot, None, true).unwrap();
6718
6719        // Test edge case: different patterns of shared prefixes
6720        //
6721        // Final trie structure:
6722        // Upper trie:
6723        //   0x: Branch { state_mask: 0x6 }
6724        //       ├── 0x1: Leaf { key: 0x234 }
6725        //       └── 0x2: Extension { key: 0x3 }
6726        //           └── Subtrie (0x23): pointer
6727        //
6728        // Lower subtrie (0x23):
6729        //   0x23: Branch { state_mask: 0x10 | 0x20 }
6730        //   ├── 0x234: Leaf { key: 0x5 }
6731        //   └── 0x235: Leaf { key: 0x6 }
6732
6733        // Create leaves with different shared prefix patterns
6734        let (leaf1_path, value1) = ctx.create_test_leaf([0x1, 0x2, 0x3, 0x4], 1);
6735        let (leaf2_path, value2) = ctx.create_test_leaf([0x2, 0x3, 0x4, 0x5], 2);
6736        let (leaf3_path, value3) = ctx.create_test_leaf([0x2, 0x3, 0x5, 0x6], 3);
6737
6738        trie.update_leaf(leaf1_path, value1).unwrap();
6739        trie.update_leaf(leaf2_path, value2.clone()).unwrap();
6740        trie.update_leaf(leaf3_path, value3.clone()).unwrap();
6741
6742        // Verify upper trie structure
6743        ctx.assert_upper_subtrie(&trie)
6744            .has_branch(&Nibbles::default(), &[0x1, 0x2])
6745            .has_leaf(&Nibbles::from_nibbles([0x1]), &leaf_key([0x2, 0x3, 0x4], 63))
6746            .has_extension(&Nibbles::from_nibbles([0x2]), &Nibbles::from_nibbles([0x3]));
6747
6748        // Verify lower subtrie structure
6749        ctx.assert_subtrie(&trie, Nibbles::from_nibbles([0x2, 0x3]))
6750            .has_branch(&Nibbles::from_nibbles([0x2, 0x3]), &[0x4, 0x5])
6751            .has_leaf(&Nibbles::from_nibbles([0x2, 0x3, 0x4]), &leaf_key([0x5], 61))
6752            .has_leaf(&Nibbles::from_nibbles([0x2, 0x3, 0x5]), &leaf_key([0x6], 61))
6753            .has_value(&leaf2_path, &value2)
6754            .has_value(&leaf3_path, &value3);
6755    }
6756
6757    #[test]
6758    fn test_progressive_branch_creation() {
6759        let ctx = ParallelSparseTrieTestContext;
6760        let mut trie = ParallelSparseTrie::from_root(TrieNodeV2::EmptyRoot, None, true).unwrap();
6761
6762        // Test starting with a single leaf and progressively adding leaves
6763        // that create branch nodes at shorter and shorter paths
6764        //
6765        // Step 1: Add leaf at 0x12345
6766        // Upper trie:
6767        //   0x: Leaf { key: 0x12345 }
6768        //
6769        // Step 2: Add leaf at 0x12346
6770        // Upper trie:
6771        //   0x: Extension { key: 0x1234 }
6772        //       └── Subtrie (0x12): pointer
6773        // Lower subtrie (0x12):
6774        //   0x1234: Branch { state_mask: 0x60 }  // bits 5 and 6 set
6775        //       ├── 0x12345: Leaf { key: 0x }
6776        //       └── 0x12346: Leaf { key: 0x }
6777        //
6778        // Step 3: Add leaf at 0x1235
6779        // Lower subtrie (0x12) updates to:
6780        //   0x123: Branch { state_mask: 0x30 }  // bits 4 and 5 set
6781        //       ├── 0x1234: Branch { state_mask: 0x60 }
6782        //       │   ├── 0x12345: Leaf { key: 0x }
6783        //       │   └── 0x12346: Leaf { key: 0x }
6784        //       └── 0x1235: Leaf { key: 0x }
6785        //
6786        // Step 4: Add leaf at 0x124
6787        // Lower subtrie (0x12) updates to:
6788        //   0x12: Branch { state_mask: 0x18 }  // bits 3 and 4 set
6789        //       ├── 0x123: Branch { state_mask: 0x30 }
6790        //       │   ├── 0x1234: Branch { state_mask: 0x60 }
6791        //       │   │   ├── 0x12345: Leaf { key: 0x }
6792        //       │   │   └── 0x12346: Leaf { key: 0x }
6793        //       │   └── 0x1235: Leaf { key: 0x }
6794        //       └── 0x124: Leaf { key: 0x }
6795
6796        // Step 1: Add first leaf - initially stored as leaf in upper trie
6797        let (leaf1_path, value1) = ctx.create_test_leaf([0x1, 0x2, 0x3, 0x4, 0x5], 1);
6798        trie.update_leaf(leaf1_path, value1.clone()).unwrap();
6799
6800        // Verify leaf node in upper trie (optimized single-leaf case)
6801        ctx.assert_upper_subtrie(&trie)
6802            .has_leaf(
6803                &Nibbles::default(),
6804                &pad_nibbles_right(Nibbles::from_nibbles([0x1, 0x2, 0x3, 0x4, 0x5])),
6805            )
6806            .has_value(&leaf1_path, &value1);
6807
6808        // Step 2: Add leaf at 0x12346 - creates branch at 0x1234
6809        let (leaf2_path, value2) = ctx.create_test_leaf([0x1, 0x2, 0x3, 0x4, 0x6], 2);
6810        trie.update_leaf(leaf2_path, value2.clone()).unwrap();
6811
6812        // Verify extension now goes to 0x1234
6813        ctx.assert_upper_subtrie(&trie)
6814            .has_extension(&Nibbles::default(), &Nibbles::from_nibbles([0x1, 0x2, 0x3, 0x4]));
6815
6816        // Verify subtrie path updated to 0x1234
6817        ctx.assert_subtrie_path(&trie, [0x1, 0x2], [0x1, 0x2, 0x3, 0x4]);
6818
6819        ctx.assert_subtrie(&trie, Nibbles::from_nibbles([0x1, 0x2]))
6820            .has_branch(&Nibbles::from_nibbles([0x1, 0x2, 0x3, 0x4]), &[0x5, 0x6])
6821            .has_leaf(&Nibbles::from_nibbles([0x1, 0x2, 0x3, 0x4, 0x5]), &leaf_key([], 59))
6822            .has_leaf(&Nibbles::from_nibbles([0x1, 0x2, 0x3, 0x4, 0x6]), &leaf_key([], 59))
6823            .has_value(&leaf1_path, &value1)
6824            .has_value(&leaf2_path, &value2);
6825
6826        // Step 3: Add leaf at 0x1235 - creates branch at 0x123
6827        let (leaf3_path, value3) = ctx.create_test_leaf([0x1, 0x2, 0x3, 0x5], 3);
6828        trie.update_leaf(leaf3_path, value3.clone()).unwrap();
6829
6830        // Verify extension now goes to 0x123
6831        ctx.assert_upper_subtrie(&trie)
6832            .has_extension(&Nibbles::default(), &Nibbles::from_nibbles([0x1, 0x2, 0x3]));
6833
6834        // Verify subtrie path updated to 0x123
6835        ctx.assert_subtrie_path(&trie, [0x1, 0x2], [0x1, 0x2, 0x3]);
6836
6837        ctx.assert_subtrie(&trie, Nibbles::from_nibbles([0x1, 0x2]))
6838            .has_branch(&Nibbles::from_nibbles([0x1, 0x2, 0x3]), &[0x4, 0x5])
6839            .has_branch(&Nibbles::from_nibbles([0x1, 0x2, 0x3, 0x4]), &[0x5, 0x6])
6840            .has_leaf(&Nibbles::from_nibbles([0x1, 0x2, 0x3, 0x5]), &leaf_key([], 60))
6841            .has_value(&leaf1_path, &value1)
6842            .has_value(&leaf2_path, &value2)
6843            .has_value(&leaf3_path, &value3);
6844
6845        // Step 4: Add leaf at 0x124 - creates branch at 0x12 (subtrie root)
6846        let (leaf4_path, value4) = ctx.create_test_leaf([0x1, 0x2, 0x4], 4);
6847        trie.update_leaf(leaf4_path, value4.clone()).unwrap();
6848
6849        // Verify extension now goes to 0x12
6850        ctx.assert_upper_subtrie(&trie)
6851            .has_extension(&Nibbles::default(), &Nibbles::from_nibbles([0x1, 0x2]));
6852
6853        // Verify subtrie path updated to 0x12
6854        ctx.assert_subtrie_path(&trie, [0x1, 0x2], [0x1, 0x2]);
6855
6856        // Verify final structure
6857        ctx.assert_subtrie(&trie, Nibbles::from_nibbles([0x1, 0x2]))
6858            .has_branch(&Nibbles::from_nibbles([0x1, 0x2]), &[0x3, 0x4])
6859            .has_branch(&Nibbles::from_nibbles([0x1, 0x2, 0x3]), &[0x4, 0x5])
6860            .has_branch(&Nibbles::from_nibbles([0x1, 0x2, 0x3, 0x4]), &[0x5, 0x6])
6861            .has_leaf(&Nibbles::from_nibbles([0x1, 0x2, 0x4]), &leaf_key([], 61))
6862            .has_value(&leaf1_path, &value1)
6863            .has_value(&leaf2_path, &value2)
6864            .has_value(&leaf3_path, &value3)
6865            .has_value(&leaf4_path, &value4);
6866    }
6867
6868    #[test]
6869    fn test_update_max_depth_paths() {
6870        let ctx = ParallelSparseTrieTestContext;
6871        let mut trie = ParallelSparseTrie::from_root(TrieNodeV2::EmptyRoot, None, true).unwrap();
6872
6873        // Test edge case: very long paths (64 nibbles - max for addresses/storage)
6874        //
6875        // Final trie structure:
6876        // Upper trie:
6877        //   0x: Extension { key: 0xFF }
6878        //       └── Subtrie (0xFF): pointer
6879        //
6880        // Lower subtrie (0xFF):
6881        //   Has very long paths with slight differences at the end
6882
6883        // Create two 64-nibble paths that differ only in the last nibble
6884        let mut path1_nibbles = vec![0xF; 63];
6885        path1_nibbles.push(0x0);
6886        let mut path2_nibbles = vec![0xF; 63];
6887        path2_nibbles.push(0x1);
6888
6889        let (leaf1_path, value1) = ctx.create_test_leaf(&path1_nibbles, 1);
6890        let (leaf2_path, value2) = ctx.create_test_leaf(&path2_nibbles, 2);
6891
6892        trie.update_leaf(leaf1_path, value1.clone()).unwrap();
6893        trie.update_leaf(leaf2_path, value2.clone()).unwrap();
6894
6895        // The common prefix of 63 F's will create a very long extension
6896        let extension_key = vec![0xF; 63];
6897        ctx.assert_upper_subtrie(&trie)
6898            .has_extension(&Nibbles::default(), &Nibbles::from_nibbles(&extension_key));
6899
6900        // Verify the subtrie has the branch at the end
6901        ctx.assert_subtrie(&trie, Nibbles::from_nibbles([0xF, 0xF]))
6902            .has_branch(&Nibbles::from_nibbles(&path1_nibbles[..63]), &[0x0, 0x1])
6903            .has_value(&leaf1_path, &value1)
6904            .has_value(&leaf2_path, &value2);
6905    }
6906
6907    #[test]
6908    fn test_hoodie_block_1_data() {
6909        // Reveal node at path Nibbles(0x) - root branch node
6910        let root_branch_stack = vec![
6911            hex!("a0550b6aba4dd4582a2434d2cbdad8d3007d09f622d7a6e6eaa7a49385823c2fa2"),
6912            hex!("a04788a4975a9e1efd29b834fd80fdfe8a57cc1b1c5ace6d30ce5a36a15e0092b3"),
6913            hex!("a093aeccf87da304e6f7d09edc5d7bd3a552808866d2149dd0940507a8f9bfa910"),
6914            hex!("a08b5b423ba68d0dec2eca1f408076f9170678505eb4a5db2abbbd83bb37666949"),
6915            hex!("a08592f62216af4218098a78acad7cf472a727fb55e6c27d3cfdf2774d4518eb83"),
6916            hex!("a0ef02aeee845cb64c11f85edc1a3094227c26445952554b8a9248915d80c746c3"),
6917            hex!("a0df2529ee3a1ce4df5a758cf17e6a86d0fb5ea22ab7071cf60af6412e9b0a428a"),
6918            hex!("a0acaa1092db69cd5a63676685827b3484c4b80dc1d3361f6073bbb9240101e144"),
6919            hex!("a09c3f2bb2a729d71f246a833353ade65667716bb330e0127a3299a42d11200f93"),
6920            hex!("a0ce978470f4c0b1f8069570563a14d2b79d709add2db4bf22dd9b6aed3271c566"),
6921            hex!("a095f783cd1d464a60e3c8adcadc28c6eb9fec7306664df39553be41dccc909606"),
6922            hex!("a0a9083f5fb914b255e1feb5d951a4dfddacf3c8003ef1d1ec6a13bb6ba5b2ac62"),
6923            hex!("a0fec113d537d8577cd361e0cabf5e95ef58f1cc34318292fdecce9fae57c3e094"),
6924            hex!("a08b7465f5fe8b3e3c0d087cb7521310d4065ef2a0ee43bf73f68dee8a5742b3dd"),
6925            hex!("a0c589aa1ae3d5fd87d8640957f7d5184a4ac06f393b453a8e8ed7e8fba0d385c8"),
6926            hex!("a0b516d6f3352f87beab4ed6e7322f191fc7a147686500ef4de7dd290ad784ef51"),
6927        ];
6928
6929        let root_branch_rlp_stack: Vec<RlpNode> = root_branch_stack
6930            .iter()
6931            .map(|hex_str| RlpNode::from_raw_rlp(&hex_str[..]).unwrap())
6932            .collect();
6933
6934        let root_branch_node = BranchNodeV2::new(
6935            Default::default(),
6936            root_branch_rlp_stack,
6937            TrieMask::new(0b1111111111111111), // state_mask: all 16 children present
6938            None,
6939        );
6940
6941        let root_branch_masks = Some(BranchNodeMasks {
6942            hash_mask: TrieMask::new(0b1111111111111111),
6943            tree_mask: TrieMask::new(0b1111111111111111),
6944        });
6945
6946        let mut trie = ParallelSparseTrie::from_root(
6947            TrieNodeV2::Branch(root_branch_node),
6948            root_branch_masks,
6949            true,
6950        )
6951        .unwrap();
6952
6953        // Reveal node at path Nibbles(0x3) - branch node
6954        let branch_0x3_stack = vec![
6955            hex!("a09da7d9755fe0c558b3c3de9fdcdf9f28ae641f38c9787b05b73ab22ae53af3e2"),
6956            hex!("a0d9990bf0b810d1145ecb2b011fd68c63cc85564e6724166fd4a9520180706e5f"),
6957            hex!("a0f60eb4b12132a40df05d9bbdb88bbde0185a3f097f3c76bf4200c23eda26cf86"),
6958            hex!("a0ca976997ddaf06f18992f6207e4f6a05979d07acead96568058789017cc6d06b"),
6959            hex!("a04d78166b48044fdc28ed22d2fd39c8df6f8aaa04cb71d3a17286856f6893ff83"),
6960            hex!("a021d4f90c34d3f1706e78463b6482bca77a3aa1cd059a3f326c42a1cfd30b9b60"),
6961            hex!("a0fc3b71c33e2e6b77c5e494c1db7fdbb447473f003daf378c7a63ba9bf3f0049d"),
6962            hex!("a0e33ed2be194a3d93d343e85642447c93a9d0cfc47a016c2c23d14c083be32a7c"),
6963            hex!("a07b8e7a21c1178d28074f157b50fca85ee25c12568ff8e9706dcbcdacb77bf854"),
6964            hex!("a0973274526811393ea0bf4811ca9077531db00d06b86237a2ecd683f55ba4bcb0"),
6965            hex!("a03a93d726d7487874e51b52d8d534c63aa2a689df18e3b307c0d6cb0a388b00f3"),
6966            hex!("a06aa67101d011d1c22fe739ef83b04b5214a3e2f8e1a2625d8bfdb116b447e86f"),
6967            hex!("a02dd545b33c62d33a183e127a08a4767fba891d9f3b94fc20a2ca02600d6d1fff"),
6968            hex!("a0fe6db87d00f06d53bff8169fa497571ff5af1addfb715b649b4d79dd3e394b04"),
6969            hex!("a0d9240a9d2d5851d05a97ff3305334dfdb0101e1e321fc279d2bb3cad6afa8fc8"),
6970            hex!("a01b69c6ab5173de8a8ec53a6ebba965713a4cc7feb86cb3e230def37c230ca2b2"),
6971        ];
6972
6973        let branch_0x3_rlp_stack: Vec<RlpNode> = branch_0x3_stack
6974            .iter()
6975            .map(|hex_str| RlpNode::from_raw_rlp(&hex_str[..]).unwrap())
6976            .collect();
6977
6978        let branch_0x3_node = BranchNodeV2::new(
6979            Default::default(),
6980            branch_0x3_rlp_stack,
6981            TrieMask::new(0b1111111111111111), // state_mask: all 16 children present
6982            None,
6983        );
6984
6985        let branch_0x3_masks = Some(BranchNodeMasks {
6986            hash_mask: TrieMask::new(0b0100010000010101),
6987            tree_mask: TrieMask::new(0b0100000000000000),
6988        });
6989
6990        // Reveal node at path Nibbles(0x37) - leaf node
6991        let leaf_path = Nibbles::from_nibbles([0x3, 0x7]);
6992        let leaf_key = Nibbles::unpack(
6993            &hex!("d65eaa92c6bc4c13a5ec45527f0c18ea8932588728769ec7aecfe6d9f32e42")[..],
6994        );
6995        let leaf_value = hex!("f8440180a056e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421a0f57acd40259872606d76197ef052f3d35588dadf919ee1f0e3cb9b62d3f4b02c").to_vec();
6996
6997        let leaf_node = LeafNode::new(leaf_key, leaf_value);
6998        let leaf_masks = None;
6999
7000        trie.reveal_nodes(&mut [
7001            ProofTrieNodeV2 {
7002                path: Nibbles::from_nibbles([0x3]),
7003                node: TrieNodeV2::Branch(branch_0x3_node),
7004                masks: branch_0x3_masks,
7005            },
7006            ProofTrieNodeV2 {
7007                path: leaf_path,
7008                node: TrieNodeV2::Leaf(leaf_node),
7009                masks: leaf_masks,
7010            },
7011        ])
7012        .unwrap();
7013
7014        // Update leaf with its new value
7015        let mut leaf_full_path = leaf_path;
7016        leaf_full_path.extend(&leaf_key);
7017
7018        let leaf_new_value = vec![
7019            248, 68, 1, 128, 160, 224, 163, 152, 169, 122, 160, 155, 102, 53, 41, 0, 47, 28, 205,
7020            190, 199, 5, 215, 108, 202, 22, 138, 70, 196, 178, 193, 208, 18, 96, 95, 63, 238, 160,
7021            245, 122, 205, 64, 37, 152, 114, 96, 109, 118, 25, 126, 240, 82, 243, 211, 85, 136,
7022            218, 223, 145, 158, 225, 240, 227, 203, 155, 98, 211, 244, 176, 44,
7023        ];
7024
7025        trie.update_leaf(leaf_full_path, leaf_new_value.clone()).unwrap();
7026
7027        // Sanity checks before calculating the root
7028        assert_eq!(
7029            Some(&leaf_new_value),
7030            trie.lower_subtrie_for_path(&leaf_path).unwrap().inner.values.get(&leaf_full_path)
7031        );
7032        assert!(trie.upper_subtrie.inner.values.is_empty());
7033
7034        // Assert the root hash matches the expected value
7035        let expected_root =
7036            b256!("0x29b07de8376e9ce7b3a69e9b102199869514d3f42590b5abc6f7d48ec9b8665c");
7037        assert_eq!(trie.root(), expected_root);
7038    }
7039
7040    #[test]
7041    fn find_leaf_existing_leaf() {
7042        // Create a simple trie with one leaf
7043        let mut sparse = ParallelSparseTrie::default();
7044        let path = pad_nibbles_right(Nibbles::from_nibbles([0x1, 0x2, 0x3]));
7045        let value = b"test_value".to_vec();
7046
7047        sparse.update_leaf(path, value.clone()).unwrap();
7048
7049        // Check that the leaf exists
7050        let result = sparse.find_leaf(&path, None);
7051        assert_matches!(result, Ok(LeafLookup::Exists));
7052
7053        // Check with expected value matching
7054        let result = sparse.find_leaf(&path, Some(&value));
7055        assert_matches!(result, Ok(LeafLookup::Exists));
7056    }
7057
7058    #[test]
7059    fn find_leaf_value_mismatch() {
7060        // Create a simple trie with one leaf
7061        let mut sparse = ParallelSparseTrie::default();
7062        let path = pad_nibbles_right(Nibbles::from_nibbles([0x1, 0x2, 0x3]));
7063        let value = b"test_value".to_vec();
7064        let wrong_value = b"wrong_value".to_vec();
7065
7066        sparse.update_leaf(path, value).unwrap();
7067
7068        // Check with wrong expected value
7069        let result = sparse.find_leaf(&path, Some(&wrong_value));
7070        assert_matches!(
7071            result,
7072            Err(LeafLookupError::ValueMismatch { path: p, expected: Some(e), actual: _a }) if p == path && e == wrong_value
7073        );
7074    }
7075
7076    #[test]
7077    fn find_leaf_not_found_empty_trie() {
7078        // Empty trie
7079        let sparse = ParallelSparseTrie::default();
7080        let path = Nibbles::from_nibbles([0x1, 0x2, 0x3]);
7081
7082        // Leaf should not exist
7083        let result = sparse.find_leaf(&path, None);
7084        assert_matches!(result, Ok(LeafLookup::NonExistent));
7085    }
7086
7087    #[test]
7088    fn find_leaf_empty_trie() {
7089        let sparse = ParallelSparseTrie::default();
7090        let path = Nibbles::from_nibbles_unchecked([0x1, 0x2, 0x3, 0x4]);
7091
7092        let result = sparse.find_leaf(&path, None);
7093        assert_matches!(result, Ok(LeafLookup::NonExistent));
7094    }
7095
7096    #[test]
7097    fn find_leaf_exists_no_value_check() {
7098        let mut sparse = ParallelSparseTrie::default();
7099        let path = pad_nibbles_right(Nibbles::from_nibbles([0x1, 0x2, 0x3, 0x4]));
7100        sparse.update_leaf(path, encode_account_value(0)).unwrap();
7101
7102        let result = sparse.find_leaf(&path, None);
7103        assert_matches!(result, Ok(LeafLookup::Exists));
7104    }
7105
7106    #[test]
7107    fn find_leaf_exists_with_value_check_ok() {
7108        let mut sparse = ParallelSparseTrie::default();
7109        let path = pad_nibbles_right(Nibbles::from_nibbles([0x1, 0x2, 0x3, 0x4]));
7110        let value = encode_account_value(0);
7111        sparse.update_leaf(path, value.clone()).unwrap();
7112
7113        let result = sparse.find_leaf(&path, Some(&value));
7114        assert_matches!(result, Ok(LeafLookup::Exists));
7115    }
7116
7117    #[test]
7118    fn find_leaf_exclusion_branch_divergence() {
7119        let mut sparse = ParallelSparseTrie::default();
7120        let path1 = pad_nibbles_right(Nibbles::from_nibbles([0x1, 0x2, 0x3, 0x4])); // Creates branch at 0x12
7121        let path2 = pad_nibbles_right(Nibbles::from_nibbles([0x1, 0x2, 0x5, 0x6])); // Belongs to same branch
7122        let search_path = pad_nibbles_right(Nibbles::from_nibbles([0x1, 0x2, 0x7, 0x8])); // Diverges at nibble 7
7123
7124        sparse.update_leaf(path1, encode_account_value(0)).unwrap();
7125        sparse.update_leaf(path2, encode_account_value(1)).unwrap();
7126
7127        let result = sparse.find_leaf(&search_path, None);
7128        assert_matches!(result, Ok(LeafLookup::NonExistent))
7129    }
7130
7131    #[test]
7132    fn find_leaf_exclusion_extension_divergence() {
7133        let mut sparse = ParallelSparseTrie::default();
7134        // This will create an extension node at root with key 0x12
7135        let path1 = pad_nibbles_right(Nibbles::from_nibbles([0x1, 0x2, 0x3, 0x4, 0x5, 0x6]));
7136        // This path diverges from the extension key
7137        let search_path = pad_nibbles_right(Nibbles::from_nibbles([0x1, 0x2, 0x7, 0x8]));
7138
7139        sparse.update_leaf(path1, encode_account_value(0)).unwrap();
7140
7141        let result = sparse.find_leaf(&search_path, None);
7142        assert_matches!(result, Ok(LeafLookup::NonExistent))
7143    }
7144
7145    #[test]
7146    fn find_leaf_exclusion_leaf_divergence() {
7147        let mut sparse = ParallelSparseTrie::default();
7148        let existing_leaf_path = pad_nibbles_right(Nibbles::from_nibbles([0x1, 0x2, 0x3, 0x4]));
7149        let search_path = pad_nibbles_right(Nibbles::from_nibbles([0x1, 0x2, 0x3, 0x4, 0x5, 0x6]));
7150
7151        sparse.update_leaf(existing_leaf_path, encode_account_value(0)).unwrap();
7152
7153        let result = sparse.find_leaf(&search_path, None);
7154        assert_matches!(result, Ok(LeafLookup::NonExistent))
7155    }
7156
7157    #[test]
7158    fn find_leaf_exclusion_path_ends_at_branch() {
7159        let mut sparse = ParallelSparseTrie::default();
7160        let path1 = pad_nibbles_right(Nibbles::from_nibbles([0x1, 0x2, 0x3, 0x4])); // Creates branch at 0x12
7161        let path2 = pad_nibbles_right(Nibbles::from_nibbles([0x1, 0x2, 0x5, 0x6]));
7162        let search_path = pad_nibbles_right(Nibbles::from_nibbles([0x1, 0x2])); // Path of the branch itself
7163
7164        sparse.update_leaf(path1, encode_account_value(0)).unwrap();
7165        sparse.update_leaf(path2, encode_account_value(1)).unwrap();
7166
7167        let result = sparse.find_leaf(&search_path, None);
7168        assert_matches!(result, Ok(LeafLookup::NonExistent));
7169    }
7170
7171    #[test]
7172    fn find_leaf_error_blinded_node_at_leaf_path() {
7173        // Scenario: The node *at* the leaf path is blinded.
7174        let blinded_hash = B256::repeat_byte(0xBB);
7175        let leaf_path = Nibbles::from_nibbles_unchecked([0x1, 0x2, 0x3, 0x4]);
7176
7177        let sparse = new_test_trie(
7178            [
7179                (
7180                    // Ext 0x12
7181                    Nibbles::default(),
7182                    SparseNode::new_ext(Nibbles::from_nibbles_unchecked([0x1, 0x2])),
7183                ),
7184                (
7185                    // Ext 0x123
7186                    Nibbles::from_nibbles_unchecked([0x1, 0x2]),
7187                    SparseNode::new_ext(Nibbles::from_nibbles_unchecked([0x3])),
7188                ),
7189                (
7190                    // Branch at 0x123, child 4
7191                    Nibbles::from_nibbles_unchecked([0x1, 0x2, 0x3]),
7192                    SparseNode::new_branch(TrieMask::new(0b10000), &[(0x4, blinded_hash)]),
7193                ),
7194            ]
7195            .into_iter(),
7196        );
7197
7198        let result = sparse.find_leaf(&leaf_path, None);
7199
7200        // Should error because it hit the blinded node exactly at the leaf path
7201        assert_matches!(result, Err(LeafLookupError::BlindedNode { path, hash })
7202            if path == leaf_path && hash == blinded_hash
7203        );
7204    }
7205
7206    #[test]
7207    fn find_leaf_error_blinded_node() {
7208        let blinded_hash = B256::repeat_byte(0xAA);
7209        let path_to_blind = Nibbles::from_nibbles_unchecked([0x1]);
7210        let search_path = Nibbles::from_nibbles_unchecked([0x1, 0x2, 0x3, 0x4]);
7211
7212        let sparse = new_test_trie(
7213            [
7214                // Root is a branch with child 0x1 (blinded) and 0x5 (revealed leaf)
7215                // So we set Bit 1 and Bit 5 in the state_mask
7216                (
7217                    Nibbles::default(),
7218                    SparseNode::new_branch(TrieMask::new(0b100010), &[(0x1, blinded_hash)]),
7219                ),
7220                (
7221                    Nibbles::from_nibbles_unchecked([0x5]),
7222                    SparseNode::new_leaf(Nibbles::from_nibbles_unchecked([0x6, 0x7, 0x8])),
7223                ),
7224            ]
7225            .into_iter(),
7226        );
7227
7228        let result = sparse.find_leaf(&search_path, None);
7229
7230        // Should error because it hit the blinded node at path 0x1
7231        assert_matches!(result, Err(LeafLookupError::BlindedNode { path, hash })
7232            if path == path_to_blind && hash == blinded_hash
7233        );
7234    }
7235
7236    #[test]
7237    fn test_mainnet_block_24185431_storage_0x6ba784ee() {
7238        reth_tracing::init_test_tracing();
7239
7240        // Reveal branch at 0x3 with full state
7241        let mut branch_0x3_hashes = vec![
7242            B256::from(hex!("fc11ba8de4b220b8f19a09f0676c69b8e18bae1350788392640069e59b41733d")),
7243            B256::from(hex!("8afe085cc6685680bd8ba4bac6e65937a4babf737dc5e7413d21cdda958e8f74")),
7244            B256::from(hex!("c7b6f7c0fc601a27aece6ec178fd9be17cdee77c4884ecfbe1ee459731eb57da")),
7245            B256::from(hex!("71c1aec60db78a2deb4e10399b979a2ed5be42b4ee0c0a17c614f9ddc9f9072e")),
7246            B256::from(hex!("e9261302e7c0b77930eaf1851b585210906cd01e015ab6be0f7f3c0cc947c32a")),
7247            B256::from(hex!("38ce8f369c56bd77fabdf679b27265b1f8d0a54b09ef612c8ee8ddfc6b3fab95")),
7248            B256::from(hex!("7b507a8936a28c5776b647d1c4bda0bbbb3d0d227f16c5f5ebba58d02e31918d")),
7249            B256::from(hex!("0f456b9457a824a81e0eb555aa861461acb38674dcf36959b3b26deb24ed0af9")),
7250            B256::from(hex!("2145420289652722ad199ba932622e3003c779d694fa5a2acfb2f77b0782b38a")),
7251            B256::from(hex!("2c1a04dce1a9e2f1cfbf8806edce50a356dfa58e7e7c542c848541502613b796")),
7252            B256::from(hex!("dad7ca55186ac8f40d4450dc874166df8267b44abc07e684d9507260f5712df3")),
7253            B256::from(hex!("3a8c2a1d7d2423e92965ec29014634e7f0307ded60b1a63d28c86c3222b24236")),
7254            B256::from(hex!("4e9929e6728b3a7bf0db6a0750ab376045566b556c9c605e606ecb8ec25200d7")),
7255            B256::from(hex!("1797c36f98922f52292c161590057a1b5582d5503e3370bcfbf6fd939f3ec98b")),
7256            B256::from(hex!("9e514589a9c9210b783c19fa3f0b384bbfaefe98f10ea189a2bfc58c6bf000a1")),
7257            B256::from(hex!("85bdaabbcfa583cbd049650e41d3d19356bd833b3ed585cf225a3548557c7fa3")),
7258        ];
7259        let branch_0x3_node = create_branch_node(
7260            Nibbles::from_nibbles([0x3]),
7261            &[0x0, 0x1, 0x2, 0x3, 0x4, 0x5, 0x6, 0x7, 0x8, 0x9, 0xa, 0xb, 0xc, 0xd, 0xe, 0xf],
7262            branch_0x3_hashes.iter().map(RlpNode::word_rlp),
7263        );
7264
7265        // Reveal branch at 0x31
7266        let branch_0x31_hashes = vec![B256::from(hex!(
7267            "3ca994ba59ce70b83fee1f01731c8dac4fdd0f70ade79bf9b0695c4c53531aab"
7268        ))];
7269        let branch_0x31_node = create_branch_node_with_children(
7270            &[0xc],
7271            branch_0x31_hashes.into_iter().map(|h| RlpNode::word_rlp(&h)),
7272        );
7273
7274        // Reveal leaf at 0x31b0b645a6c4a0a1bb3d2f0c1d31c39f4aba2e3b015928a8eef7161e28388b81
7275        let leaf_path = hex!("31b0b645a6c4a0a1bb3d2f0c1d31c39f4aba2e3b015928a8eef7161e28388b81");
7276        let leaf_nibbles = Nibbles::unpack(leaf_path.as_slice());
7277        let leaf_value = hex!("0009ae8ce8245bff").to_vec();
7278
7279        // Reveal branch at 0x31c
7280        let branch_0x31c_hashes = vec![
7281            B256::from(hex!("1a68fdb36b77e9332b49a977faf800c22d0199e6cecf44032bb083c78943e540")),
7282            B256::from(hex!("cd4622c6df6fd7172c7fed1b284ef241e0f501b4c77b675ef10c612bd0948a7a")),
7283            B256::from(hex!("abf3603d2f991787e21f1709ee4c7375d85dfc506995c0435839fccf3fe2add4")),
7284        ];
7285        let branch_0x31c_node = create_branch_node_with_children(
7286            &[0x3, 0x7, 0xc],
7287            branch_0x31c_hashes.into_iter().map(|h| RlpNode::word_rlp(&h)),
7288        );
7289
7290        // Reveal the trie structure using ProofTrieNode
7291        let mut proof_nodes = vec![ProofTrieNodeV2 {
7292            path: Nibbles::from_nibbles([0x3, 0x1]),
7293            node: branch_0x31_node,
7294            masks: Some(BranchNodeMasks {
7295                tree_mask: TrieMask::new(4096),
7296                hash_mask: TrieMask::new(4096),
7297            }),
7298        }];
7299
7300        // Create a sparse trie and reveal nodes
7301        let mut trie = ParallelSparseTrie::default()
7302            .with_root(
7303                branch_0x3_node,
7304                Some(BranchNodeMasks {
7305                    tree_mask: TrieMask::new(26099),
7306                    hash_mask: TrieMask::new(65535),
7307                }),
7308                true,
7309            )
7310            .expect("root revealed");
7311
7312        trie.reveal_nodes(&mut proof_nodes).unwrap();
7313
7314        // Update the leaf in order to reveal it in the trie
7315        trie.update_leaf(leaf_nibbles, leaf_value).unwrap();
7316
7317        // Now try deleting the leaf
7318        let Err(err) = trie.remove_leaf(&leaf_nibbles) else {
7319            panic!("expected blinded node error");
7320        };
7321        assert_matches!(err.kind(), SparseTrieErrorKind::BlindedNode(path) if path == &Nibbles::from_nibbles([0x3, 0x1, 0xc]));
7322
7323        trie.reveal_nodes(&mut [ProofTrieNodeV2 {
7324            path: Nibbles::from_nibbles([0x3, 0x1, 0xc]),
7325            node: branch_0x31c_node,
7326            masks: Some(BranchNodeMasks { tree_mask: 0.into(), hash_mask: 4096.into() }),
7327        }])
7328        .unwrap();
7329
7330        // Now remove the leaf again, this should succeed
7331        trie.remove_leaf(&leaf_nibbles).unwrap();
7332
7333        // Compute the root to trigger updates
7334        let _ = trie.root();
7335
7336        // Assert the resulting branch node updates
7337        let updates = trie.updates_ref();
7338
7339        // Check that the branch at 0x3 was updated with the expected structure
7340        let branch_0x3_update = updates
7341            .updated_nodes
7342            .get(&Nibbles::from_nibbles([0x3]))
7343            .expect("Branch at 0x3 should be in updates");
7344
7345        // We no longer expect to track the hash for child 1
7346        branch_0x3_hashes.remove(1);
7347
7348        // Expected structure from prompt.md
7349        let expected_branch = BranchNodeCompact::new(
7350            0b1111111111111111,
7351            0b0110010111110011,
7352            0b1111111111111101,
7353            branch_0x3_hashes,
7354            None,
7355        );
7356
7357        assert_eq!(branch_0x3_update, &expected_branch);
7358    }
7359
7360    #[test]
7361    fn test_get_leaf_value_lower_subtrie() {
7362        // This test demonstrates that get_leaf_value must look in the correct subtrie,
7363        // not always in upper_subtrie.
7364
7365        // Set up a root branch pointing to nibble 0x1, and a branch at [0x1] pointing to
7366        // nibble 0x2, so that the lower subtrie at [0x1, 0x2] is reachable.
7367        let root_branch =
7368            create_branch_node_with_children(&[0x1], [RlpNode::word_rlp(&B256::repeat_byte(0xAA))]);
7369        let branch_at_1 =
7370            create_branch_node_with_children(&[0x2], [RlpNode::word_rlp(&B256::repeat_byte(0xBB))]);
7371        let mut trie = ParallelSparseTrie::from_root(root_branch, None, false).unwrap();
7372        trie.reveal_nodes(&mut [ProofTrieNodeV2 {
7373            path: Nibbles::from_nibbles([0x1]),
7374            node: branch_at_1,
7375            masks: None,
7376        }])
7377        .unwrap();
7378
7379        // Create a leaf node with path >= 2 nibbles (will go to lower subtrie)
7380        let leaf_path = Nibbles::from_nibbles([0x1, 0x2]);
7381        let leaf_key = Nibbles::from_nibbles([0x3, 0x4]);
7382        let leaf_node = create_leaf_node(leaf_key.to_vec(), 42);
7383
7384        // Reveal the leaf node
7385        trie.reveal_nodes(&mut [ProofTrieNodeV2 { path: leaf_path, node: leaf_node, masks: None }])
7386            .unwrap();
7387
7388        // The full path is leaf_path + leaf_key
7389        let full_path = Nibbles::from_nibbles([0x1, 0x2, 0x3, 0x4]);
7390
7391        // Verify the value is stored in the lower subtrie, not upper
7392        let idx = path_subtrie_index_unchecked(&leaf_path);
7393        let lower_subtrie = trie.lower_subtries[idx].as_revealed_ref().unwrap();
7394        assert!(
7395            lower_subtrie.inner.values.contains_key(&full_path),
7396            "value should be in lower subtrie"
7397        );
7398        assert!(
7399            !trie.upper_subtrie.inner.values.contains_key(&full_path),
7400            "value should NOT be in upper subtrie"
7401        );
7402
7403        // get_leaf_value should find the value
7404        assert!(
7405            trie.get_leaf_value(&full_path).is_some(),
7406            "get_leaf_value should find the value in lower subtrie"
7407        );
7408    }
7409
7410    /// Test that `get_leaf_value` correctly returns values stored via `update_leaf`
7411    /// when the leaf node ends up in the upper subtrie (depth < 2).
7412    ///
7413    /// This can happen when the trie is sparse and the leaf is inserted at the root level.
7414    /// Previously, `get_leaf_value` only checked the lower subtrie based on the full path,
7415    /// missing values stored in `upper_subtrie.inner.values`.
7416    #[test]
7417    fn test_get_leaf_value_upper_subtrie_via_update_leaf() {
7418        // Create an empty trie with an empty root
7419        let mut trie = ParallelSparseTrie::default()
7420            .with_root(TrieNodeV2::EmptyRoot, None, false)
7421            .expect("root revealed");
7422
7423        // Create a full 64-nibble path (like a real account hash)
7424        let full_path = pad_nibbles_right(Nibbles::from_nibbles([0x0, 0xA, 0xB, 0xC]));
7425        let value = encode_account_value(42);
7426
7427        // Insert the leaf - since the trie is empty, the leaf node will be created
7428        // at the root level (depth 0), which is in the upper subtrie
7429        trie.update_leaf(full_path, value.clone()).unwrap();
7430
7431        // Verify the value is stored in upper_subtrie (where update_leaf puts it)
7432        assert!(
7433            trie.upper_subtrie.inner.values.contains_key(&full_path),
7434            "value should be in upper subtrie after update_leaf"
7435        );
7436
7437        // Verify the value can be retrieved via get_leaf_value
7438        // Before the fix, this would return None because get_leaf_value only
7439        // checked the lower subtrie based on the path length
7440        let retrieved = trie.get_leaf_value(&full_path);
7441        assert_eq!(retrieved, Some(&value));
7442    }
7443
7444    /// Test that `get_leaf_value` works for values in both upper and lower subtries.
7445    #[test]
7446    fn test_get_leaf_value_upper_and_lower_subtries() {
7447        // Create an empty trie
7448        let mut trie = ParallelSparseTrie::default()
7449            .with_root(TrieNodeV2::EmptyRoot, None, false)
7450            .expect("root revealed");
7451
7452        // Insert first leaf - will be at root level (upper subtrie)
7453        let path1 = pad_nibbles_right(Nibbles::from_nibbles([0x0, 0xA]));
7454        let value1 = encode_account_value(1);
7455        trie.update_leaf(path1, value1.clone()).unwrap();
7456
7457        // Insert second leaf with different prefix - creates a branch
7458        let path2 = pad_nibbles_right(Nibbles::from_nibbles([0x1, 0xB]));
7459        let value2 = encode_account_value(2);
7460        trie.update_leaf(path2, value2.clone()).unwrap();
7461
7462        // Both values should be retrievable
7463        assert_eq!(trie.get_leaf_value(&path1), Some(&value1));
7464        assert_eq!(trie.get_leaf_value(&path2), Some(&value2));
7465    }
7466
7467    /// Test that `get_leaf_value` works for storage tries which are often very sparse.
7468    #[test]
7469    fn test_get_leaf_value_sparse_storage_trie() {
7470        // Simulate a storage trie with a single slot
7471        let mut trie = ParallelSparseTrie::default()
7472            .with_root(TrieNodeV2::EmptyRoot, None, false)
7473            .expect("root revealed");
7474
7475        // Single storage slot - leaf will be at root (depth 0)
7476        let slot_path = pad_nibbles_right(Nibbles::from_nibbles([0x2, 0x9]));
7477        let slot_value = alloy_rlp::encode(U256::from(12345));
7478        trie.update_leaf(slot_path, slot_value.clone()).unwrap();
7479
7480        // Value should be retrievable
7481        assert_eq!(trie.get_leaf_value(&slot_path), Some(&slot_value));
7482    }
7483
7484    #[test]
7485    fn test_prune_empty_suffix_key_regression() {
7486        // Regression test: when a leaf has an empty suffix key (full path == node path),
7487        // the value must be removed when that path becomes a pruned root.
7488        // This catches the bug where is_strict_descendant fails to remove p == pruned_root.
7489        let mut parallel = ParallelSparseTrie::default();
7490
7491        // Large value to ensure nodes have hashes (RLP >= 32 bytes)
7492        let value = {
7493            let account = Account {
7494                nonce: 0x123456789abcdef,
7495                balance: U256::from(0x123456789abcdef0123456789abcdef_u128),
7496                ..Default::default()
7497            };
7498            let mut buf = Vec::new();
7499            account.into_trie_account(EMPTY_ROOT_HASH).encode(&mut buf);
7500            buf
7501        };
7502
7503        // Create a trie with multiple leaves to force a branch at root
7504        for i in 0..16u8 {
7505            parallel
7506                .update_leaf(
7507                    pad_nibbles_right(Nibbles::from_nibbles([i, 0x1, 0x2, 0x3, 0x4, 0x5])),
7508                    value.clone(),
7509                )
7510                .unwrap();
7511        }
7512
7513        // Compute root to get hashes
7514        let root_before = parallel.root();
7515
7516        // Prune with no retained leaves: all children of root become pruned roots
7517        parallel.prune(&[]);
7518
7519        let root_after = parallel.root();
7520        assert_eq!(root_before, root_after, "root hash must be preserved");
7521
7522        // Key assertion: values under pruned paths must be removed
7523        // With the bug, values at pruned_root paths (not strict descendants) would remain
7524        for i in 0..16u8 {
7525            let path = pad_nibbles_right(Nibbles::from_nibbles([i, 0x1, 0x2, 0x3, 0x4, 0x5]));
7526            assert!(
7527                parallel.get_leaf_value(&path).is_none(),
7528                "value at {:?} should be removed after prune",
7529                path
7530            );
7531        }
7532    }
7533
7534    #[test]
7535    fn test_prune_empty_trie() {
7536        let mut trie = ParallelSparseTrie::default();
7537        trie.prune(&[]);
7538        let root = trie.root();
7539        assert_eq!(root, EMPTY_ROOT_HASH, "empty trie should have empty root hash");
7540    }
7541
7542    #[test]
7543    fn test_prune_preserves_root_hash() {
7544        let mut trie = ParallelSparseTrie::default();
7545
7546        let value = large_account_value();
7547
7548        for i in 0..8u8 {
7549            for j in 0..4u8 {
7550                trie.update_leaf(
7551                    pad_nibbles_right(Nibbles::from_nibbles([i, j, 0x3, 0x4, 0x5, 0x6])),
7552                    value.clone(),
7553                )
7554                .unwrap();
7555            }
7556        }
7557
7558        let root_before = trie.root();
7559        trie.prune(&[]);
7560        let root_after = trie.root();
7561        assert_eq!(root_before, root_after, "root hash must be preserved after prune");
7562    }
7563
7564    #[test]
7565    fn test_prune_single_leaf_trie() {
7566        let mut trie = ParallelSparseTrie::default();
7567
7568        let value = large_account_value();
7569        trie.update_leaf(pad_nibbles_right(Nibbles::from_nibbles([0x1, 0x2, 0x3, 0x4])), value)
7570            .unwrap();
7571
7572        let root_before = trie.root();
7573        let nodes_before = trie.size_hint();
7574
7575        trie.prune(&[]);
7576
7577        let root_after = trie.root();
7578        assert_eq!(root_before, root_after, "root hash should be preserved");
7579        assert_eq!(trie.size_hint(), nodes_before, "single leaf trie should not change");
7580    }
7581
7582    #[test]
7583    fn test_prune_root_hash_preserved() {
7584        let mut trie = ParallelSparseTrie::default();
7585
7586        // Create two 64-nibble paths that differ only in the first nibble
7587        let key1 = Nibbles::unpack(B256::repeat_byte(0x00));
7588        let key2 = Nibbles::unpack(B256::repeat_byte(0x11));
7589
7590        let large_value = large_account_value();
7591        trie.update_leaf(key1, large_value.clone()).unwrap();
7592        trie.update_leaf(key2, large_value).unwrap();
7593
7594        let root_before = trie.root();
7595
7596        trie.prune(&[]);
7597
7598        assert_eq!(root_before, trie.root(), "root hash must be preserved after pruning");
7599    }
7600
7601    #[test]
7602    fn test_prune_mixed_embedded_and_hashed() {
7603        let mut trie = ParallelSparseTrie::default();
7604
7605        let large_value = large_account_value();
7606        let small_value = vec![0x80];
7607
7608        for i in 0..8u8 {
7609            let value = if i < 4 { large_value.clone() } else { small_value.clone() };
7610            trie.update_leaf(pad_nibbles_right(Nibbles::from_nibbles([i, 0x1, 0x2, 0x3])), value)
7611                .unwrap();
7612        }
7613
7614        let root_before = trie.root();
7615        trie.prune(&[]);
7616        assert_eq!(root_before, trie.root(), "root hash must be preserved");
7617    }
7618
7619    #[test]
7620    fn test_prune_all_lower_subtries() {
7621        let large_value = large_account_value();
7622
7623        let mut keys = Vec::new();
7624        for first in 0..16u8 {
7625            for second in 0..16u8 {
7626                keys.push(pad_nibbles_right(Nibbles::from_nibbles([
7627                    first, second, 0x1, 0x2, 0x3, 0x4,
7628                ])));
7629            }
7630        }
7631
7632        let mut trie = ParallelSparseTrie::default();
7633
7634        for key in &keys {
7635            trie.update_leaf(*key, large_value.clone()).unwrap();
7636        }
7637
7638        let root_before = trie.root();
7639
7640        let total_pruned = trie.prune(&[]);
7641
7642        assert!(total_pruned > 0, "should have pruned some nodes");
7643        assert_eq!(root_before, trie.root(), "root hash should be preserved");
7644
7645        for key in &keys {
7646            assert!(trie.get_leaf_value(key).is_none(), "value should be pruned");
7647        }
7648    }
7649
7650    #[test]
7651    fn test_prune_keeps_only_hot_paths() {
7652        let mut trie = ParallelSparseTrie::default();
7653
7654        let key_keep = pad_nibbles_right(Nibbles::from_nibbles([0x1, 0x2, 0x3, 0x4]));
7655        let key_drop_1 = pad_nibbles_right(Nibbles::from_nibbles([0x5, 0x2, 0x3, 0x4]));
7656        let key_drop_2 = pad_nibbles_right(Nibbles::from_nibbles([0x9, 0x2, 0x3, 0x4]));
7657
7658        let value = large_account_value();
7659        trie.update_leaf(key_keep, value.clone()).unwrap();
7660        trie.update_leaf(key_drop_1, value.clone()).unwrap();
7661        trie.update_leaf(key_drop_2, value).unwrap();
7662
7663        let root_before = trie.root();
7664
7665        let pruned = trie.prune(&[key_keep]);
7666        assert!(pruned > 0, "expected some nodes to be pruned");
7667        assert_eq!(root_before, trie.root(), "root hash should be preserved after LFU prune");
7668
7669        assert!(trie.get_leaf_value(&key_keep).is_some(), "retained key must remain revealed");
7670        assert!(trie.get_leaf_value(&key_drop_1).is_none(), "non-retained key should be pruned");
7671        assert!(trie.get_leaf_value(&key_drop_2).is_none(), "non-retained key should be pruned");
7672    }
7673
7674    #[test]
7675    fn test_prune_update_after() {
7676        // After pruning, we should be able to update leaves without panic.
7677        let mut trie = ParallelSparseTrie::default();
7678
7679        let value = large_account_value();
7680
7681        // Create keys that span into lower subtries (path.len() >= UPPER_TRIE_MAX_DEPTH)
7682        for first in 0..4u8 {
7683            for second in 0..4u8 {
7684                trie.update_leaf(
7685                    pad_nibbles_right(Nibbles::from_nibbles([
7686                        first, second, 0x1, 0x2, 0x3, 0x4, 0x5, 0x6,
7687                    ])),
7688                    value.clone(),
7689                )
7690                .unwrap();
7691            }
7692        }
7693
7694        let root_before = trie.root();
7695
7696        trie.prune(&[]);
7697
7698        let root_after = trie.root();
7699        assert_eq!(root_before, root_after, "root hash should be preserved");
7700
7701        // Now try to update a leaf - this should not panic even though lower subtries
7702        // were replaced with Blind(None)
7703        let new_path =
7704            pad_nibbles_right(Nibbles::from_nibbles([0x5, 0x5, 0x1, 0x2, 0x3, 0x4, 0x5, 0x6]));
7705        trie.update_leaf(new_path, value).unwrap();
7706
7707        // The trie should still be functional
7708        let _ = trie.root();
7709    }
7710
7711    // update_leaves tests
7712
7713    #[test]
7714    fn test_update_leaves_successful_update() {
7715        use crate::LeafUpdate;
7716        use alloy_primitives::map::B256Map;
7717        use std::cell::RefCell;
7718        let mut trie = ParallelSparseTrie::default();
7719
7720        // Create a leaf in the trie using a full-length key
7721        let b256_key = B256::with_last_byte(42);
7722        let key = Nibbles::unpack(b256_key);
7723        let value = encode_account_value(1);
7724        trie.update_leaf(key, value).unwrap();
7725
7726        // Create update map with a new value for the same key
7727        let new_value = encode_account_value(2);
7728
7729        let mut updates: B256Map<LeafUpdate> = B256Map::default();
7730        updates.insert(b256_key, LeafUpdate::Changed(new_value));
7731
7732        let proof_targets = RefCell::new(Vec::new());
7733        trie.update_leaves(&mut updates, |path, min_len| {
7734            proof_targets.borrow_mut().push((path, min_len));
7735        })
7736        .unwrap();
7737
7738        // Update should succeed: map empty, callback not invoked
7739        assert!(updates.is_empty(), "Update map should be empty after successful update");
7740        assert!(
7741            proof_targets.borrow().is_empty(),
7742            "Callback should not be invoked for revealed paths"
7743        );
7744    }
7745
7746    #[test]
7747    fn test_update_leaves_insert_new_leaf() {
7748        use crate::LeafUpdate;
7749        use alloy_primitives::map::B256Map;
7750        use std::cell::RefCell;
7751
7752        let mut trie = ParallelSparseTrie::default();
7753
7754        // Insert a NEW leaf (key doesn't exist yet) via update_leaves
7755        let b256_key = B256::with_last_byte(99);
7756        let new_value = encode_account_value(42);
7757
7758        let mut updates: B256Map<LeafUpdate> = B256Map::default();
7759        updates.insert(b256_key, LeafUpdate::Changed(new_value.clone()));
7760
7761        let proof_targets = RefCell::new(Vec::new());
7762        trie.update_leaves(&mut updates, |path, min_len| {
7763            proof_targets.borrow_mut().push((path, min_len));
7764        })
7765        .unwrap();
7766
7767        // Insert should succeed: map empty, callback not invoked
7768        assert!(updates.is_empty(), "Update map should be empty after successful insert");
7769        assert!(
7770            proof_targets.borrow().is_empty(),
7771            "Callback should not be invoked for new leaf insert"
7772        );
7773
7774        // Verify the leaf was actually inserted
7775        let full_path = Nibbles::unpack(b256_key);
7776        assert_eq!(
7777            trie.get_leaf_value(&full_path),
7778            Some(&new_value),
7779            "New leaf value should be retrievable"
7780        );
7781    }
7782
7783    #[test]
7784    fn test_update_leaves_blinded_node() {
7785        use crate::LeafUpdate;
7786        use alloy_primitives::map::B256Map;
7787        use std::cell::RefCell;
7788
7789        // Create a trie with a blinded node
7790        // Use a small value that fits in RLP encoding
7791        let small_value = alloy_rlp::encode_fixed_size(&U256::from(1)).to_vec();
7792        let leaf = LeafNode::new(
7793            Nibbles::default(), // short key for RLP encoding
7794            small_value,
7795        );
7796        let branch = TrieNodeV2::Branch(BranchNodeV2::new(
7797            Nibbles::default(),
7798            vec![
7799                RlpNode::word_rlp(&B256::repeat_byte(1)), // blinded child at 0
7800                RlpNode::from_raw_rlp(&alloy_rlp::encode(leaf.clone())).unwrap(), // revealed at 1
7801            ],
7802            TrieMask::new(0b11),
7803            None,
7804        ));
7805
7806        let mut trie = ParallelSparseTrie::from_root(
7807            branch.clone(),
7808            Some(BranchNodeMasks {
7809                hash_mask: TrieMask::new(0b01),
7810                tree_mask: TrieMask::default(),
7811            }),
7812            false,
7813        )
7814        .unwrap();
7815
7816        // Reveal only the branch and one child, leaving child 0 as a Hash node
7817        trie.reveal_node(
7818            Nibbles::default(),
7819            branch,
7820            Some(BranchNodeMasks {
7821                hash_mask: TrieMask::default(),
7822                tree_mask: TrieMask::new(0b01),
7823            }),
7824        )
7825        .unwrap();
7826        trie.reveal_node(Nibbles::from_nibbles([0x1]), TrieNodeV2::Leaf(leaf), None).unwrap();
7827
7828        // The path 0x0... is blinded (Hash node)
7829        // Create an update targeting the blinded path using a full B256 key
7830        let b256_key = B256::ZERO; // starts with 0x0...
7831
7832        let new_value = encode_account_value(42);
7833        let mut updates: B256Map<LeafUpdate> = B256Map::default();
7834        updates.insert(b256_key, LeafUpdate::Changed(new_value));
7835
7836        let proof_targets = RefCell::new(Vec::new());
7837        let prefix_set_len_before = trie.prefix_set.len();
7838        trie.update_leaves(&mut updates, |path, min_len| {
7839            proof_targets.borrow_mut().push((path, min_len));
7840        })
7841        .unwrap();
7842
7843        // Update should remain in map (blinded node)
7844        assert!(!updates.is_empty(), "Update should remain in map when hitting blinded node");
7845
7846        // prefix_set should be unchanged after failed update
7847        assert_eq!(
7848            trie.prefix_set.len(),
7849            prefix_set_len_before,
7850            "prefix_set should be unchanged after failed update on blinded node"
7851        );
7852
7853        // Callback should be invoked
7854        let targets = proof_targets.borrow();
7855        assert!(!targets.is_empty(), "Callback should be invoked for blinded path");
7856
7857        // min_len should equal the blinded node's path length (1 nibble)
7858        assert_eq!(targets[0].1, 1, "min_len should equal blinded node path length");
7859    }
7860
7861    #[test]
7862    fn test_update_leaves_removal() {
7863        use crate::LeafUpdate;
7864        use alloy_primitives::map::B256Map;
7865        use std::cell::RefCell;
7866        let mut trie = ParallelSparseTrie::default();
7867
7868        // Create two leaves so removal doesn't result in empty trie issues
7869        // Use full-length keys
7870        let b256_key1 = B256::with_last_byte(1);
7871        let b256_key2 = B256::with_last_byte(2);
7872        let key1 = Nibbles::unpack(b256_key1);
7873        let key2 = Nibbles::unpack(b256_key2);
7874        let value = encode_account_value(1);
7875        trie.update_leaf(key1, value.clone()).unwrap();
7876        trie.update_leaf(key2, value).unwrap();
7877
7878        // Create an update to remove key1 (empty value = removal)
7879        let mut updates: B256Map<LeafUpdate> = B256Map::default();
7880        updates.insert(b256_key1, LeafUpdate::Changed(vec![])); // empty = removal
7881
7882        let proof_targets = RefCell::new(Vec::new());
7883        trie.update_leaves(&mut updates, |path, min_len| {
7884            proof_targets.borrow_mut().push((path, min_len));
7885        })
7886        .unwrap();
7887
7888        // Removal should succeed: map empty
7889        assert!(updates.is_empty(), "Update map should be empty after successful removal");
7890    }
7891
7892    #[test]
7893    fn test_update_leaves_removal_blinded() {
7894        use crate::LeafUpdate;
7895        use alloy_primitives::map::B256Map;
7896        use std::cell::RefCell;
7897
7898        // Create a trie with a blinded node
7899        // Use a small value that fits in RLP encoding
7900        let small_value = alloy_rlp::encode_fixed_size(&U256::from(1)).to_vec();
7901        let leaf = LeafNode::new(
7902            Nibbles::default(), // short key for RLP encoding
7903            small_value,
7904        );
7905        let branch = TrieNodeV2::Branch(BranchNodeV2::new(
7906            Nibbles::default(),
7907            vec![
7908                RlpNode::word_rlp(&B256::repeat_byte(1)), // blinded child at 0
7909                RlpNode::from_raw_rlp(&alloy_rlp::encode(leaf.clone())).unwrap(), // revealed at 1
7910            ],
7911            TrieMask::new(0b11),
7912            None,
7913        ));
7914
7915        let mut trie = ParallelSparseTrie::from_root(
7916            branch.clone(),
7917            Some(BranchNodeMasks {
7918                hash_mask: TrieMask::new(0b01),
7919                tree_mask: TrieMask::default(),
7920            }),
7921            false,
7922        )
7923        .unwrap();
7924
7925        trie.reveal_node(
7926            Nibbles::default(),
7927            branch,
7928            Some(BranchNodeMasks {
7929                hash_mask: TrieMask::default(),
7930                tree_mask: TrieMask::new(0b01),
7931            }),
7932        )
7933        .unwrap();
7934        trie.reveal_node(Nibbles::from_nibbles([0x1]), TrieNodeV2::Leaf(leaf), None).unwrap();
7935
7936        // Simulate having a known value behind the blinded node
7937        let b256_key = B256::ZERO; // starts with 0x0...
7938        let full_path = Nibbles::unpack(b256_key);
7939
7940        // Insert the value into the trie's values map (simulating we know about it)
7941        let old_value = encode_account_value(99);
7942        trie.upper_subtrie.inner.values.insert(full_path, old_value.clone());
7943
7944        let mut updates: B256Map<LeafUpdate> = B256Map::default();
7945        updates.insert(b256_key, LeafUpdate::Changed(vec![])); // empty = removal
7946
7947        let proof_targets = RefCell::new(Vec::new());
7948        let prefix_set_len_before = trie.prefix_set.len();
7949        trie.update_leaves(&mut updates, |path, min_len| {
7950            proof_targets.borrow_mut().push((path, min_len));
7951        })
7952        .unwrap();
7953
7954        // Callback should be invoked
7955        assert!(
7956            !proof_targets.borrow().is_empty(),
7957            "Callback should be invoked when removal hits blinded node"
7958        );
7959
7960        // Update should remain in map
7961        assert!(!updates.is_empty(), "Update should remain in map when removal hits blinded node");
7962
7963        // Original value should be preserved (reverted)
7964        assert_eq!(
7965            trie.upper_subtrie.inner.values.get(&full_path),
7966            Some(&old_value),
7967            "Original value should be preserved after failed removal"
7968        );
7969
7970        // prefix_set should be unchanged after failed removal
7971        assert_eq!(
7972            trie.prefix_set.len(),
7973            prefix_set_len_before,
7974            "prefix_set should be unchanged after failed removal on blinded node"
7975        );
7976    }
7977
7978    #[test]
7979    fn test_update_leaves_removal_branch_collapse_blinded() {
7980        use crate::LeafUpdate;
7981        use alloy_primitives::map::B256Map;
7982        use std::cell::RefCell;
7983
7984        // Create a branch node at root with two children:
7985        // - Child at nibble 0: a blinded Hash node
7986        // - Child at nibble 1: a revealed Leaf node
7987        let small_value = alloy_rlp::encode_fixed_size(&U256::from(1)).to_vec();
7988        let leaf = LeafNode::new(Nibbles::default(), small_value);
7989        let branch = TrieNodeV2::Branch(BranchNodeV2::new(
7990            Nibbles::default(),
7991            vec![
7992                RlpNode::word_rlp(&B256::repeat_byte(1)), // blinded child at nibble 0
7993                RlpNode::from_raw_rlp(&alloy_rlp::encode(leaf.clone())).unwrap(), /* leaf at nibble 1 */
7994            ],
7995            TrieMask::new(0b11),
7996            None,
7997        ));
7998
7999        let mut trie = ParallelSparseTrie::from_root(
8000            branch.clone(),
8001            Some(BranchNodeMasks {
8002                hash_mask: TrieMask::new(0b01), // nibble 0 is hashed
8003                tree_mask: TrieMask::default(),
8004            }),
8005            false,
8006        )
8007        .unwrap();
8008
8009        // Reveal the branch and the leaf at nibble 1, leaving nibble 0 as Hash node
8010        trie.reveal_node(
8011            Nibbles::default(),
8012            branch,
8013            Some(BranchNodeMasks {
8014                hash_mask: TrieMask::default(),
8015                tree_mask: TrieMask::new(0b01),
8016            }),
8017        )
8018        .unwrap();
8019        trie.reveal_node(Nibbles::from_nibbles([0x1]), TrieNodeV2::Leaf(leaf), None).unwrap();
8020
8021        // Insert the leaf's value into the values map for the revealed leaf
8022        // Use B256 key that starts with nibble 1 (0x10 has first nibble = 1)
8023        let b256_key = B256::with_last_byte(0x10);
8024        let full_path = Nibbles::unpack(b256_key);
8025        let leaf_value = encode_account_value(42);
8026        trie.upper_subtrie.inner.values.insert(full_path, leaf_value.clone());
8027
8028        // Record state before update_leaves
8029        let prefix_set_len_before = trie.prefix_set.len();
8030        let node_count_before = trie.upper_subtrie.nodes.len() +
8031            trie.lower_subtries
8032                .iter()
8033                .filter_map(|s| s.as_revealed_ref())
8034                .map(|s| s.nodes.len())
8035                .sum::<usize>();
8036
8037        let mut updates: B256Map<LeafUpdate> = B256Map::default();
8038        updates.insert(b256_key, LeafUpdate::Changed(vec![])); // removal
8039
8040        let proof_targets = RefCell::new(Vec::new());
8041        trie.update_leaves(&mut updates, |path, min_len| {
8042            proof_targets.borrow_mut().push((path, min_len));
8043        })
8044        .unwrap();
8045
8046        // Assert: update remains in map (removal blocked by blinded sibling)
8047        assert!(
8048            !updates.is_empty(),
8049            "Update should remain in map when removal would collapse branch with blinded sibling"
8050        );
8051
8052        // Assert: callback was invoked for the blinded path
8053        assert!(
8054            !proof_targets.borrow().is_empty(),
8055            "Callback should be invoked for blinded sibling path"
8056        );
8057
8058        // Assert: prefix_set unchanged (atomic failure)
8059        assert_eq!(
8060            trie.prefix_set.len(),
8061            prefix_set_len_before,
8062            "prefix_set should be unchanged after atomic failure"
8063        );
8064
8065        // Assert: node count unchanged
8066        let node_count_after = trie.upper_subtrie.nodes.len() +
8067            trie.lower_subtries
8068                .iter()
8069                .filter_map(|s| s.as_revealed_ref())
8070                .map(|s| s.nodes.len())
8071                .sum::<usize>();
8072        assert_eq!(
8073            node_count_before, node_count_after,
8074            "Node count should be unchanged after atomic failure"
8075        );
8076
8077        // Assert: the leaf value still exists (not removed)
8078        assert_eq!(
8079            trie.upper_subtrie.inner.values.get(&full_path),
8080            Some(&leaf_value),
8081            "Leaf value should still exist after failed removal"
8082        );
8083    }
8084
8085    #[test]
8086    fn test_update_leaves_touched() {
8087        use crate::LeafUpdate;
8088        use alloy_primitives::map::B256Map;
8089        use std::cell::RefCell;
8090        let mut trie = ParallelSparseTrie::default();
8091
8092        // Create a leaf in the trie using a full-length key
8093        let b256_key = B256::with_last_byte(42);
8094        let key = Nibbles::unpack(b256_key);
8095        let value = encode_account_value(1);
8096        trie.update_leaf(key, value).unwrap();
8097
8098        // Create a Touched update for the existing key
8099        let mut updates: B256Map<LeafUpdate> = B256Map::default();
8100        updates.insert(b256_key, LeafUpdate::Touched);
8101
8102        let proof_targets = RefCell::new(Vec::new());
8103        let prefix_set_len_before = trie.prefix_set.len();
8104
8105        trie.update_leaves(&mut updates, |path, min_len| {
8106            proof_targets.borrow_mut().push((path, min_len));
8107        })
8108        .unwrap();
8109
8110        // Update should be removed (path is accessible)
8111        assert!(updates.is_empty(), "Touched update should be removed for accessible path");
8112
8113        // No callback
8114        assert!(
8115            proof_targets.borrow().is_empty(),
8116            "Callback should not be invoked for accessible path"
8117        );
8118
8119        // prefix_set should be unchanged since Touched is read-only
8120        assert_eq!(
8121            trie.prefix_set.len(),
8122            prefix_set_len_before,
8123            "prefix_set should be unchanged for Touched update (read-only)"
8124        );
8125    }
8126
8127    #[test]
8128    fn test_update_leaves_touched_nonexistent() {
8129        use crate::LeafUpdate;
8130        use alloy_primitives::map::B256Map;
8131        use std::cell::RefCell;
8132
8133        let mut trie = ParallelSparseTrie::default();
8134
8135        // Create a Touched update for a key that doesn't exist
8136        let b256_key = B256::with_last_byte(99);
8137        let full_path = Nibbles::unpack(b256_key);
8138
8139        let prefix_set_len_before = trie.prefix_set.len();
8140
8141        let mut updates: B256Map<LeafUpdate> = B256Map::default();
8142        updates.insert(b256_key, LeafUpdate::Touched);
8143
8144        let proof_targets = RefCell::new(Vec::new());
8145        trie.update_leaves(&mut updates, |path, min_len| {
8146            proof_targets.borrow_mut().push((path, min_len));
8147        })
8148        .unwrap();
8149
8150        // Update should be removed (path IS accessible - it's just empty)
8151        assert!(updates.is_empty(), "Touched update should be removed for accessible (empty) path");
8152
8153        // No callback should be invoked (path is revealed, just empty)
8154        assert!(
8155            proof_targets.borrow().is_empty(),
8156            "Callback should not be invoked for accessible path"
8157        );
8158
8159        // prefix_set should NOT be modified (Touched is read-only)
8160        assert_eq!(
8161            trie.prefix_set.len(),
8162            prefix_set_len_before,
8163            "prefix_set should not be modified by Touched update"
8164        );
8165
8166        // No value should be inserted
8167        assert!(
8168            trie.get_leaf_value(&full_path).is_none(),
8169            "No value should exist for non-existent key after Touched update"
8170        );
8171    }
8172
8173    #[test]
8174    fn test_update_leaves_touched_blinded() {
8175        use crate::LeafUpdate;
8176        use alloy_primitives::map::B256Map;
8177        use std::cell::RefCell;
8178
8179        // Create a trie with a blinded node
8180        // Use a small value that fits in RLP encoding
8181        let small_value = alloy_rlp::encode_fixed_size(&U256::from(1)).to_vec();
8182        let leaf = LeafNode::new(
8183            Nibbles::default(), // short key for RLP encoding
8184            small_value,
8185        );
8186        let branch = TrieNodeV2::Branch(BranchNodeV2::new(
8187            Nibbles::default(),
8188            vec![
8189                RlpNode::word_rlp(&B256::repeat_byte(1)), // blinded child at 0
8190                RlpNode::from_raw_rlp(&alloy_rlp::encode(leaf.clone())).unwrap(), // revealed at 1
8191            ],
8192            TrieMask::new(0b11),
8193            None,
8194        ));
8195
8196        let mut trie = ParallelSparseTrie::from_root(
8197            branch.clone(),
8198            Some(BranchNodeMasks {
8199                hash_mask: TrieMask::new(0b01),
8200                tree_mask: TrieMask::default(),
8201            }),
8202            false,
8203        )
8204        .unwrap();
8205
8206        trie.reveal_node(
8207            Nibbles::default(),
8208            branch,
8209            Some(BranchNodeMasks {
8210                hash_mask: TrieMask::default(),
8211                tree_mask: TrieMask::new(0b01),
8212            }),
8213        )
8214        .unwrap();
8215        trie.reveal_node(Nibbles::from_nibbles([0x1]), TrieNodeV2::Leaf(leaf), None).unwrap();
8216
8217        // Create a Touched update targeting the blinded path using full B256 key
8218        let b256_key = B256::ZERO; // starts with 0x0...
8219
8220        let mut updates: B256Map<LeafUpdate> = B256Map::default();
8221        updates.insert(b256_key, LeafUpdate::Touched);
8222
8223        let proof_targets = RefCell::new(Vec::new());
8224        let prefix_set_len_before = trie.prefix_set.len();
8225        trie.update_leaves(&mut updates, |path, min_len| {
8226            proof_targets.borrow_mut().push((path, min_len));
8227        })
8228        .unwrap();
8229
8230        // Callback should be invoked
8231        assert!(!proof_targets.borrow().is_empty(), "Callback should be invoked for blinded path");
8232
8233        // Update should remain in map
8234        assert!(!updates.is_empty(), "Touched update should remain in map for blinded path");
8235
8236        // prefix_set should be unchanged since Touched is read-only
8237        assert_eq!(
8238            trie.prefix_set.len(),
8239            prefix_set_len_before,
8240            "prefix_set should be unchanged for Touched update on blinded path"
8241        );
8242    }
8243
8244    #[test]
8245    fn test_update_leaves_deduplication() {
8246        use crate::LeafUpdate;
8247        use alloy_primitives::map::B256Map;
8248        use std::cell::RefCell;
8249
8250        // Create a trie with a blinded node
8251        // Use a small value that fits in RLP encoding
8252        let small_value = alloy_rlp::encode_fixed_size(&U256::from(1)).to_vec();
8253        let leaf = LeafNode::new(
8254            Nibbles::default(), // short key for RLP encoding
8255            small_value,
8256        );
8257        let branch = TrieNodeV2::Branch(BranchNodeV2::new(
8258            Nibbles::default(),
8259            vec![
8260                RlpNode::word_rlp(&B256::repeat_byte(1)), // blinded child at 0
8261                RlpNode::from_raw_rlp(&alloy_rlp::encode(leaf.clone())).unwrap(), // revealed at 1
8262            ],
8263            TrieMask::new(0b11),
8264            None,
8265        ));
8266
8267        let mut trie = ParallelSparseTrie::from_root(
8268            branch.clone(),
8269            Some(BranchNodeMasks {
8270                hash_mask: TrieMask::new(0b01),
8271                tree_mask: TrieMask::default(),
8272            }),
8273            false,
8274        )
8275        .unwrap();
8276
8277        trie.reveal_node(
8278            Nibbles::default(),
8279            branch,
8280            Some(BranchNodeMasks {
8281                hash_mask: TrieMask::default(),
8282                tree_mask: TrieMask::new(0b01),
8283            }),
8284        )
8285        .unwrap();
8286        trie.reveal_node(Nibbles::from_nibbles([0x1]), TrieNodeV2::Leaf(leaf), None).unwrap();
8287
8288        // Create multiple updates that would all hit the same blinded node at path 0x0
8289        // Use full B256 keys that all start with 0x0
8290        let b256_key1 = B256::ZERO;
8291        let b256_key2 = B256::with_last_byte(1); // still starts with 0x0
8292        let b256_key3 = B256::with_last_byte(2); // still starts with 0x0
8293
8294        let mut updates: B256Map<LeafUpdate> = B256Map::default();
8295        let value = encode_account_value(42);
8296
8297        updates.insert(b256_key1, LeafUpdate::Changed(value.clone()));
8298        updates.insert(b256_key2, LeafUpdate::Changed(value.clone()));
8299        updates.insert(b256_key3, LeafUpdate::Changed(value));
8300
8301        let proof_targets = RefCell::new(Vec::new());
8302        trie.update_leaves(&mut updates, |path, min_len| {
8303            proof_targets.borrow_mut().push((path, min_len));
8304        })
8305        .unwrap();
8306
8307        // The callback should be invoked 3 times - once for each unique full_path
8308        // The deduplication is by (full_path, min_len), not by blinded node
8309        let targets = proof_targets.borrow();
8310        assert_eq!(targets.len(), 3, "Callback should be invoked for each unique key");
8311
8312        // All should have the same min_len (1) since they all hit blinded node at path 0x0
8313        for (_, min_len) in targets.iter() {
8314            assert_eq!(*min_len, 1, "All should have min_len 1 from blinded node at 0x0");
8315        }
8316    }
8317
8318    #[test]
8319    fn test_nibbles_to_padded_b256() {
8320        // Empty nibbles should produce all zeros
8321        let empty = Nibbles::default();
8322        assert_eq!(ParallelSparseTrie::nibbles_to_padded_b256(&empty), B256::ZERO);
8323
8324        // Full 64-nibble path should round-trip through B256
8325        let full_key = b256!("0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef");
8326        let full_nibbles = Nibbles::unpack(full_key);
8327        assert_eq!(ParallelSparseTrie::nibbles_to_padded_b256(&full_nibbles), full_key);
8328
8329        // Partial nibbles should be left-aligned with zero padding on the right
8330        // 4 nibbles [0x1, 0x2, 0x3, 0x4] should pack to 0x1234...00
8331        let partial = Nibbles::from_nibbles_unchecked([0x1, 0x2, 0x3, 0x4]);
8332        let expected = b256!("1234000000000000000000000000000000000000000000000000000000000000");
8333        assert_eq!(ParallelSparseTrie::nibbles_to_padded_b256(&partial), expected);
8334
8335        // Single nibble
8336        let single = Nibbles::from_nibbles_unchecked([0xf]);
8337        let expected_single =
8338            b256!("f000000000000000000000000000000000000000000000000000000000000000");
8339        assert_eq!(ParallelSparseTrie::nibbles_to_padded_b256(&single), expected_single);
8340    }
8341
8342    #[test]
8343    fn test_memory_size() {
8344        // Test that memory_size returns a reasonable value for an empty trie
8345        let trie = ParallelSparseTrie::default();
8346        let empty_size = trie.memory_size();
8347
8348        // Should at least be the size of the struct itself
8349        assert!(empty_size >= core::mem::size_of::<ParallelSparseTrie>());
8350
8351        // Create a trie with some data. Set up a root branch with children at 0x1 and
8352        // 0x5, and branches at [0x1] and [0x5] pointing to 0x2 and 0x6 respectively,
8353        // so the lower subtries at [0x1, 0x2] and [0x5, 0x6] are reachable.
8354        let root_branch = create_branch_node_with_children(
8355            &[0x1, 0x5],
8356            [
8357                RlpNode::word_rlp(&B256::repeat_byte(0xAA)),
8358                RlpNode::word_rlp(&B256::repeat_byte(0xBB)),
8359            ],
8360        );
8361        let mut trie = ParallelSparseTrie::from_root(root_branch, None, false).unwrap();
8362
8363        let branch_at_1 =
8364            create_branch_node_with_children(&[0x2], [RlpNode::word_rlp(&B256::repeat_byte(0xCC))]);
8365        let branch_at_5 =
8366            create_branch_node_with_children(&[0x6], [RlpNode::word_rlp(&B256::repeat_byte(0xDD))]);
8367        trie.reveal_nodes(&mut [
8368            ProofTrieNodeV2 {
8369                path: Nibbles::from_nibbles_unchecked([0x1]),
8370                node: branch_at_1,
8371                masks: None,
8372            },
8373            ProofTrieNodeV2 {
8374                path: Nibbles::from_nibbles_unchecked([0x5]),
8375                node: branch_at_5,
8376                masks: None,
8377            },
8378        ])
8379        .unwrap();
8380
8381        let mut nodes = vec![
8382            ProofTrieNodeV2 {
8383                path: Nibbles::from_nibbles_unchecked([0x1, 0x2]),
8384                node: TrieNodeV2::Leaf(LeafNode {
8385                    key: Nibbles::from_nibbles_unchecked([0x3, 0x4]),
8386                    value: vec![1, 2, 3],
8387                }),
8388                masks: None,
8389            },
8390            ProofTrieNodeV2 {
8391                path: Nibbles::from_nibbles_unchecked([0x5, 0x6]),
8392                node: TrieNodeV2::Leaf(LeafNode {
8393                    key: Nibbles::from_nibbles_unchecked([0x7, 0x8]),
8394                    value: vec![4, 5, 6],
8395                }),
8396                masks: None,
8397            },
8398        ];
8399        trie.reveal_nodes(&mut nodes).unwrap();
8400
8401        let populated_size = trie.memory_size();
8402
8403        // Populated trie should use more memory than an empty one
8404        assert!(populated_size > empty_size);
8405    }
8406
8407    #[test]
8408    fn test_reveal_extension_branch_leaves_then_root() {
8409        // Test structure:
8410        // - 0x (root): extension node with key of 63 zeroes
8411        // - 0x000...000 (63 zeroes): branch node with children at 1 and 2
8412        // - 0x000...0001 (62 zeroes + 01): leaf with value 1
8413        // - 0x000...0002 (62 zeroes + 02): leaf with value 2
8414        //
8415        // The leaves and branch are small enough to be embedded (< 32 bytes),
8416        // so we manually RLP encode them and use those encodings in parent nodes.
8417
8418        // Create the extension key (63 zero nibbles)
8419        let ext_key: [u8; 63] = [0; 63];
8420
8421        // The branch is at the end of the extension (63 zeroes)
8422        let branch_path = Nibbles::from_nibbles(ext_key);
8423
8424        // Leaf paths: 63 zeroes + 1, 63 zeroes + 2
8425        let mut leaf1_path_bytes = [0u8; 64];
8426        leaf1_path_bytes[63] = 1;
8427        let leaf1_path = Nibbles::from_nibbles(leaf1_path_bytes);
8428
8429        let mut leaf2_path_bytes = [0u8; 64];
8430        leaf2_path_bytes[63] = 2;
8431        let leaf2_path = Nibbles::from_nibbles(leaf2_path_bytes);
8432
8433        // Create leaves with empty keys (full path consumed by extension + branch)
8434        // and simple values
8435        let leaf1_node = LeafNode::new(Nibbles::default(), vec![0x1]);
8436        let leaf2_node = LeafNode::new(Nibbles::default(), vec![0x2]);
8437
8438        // RLP encode the leaves to get their RlpNode representations
8439        let leaf1_rlp = RlpNode::from_rlp(&alloy_rlp::encode(TrieNodeV2::Leaf(leaf1_node.clone())));
8440        let leaf2_rlp = RlpNode::from_rlp(&alloy_rlp::encode(TrieNodeV2::Leaf(leaf2_node.clone())));
8441
8442        // Create the branch node with children at indices 1 and 2, using the RLP-encoded leaves.
8443        // In V2, branch and extension are combined: the key holds the extension prefix.
8444        let state_mask = TrieMask::new(0b0000_0110); // bits 1 and 2 set
8445        let stack = vec![leaf1_rlp, leaf2_rlp];
8446
8447        // First encode the bare branch (empty key) to get its RlpNode
8448        let bare_branch = BranchNodeV2::new(Nibbles::new(), stack.clone(), state_mask, None);
8449        let branch_rlp = RlpNode::from_rlp(&alloy_rlp::encode(&bare_branch));
8450
8451        // Create the combined extension+branch node as the root.
8452        let root_node = TrieNodeV2::Branch(BranchNodeV2::new(
8453            Nibbles::from_nibbles(ext_key),
8454            stack.clone(),
8455            state_mask,
8456            Some(branch_rlp),
8457        ));
8458
8459        // Initialize trie with the extension+branch as root
8460        let mut trie = ParallelSparseTrie::from_root(root_node, None, false).unwrap();
8461
8462        // Reveal the branch and leaves
8463        let mut nodes = vec![
8464            ProofTrieNodeV2 {
8465                path: branch_path,
8466                node: TrieNodeV2::Branch(BranchNodeV2::new(
8467                    Nibbles::new(),
8468                    stack,
8469                    state_mask,
8470                    None,
8471                )),
8472                masks: None,
8473            },
8474            ProofTrieNodeV2 { path: leaf1_path, node: TrieNodeV2::Leaf(leaf1_node), masks: None },
8475            ProofTrieNodeV2 { path: leaf2_path, node: TrieNodeV2::Leaf(leaf2_node), masks: None },
8476        ];
8477        trie.reveal_nodes(&mut nodes).unwrap();
8478
8479        // Add the leaf paths to prefix_set so that root() will update their hashes
8480        trie.prefix_set.insert(leaf1_path);
8481        trie.prefix_set.insert(leaf2_path);
8482
8483        // Call root() to compute the trie root hash
8484        let _root = trie.root();
8485    }
8486
8487    #[test]
8488    fn test_update_leaf_creates_embedded_nodes_then_root() {
8489        // Similar structure to test_reveal_extension_branch_leaves_then_root, but created
8490        // via update_leaf calls on an empty trie instead of revealing pre-built nodes.
8491        //
8492        // Two leaves with paths that share a long common prefix will create:
8493        // - Extension node at root with the shared prefix
8494        // - Branch node where the paths diverge
8495        // - Two leaf nodes (embedded in the branch since they're small)
8496
8497        // Create two paths that share 63 nibbles and differ only at the 64th
8498        let mut leaf1_path_bytes = [0u8; 64];
8499        leaf1_path_bytes[63] = 1;
8500        let leaf1_path = Nibbles::from_nibbles(leaf1_path_bytes);
8501
8502        let mut leaf2_path_bytes = [0u8; 64];
8503        leaf2_path_bytes[63] = 2;
8504        let leaf2_path = Nibbles::from_nibbles(leaf2_path_bytes);
8505
8506        // Create an empty trie and update with two leaves
8507        let mut trie = ParallelSparseTrie::default();
8508        trie.update_leaf(leaf1_path, vec![0x1]).unwrap();
8509        trie.update_leaf(leaf2_path, vec![0x2]).unwrap();
8510
8511        // Call root() to compute the trie root hash
8512        let _root = trie.root();
8513    }
8514}