1#[cfg(feature = "trie-debug")]
2use crate::debug_recorder::{LeafUpdateRecord, ProofTrieNodeRecord, RecordedOp, TrieDebugRecorder};
3use crate::{
4 lower::LowerSparseSubtrie, provider::TrieNodeProvider, LeafLookup, LeafLookupError,
5 RlpNodeStackItem, SparseNode, SparseNodeState, SparseNodeType, SparseTrie, SparseTrieUpdates,
6};
7use alloc::{borrow::Cow, boxed::Box, vec, vec::Vec};
8use alloy_primitives::{
9 map::{Entry, HashMap, HashSet},
10 B256, U256,
11};
12use alloy_rlp::Decodable;
13use alloy_trie::{BranchNodeCompact, TrieMask, EMPTY_ROOT_HASH};
14use core::cmp::{Ord, Ordering, PartialOrd};
15use reth_execution_errors::{SparseTrieError, SparseTrieErrorKind, SparseTrieResult};
16#[cfg(feature = "metrics")]
17use reth_primitives_traits::FastInstant as Instant;
18use reth_trie_common::{
19 prefix_set::{PrefixSet, PrefixSetMut},
20 BranchNodeMasks, BranchNodeMasksMap, BranchNodeRef, ExtensionNodeRef, LeafNodeRef, Nibbles,
21 ProofTrieNodeV2, RlpNode, TrieNodeV2,
22};
23use smallvec::SmallVec;
24use tracing::{instrument, trace};
25
26pub const UPPER_TRIE_MAX_DEPTH: usize = 2;
29
30pub const NUM_LOWER_SUBTRIES: usize = 16usize.pow(UPPER_TRIE_MAX_DEPTH as u32);
32
33#[derive(Debug, Clone, Copy, PartialEq, Eq, Default)]
35pub struct ParallelismThresholds {
36 pub min_revealed_nodes: usize,
39 pub min_updated_nodes: usize,
43}
44
45#[derive(Clone, PartialEq, Eq, Debug)]
107pub struct ParallelSparseTrie {
108 upper_subtrie: Box<SparseSubtrie>,
110 lower_subtries: Box<[LowerSparseSubtrie; NUM_LOWER_SUBTRIES]>,
112 prefix_set: PrefixSetMut,
115 updates: Option<SparseTrieUpdates>,
117 branch_node_masks: BranchNodeMasksMap,
123 update_actions_buffers: Vec<Vec<SparseTrieUpdatesAction>>,
126 parallelism_thresholds: ParallelismThresholds,
128 #[cfg(feature = "metrics")]
130 metrics: crate::metrics::ParallelSparseTrieMetrics,
131 #[cfg(feature = "trie-debug")]
133 debug_recorder: TrieDebugRecorder,
134}
135
136impl Default for ParallelSparseTrie {
137 fn default() -> Self {
138 Self {
139 upper_subtrie: Box::new(SparseSubtrie {
140 nodes: HashMap::from_iter([(Nibbles::default(), SparseNode::Empty)]),
141 ..Default::default()
142 }),
143 lower_subtries: Box::new(
144 [const { LowerSparseSubtrie::Blind(None) }; NUM_LOWER_SUBTRIES],
145 ),
146 prefix_set: PrefixSetMut::default(),
147 updates: None,
148 branch_node_masks: BranchNodeMasksMap::default(),
149 update_actions_buffers: Vec::default(),
150 parallelism_thresholds: Default::default(),
151 #[cfg(feature = "metrics")]
152 metrics: Default::default(),
153 #[cfg(feature = "trie-debug")]
154 debug_recorder: Default::default(),
155 }
156 }
157}
158
159impl SparseTrie for ParallelSparseTrie {
160 fn set_root(
161 &mut self,
162 root: TrieNodeV2,
163 masks: Option<BranchNodeMasks>,
164 retain_updates: bool,
165 ) -> SparseTrieResult<()> {
166 #[cfg(feature = "trie-debug")]
167 self.debug_recorder.record(RecordedOp::SetRoot {
168 node: ProofTrieNodeRecord::from_proof_trie_node_v2(&ProofTrieNodeV2 {
169 path: Nibbles::default(),
170 node: root.clone(),
171 masks,
172 }),
173 });
174
175 let path = Nibbles::default();
178 let _removed_root = self.upper_subtrie.nodes.remove(&path).expect("root node should exist");
179 debug_assert_eq!(_removed_root, SparseNode::Empty);
180
181 self.set_updates(retain_updates);
182
183 if let Some(masks) = masks {
184 let branch_path = if let TrieNodeV2::Branch(branch) = &root {
185 branch.key
186 } else {
187 Nibbles::default()
188 };
189
190 self.branch_node_masks.insert(branch_path, masks);
191 }
192
193 self.reveal_upper_node(Nibbles::default(), &root, masks)
194 }
195
196 fn set_updates(&mut self, retain_updates: bool) {
197 self.updates = retain_updates.then(Default::default);
198 }
199
200 fn reveal_nodes(&mut self, nodes: &mut [ProofTrieNodeV2]) -> SparseTrieResult<()> {
201 if nodes.is_empty() {
202 return Ok(())
203 }
204
205 #[cfg(feature = "trie-debug")]
206 self.debug_recorder.record(RecordedOp::RevealNodes {
207 nodes: nodes.iter().map(ProofTrieNodeRecord::from_proof_trie_node_v2).collect(),
208 });
209
210 nodes.sort_unstable_by(
213 |ProofTrieNodeV2 { path: path_a, .. }, ProofTrieNodeV2 { path: path_b, .. }| {
214 let subtrie_type_a = SparseSubtrieType::from_path(path_a);
215 let subtrie_type_b = SparseSubtrieType::from_path(path_b);
216 subtrie_type_a.cmp(&subtrie_type_b).then_with(|| path_a.cmp(path_b))
217 },
218 );
219
220 self.branch_node_masks.reserve(nodes.len());
222 for ProofTrieNodeV2 { path, masks, node } in nodes.iter() {
223 if let Some(branch_masks) = masks {
224 let path = if let TrieNodeV2::Branch(branch) = node &&
226 !branch.key.is_empty()
227 {
228 let mut path = *path;
229 path.extend(&branch.key);
230 path
231 } else {
232 *path
233 };
234 self.branch_node_masks.insert(path, *branch_masks);
235 }
236 }
237
238 let num_upper_nodes = nodes
242 .iter()
243 .position(|n| !SparseSubtrieType::path_len_is_upper(n.path.len()))
244 .unwrap_or(nodes.len());
245 let (upper_nodes, lower_nodes) = nodes.split_at(num_upper_nodes);
246
247 self.upper_subtrie.nodes.reserve(upper_nodes.len());
250 for node in upper_nodes {
251 self.reveal_upper_node(node.path, &node.node, node.masks)?;
252 }
253
254 let reachable_subtries = self.reachable_subtries();
255
256 let hashes_from_upper = nodes
261 .iter()
262 .filter_map(|node| {
263 if node.path.len() != UPPER_TRIE_MAX_DEPTH ||
264 !reachable_subtries.get(path_subtrie_index_unchecked(&node.path))
265 {
266 return None;
267 }
268
269 let parent_path = node.path.slice(0..UPPER_TRIE_MAX_DEPTH - 1);
270 let Some(SparseNode::Branch { blinded_mask, blinded_hashes, .. }) =
271 self.upper_subtrie.nodes.get_mut(&parent_path)
272 else {
273 return None;
274 };
275
276 let nibble = node.path.last().unwrap();
277 blinded_mask.is_bit_set(nibble).then(|| {
278 blinded_mask.unset_bit(nibble);
279 (node.path, blinded_hashes[nibble as usize])
280 })
281 })
282 .collect::<HashMap<_, _>>();
283
284 if !self.is_reveal_parallelism_enabled(lower_nodes.len()) {
285 for node in lower_nodes {
286 let idx = path_subtrie_index_unchecked(&node.path);
287 if !reachable_subtries.get(idx) {
288 trace!(
289 target: "trie::parallel_sparse",
290 reveal_path = ?node.path,
291 "Node's lower subtrie is not reachable, skipping",
292 );
293 continue;
294 }
295 if node.path.len() == UPPER_TRIE_MAX_DEPTH &&
297 !Self::is_boundary_leaf_reachable(
298 &self.upper_subtrie.nodes,
299 &node.path,
300 &node.node,
301 )
302 {
303 trace!(
304 target: "trie::parallel_sparse",
305 path = ?node.path,
306 "Boundary leaf not reachable from upper subtrie, skipping",
307 );
308 continue;
309 }
310 self.lower_subtries[idx].reveal(&node.path);
311 self.lower_subtries[idx].as_revealed_mut().expect("just revealed").reveal_node(
312 node.path,
313 &node.node,
314 node.masks,
315 hashes_from_upper.get(&node.path).copied(),
316 )?;
317 }
318 return Ok(())
319 }
320
321 #[cfg(not(feature = "std"))]
322 unreachable!("nostd is checked by is_reveal_parallelism_enabled");
323
324 #[cfg(feature = "std")]
325 {
327 use rayon::iter::{IntoParallelIterator, ParallelIterator};
328 use tracing::Span;
329
330 let parent_span = Span::current();
332
333 let upper_nodes = &self.upper_subtrie.nodes;
335
336 let results = lower_nodes
338 .chunk_by(|node_a, node_b| {
339 SparseSubtrieType::from_path(&node_a.path) ==
340 SparseSubtrieType::from_path(&node_b.path)
341 })
342 .filter_map(|nodes| {
344 let mut nodes = nodes
345 .iter()
346 .filter(|node| {
347 if node.path.len() == UPPER_TRIE_MAX_DEPTH &&
350 !Self::is_boundary_leaf_reachable(
351 upper_nodes,
352 &node.path,
353 &node.node,
354 )
355 {
356 trace!(
357 target: "trie::parallel_sparse",
358 path = ?node.path,
359 "Boundary leaf not reachable from upper subtrie, skipping",
360 );
361 false
362 } else {
363 true
364 }
365 })
366 .peekable();
367
368 let node = nodes.peek()?;
369 let idx =
370 SparseSubtrieType::from_path(&node.path).lower_index().unwrap_or_else(
371 || panic!("upper subtrie node {node:?} found amongst lower nodes"),
372 );
373
374 if !reachable_subtries.get(idx) {
375 trace!(
376 target: "trie::parallel_sparse",
377 nodes = ?nodes,
378 "Lower subtrie is not reachable, skipping reveal",
379 );
380 return None;
381 }
382
383 self.lower_subtries[idx].reveal(&node.path);
388 Some((
389 idx,
390 self.lower_subtries[idx].take_revealed().expect("just revealed"),
391 nodes,
392 ))
393 })
394 .collect::<Vec<_>>()
395 .into_par_iter()
396 .map(|(subtrie_idx, mut subtrie, nodes)| {
397 let _guard = parent_span.enter();
400
401 subtrie.nodes.reserve(nodes.size_hint().1.unwrap_or(0));
404
405 for node in nodes {
406 let res = subtrie.reveal_node(
408 node.path,
409 &node.node,
410 node.masks,
411 hashes_from_upper.get(&node.path).copied(),
412 );
413 if res.is_err() {
414 return (subtrie_idx, subtrie, res.map(|_| ()))
415 }
416 }
417 (subtrie_idx, subtrie, Ok(()))
418 })
419 .collect::<Vec<_>>();
420
421 let mut any_err = Ok(());
424 for (subtrie_idx, subtrie, res) in results {
425 self.lower_subtries[subtrie_idx] = LowerSparseSubtrie::Revealed(subtrie);
426 if res.is_err() {
427 any_err = res;
428 }
429 }
430
431 any_err
432 }
433 }
434
435 fn update_leaf<P: TrieNodeProvider>(
436 &mut self,
437 full_path: Nibbles,
438 value: Vec<u8>,
439 _provider: P,
440 ) -> SparseTrieResult<()> {
441 debug_assert_eq!(
442 full_path.len(),
443 B256::len_bytes() * 2,
444 "update_leaf full_path must be 64 nibbles (32 bytes), got {} nibbles",
445 full_path.len()
446 );
447
448 trace!(
449 target: "trie::parallel_sparse",
450 ?full_path,
451 value_len = value.len(),
452 "Updating leaf",
453 );
454
455 if self.upper_subtrie.inner.values.contains_key(&full_path) {
457 self.prefix_set.insert(full_path);
458 self.upper_subtrie.inner.values.insert(full_path, value);
459 return Ok(());
460 }
461 if let Some(subtrie) = self.lower_subtrie_for_path(&full_path) &&
463 subtrie.inner.values.contains_key(&full_path)
464 {
465 self.prefix_set.insert(full_path);
466 self.lower_subtrie_for_path_mut(&full_path)
467 .expect("subtrie exists")
468 .inner
469 .values
470 .insert(full_path, value);
471 return Ok(());
472 }
473
474 self.upper_subtrie.inner.values.insert(full_path, value.clone());
477
478 let mut new_nodes = Vec::new();
487 let mut next = Some(Nibbles::default());
488
489 while let Some(current) =
494 next.as_mut().filter(|next| SparseSubtrieType::path_len_is_upper(next.len()))
495 {
496 let step_result = self.upper_subtrie.update_next_node(current, &full_path);
499
500 if step_result.is_err() {
501 self.upper_subtrie.inner.values.remove(&full_path);
502 return step_result.map(|_| ());
503 }
504
505 match step_result? {
506 LeafUpdateStep::Continue => {}
507 LeafUpdateStep::Complete { inserted_nodes } => {
508 new_nodes.extend(inserted_nodes);
509 next = None;
510 }
511 LeafUpdateStep::NodeNotFound => {
512 next = None;
513 }
514 }
515 }
516
517 for node_path in &new_nodes {
519 if SparseSubtrieType::path_len_is_upper(node_path.len()) {
521 continue
522 }
523
524 let node =
525 self.upper_subtrie.nodes.remove(node_path).expect("node belongs to upper subtrie");
526
527 let leaf_value = if let SparseNode::Leaf { key, .. } = &node {
529 let mut leaf_full_path = *node_path;
530 leaf_full_path.extend(key);
531 Some((
532 leaf_full_path,
533 self.upper_subtrie
534 .inner
535 .values
536 .remove(&leaf_full_path)
537 .expect("leaf nodes have associated values entries"),
538 ))
539 } else {
540 None
541 };
542
543 let subtrie = self.subtrie_for_path_mut(node_path);
545
546 if let Some((leaf_full_path, value)) = leaf_value {
548 subtrie.inner.values.insert(leaf_full_path, value);
549 }
550
551 subtrie.nodes.insert(*node_path, node);
553 }
554
555 if let Some(next_path) = next.filter(|n| !SparseSubtrieType::path_len_is_upper(n.len())) {
557 self.upper_subtrie.inner.values.remove(&full_path);
562
563 let subtrie = self.subtrie_for_path_mut(&next_path);
568
569 if subtrie.nodes.is_empty() {
571 subtrie.nodes.insert(subtrie.path, SparseNode::Empty);
572 }
573
574 if let Err(e) = subtrie.update_leaf(full_path, value) {
577 if let Some(lower) = self.lower_subtrie_for_path_mut(&full_path) {
579 lower.inner.values.remove(&full_path);
580 }
581 return Err(e);
582 }
583 }
584
585 self.prefix_set.insert(full_path);
587
588 Ok(())
589 }
590
591 fn remove_leaf<P: TrieNodeProvider>(
592 &mut self,
593 full_path: &Nibbles,
594 _provider: P,
595 ) -> SparseTrieResult<()> {
596 debug_assert_eq!(
597 full_path.len(),
598 B256::len_bytes() * 2,
599 "remove_leaf full_path must be 64 nibbles (32 bytes), got {} nibbles",
600 full_path.len()
601 );
602
603 trace!(
604 target: "trie::parallel_sparse",
605 ?full_path,
606 "Removing leaf",
607 );
608
609 let leaf_path;
625 let leaf_subtrie_type;
626
627 let mut branch_parent_path: Option<Nibbles> = None;
628 let mut branch_parent_node: Option<SparseNode> = None;
629
630 let mut ext_grandparent_path: Option<Nibbles> = None;
631 let mut ext_grandparent_node: Option<SparseNode> = None;
632
633 let mut curr_path = Nibbles::new(); let mut curr_subtrie_type = SparseSubtrieType::Upper;
635
636 let mut paths_to_mark_dirty = Vec::new();
638
639 loop {
640 let curr_subtrie = match curr_subtrie_type {
641 SparseSubtrieType::Upper => &mut self.upper_subtrie,
642 SparseSubtrieType::Lower(idx) => {
643 self.lower_subtries[idx].as_revealed_mut().expect("lower subtrie is revealed")
644 }
645 };
646 let curr_node = curr_subtrie.nodes.get_mut(&curr_path).unwrap();
647
648 match Self::find_next_to_leaf(&curr_path, curr_node, full_path) {
649 FindNextToLeafOutcome::NotFound => return Ok(()), FindNextToLeafOutcome::BlindedNode(path) => {
651 return Err(SparseTrieErrorKind::BlindedNode(path).into())
652 }
653 FindNextToLeafOutcome::Found => {
654 leaf_path = curr_path;
656 leaf_subtrie_type = curr_subtrie_type;
657 break;
658 }
659 FindNextToLeafOutcome::ContinueFrom(next_path) => {
660 match curr_node {
663 SparseNode::Branch { .. } => {
664 paths_to_mark_dirty
665 .push((SparseSubtrieType::from_path(&curr_path), curr_path));
666
667 match (&branch_parent_path, &ext_grandparent_path) {
670 (Some(branch), Some(ext)) if branch.len() > ext.len() => {
671 ext_grandparent_path = None;
672 ext_grandparent_node = None;
673 }
674 _ => (),
675 };
676 branch_parent_path = Some(curr_path);
677 branch_parent_node = Some(curr_node.clone());
678 }
679 SparseNode::Extension { .. } => {
680 paths_to_mark_dirty
681 .push((SparseSubtrieType::from_path(&curr_path), curr_path));
682
683 ext_grandparent_path = Some(curr_path);
687 ext_grandparent_node = Some(curr_node.clone());
688 }
689 SparseNode::Empty | SparseNode::Leaf { .. } => {
690 unreachable!(
691 "find_next_to_leaf only continues to a branch or extension"
692 )
693 }
694 }
695
696 curr_path = next_path;
697
698 let next_subtrie_type = SparseSubtrieType::from_path(&curr_path);
700 if matches!(curr_subtrie_type, SparseSubtrieType::Upper) &&
701 matches!(next_subtrie_type, SparseSubtrieType::Lower(_))
702 {
703 curr_subtrie_type = next_subtrie_type;
704 }
705 }
706 };
707 }
708
709 if let (Some(branch_path), Some(SparseNode::Branch { state_mask, blinded_mask, .. })) =
712 (&branch_parent_path, &branch_parent_node)
713 {
714 let mut check_mask = *state_mask;
715 let child_nibble = leaf_path.get_unchecked(branch_path.len());
716 check_mask.unset_bit(child_nibble);
717
718 if check_mask.count_bits() == 1 {
719 let remaining_nibble =
720 check_mask.first_set_bit_index().expect("state mask is not empty");
721
722 if blinded_mask.is_bit_set(remaining_nibble) {
723 let mut path = *branch_path;
724 path.push_unchecked(remaining_nibble);
725 return Err(SparseTrieErrorKind::BlindedNode(path).into());
726 }
727 }
728 }
729
730 self.prefix_set.insert(*full_path);
733 let leaf_subtrie = match leaf_subtrie_type {
734 SparseSubtrieType::Upper => &mut self.upper_subtrie,
735 SparseSubtrieType::Lower(idx) => {
736 self.lower_subtries[idx].as_revealed_mut().expect("lower subtrie is revealed")
737 }
738 };
739 leaf_subtrie.inner.values.remove(full_path);
740 for (subtrie_type, path) in paths_to_mark_dirty {
741 let node = match subtrie_type {
742 SparseSubtrieType::Upper => self.upper_subtrie.nodes.get_mut(&path),
743 SparseSubtrieType::Lower(idx) => self.lower_subtries[idx]
744 .as_revealed_mut()
745 .expect("lower subtrie is revealed")
746 .nodes
747 .get_mut(&path),
748 }
749 .expect("node exists");
750
751 match node {
752 SparseNode::Extension { state, .. } | SparseNode::Branch { state, .. } => {
753 *state = SparseNodeState::Dirty
754 }
755 SparseNode::Empty | SparseNode::Leaf { .. } => {
756 unreachable!(
757 "only branch and extension nodes can be marked dirty when removing a leaf"
758 )
759 }
760 }
761 }
762 self.remove_node(&leaf_path);
763
764 if leaf_path.is_empty() {
767 self.upper_subtrie.nodes.insert(leaf_path, SparseNode::Empty);
768 return Ok(())
769 }
770
771 if let (
774 Some(branch_path),
775 &Some(SparseNode::Branch { mut state_mask, blinded_mask, ref blinded_hashes, .. }),
776 ) = (&branch_parent_path, &branch_parent_node)
777 {
778 let child_nibble = leaf_path.get_unchecked(branch_path.len());
779 state_mask.unset_bit(child_nibble);
780
781 let new_branch_node = if state_mask.count_bits() == 1 {
782 let remaining_child_nibble =
785 state_mask.first_set_bit_index().expect("state mask is not empty");
786 let mut remaining_child_path = *branch_path;
787 remaining_child_path.push_unchecked(remaining_child_nibble);
788
789 trace!(
790 target: "trie::parallel_sparse",
791 ?leaf_path,
792 ?branch_path,
793 ?remaining_child_path,
794 "Branch node has only one child",
795 );
796
797 if blinded_mask.is_bit_set(remaining_child_nibble) {
800 return Err(SparseTrieErrorKind::BlindedNode(remaining_child_path).into());
801 }
802
803 let remaining_child_node = self
804 .subtrie_for_path_mut(&remaining_child_path)
805 .nodes
806 .get(&remaining_child_path)
807 .unwrap();
808
809 let (new_branch_node, remove_child) = Self::branch_changes_on_leaf_removal(
810 branch_path,
811 &remaining_child_path,
812 remaining_child_node,
813 );
814
815 if remove_child {
816 self.move_value_on_leaf_removal(
817 branch_path,
818 &new_branch_node,
819 &remaining_child_path,
820 );
821 self.remove_node(&remaining_child_path);
822 }
823
824 if let Some(updates) = self.updates.as_mut() {
825 updates.updated_nodes.remove(branch_path);
826 updates.removed_nodes.insert(*branch_path);
827 }
828
829 new_branch_node
830 } else {
831 SparseNode::Branch {
834 state_mask,
835 blinded_mask,
836 blinded_hashes: blinded_hashes.clone(),
837 state: SparseNodeState::Dirty,
838 }
839 };
840
841 let branch_subtrie = self.subtrie_for_path_mut(branch_path);
842 branch_subtrie.nodes.insert(*branch_path, new_branch_node.clone());
843 branch_parent_node = Some(new_branch_node);
844 };
845
846 if let (Some(ext_path), Some(SparseNode::Extension { key: shortkey, .. })) =
850 (ext_grandparent_path, &ext_grandparent_node)
851 {
852 let ext_subtrie = self.subtrie_for_path_mut(&ext_path);
853 let branch_path = branch_parent_path.as_ref().unwrap();
854
855 if let Some(new_ext_node) = Self::extension_changes_on_leaf_removal(
856 &ext_path,
857 shortkey,
858 branch_path,
859 branch_parent_node.as_ref().unwrap(),
860 ) {
861 ext_subtrie.nodes.insert(ext_path, new_ext_node.clone());
862 self.move_value_on_leaf_removal(&ext_path, &new_ext_node, branch_path);
863 self.remove_node(branch_path);
864 }
865 }
866
867 Ok(())
868 }
869
870 #[instrument(level = "trace", target = "trie::sparse::parallel", skip(self))]
871 fn root(&mut self) -> B256 {
872 trace!(target: "trie::parallel_sparse", "Calculating trie root hash");
873
874 #[cfg(feature = "trie-debug")]
875 self.debug_recorder.record(RecordedOp::Root);
876
877 if self.prefix_set.is_empty() &&
878 let Some(rlp_node) = self
879 .upper_subtrie
880 .nodes
881 .get(&Nibbles::default())
882 .and_then(|node| node.cached_rlp_node())
883 {
884 return rlp_node
885 .as_hash()
886 .expect("RLP-encoding of the root node cannot be less than 32 bytes")
887 }
888
889 self.update_subtrie_hashes();
891
892 let mut prefix_set = core::mem::take(&mut self.prefix_set).freeze();
895 let root_rlp = self.update_upper_subtrie_hashes(&mut prefix_set);
896
897 root_rlp.as_hash().unwrap_or(EMPTY_ROOT_HASH)
899 }
900
901 fn is_root_cached(&self) -> bool {
902 self.prefix_set.is_empty() &&
903 self.upper_subtrie
904 .nodes
905 .get(&Nibbles::default())
906 .is_some_and(|node| node.cached_rlp_node().is_some())
907 }
908
909 #[instrument(level = "trace", target = "trie::sparse::parallel", skip(self))]
910 fn update_subtrie_hashes(&mut self) {
911 trace!(target: "trie::parallel_sparse", "Updating subtrie hashes");
912
913 #[cfg(feature = "trie-debug")]
914 self.debug_recorder.record(RecordedOp::UpdateSubtrieHashes);
915
916 let mut prefix_set = core::mem::take(&mut self.prefix_set).freeze();
918 let num_changed_keys = prefix_set.len();
919 let (mut changed_subtries, unchanged_prefix_set) =
920 self.take_changed_lower_subtries(&mut prefix_set);
921
922 #[cfg(feature = "metrics")]
924 self.metrics.subtries_updated.record(changed_subtries.len() as f64);
925
926 self.prefix_set = unchanged_prefix_set;
928
929 if !self.is_update_parallelism_enabled(num_changed_keys) {
931 for changed_subtrie in &mut changed_subtries {
932 changed_subtrie.subtrie.update_hashes(
933 &mut changed_subtrie.prefix_set,
934 &mut changed_subtrie.update_actions_buf,
935 &self.branch_node_masks,
936 );
937 }
938
939 self.insert_changed_subtries(changed_subtries);
940 return
941 }
942
943 #[cfg(not(feature = "std"))]
944 unreachable!("nostd is checked by is_update_parallelism_enabled");
945
946 #[cfg(feature = "std")]
947 {
949 use rayon::prelude::*;
950
951 changed_subtries.par_iter_mut().for_each(|changed_subtrie| {
952 #[cfg(feature = "metrics")]
953 let start = Instant::now();
954 changed_subtrie.subtrie.update_hashes(
955 &mut changed_subtrie.prefix_set,
956 &mut changed_subtrie.update_actions_buf,
957 &self.branch_node_masks,
958 );
959 #[cfg(feature = "metrics")]
960 self.metrics.subtrie_hash_update_latency.record(start.elapsed());
961 });
962
963 self.insert_changed_subtries(changed_subtries);
964 }
965 }
966
967 fn get_leaf_value(&self, full_path: &Nibbles) -> Option<&Vec<u8>> {
968 if let Some(subtrie) = self.subtrie_for_path(full_path) &&
973 !subtrie.is_empty()
974 {
975 return subtrie.inner.values.get(full_path);
976 }
977
978 self.upper_subtrie.inner.values.get(full_path)
979 }
980
981 fn updates_ref(&self) -> Cow<'_, SparseTrieUpdates> {
982 self.updates.as_ref().map_or(Cow::Owned(SparseTrieUpdates::default()), Cow::Borrowed)
983 }
984
985 fn take_updates(&mut self) -> SparseTrieUpdates {
986 match self.updates.take() {
987 Some(updates) => {
988 self.updates = Some(SparseTrieUpdates::with_capacity(
990 updates.updated_nodes.len(),
991 updates.removed_nodes.len(),
992 ));
993 updates
994 }
995 None => SparseTrieUpdates::default(),
996 }
997 }
998
999 fn wipe(&mut self) {
1000 self.upper_subtrie.wipe();
1001 for trie in &mut *self.lower_subtries {
1002 trie.wipe();
1003 }
1004 self.prefix_set = PrefixSetMut::all();
1005 self.updates = self.updates.is_some().then(SparseTrieUpdates::wiped);
1006 }
1007
1008 fn clear(&mut self) {
1009 self.upper_subtrie.clear();
1010 self.upper_subtrie.nodes.insert(Nibbles::default(), SparseNode::Empty);
1011 for subtrie in &mut *self.lower_subtries {
1012 subtrie.clear();
1013 }
1014 self.prefix_set.clear();
1015 self.updates = None;
1016 self.branch_node_masks.clear();
1017 #[cfg(feature = "trie-debug")]
1018 self.debug_recorder.reset();
1019 }
1022
1023 fn find_leaf(
1024 &self,
1025 full_path: &Nibbles,
1026 expected_value: Option<&Vec<u8>>,
1027 ) -> Result<LeafLookup, LeafLookupError> {
1028 if let Some(actual_value) = core::iter::once(self.upper_subtrie.as_ref())
1034 .chain(self.lower_subtrie_for_path(full_path))
1035 .filter_map(|subtrie| subtrie.inner.values.get(full_path))
1036 .next()
1037 {
1038 return expected_value
1040 .is_none_or(|v| v == actual_value)
1041 .then_some(LeafLookup::Exists)
1042 .ok_or_else(|| LeafLookupError::ValueMismatch {
1043 path: *full_path,
1044 expected: expected_value.cloned(),
1045 actual: actual_value.clone(),
1046 })
1047 }
1048
1049 let mut curr_path = Nibbles::new(); let mut curr_subtrie = self.upper_subtrie.as_ref();
1057 let mut curr_subtrie_is_upper = true;
1058
1059 loop {
1060 match curr_subtrie.nodes.get(&curr_path).unwrap() {
1061 SparseNode::Empty => return Ok(LeafLookup::NonExistent),
1062 SparseNode::Leaf { key, .. } => {
1063 let mut found_full_path = curr_path;
1064 found_full_path.extend(key);
1065 assert!(&found_full_path != full_path, "target leaf {full_path:?} found, even though value wasn't in values hashmap");
1066 return Ok(LeafLookup::NonExistent)
1067 }
1068 SparseNode::Extension { key, .. } => {
1069 if full_path.len() == curr_path.len() {
1070 return Ok(LeafLookup::NonExistent)
1071 }
1072 curr_path.extend(key);
1073 if !full_path.starts_with(&curr_path) {
1074 return Ok(LeafLookup::NonExistent)
1075 }
1076 }
1077 SparseNode::Branch { state_mask, blinded_mask, blinded_hashes, .. } => {
1078 if full_path.len() == curr_path.len() {
1079 return Ok(LeafLookup::NonExistent)
1080 }
1081 let nibble = full_path.get_unchecked(curr_path.len());
1082 if !state_mask.is_bit_set(nibble) {
1083 return Ok(LeafLookup::NonExistent)
1084 }
1085 curr_path.push_unchecked(nibble);
1086 if blinded_mask.is_bit_set(nibble) {
1087 return Err(LeafLookupError::BlindedNode {
1088 path: curr_path,
1089 hash: blinded_hashes[nibble as usize],
1090 })
1091 }
1092 }
1093 }
1094
1095 if curr_subtrie_is_upper &&
1098 let Some(lower_subtrie) = self.lower_subtrie_for_path(&curr_path)
1099 {
1100 curr_subtrie = lower_subtrie;
1101 curr_subtrie_is_upper = false;
1102 }
1103 }
1104 }
1105
1106 fn shrink_nodes_to(&mut self, size: usize) {
1107 let total_subtries = 1 + NUM_LOWER_SUBTRIES;
1111 let size_per_subtrie = size / total_subtries;
1112
1113 self.upper_subtrie.shrink_nodes_to(size_per_subtrie);
1115
1116 for subtrie in &mut *self.lower_subtries {
1118 subtrie.shrink_nodes_to(size_per_subtrie);
1119 }
1120
1121 self.branch_node_masks.shrink_to(size);
1123 }
1124
1125 fn shrink_values_to(&mut self, size: usize) {
1126 let total_subtries = 1 + NUM_LOWER_SUBTRIES;
1130 let size_per_subtrie = size / total_subtries;
1131
1132 self.upper_subtrie.shrink_values_to(size_per_subtrie);
1134
1135 for subtrie in &mut *self.lower_subtries {
1137 subtrie.shrink_values_to(size_per_subtrie);
1138 }
1139 }
1140
1141 fn size_hint(&self) -> usize {
1143 let upper_count = self.upper_subtrie.nodes.len();
1144 let lower_count: usize = self
1145 .lower_subtries
1146 .iter()
1147 .filter_map(|s| s.as_revealed_ref())
1148 .map(|s| s.nodes.len())
1149 .sum();
1150 upper_count + lower_count
1151 }
1152
1153 fn memory_size(&self) -> usize {
1154 self.memory_size()
1155 }
1156
1157 fn prune(&mut self, retained_leaves: &[Nibbles]) -> usize {
1158 #[cfg(feature = "trie-debug")]
1159 self.debug_recorder.reset();
1160
1161 let mut retained_leaves = retained_leaves.to_vec();
1162 retained_leaves.sort_unstable();
1163
1164 let mut effective_pruned_roots = Vec::<Nibbles>::new();
1165 let mut stack: SmallVec<[Nibbles; 32]> = SmallVec::new();
1166 stack.push(Nibbles::default());
1167
1168 while let Some(path) = stack.pop() {
1169 let Some(node) =
1170 self.subtrie_for_path(&path).and_then(|subtrie| subtrie.nodes.get(&path).cloned())
1171 else {
1172 continue;
1173 };
1174
1175 match node {
1176 SparseNode::Empty | SparseNode::Leaf { .. } => {}
1177 SparseNode::Extension { key, state, .. } => {
1178 let mut child = path;
1179 child.extend(&key);
1180
1181 if has_retained_descendant(&retained_leaves, &child) {
1182 stack.push(child);
1183 continue;
1184 }
1185
1186 if path.is_empty() {
1188 continue;
1189 }
1190
1191 let Some(hash) = state.cached_hash() else { continue };
1192 self.subtrie_for_path_mut_untracked(&path)
1193 .expect("node subtrie exists")
1194 .nodes
1195 .remove(&path);
1196
1197 let parent_path = path.slice(0..path.len() - 1);
1198 let SparseNode::Branch { blinded_mask, blinded_hashes, .. } = self
1201 .subtrie_for_path_mut_untracked(&parent_path)
1202 .expect("parent subtrie exists")
1203 .nodes
1204 .get_mut(&parent_path)
1205 .expect("expected parent branch node")
1206 else {
1207 panic!("expected branch node at path {parent_path:?}");
1208 };
1209
1210 let nibble = path.last().unwrap();
1211 blinded_mask.set_bit(nibble);
1212 blinded_hashes[nibble as usize] = hash;
1213 effective_pruned_roots.push(path);
1214 }
1215 SparseNode::Branch { state_mask, blinded_mask, blinded_hashes, .. } => {
1216 let mut blinded_mask = blinded_mask;
1217 let mut blinded_hashes = blinded_hashes;
1218 for nibble in state_mask.iter() {
1219 if blinded_mask.is_bit_set(nibble) {
1220 continue;
1221 }
1222
1223 let mut child = path;
1224 child.push_unchecked(nibble);
1225 if has_retained_descendant(&retained_leaves, &child) {
1226 stack.push(child);
1227 continue;
1228 }
1229
1230 let Entry::Occupied(entry) =
1231 self.subtrie_for_path_mut_untracked(&child).unwrap().nodes.entry(child)
1232 else {
1233 panic!("expected node at path {child:?}");
1234 };
1235
1236 let Some(hash) = entry.get().cached_hash() else {
1237 continue;
1238 };
1239 entry.remove();
1240 blinded_mask.set_bit(nibble);
1241 blinded_hashes[nibble as usize] = hash;
1242 effective_pruned_roots.push(child);
1243 }
1244
1245 let SparseNode::Branch {
1246 blinded_mask: old_blinded_mask,
1247 blinded_hashes: old_blinded_hashes,
1248 ..
1249 } = self
1250 .subtrie_for_path_mut_untracked(&path)
1251 .unwrap()
1252 .nodes
1253 .get_mut(&path)
1254 .unwrap()
1255 else {
1256 unreachable!("expected branch node at path {path:?}");
1257 };
1258 *old_blinded_mask = blinded_mask;
1259 *old_blinded_hashes = blinded_hashes;
1260 }
1261 }
1262 }
1263
1264 Self::finalize_pruned_roots(self, effective_pruned_roots)
1265 }
1266
1267 fn update_leaves(
1268 &mut self,
1269 updates: &mut alloy_primitives::map::B256Map<crate::LeafUpdate>,
1270 mut proof_required_fn: impl FnMut(B256, u8),
1271 ) -> SparseTrieResult<()> {
1272 use crate::{provider::NoRevealProvider, LeafUpdate};
1273
1274 #[cfg(feature = "trie-debug")]
1275 let recorded_updates: Vec<_> =
1276 updates.iter().map(|(k, v)| (*k, LeafUpdateRecord::from(v))).collect();
1277 #[cfg(feature = "trie-debug")]
1278 let mut recorded_proof_targets: Vec<(B256, u8)> = Vec::new();
1279
1280 let drained: Vec<_> = updates.drain().collect();
1283
1284 for (key, update) in drained {
1285 let full_path = Nibbles::unpack(key);
1286
1287 match update {
1288 LeafUpdate::Changed(value) => {
1289 if value.is_empty() {
1290 match self.remove_leaf(&full_path, NoRevealProvider) {
1293 Ok(()) => {}
1294 Err(e) => {
1295 if let Some(path) = Self::get_retriable_path(&e) {
1296 let (target_key, min_len) =
1297 Self::proof_target_for_path(key, &full_path, &path);
1298 proof_required_fn(target_key, min_len);
1299 #[cfg(feature = "trie-debug")]
1300 recorded_proof_targets.push((target_key, min_len));
1301 updates.insert(key, LeafUpdate::Changed(value));
1302 } else {
1303 return Err(e);
1304 }
1305 }
1306 }
1307 } else {
1308 if let Err(e) = self.update_leaf(full_path, value.clone(), NoRevealProvider)
1310 {
1311 if let Some(path) = Self::get_retriable_path(&e) {
1312 let (target_key, min_len) =
1313 Self::proof_target_for_path(key, &full_path, &path);
1314 proof_required_fn(target_key, min_len);
1315 #[cfg(feature = "trie-debug")]
1316 recorded_proof_targets.push((target_key, min_len));
1317 updates.insert(key, LeafUpdate::Changed(value));
1318 } else {
1319 return Err(e);
1320 }
1321 }
1322 }
1323 }
1324 LeafUpdate::Touched => {
1325 match self.find_leaf(&full_path, None) {
1327 Err(LeafLookupError::BlindedNode { path, .. }) => {
1328 let (target_key, min_len) =
1329 Self::proof_target_for_path(key, &full_path, &path);
1330 proof_required_fn(target_key, min_len);
1331 #[cfg(feature = "trie-debug")]
1332 recorded_proof_targets.push((target_key, min_len));
1333 updates.insert(key, LeafUpdate::Touched);
1334 }
1335 Ok(_) | Err(LeafLookupError::ValueMismatch { .. }) => {}
1337 }
1338 }
1339 }
1340 }
1341
1342 #[cfg(feature = "trie-debug")]
1343 self.debug_recorder.record(RecordedOp::UpdateLeaves {
1344 updates: recorded_updates,
1345 proof_targets: recorded_proof_targets,
1346 });
1347
1348 Ok(())
1349 }
1350
1351 #[cfg(feature = "trie-debug")]
1352 fn take_debug_recorder(&mut self) -> TrieDebugRecorder {
1353 core::mem::take(&mut self.debug_recorder)
1354 }
1355
1356 fn commit_updates(
1357 &mut self,
1358 updated: &HashMap<Nibbles, BranchNodeCompact>,
1359 removed: &HashSet<Nibbles>,
1360 ) {
1361 self.branch_node_masks.reserve(updated.len());
1365 for (path, node) in updated {
1366 self.branch_node_masks.insert(
1367 *path,
1368 BranchNodeMasks { tree_mask: node.tree_mask, hash_mask: node.hash_mask },
1369 );
1370 }
1371 for path in removed {
1372 self.branch_node_masks.remove(path);
1373 }
1374 }
1375}
1376
1377impl ParallelSparseTrie {
1378 pub const fn with_parallelism_thresholds(mut self, thresholds: ParallelismThresholds) -> Self {
1380 self.parallelism_thresholds = thresholds;
1381 self
1382 }
1383
1384 const fn updates_enabled(&self) -> bool {
1386 self.updates.is_some()
1387 }
1388
1389 const fn is_reveal_parallelism_enabled(&self, num_nodes: usize) -> bool {
1392 #[cfg(not(feature = "std"))]
1393 {
1394 let _ = num_nodes;
1395 return false;
1396 }
1397
1398 #[cfg(feature = "std")]
1399 {
1400 num_nodes >= self.parallelism_thresholds.min_revealed_nodes
1401 }
1402 }
1403
1404 const fn is_update_parallelism_enabled(&self, num_changed_keys: usize) -> bool {
1407 #[cfg(not(feature = "std"))]
1408 {
1409 let _ = num_changed_keys;
1410 return false;
1411 }
1412
1413 #[cfg(feature = "std")]
1414 {
1415 num_changed_keys >= self.parallelism_thresholds.min_updated_nodes
1416 }
1417 }
1418
1419 const fn get_retriable_path(e: &SparseTrieError) -> Option<Nibbles> {
1426 match e.kind() {
1427 SparseTrieErrorKind::BlindedNode(path) |
1428 SparseTrieErrorKind::NodeNotFoundInProvider { path } => Some(*path),
1429 _ => None,
1430 }
1431 }
1432
1433 fn nibbles_to_padded_b256(path: &Nibbles) -> B256 {
1435 let mut bytes = [0u8; 32];
1436 path.pack_to(&mut bytes);
1437 B256::from(bytes)
1438 }
1439
1440 fn proof_target_for_path(full_key: B256, full_path: &Nibbles, path: &Nibbles) -> (B256, u8) {
1446 let min_len = (path.len() as u8).min(64);
1447 let target_key =
1448 if full_path.starts_with(path) { full_key } else { Self::nibbles_to_padded_b256(path) };
1449 (target_key, min_len)
1450 }
1451
1452 pub fn from_root(
1467 root: TrieNodeV2,
1468 masks: Option<BranchNodeMasks>,
1469 retain_updates: bool,
1470 ) -> SparseTrieResult<Self> {
1471 Self::default().with_root(root, masks, retain_updates)
1472 }
1473
1474 fn finalize_pruned_roots(&mut self, mut effective_pruned_roots: Vec<Nibbles>) -> usize {
1475 if effective_pruned_roots.is_empty() {
1476 return 0;
1477 }
1478
1479 let nodes_converted = effective_pruned_roots.len();
1480
1481 effective_pruned_roots.sort_unstable_by(|path_a, path_b| {
1483 let subtrie_type_a = SparseSubtrieType::from_path(path_a);
1484 let subtrie_type_b = SparseSubtrieType::from_path(path_b);
1485 subtrie_type_a.cmp(&subtrie_type_b).then(path_a.cmp(path_b))
1486 });
1487
1488 let num_upper_roots = effective_pruned_roots
1490 .iter()
1491 .position(|p| !SparseSubtrieType::path_len_is_upper(p.len()))
1492 .unwrap_or(effective_pruned_roots.len());
1493
1494 let roots_upper = &effective_pruned_roots[..num_upper_roots];
1495 let roots_lower = &effective_pruned_roots[num_upper_roots..];
1496
1497 debug_assert!(
1498 {
1499 let mut all_roots: Vec<_> = effective_pruned_roots.clone();
1500 all_roots.sort_unstable();
1501 all_roots.windows(2).all(|w| !w[1].starts_with(&w[0]))
1502 },
1503 "prune roots must be prefix-free"
1504 );
1505
1506 if !roots_upper.is_empty() {
1509 for subtrie in &mut *self.lower_subtries {
1510 let should_clear = subtrie.as_revealed_ref().is_some_and(|s| {
1511 let search_idx = roots_upper.partition_point(|root| root <= &s.path);
1512 search_idx > 0 && s.path.starts_with(&roots_upper[search_idx - 1])
1513 });
1514 if should_clear {
1515 subtrie.clear();
1516 }
1517 }
1518 }
1519
1520 self.upper_subtrie.nodes.retain(|p, _| !is_strict_descendant_in(roots_upper, p));
1522 self.upper_subtrie.inner.values.retain(|p, _| {
1523 !starts_with_pruned_in(roots_upper, p) && !starts_with_pruned_in(roots_lower, p)
1524 });
1525
1526 for roots_group in roots_lower.chunk_by(|path_a, path_b| {
1528 SparseSubtrieType::from_path(path_a) == SparseSubtrieType::from_path(path_b)
1529 }) {
1530 let subtrie_idx = path_subtrie_index_unchecked(&roots_group[0]);
1531
1532 let should_clear = {
1534 let Some(subtrie) = self.lower_subtries[subtrie_idx].as_revealed_mut() else {
1535 continue;
1536 };
1537
1538 subtrie.nodes.retain(|p, _| !is_strict_descendant_in(roots_group, p));
1540 subtrie.inner.values.retain(|p, _| !starts_with_pruned_in(roots_group, p));
1541
1542 !subtrie.nodes.contains_key(&subtrie.path)
1545 };
1546
1547 if should_clear {
1548 self.lower_subtries[subtrie_idx].clear();
1549 }
1550 }
1551
1552 self.branch_node_masks.retain(|p, _| {
1554 if SparseSubtrieType::path_len_is_upper(p.len()) {
1555 !starts_with_pruned_in(roots_upper, p)
1556 } else {
1557 !starts_with_pruned_in(roots_lower, p) && !starts_with_pruned_in(roots_upper, p)
1558 }
1559 });
1560
1561 nodes_converted
1562 }
1563
1564 fn lower_subtrie_for_path(&self, path: &Nibbles) -> Option<&SparseSubtrie> {
1568 match SparseSubtrieType::from_path(path) {
1569 SparseSubtrieType::Upper => None,
1570 SparseSubtrieType::Lower(idx) => self.lower_subtries[idx].as_revealed_ref(),
1571 }
1572 }
1573
1574 fn lower_subtrie_for_path_mut(&mut self, path: &Nibbles) -> Option<&mut SparseSubtrie> {
1581 match SparseSubtrieType::from_path(path) {
1582 SparseSubtrieType::Upper => None,
1583 SparseSubtrieType::Lower(idx) => {
1584 self.lower_subtries[idx].reveal(path);
1585 Some(self.lower_subtries[idx].as_revealed_mut().expect("just revealed"))
1586 }
1587 }
1588 }
1589
1590 fn subtrie_for_path(&self, path: &Nibbles) -> Option<&SparseSubtrie> {
1595 if SparseSubtrieType::path_len_is_upper(path.len()) {
1596 Some(&self.upper_subtrie)
1597 } else {
1598 self.lower_subtrie_for_path(path)
1599 }
1600 }
1601
1602 fn subtrie_for_path_mut(&mut self, path: &Nibbles) -> &mut SparseSubtrie {
1609 if SparseSubtrieType::path_len_is_upper(path.len()) {
1612 &mut self.upper_subtrie
1613 } else {
1614 self.lower_subtrie_for_path_mut(path).unwrap()
1615 }
1616 }
1617
1618 fn subtrie_for_path_mut_untracked(&mut self, path: &Nibbles) -> Option<&mut SparseSubtrie> {
1621 if SparseSubtrieType::path_len_is_upper(path.len()) {
1622 Some(&mut self.upper_subtrie)
1623 } else {
1624 match SparseSubtrieType::from_path(path) {
1625 SparseSubtrieType::Upper => None,
1626 SparseSubtrieType::Lower(idx) => self.lower_subtries[idx].as_revealed_mut(),
1627 }
1628 }
1629 }
1630
1631 fn find_next_to_leaf(
1639 from_path: &Nibbles,
1640 from_node: &SparseNode,
1641 leaf_full_path: &Nibbles,
1642 ) -> FindNextToLeafOutcome {
1643 debug_assert!(leaf_full_path.len() >= from_path.len());
1644 debug_assert!(leaf_full_path.starts_with(from_path));
1645
1646 match from_node {
1647 SparseNode::Empty => FindNextToLeafOutcome::NotFound,
1650 SparseNode::Leaf { key, .. } => {
1651 let mut found_full_path = *from_path;
1652 found_full_path.extend(key);
1653
1654 if &found_full_path == leaf_full_path {
1655 return FindNextToLeafOutcome::Found
1656 }
1657 FindNextToLeafOutcome::NotFound
1658 }
1659 SparseNode::Extension { key, .. } => {
1660 if leaf_full_path.len() == from_path.len() {
1661 return FindNextToLeafOutcome::NotFound
1662 }
1663
1664 let mut child_path = *from_path;
1665 child_path.extend(key);
1666
1667 if !leaf_full_path.starts_with(&child_path) {
1668 return FindNextToLeafOutcome::NotFound
1669 }
1670 FindNextToLeafOutcome::ContinueFrom(child_path)
1671 }
1672 SparseNode::Branch { state_mask, blinded_mask, .. } => {
1673 if leaf_full_path.len() == from_path.len() {
1674 return FindNextToLeafOutcome::NotFound
1675 }
1676
1677 let nibble = leaf_full_path.get_unchecked(from_path.len());
1678 if !state_mask.is_bit_set(nibble) {
1679 return FindNextToLeafOutcome::NotFound
1680 }
1681
1682 let mut child_path = *from_path;
1683 child_path.push_unchecked(nibble);
1684
1685 if blinded_mask.is_bit_set(nibble) {
1686 return FindNextToLeafOutcome::BlindedNode(child_path);
1687 }
1688
1689 FindNextToLeafOutcome::ContinueFrom(child_path)
1690 }
1691 }
1692 }
1693
1694 fn move_value_on_leaf_removal(
1699 &mut self,
1700 parent_path: &Nibbles,
1701 new_parent_node: &SparseNode,
1702 prev_child_path: &Nibbles,
1703 ) {
1704 if SparseSubtrieType::from_path(parent_path).lower_index().is_some() {
1707 return;
1708 }
1709
1710 if let SparseNode::Leaf { key, .. } = new_parent_node {
1711 let Some(prev_child_subtrie) = self.lower_subtrie_for_path_mut(prev_child_path) else {
1712 return;
1713 };
1714
1715 let mut leaf_full_path = *parent_path;
1716 leaf_full_path.extend(key);
1717
1718 let val = prev_child_subtrie.inner.values.remove(&leaf_full_path).expect("ParallelSparseTrie is in an inconsistent state, expected value on subtrie which wasn't found");
1719 self.upper_subtrie.inner.values.insert(leaf_full_path, val);
1720 }
1721 }
1722
1723 fn remove_node(&mut self, path: &Nibbles) {
1735 let subtrie = self.subtrie_for_path_mut(path);
1736 let node = subtrie.nodes.remove(path);
1737
1738 let Some(idx) = SparseSubtrieType::from_path(path).lower_index() else {
1739 return;
1742 };
1743
1744 match node {
1745 Some(SparseNode::Leaf { .. }) => {
1746 if subtrie.nodes.is_empty() {
1749 self.lower_subtries[idx].clear();
1750 }
1751 }
1752 Some(SparseNode::Extension { key, .. }) => {
1753 if &subtrie.path == path {
1757 subtrie.path.extend(&key);
1758 }
1759 }
1760 _ => panic!("Expected to remove a leaf or extension, but removed {node:?}"),
1761 }
1762 }
1763
1764 fn branch_changes_on_leaf_removal(
1773 parent_path: &Nibbles,
1774 remaining_child_path: &Nibbles,
1775 remaining_child_node: &SparseNode,
1776 ) -> (SparseNode, bool) {
1777 debug_assert!(remaining_child_path.len() > parent_path.len());
1778 debug_assert!(remaining_child_path.starts_with(parent_path));
1779
1780 let remaining_child_nibble = remaining_child_path.get_unchecked(parent_path.len());
1781
1782 match remaining_child_node {
1785 SparseNode::Empty => {
1786 panic!("remaining child must have been revealed already")
1787 }
1788 SparseNode::Leaf { key, .. } => {
1792 let mut new_key = Nibbles::from_nibbles_unchecked([remaining_child_nibble]);
1793 new_key.extend(key);
1794 (SparseNode::new_leaf(new_key), true)
1795 }
1796 SparseNode::Extension { key, .. } => {
1800 let mut new_key = Nibbles::from_nibbles_unchecked([remaining_child_nibble]);
1801 new_key.extend(key);
1802 (SparseNode::new_ext(new_key), true)
1803 }
1804 SparseNode::Branch { .. } => (
1807 SparseNode::new_ext(Nibbles::from_nibbles_unchecked([remaining_child_nibble])),
1808 false,
1809 ),
1810 }
1811 }
1812
1813 fn extension_changes_on_leaf_removal(
1822 parent_path: &Nibbles,
1823 parent_key: &Nibbles,
1824 child_path: &Nibbles,
1825 child: &SparseNode,
1826 ) -> Option<SparseNode> {
1827 debug_assert!(child_path.len() > parent_path.len());
1828 debug_assert!(child_path.starts_with(parent_path));
1829
1830 match child {
1833 SparseNode::Empty => {
1834 panic!("child must be revealed")
1835 }
1836 SparseNode::Leaf { key, .. } => {
1842 let mut new_key = *parent_key;
1843 new_key.extend(key);
1844 Some(SparseNode::new_leaf(new_key))
1845 }
1846 SparseNode::Extension { key, .. } => {
1849 let mut new_key = *parent_key;
1850 new_key.extend(key);
1851 Some(SparseNode::new_ext(new_key))
1852 }
1853 SparseNode::Branch { .. } => None,
1855 }
1856 }
1857
1858 #[instrument(level = "trace", target = "trie::parallel_sparse", skip_all)]
1861 fn apply_subtrie_update_actions(
1862 &mut self,
1863 update_actions: impl Iterator<Item = SparseTrieUpdatesAction>,
1864 ) {
1865 if let Some(updates) = self.updates.as_mut() {
1866 let additional = update_actions.size_hint().0;
1867 updates.updated_nodes.reserve(additional);
1868 updates.removed_nodes.reserve(additional);
1869 for action in update_actions {
1870 match action {
1871 SparseTrieUpdatesAction::InsertRemoved(path) => {
1872 updates.updated_nodes.remove(&path);
1873 updates.removed_nodes.insert(path);
1874 }
1875 SparseTrieUpdatesAction::RemoveUpdated(path) => {
1876 updates.updated_nodes.remove(&path);
1877 }
1878 SparseTrieUpdatesAction::InsertUpdated(path, branch_node) => {
1879 updates.updated_nodes.insert(path, branch_node);
1880 updates.removed_nodes.remove(&path);
1881 }
1882 }
1883 }
1884 };
1885 }
1886
1887 #[instrument(level = "trace", target = "trie::parallel_sparse", skip_all, ret)]
1889 fn update_upper_subtrie_hashes(&mut self, prefix_set: &mut PrefixSet) -> RlpNode {
1890 trace!(target: "trie::parallel_sparse", "Updating upper subtrie hashes");
1891
1892 debug_assert!(self.upper_subtrie.inner.buffers.path_stack.is_empty());
1893 self.upper_subtrie.inner.buffers.path_stack.push(RlpNodePathStackItem {
1894 path: Nibbles::default(), is_in_prefix_set: None,
1896 });
1897
1898 #[cfg(feature = "metrics")]
1899 let start = Instant::now();
1900
1901 let mut update_actions_buf =
1902 self.updates_enabled().then(|| self.update_actions_buffers.pop().unwrap_or_default());
1903
1904 while let Some(stack_item) = self.upper_subtrie.inner.buffers.path_stack.pop() {
1905 let path = stack_item.path;
1906 let node = if path.len() < UPPER_TRIE_MAX_DEPTH {
1907 self.upper_subtrie.nodes.get_mut(&path).expect("upper subtrie node must exist")
1908 } else {
1909 let index = path_subtrie_index_unchecked(&path);
1910 let node = self.lower_subtries[index]
1911 .as_revealed_mut()
1912 .expect("lower subtrie must exist")
1913 .nodes
1914 .get_mut(&path)
1915 .expect("lower subtrie node must exist");
1916 debug_assert!(
1919 node.cached_rlp_node().is_some(),
1920 "Lower subtrie root node {node:?} at path {path:?} has no cached RLP node"
1921 );
1922 node
1923 };
1924
1925 self.upper_subtrie.inner.rlp_node(
1927 prefix_set,
1928 &mut update_actions_buf,
1929 stack_item,
1930 node,
1931 &self.branch_node_masks,
1932 );
1933 }
1934
1935 if let Some(mut update_actions_buf) = update_actions_buf {
1938 self.apply_subtrie_update_actions(
1939 #[allow(clippy::iter_with_drain)]
1940 update_actions_buf.drain(..),
1941 );
1942 self.update_actions_buffers.push(update_actions_buf);
1943 }
1944
1945 #[cfg(feature = "metrics")]
1946 self.metrics.subtrie_upper_hash_latency.record(start.elapsed());
1947
1948 debug_assert_eq!(self.upper_subtrie.inner.buffers.rlp_node_stack.len(), 1);
1949 self.upper_subtrie.inner.buffers.rlp_node_stack.pop().unwrap().rlp_node
1950 }
1951
1952 #[instrument(level = "trace", target = "trie::parallel_sparse", skip_all, fields(prefix_set_len = prefix_set.len()))]
1966 fn take_changed_lower_subtries(
1967 &mut self,
1968 prefix_set: &mut PrefixSet,
1969 ) -> (Vec<ChangedSubtrie>, PrefixSetMut) {
1970 if prefix_set.is_empty() {
1973 return Default::default();
1974 }
1975
1976 let prefix_set_clone = prefix_set.clone();
1978 let mut prefix_set_iter = prefix_set_clone.into_iter().copied().peekable();
1979 let mut changed_subtries = Vec::new();
1980 let mut unchanged_prefix_set = PrefixSetMut::default();
1981 let updates_enabled = self.updates_enabled();
1982
1983 for (index, subtrie) in self.lower_subtries.iter_mut().enumerate() {
1984 if let Some(subtrie) = subtrie.take_revealed_if(|subtrie| {
1985 prefix_set.contains(&subtrie.path) ||
1986 subtrie
1987 .nodes
1988 .get(&subtrie.path)
1989 .is_some_and(|n| n.cached_rlp_node().is_none())
1990 }) {
1991 let prefix_set = if prefix_set.all() {
1992 unchanged_prefix_set = PrefixSetMut::all();
1993 PrefixSetMut::all()
1994 } else {
1995 let mut new_prefix_set = Vec::new();
2000 while let Some(key) = prefix_set_iter.peek() {
2001 if key.starts_with(&subtrie.path) {
2002 new_prefix_set.push(prefix_set_iter.next().unwrap());
2004 } else if new_prefix_set.is_empty() && key < &subtrie.path {
2005 unchanged_prefix_set.insert(prefix_set_iter.next().unwrap());
2009 } else {
2010 break
2014 }
2015 }
2016 PrefixSetMut::from(new_prefix_set)
2017 }
2018 .freeze();
2019
2020 match subtrie.nodes.get(&subtrie.path) {
2023 Some(SparseNode::Extension { key, .. } | SparseNode::Leaf { key, .. }) => {
2024 unchanged_prefix_set.insert(subtrie.path.join(key));
2025 }
2026 Some(SparseNode::Branch { .. }) => {
2027 unchanged_prefix_set.insert(subtrie.path);
2028 }
2029 _ => {}
2030 }
2031
2032 let update_actions_buf =
2033 updates_enabled.then(|| self.update_actions_buffers.pop().unwrap_or_default());
2034
2035 changed_subtries.push(ChangedSubtrie {
2036 index,
2037 subtrie,
2038 prefix_set,
2039 update_actions_buf,
2040 });
2041 }
2042 }
2043
2044 unchanged_prefix_set.extend_keys(prefix_set_iter);
2046
2047 (changed_subtries, unchanged_prefix_set)
2048 }
2049
2050 #[cfg(test)]
2052 fn all_nodes(&self) -> impl IntoIterator<Item = (&Nibbles, &SparseNode)> {
2053 let mut nodes = vec![];
2054 for subtrie in self.lower_subtries.iter().filter_map(LowerSparseSubtrie::as_revealed_ref) {
2055 nodes.extend(subtrie.nodes.iter())
2056 }
2057 nodes.extend(self.upper_subtrie.nodes.iter());
2058 nodes
2059 }
2060
2061 fn reveal_upper_node(
2078 &mut self,
2079 path: Nibbles,
2080 node: &TrieNodeV2,
2081 masks: Option<BranchNodeMasks>,
2082 ) -> SparseTrieResult<()> {
2083 if !self.is_path_reachable_from_upper(&path) {
2086 return Ok(())
2087 }
2088
2089 if !self.upper_subtrie.reveal_node(path, node, masks, None)? {
2091 if let TrieNodeV2::Branch(branch) = node {
2092 if branch.key.is_empty() {
2093 return Ok(());
2094 }
2095
2096 if SparseSubtrieType::path_len_is_upper(path.len() + branch.key.len()) {
2099 return Ok(())
2100 }
2101 } else {
2102 return Ok(());
2103 }
2104 }
2105
2106 match node {
2111 TrieNodeV2::Branch(branch) => {
2112 let mut branch_path = path;
2113 branch_path.extend(&branch.key);
2114
2115 if !SparseSubtrieType::path_len_is_upper(branch_path.len()) {
2118 self.lower_subtrie_for_path_mut(&branch_path)
2119 .expect("branch_path must have a lower subtrie")
2120 .reveal_branch(
2121 branch_path,
2122 branch.state_mask,
2123 &branch.stack,
2124 masks,
2125 branch.branch_rlp_node.clone(),
2126 )?
2127 } else if !SparseSubtrieType::path_len_is_upper(branch_path.len() + 1) {
2128 for (stack_ptr, idx) in branch.state_mask.iter().enumerate() {
2133 let mut child_path = branch_path;
2134 child_path.push_unchecked(idx);
2135 let child = &branch.stack[stack_ptr];
2136
2137 if !child.is_hash() {
2140 self.lower_subtrie_for_path_mut(&child_path)
2141 .expect("child_path must have a lower subtrie")
2142 .reveal_node(
2143 child_path,
2144 &TrieNodeV2::decode(&mut branch.stack[stack_ptr].as_ref())?,
2145 None,
2146 None,
2147 )?;
2148 }
2149 }
2150 }
2151 }
2152 TrieNodeV2::Extension(ext) => {
2153 let mut child_path = path;
2154 child_path.extend(&ext.key);
2155 if let Some(subtrie) = self.lower_subtrie_for_path_mut(&child_path) {
2156 subtrie.reveal_node(
2157 child_path,
2158 &TrieNodeV2::decode(&mut ext.child.as_ref())?,
2159 None,
2160 None,
2161 )?;
2162 }
2163 }
2164 TrieNodeV2::EmptyRoot | TrieNodeV2::Leaf(_) => (),
2165 }
2166
2167 Ok(())
2168 }
2169
2170 #[instrument(level = "trace", target = "trie::parallel_sparse", skip_all)]
2173 fn insert_changed_subtries(
2174 &mut self,
2175 changed_subtries: impl IntoIterator<Item = ChangedSubtrie>,
2176 ) {
2177 for ChangedSubtrie { index, subtrie, update_actions_buf, .. } in changed_subtries {
2178 if let Some(mut update_actions_buf) = update_actions_buf {
2179 self.apply_subtrie_update_actions(
2180 #[allow(clippy::iter_with_drain)]
2181 update_actions_buf.drain(..),
2182 );
2183 self.update_actions_buffers.push(update_actions_buf);
2184 }
2185
2186 self.lower_subtries[index] = LowerSparseSubtrie::Revealed(subtrie);
2187 }
2188 }
2189
2190 pub fn memory_size(&self) -> usize {
2202 let mut size = core::mem::size_of::<Self>();
2203
2204 size += self.upper_subtrie.memory_size();
2206
2207 for subtrie in self.lower_subtries.iter() {
2209 size += subtrie.memory_size();
2210 }
2211
2212 size += self.prefix_set.len() * core::mem::size_of::<Nibbles>();
2214
2215 size += self.branch_node_masks.len() *
2217 (core::mem::size_of::<Nibbles>() + core::mem::size_of::<BranchNodeMasks>());
2218
2219 if let Some(updates) = &self.updates {
2221 size += updates.updated_nodes.len() *
2222 (core::mem::size_of::<Nibbles>() + core::mem::size_of::<BranchNodeCompact>());
2223 size += updates.removed_nodes.len() * core::mem::size_of::<Nibbles>();
2224 }
2225
2226 for buf in &self.update_actions_buffers {
2228 size += buf.capacity() * core::mem::size_of::<SparseTrieUpdatesAction>();
2229 }
2230
2231 size
2232 }
2233
2234 fn is_path_reachable_from_upper(&self, path: &Nibbles) -> bool {
2236 let mut current = Nibbles::default();
2237 while current.len() < path.len() {
2238 let Some(node) = self.upper_subtrie.nodes.get(¤t) else { return false };
2239 match node {
2240 SparseNode::Branch { state_mask, .. } => {
2241 if !state_mask.is_bit_set(path.get_unchecked(current.len())) {
2242 return false
2243 }
2244
2245 current.push_unchecked(path.get_unchecked(current.len()));
2246 }
2247 SparseNode::Extension { key, .. } => {
2248 if *key != path.slice(current.len()..current.len() + key.len()) {
2249 return false
2250 }
2251 current.extend(key);
2252 }
2253 SparseNode::Empty | SparseNode::Leaf { .. } => return false,
2254 }
2255 }
2256 true
2257 }
2258
2259 fn is_boundary_leaf_reachable(
2265 upper_nodes: &HashMap<Nibbles, SparseNode>,
2266 path: &Nibbles,
2267 node: &TrieNodeV2,
2268 ) -> bool {
2269 debug_assert_eq!(path.len(), UPPER_TRIE_MAX_DEPTH);
2270
2271 if !matches!(node, TrieNodeV2::Leaf(_)) {
2272 return true
2273 }
2274
2275 let parent_path = path.slice(..path.len() - 1);
2276 let leaf_nibble = path.get_unchecked(path.len() - 1);
2277
2278 match upper_nodes.get(&parent_path) {
2279 Some(SparseNode::Branch { state_mask, .. }) => state_mask.is_bit_set(leaf_nibble),
2280 _ => false,
2281 }
2282 }
2283
2284 fn reachable_subtries(&self) -> SubtriesBitmap {
2287 let mut reachable = SubtriesBitmap::default();
2288
2289 let mut stack = Vec::new();
2290 stack.push(Nibbles::default());
2291
2292 while let Some(current) = stack.pop() {
2293 let Some(node) = self.upper_subtrie.nodes.get(¤t) else { continue };
2294 match node {
2295 SparseNode::Branch { state_mask, .. } => {
2296 for idx in state_mask.iter() {
2297 let mut next = current;
2298 next.push_unchecked(idx);
2299 if next.len() >= UPPER_TRIE_MAX_DEPTH {
2300 reachable.set(path_subtrie_index_unchecked(&next));
2301 } else {
2302 stack.push(next);
2303 }
2304 }
2305 }
2306 SparseNode::Extension { key, .. } => {
2307 let mut next = current;
2308 next.extend(key);
2309 if next.len() >= UPPER_TRIE_MAX_DEPTH {
2310 reachable.set(path_subtrie_index_unchecked(&next));
2311 } else {
2312 stack.push(next);
2313 }
2314 }
2315 SparseNode::Empty | SparseNode::Leaf { .. } => {}
2316 };
2317 }
2318
2319 reachable
2320 }
2321}
2322
2323#[derive(Clone, Default, PartialEq, Eq, Debug)]
2325struct SubtriesBitmap(U256);
2326
2327impl SubtriesBitmap {
2328 #[inline]
2330 fn set(&mut self, idx: usize) {
2331 debug_assert!(idx < NUM_LOWER_SUBTRIES);
2332 self.0.set_bit(idx, true);
2333 }
2334
2335 #[inline]
2337 fn get(&self, idx: usize) -> bool {
2338 debug_assert!(idx < NUM_LOWER_SUBTRIES);
2339 self.0.bit(idx)
2340 }
2341}
2342
2343#[derive(Clone, PartialEq, Eq, Debug, Default)]
2346pub struct SparseSubtrie {
2347 pub(crate) path: Nibbles,
2355 nodes: HashMap<Nibbles, SparseNode>,
2357 inner: SparseSubtrieInner,
2359}
2360
2361enum FindNextToLeafOutcome {
2364 Found,
2366 ContinueFrom(Nibbles),
2368 NotFound,
2371 BlindedNode(Nibbles),
2374}
2375
2376impl SparseSubtrie {
2377 pub(crate) fn new(path: Nibbles) -> Self {
2379 Self { path, ..Default::default() }
2380 }
2381
2382 pub(crate) fn is_empty(&self) -> bool {
2384 self.nodes.is_empty()
2385 }
2386
2387 fn is_child_same_level(current_path: &Nibbles, child_path: &Nibbles) -> bool {
2389 let current_level = core::mem::discriminant(&SparseSubtrieType::from_path(current_path));
2390 let child_level = core::mem::discriminant(&SparseSubtrieType::from_path(child_path));
2391 current_level == child_level
2392 }
2393
2394 fn is_leaf_reachable_from_parent(&self, path: &Nibbles) -> bool {
2403 if path.is_empty() {
2404 return true
2405 }
2406
2407 let parent_path = path.slice(..path.len() - 1);
2408 let leaf_nibble = path.get_unchecked(path.len() - 1);
2409
2410 match self.nodes.get(&parent_path) {
2411 Some(SparseNode::Branch { state_mask, .. }) => state_mask.is_bit_set(leaf_nibble),
2412 _ => false,
2413 }
2414 }
2415
2416 pub fn update_leaf(&mut self, full_path: Nibbles, value: Vec<u8>) -> SparseTrieResult<()> {
2432 debug_assert!(full_path.starts_with(&self.path));
2433
2434 if let Entry::Occupied(mut e) = self.inner.values.entry(full_path) {
2436 e.insert(value);
2437 return Ok(())
2438 }
2439
2440 let mut current = Some(self.path);
2442
2443 while let Some(current_path) = current.as_mut() {
2444 match self.update_next_node(current_path, &full_path)? {
2445 LeafUpdateStep::Continue => {}
2446 LeafUpdateStep::NodeNotFound | LeafUpdateStep::Complete { .. } => break,
2447 }
2448 }
2449
2450 self.inner.values.insert(full_path, value);
2452
2453 Ok(())
2454 }
2455
2456 fn update_next_node(
2463 &mut self,
2464 current: &mut Nibbles,
2465 path: &Nibbles,
2466 ) -> SparseTrieResult<LeafUpdateStep> {
2467 debug_assert!(path.starts_with(&self.path));
2468 debug_assert!(current.starts_with(&self.path));
2469 debug_assert!(path.starts_with(current));
2470 let Some(node) = self.nodes.get_mut(current) else {
2471 return Ok(LeafUpdateStep::NodeNotFound);
2472 };
2473
2474 match node {
2475 SparseNode::Empty => {
2476 let path = path.slice(self.path.len()..);
2479 *node = SparseNode::new_leaf(path);
2480 Ok(LeafUpdateStep::complete_with_insertions(vec![*current]))
2481 }
2482 SparseNode::Leaf { key: current_key, .. } => {
2483 current.extend(current_key);
2484
2485 debug_assert!(current != path, "we already checked leaf presence in the beginning");
2487
2488 let common = current.common_prefix_length(path);
2490
2491 let new_ext_key = current.slice(current.len() - current_key.len()..common);
2493 *node = SparseNode::new_ext(new_ext_key);
2494
2495 self.nodes.reserve(3);
2497 let branch_path = current.slice(..common);
2498 let new_leaf_path = path.slice(..=common);
2499 let existing_leaf_path = current.slice(..=common);
2500
2501 self.nodes.insert(
2502 branch_path,
2503 SparseNode::new_split_branch(
2504 current.get_unchecked(common),
2505 path.get_unchecked(common),
2506 ),
2507 );
2508 self.nodes.insert(new_leaf_path, SparseNode::new_leaf(path.slice(common + 1..)));
2509 self.nodes
2510 .insert(existing_leaf_path, SparseNode::new_leaf(current.slice(common + 1..)));
2511
2512 Ok(LeafUpdateStep::complete_with_insertions(vec![
2513 branch_path,
2514 new_leaf_path,
2515 existing_leaf_path,
2516 ]))
2517 }
2518 SparseNode::Extension { key, .. } => {
2519 current.extend(key);
2520
2521 if !path.starts_with(current) {
2522 let common = current.common_prefix_length(path);
2524 *key = current.slice(current.len() - key.len()..common);
2525
2526 self.nodes.reserve(3);
2529 let branch_path = current.slice(..common);
2530 let new_leaf_path = path.slice(..=common);
2531 let branch = SparseNode::new_split_branch(
2532 current.get_unchecked(common),
2533 path.get_unchecked(common),
2534 );
2535
2536 self.nodes.insert(branch_path, branch);
2537
2538 let new_leaf = SparseNode::new_leaf(path.slice(common + 1..));
2540 self.nodes.insert(new_leaf_path, new_leaf);
2541
2542 let mut inserted_nodes = vec![branch_path, new_leaf_path];
2543
2544 let key = current.slice(common + 1..);
2546 if !key.is_empty() {
2547 let ext_path = current.slice(..=common);
2548 self.nodes.insert(ext_path, SparseNode::new_ext(key));
2549 inserted_nodes.push(ext_path);
2550 }
2551
2552 return Ok(LeafUpdateStep::complete_with_insertions(inserted_nodes))
2553 }
2554
2555 Ok(LeafUpdateStep::Continue)
2556 }
2557 SparseNode::Branch { state_mask, blinded_mask, .. } => {
2558 let nibble = path.get_unchecked(current.len());
2559 current.push_unchecked(nibble);
2560
2561 if !state_mask.is_bit_set(nibble) {
2562 state_mask.set_bit(nibble);
2563 let new_leaf = SparseNode::new_leaf(path.slice(current.len()..));
2564 self.nodes.insert(*current, new_leaf);
2565 return Ok(LeafUpdateStep::complete_with_insertions(vec![*current]))
2566 }
2567
2568 if blinded_mask.is_bit_set(nibble) {
2569 return Err(SparseTrieErrorKind::BlindedNode(*current).into());
2570 }
2571
2572 Ok(LeafUpdateStep::Continue)
2574 }
2575 }
2576 }
2577
2578 fn reveal_branch(
2580 &mut self,
2581 path: Nibbles,
2582 state_mask: TrieMask,
2583 children: &[RlpNode],
2584 masks: Option<BranchNodeMasks>,
2585 rlp_node: Option<RlpNode>,
2586 ) -> SparseTrieResult<()> {
2587 match self.nodes.entry(path) {
2588 Entry::Occupied(_) => {
2589 return Ok(());
2591 }
2592 Entry::Vacant(entry) => {
2593 let state =
2594 match rlp_node.as_ref() {
2595 Some(rlp_node) => SparseNodeState::Cached {
2596 rlp_node: rlp_node.clone(),
2597 store_in_db_trie: Some(masks.is_some_and(|m| {
2598 !m.hash_mask.is_empty() || !m.tree_mask.is_empty()
2599 })),
2600 },
2601 None => SparseNodeState::Dirty,
2602 };
2603
2604 let mut blinded_mask = TrieMask::default();
2605 let mut blinded_hashes = Box::new([B256::ZERO; 16]);
2606
2607 for (stack_ptr, idx) in state_mask.iter().enumerate() {
2608 let mut child_path = path;
2609 child_path.push_unchecked(idx);
2610 let child = &children[stack_ptr];
2611
2612 if let Some(hash) = child.as_hash() {
2613 blinded_mask.set_bit(idx);
2614 blinded_hashes[idx as usize] = hash;
2615 }
2616 }
2617
2618 entry.insert(SparseNode::Branch {
2619 state_mask,
2620 state,
2621 blinded_mask,
2622 blinded_hashes,
2623 });
2624 }
2625 }
2626
2627 for (stack_ptr, idx) in state_mask.iter().enumerate() {
2630 let mut child_path = path;
2631 child_path.push_unchecked(idx);
2632 let child = &children[stack_ptr];
2633 if !child.is_hash() && Self::is_child_same_level(&path, &child_path) {
2634 self.reveal_node(
2637 child_path,
2638 &TrieNodeV2::decode(&mut child.as_ref())?,
2639 None,
2640 None,
2641 )?;
2642 }
2643 }
2644
2645 Ok(())
2646 }
2647
2648 fn reveal_node(
2653 &mut self,
2654 path: Nibbles,
2655 node: &TrieNodeV2,
2656 masks: Option<BranchNodeMasks>,
2657 hash_from_upper: Option<B256>,
2658 ) -> SparseTrieResult<bool> {
2659 debug_assert!(path.starts_with(&self.path));
2660
2661 if self.nodes.contains_key(&path) {
2663 return Ok(false);
2664 }
2665
2666 let hash = if let Some(hash) = hash_from_upper {
2669 Some(hash)
2670 } else if path.len() != UPPER_TRIE_MAX_DEPTH && !path.is_empty() {
2671 let Some(SparseNode::Branch { state_mask, blinded_mask, blinded_hashes, .. }) =
2672 self.nodes.get_mut(&path.slice(0..path.len() - 1))
2673 else {
2674 return Ok(false);
2675 };
2676 let nibble = path.last().unwrap();
2677 if !state_mask.is_bit_set(nibble) {
2678 return Ok(false);
2679 }
2680
2681 blinded_mask.is_bit_set(nibble).then(|| {
2682 blinded_mask.unset_bit(nibble);
2683 blinded_hashes[nibble as usize]
2684 })
2685 } else {
2686 None
2687 };
2688
2689 trace!(
2690 target: "trie::parallel_sparse",
2691 ?path,
2692 ?node,
2693 ?masks,
2694 "Revealing node",
2695 );
2696
2697 match node {
2698 TrieNodeV2::EmptyRoot => {
2699 debug_assert!(path.is_empty());
2701 debug_assert!(self.path.is_empty());
2702 self.nodes.insert(path, SparseNode::Empty);
2703 }
2704 TrieNodeV2::Branch(branch) => {
2705 if branch.key.is_empty() {
2706 self.reveal_branch(
2707 path,
2708 branch.state_mask,
2709 &branch.stack,
2710 masks,
2711 hash.as_ref().map(RlpNode::word_rlp),
2712 )?;
2713 return Ok(true);
2714 }
2715
2716 self.nodes.insert(
2717 path,
2718 SparseNode::Extension {
2719 key: branch.key,
2720 state: hash
2721 .as_ref()
2722 .map(|hash| SparseNodeState::Cached {
2723 rlp_node: RlpNode::word_rlp(hash),
2724 store_in_db_trie: Some(masks.is_some_and(|m| {
2728 !m.hash_mask.is_empty() || !m.tree_mask.is_empty()
2729 })),
2730 })
2731 .unwrap_or(SparseNodeState::Dirty),
2732 },
2733 );
2734
2735 let mut branch_path = path;
2736 branch_path.extend(&branch.key);
2737
2738 if !Self::is_child_same_level(&path, &branch_path) {
2740 return Ok(true);
2741 }
2742
2743 self.reveal_branch(
2745 branch_path,
2746 branch.state_mask,
2747 &branch.stack,
2748 masks,
2749 branch.branch_rlp_node.clone(),
2750 )?;
2751 }
2752 TrieNodeV2::Extension(_) => unreachable!(),
2753 TrieNodeV2::Leaf(leaf) => {
2754 if path.len() != UPPER_TRIE_MAX_DEPTH && !self.is_leaf_reachable_from_parent(&path)
2759 {
2760 trace!(
2761 target: "trie::parallel_sparse",
2762 ?path,
2763 "Leaf not reachable from parent branch, skipping",
2764 );
2765 return Ok(false)
2766 }
2767
2768 let mut full_key = path;
2769 full_key.extend(&leaf.key);
2770
2771 match self.inner.values.entry(full_key) {
2772 Entry::Occupied(_) => {
2773 trace!(
2774 target: "trie::parallel_sparse",
2775 ?path,
2776 ?full_key,
2777 "Leaf full key value already present, skipping",
2778 );
2779 return Ok(false)
2780 }
2781 Entry::Vacant(entry) => {
2782 entry.insert(leaf.value.clone());
2783 }
2784 }
2785
2786 self.nodes.insert(
2787 path,
2788 SparseNode::Leaf {
2789 key: leaf.key,
2790 state: hash
2791 .as_ref()
2792 .map(|hash| SparseNodeState::Cached {
2793 rlp_node: RlpNode::word_rlp(hash),
2794 store_in_db_trie: Some(false),
2795 })
2796 .unwrap_or(SparseNodeState::Dirty),
2797 },
2798 );
2799 }
2800 }
2801
2802 Ok(true)
2803 }
2804
2805 #[instrument(level = "trace", target = "trie::parallel_sparse", skip_all, fields(root = ?self.path), ret)]
2827 fn update_hashes(
2828 &mut self,
2829 prefix_set: &mut PrefixSet,
2830 update_actions: &mut Option<Vec<SparseTrieUpdatesAction>>,
2831 branch_node_masks: &BranchNodeMasksMap,
2832 ) -> RlpNode {
2833 trace!(target: "trie::parallel_sparse", "Updating subtrie hashes");
2834
2835 debug_assert!(prefix_set.iter().all(|path| path.starts_with(&self.path)));
2836
2837 debug_assert!(self.inner.buffers.path_stack.is_empty());
2838 self.inner
2839 .buffers
2840 .path_stack
2841 .push(RlpNodePathStackItem { path: self.path, is_in_prefix_set: None });
2842
2843 while let Some(stack_item) = self.inner.buffers.path_stack.pop() {
2844 let path = stack_item.path;
2845 let node = self
2846 .nodes
2847 .get_mut(&path)
2848 .unwrap_or_else(|| panic!("node at path {path:?} does not exist"));
2849
2850 self.inner.rlp_node(prefix_set, update_actions, stack_item, node, branch_node_masks);
2851 }
2852
2853 debug_assert_eq!(self.inner.buffers.rlp_node_stack.len(), 1);
2854 self.inner.buffers.rlp_node_stack.pop().unwrap().rlp_node
2855 }
2856
2857 fn wipe(&mut self) {
2860 self.nodes.clear();
2861 self.nodes.insert(Nibbles::default(), SparseNode::Empty);
2862 self.inner.clear();
2863 }
2864
2865 pub(crate) fn clear(&mut self) {
2867 self.nodes.clear();
2868 self.inner.clear();
2869 }
2870
2871 pub(crate) fn shrink_nodes_to(&mut self, size: usize) {
2873 self.nodes.shrink_to(size);
2874 }
2875
2876 pub(crate) fn shrink_values_to(&mut self, size: usize) {
2878 self.inner.values.shrink_to(size);
2879 }
2880
2881 pub(crate) fn memory_size(&self) -> usize {
2883 let mut size = core::mem::size_of::<Self>();
2884
2885 for (path, node) in &self.nodes {
2887 size += core::mem::size_of::<Nibbles>();
2888 size += path.len(); size += node.memory_size();
2890 }
2891
2892 for (path, value) in &self.inner.values {
2894 size += core::mem::size_of::<Nibbles>();
2895 size += path.len(); size += core::mem::size_of::<Vec<u8>>() + value.capacity();
2897 }
2898
2899 size += self.inner.buffers.memory_size();
2901
2902 size
2903 }
2904}
2905
2906#[derive(Clone, PartialEq, Eq, Debug, Default)]
2909struct SparseSubtrieInner {
2910 values: HashMap<Nibbles, Vec<u8>>,
2913 buffers: SparseSubtrieBuffers,
2915}
2916
2917impl SparseSubtrieInner {
2918 fn rlp_node(
2948 &mut self,
2949 prefix_set: &mut PrefixSet,
2950 update_actions: &mut Option<Vec<SparseTrieUpdatesAction>>,
2951 mut stack_item: RlpNodePathStackItem,
2952 node: &mut SparseNode,
2953 branch_node_masks: &BranchNodeMasksMap,
2954 ) {
2955 let path = stack_item.path;
2956 trace!(
2957 target: "trie::parallel_sparse",
2958 ?path,
2959 ?node,
2960 "Calculating node RLP"
2961 );
2962
2963 let mut prefix_set_contains = |path: &Nibbles| {
2967 *stack_item.is_in_prefix_set.get_or_insert_with(|| prefix_set.contains(path))
2968 };
2969
2970 let (rlp_node, node_type) = match node {
2971 SparseNode::Empty => (RlpNode::word_rlp(&EMPTY_ROOT_HASH), SparseNodeType::Empty),
2972 SparseNode::Leaf { key, state } => {
2973 let mut path = path;
2974 path.extend(key);
2975 let value = self.values.get(&path);
2976
2977 let cached_rlp_node = state.cached_rlp_node();
2982 let use_cached =
2983 cached_rlp_node.is_some() && (!prefix_set_contains(&path) || value.is_none());
2984
2985 if let Some(rlp_node) = use_cached.then(|| cached_rlp_node.unwrap()) {
2986 (rlp_node.clone(), SparseNodeType::Leaf)
2988 } else {
2989 let value = value.expect("leaf value must exist in subtrie");
2991 self.buffers.rlp_buf.clear();
2992 let rlp_node = LeafNodeRef { key, value }.rlp(&mut self.buffers.rlp_buf);
2993 *state = SparseNodeState::Cached {
2994 rlp_node: rlp_node.clone(),
2995 store_in_db_trie: Some(false),
2996 };
2997 trace!(
2998 target: "trie::parallel_sparse",
2999 ?path,
3000 ?key,
3001 value = %alloy_primitives::hex::encode(value),
3002 ?rlp_node,
3003 "Calculated leaf RLP node",
3004 );
3005 (rlp_node, SparseNodeType::Leaf)
3006 }
3007 }
3008 SparseNode::Extension { key, state } => {
3009 let mut child_path = path;
3010 child_path.extend(key);
3011 if let Some((rlp_node, store_in_db_trie)) = state
3012 .cached_rlp_node()
3013 .zip(state.store_in_db_trie())
3014 .filter(|_| !prefix_set_contains(&path))
3015 {
3016 (
3019 rlp_node.clone(),
3020 SparseNodeType::Extension { store_in_db_trie: Some(store_in_db_trie) },
3021 )
3022 } else if self.buffers.rlp_node_stack.last().is_some_and(|e| e.path == child_path) {
3023 let RlpNodeStackItem { path: _, rlp_node: child, node_type: child_node_type } =
3026 self.buffers.rlp_node_stack.pop().unwrap();
3027 self.buffers.rlp_buf.clear();
3028 let rlp_node =
3029 ExtensionNodeRef::new(key, &child).rlp(&mut self.buffers.rlp_buf);
3030
3031 let store_in_db_trie_value = child_node_type.store_in_db_trie();
3032
3033 trace!(
3034 target: "trie::parallel_sparse",
3035 ?path,
3036 ?child_path,
3037 ?child_node_type,
3038 "Extension node"
3039 );
3040
3041 *state = SparseNodeState::Cached {
3042 rlp_node: rlp_node.clone(),
3043 store_in_db_trie: store_in_db_trie_value,
3044 };
3045
3046 (
3047 rlp_node,
3048 SparseNodeType::Extension {
3049 store_in_db_trie: store_in_db_trie_value,
3052 },
3053 )
3054 } else {
3055 self.buffers.path_stack.extend([
3058 RlpNodePathStackItem {
3059 path,
3060 is_in_prefix_set: Some(prefix_set_contains(&path)),
3061 },
3062 RlpNodePathStackItem { path: child_path, is_in_prefix_set: None },
3063 ]);
3064 return
3065 }
3066 }
3067 SparseNode::Branch { state_mask, state, blinded_mask, blinded_hashes } => {
3068 if let Some((rlp_node, store_in_db_trie)) = state
3069 .cached_rlp_node()
3070 .zip(state.store_in_db_trie())
3071 .filter(|_| !prefix_set_contains(&path))
3072 {
3073 let node_type =
3074 SparseNodeType::Branch { store_in_db_trie: Some(store_in_db_trie) };
3075
3076 trace!(
3077 target: "trie::parallel_sparse",
3078 ?path,
3079 ?node_type,
3080 ?rlp_node,
3081 "Adding node to RLP node stack (cached branch)"
3082 );
3083
3084 self.buffers.rlp_node_stack.push(RlpNodeStackItem {
3087 path,
3088 rlp_node: rlp_node.clone(),
3089 node_type,
3090 });
3091 return
3092 }
3093
3094 let retain_updates = update_actions.is_some() && prefix_set_contains(&path);
3095
3096 self.buffers.branch_child_buf.clear();
3097 for bit in state_mask.iter().rev() {
3100 let mut child = path;
3101 child.push_unchecked(bit);
3102
3103 if !blinded_mask.is_bit_set(bit) {
3104 self.buffers.branch_child_buf.push(child);
3105 }
3106 }
3107
3108 self.buffers.branch_value_stack_buf.resize(state_mask.len(), Default::default());
3109
3110 let mut tree_mask = TrieMask::default();
3111 let mut hash_mask = TrieMask::default();
3112 let mut hashes = Vec::new();
3113
3114 let mut path_masks_storage = None;
3116 let mut path_masks =
3117 || *path_masks_storage.get_or_insert_with(|| branch_node_masks.get(&path));
3118
3119 for (i, child_nibble) in state_mask.iter().enumerate().rev() {
3120 let mut child_path = path;
3121 child_path.push_unchecked(child_nibble);
3122
3123 let (child, child_node_type) = if blinded_mask.is_bit_set(child_nibble) {
3124 (
3125 RlpNode::word_rlp(&blinded_hashes[child_nibble as usize]),
3126 SparseNodeType::Hash,
3127 )
3128 } else if self
3129 .buffers
3130 .rlp_node_stack
3131 .last()
3132 .is_some_and(|e| e.path == child_path)
3133 {
3134 let RlpNodeStackItem { path: _, rlp_node, node_type } =
3135 self.buffers.rlp_node_stack.pop().unwrap();
3136
3137 (rlp_node, node_type)
3138 } else {
3139 self.buffers.path_stack.push(RlpNodePathStackItem {
3142 path,
3143 is_in_prefix_set: Some(prefix_set_contains(&path)),
3144 });
3145 self.buffers.path_stack.extend(
3146 self.buffers
3147 .branch_child_buf
3148 .drain(..)
3149 .map(|path| RlpNodePathStackItem { path, is_in_prefix_set: None }),
3150 );
3151 return
3152 };
3153
3154 if retain_updates {
3156 let should_set_tree_mask_bit =
3158 if let Some(store_in_db_trie) = child_node_type.store_in_db_trie() {
3159 store_in_db_trie
3162 } else {
3163 child_node_type.is_hash() &&
3165 path_masks().is_some_and(|masks| {
3166 masks.tree_mask.is_bit_set(child_nibble)
3167 })
3168 };
3169 if should_set_tree_mask_bit {
3170 tree_mask.set_bit(child_nibble);
3171 }
3172 let hash = child.as_hash().filter(|_| {
3176 child_node_type.is_branch() ||
3177 (child_node_type.is_hash() &&
3178 path_masks().is_some_and(|masks| {
3179 masks.hash_mask.is_bit_set(child_nibble)
3180 }))
3181 });
3182 if let Some(hash) = hash {
3183 hash_mask.set_bit(child_nibble);
3184 hashes.push(hash);
3185 }
3186 }
3187
3188 self.buffers.branch_value_stack_buf[i] = child;
3192 }
3193
3194 trace!(
3195 target: "trie::parallel_sparse",
3196 ?path,
3197 ?tree_mask,
3198 ?hash_mask,
3199 "Branch node masks"
3200 );
3201
3202 self.buffers.rlp_buf.clear();
3205 let branch_node_ref =
3206 BranchNodeRef::new(&self.buffers.branch_value_stack_buf, *state_mask);
3207 let rlp_node = branch_node_ref.rlp(&mut self.buffers.rlp_buf);
3208
3209 let store_in_db_trie_value = if let Some(update_actions) =
3212 update_actions.as_mut().filter(|_| retain_updates && !path.is_empty())
3213 {
3214 let store_in_db_trie = !tree_mask.is_empty() || !hash_mask.is_empty();
3215 if store_in_db_trie {
3216 hashes.reverse();
3219 let branch_node =
3220 BranchNodeCompact::new(*state_mask, tree_mask, hash_mask, hashes, None);
3221 update_actions
3222 .push(SparseTrieUpdatesAction::InsertUpdated(path, branch_node));
3223 } else {
3224 let prev_had_masks = path_masks()
3226 .is_some_and(|m| !m.tree_mask.is_empty() || !m.hash_mask.is_empty());
3227 if prev_had_masks {
3228 update_actions.push(SparseTrieUpdatesAction::InsertRemoved(path));
3230 } else {
3231 update_actions.push(SparseTrieUpdatesAction::RemoveUpdated(path));
3233 }
3234 }
3235
3236 store_in_db_trie
3237 } else {
3238 false
3239 };
3240
3241 *state = SparseNodeState::Cached {
3242 rlp_node: rlp_node.clone(),
3243 store_in_db_trie: Some(store_in_db_trie_value),
3244 };
3245
3246 (
3247 rlp_node,
3248 SparseNodeType::Branch { store_in_db_trie: Some(store_in_db_trie_value) },
3249 )
3250 }
3251 };
3252
3253 trace!(
3254 target: "trie::parallel_sparse",
3255 ?path,
3256 ?node_type,
3257 ?rlp_node,
3258 "Adding node to RLP node stack"
3259 );
3260 self.buffers.rlp_node_stack.push(RlpNodeStackItem { path, rlp_node, node_type });
3261 }
3262
3263 fn clear(&mut self) {
3265 self.values.clear();
3266 self.buffers.clear();
3267 }
3268}
3269
3270#[derive(Clone, Debug, PartialEq, Eq, Default)]
3272pub enum LeafUpdateStep {
3273 Continue,
3275 Complete {
3277 inserted_nodes: Vec<Nibbles>,
3279 },
3280 #[default]
3282 NodeNotFound,
3283}
3284
3285impl LeafUpdateStep {
3286 pub const fn complete_with_insertions(inserted_nodes: Vec<Nibbles>) -> Self {
3288 Self::Complete { inserted_nodes }
3289 }
3290}
3291
3292#[derive(Clone, Copy, PartialEq, Eq, Debug)]
3301pub enum SparseSubtrieType {
3302 Upper,
3304 Lower(usize),
3307}
3308
3309impl SparseSubtrieType {
3310 pub const fn path_len_is_upper(len: usize) -> bool {
3315 len < UPPER_TRIE_MAX_DEPTH
3316 }
3317
3318 pub fn from_path(path: &Nibbles) -> Self {
3320 if Self::path_len_is_upper(path.len()) {
3321 Self::Upper
3322 } else {
3323 Self::Lower(path_subtrie_index_unchecked(path))
3324 }
3325 }
3326
3327 pub const fn lower_index(&self) -> Option<usize> {
3329 match self {
3330 Self::Upper => None,
3331 Self::Lower(index) => Some(*index),
3332 }
3333 }
3334}
3335
3336impl Ord for SparseSubtrieType {
3337 fn cmp(&self, other: &Self) -> Ordering {
3340 match (self, other) {
3341 (Self::Upper, Self::Upper) => Ordering::Equal,
3342 (Self::Upper, Self::Lower(_)) => Ordering::Less,
3343 (Self::Lower(_), Self::Upper) => Ordering::Greater,
3344 (Self::Lower(idx_a), Self::Lower(idx_b)) if idx_a == idx_b => Ordering::Equal,
3345 (Self::Lower(idx_a), Self::Lower(idx_b)) => idx_a.cmp(idx_b),
3346 }
3347 }
3348}
3349
3350impl PartialOrd for SparseSubtrieType {
3351 fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
3352 Some(self.cmp(other))
3353 }
3354}
3355
3356#[derive(Clone, PartialEq, Eq, Debug, Default)]
3360pub struct SparseSubtrieBuffers {
3361 path_stack: Vec<RlpNodePathStackItem>,
3363 rlp_node_stack: Vec<RlpNodeStackItem>,
3365 branch_child_buf: Vec<Nibbles>,
3367 branch_value_stack_buf: Vec<RlpNode>,
3369 rlp_buf: Vec<u8>,
3371}
3372
3373impl SparseSubtrieBuffers {
3374 fn clear(&mut self) {
3376 self.path_stack.clear();
3377 self.rlp_node_stack.clear();
3378 self.branch_child_buf.clear();
3379 self.branch_value_stack_buf.clear();
3380 self.rlp_buf.clear();
3381 }
3382
3383 const fn memory_size(&self) -> usize {
3385 let mut size = core::mem::size_of::<Self>();
3386
3387 size += self.path_stack.capacity() * core::mem::size_of::<RlpNodePathStackItem>();
3388 size += self.rlp_node_stack.capacity() * core::mem::size_of::<RlpNodeStackItem>();
3389 size += self.branch_child_buf.capacity() * core::mem::size_of::<Nibbles>();
3390 size += self.branch_value_stack_buf.capacity() * core::mem::size_of::<RlpNode>();
3391 size += self.rlp_buf.capacity();
3392
3393 size
3394 }
3395}
3396
3397#[derive(Clone, PartialEq, Eq, Debug)]
3399pub struct RlpNodePathStackItem {
3400 pub path: Nibbles,
3402 pub is_in_prefix_set: Option<bool>,
3404}
3405
3406#[derive(Debug)]
3408struct ChangedSubtrie {
3409 index: usize,
3411 subtrie: Box<SparseSubtrie>,
3413 prefix_set: PrefixSet,
3415 update_actions_buf: Option<Vec<SparseTrieUpdatesAction>>,
3418}
3419
3420fn path_subtrie_index_unchecked(path: &Nibbles) -> usize {
3427 debug_assert_eq!(UPPER_TRIE_MAX_DEPTH, 2);
3428 let idx = path.get_byte_unchecked(0) as usize;
3429 unsafe { core::hint::assert_unchecked(idx < NUM_LOWER_SUBTRIES) };
3431 idx
3432}
3433
3434fn is_strict_descendant_in(roots: &[Nibbles], path: &Nibbles) -> bool {
3439 if roots.is_empty() {
3440 return false;
3441 }
3442 debug_assert!(roots.windows(2).all(|w| w[0] <= w[1]), "roots must be sorted by path");
3443 let idx = roots.partition_point(|root| root <= path);
3444 if idx > 0 {
3445 let candidate = &roots[idx - 1];
3446 if path.starts_with(candidate) && path.len() > candidate.len() {
3447 return true;
3448 }
3449 }
3450 false
3451}
3452
3453fn has_retained_descendant(retained: &[Nibbles], prefix: &Nibbles) -> bool {
3457 if retained.is_empty() {
3458 return false;
3459 }
3460 debug_assert!(retained.windows(2).all(|w| w[0] <= w[1]), "retained must be sorted by path");
3461 let idx = retained.partition_point(|path| path < prefix);
3462 idx < retained.len() && retained[idx].starts_with(prefix)
3463}
3464
3465fn starts_with_pruned_in(roots: &[Nibbles], path: &Nibbles) -> bool {
3470 if roots.is_empty() {
3471 return false;
3472 }
3473 debug_assert!(roots.windows(2).all(|w| w[0] <= w[1]), "roots must be sorted by path");
3474 let idx = roots.partition_point(|root| root <= path);
3475 if idx > 0 {
3476 let candidate = &roots[idx - 1];
3477 if path.starts_with(candidate) {
3478 return true;
3479 }
3480 }
3481 false
3482}
3483
3484#[derive(Clone, Debug, Eq, PartialEq)]
3486enum SparseTrieUpdatesAction {
3487 InsertRemoved(Nibbles),
3489 RemoveUpdated(Nibbles),
3492 InsertUpdated(Nibbles, BranchNodeCompact),
3494}
3495
3496#[cfg(test)]
3497mod tests {
3498 use super::{
3499 path_subtrie_index_unchecked, LowerSparseSubtrie, ParallelSparseTrie, SparseSubtrie,
3500 SparseSubtrieType,
3501 };
3502 use crate::{
3503 parallel::ChangedSubtrie,
3504 provider::{DefaultTrieNodeProvider, NoRevealProvider},
3505 trie::SparseNodeState,
3506 LeafLookup, LeafLookupError, SparseNode, SparseTrie, SparseTrieUpdates,
3507 };
3508 use alloy_primitives::{
3509 b256, hex,
3510 map::{B256Set, HashMap},
3511 B256, U256,
3512 };
3513 use alloy_rlp::{Decodable, Encodable};
3514 use alloy_trie::{proof::AddedRemovedKeys, BranchNodeCompact, Nibbles};
3515 use assert_matches::assert_matches;
3516 use itertools::Itertools;
3517 use proptest::{prelude::*, sample::SizeRange};
3518 use proptest_arbitrary_interop::arb;
3519 use reth_execution_errors::SparseTrieErrorKind;
3520 use reth_primitives_traits::Account;
3521 use reth_provider::{
3522 test_utils::create_test_provider_factory, StorageSettingsCache, TrieWriter,
3523 };
3524 use reth_trie::{
3525 hashed_cursor::{noop::NoopHashedCursor, HashedPostStateCursor},
3526 node_iter::{TrieElement, TrieNodeIter},
3527 trie_cursor::{noop::NoopAccountTrieCursor, TrieCursor, TrieCursorFactory},
3528 walker::TrieWalker,
3529 HashedPostState,
3530 };
3531 use reth_trie_common::{
3532 prefix_set::PrefixSetMut,
3533 proof::{ProofNodes, ProofRetainer},
3534 updates::TrieUpdates,
3535 BranchNodeMasks, BranchNodeMasksMap, BranchNodeRef, BranchNodeV2, ExtensionNode,
3536 HashBuilder, LeafNode, ProofTrieNodeV2, RlpNode, TrieMask, TrieNode, TrieNodeV2,
3537 EMPTY_ROOT_HASH,
3538 };
3539 use reth_trie_db::DatabaseTrieCursorFactory;
3540 use std::collections::{BTreeMap, BTreeSet};
3541
3542 fn pad_nibbles_right(mut nibbles: Nibbles) -> Nibbles {
3544 nibbles.extend(&Nibbles::from_nibbles_unchecked(vec![
3545 0;
3546 B256::len_bytes() * 2 - nibbles.len()
3547 ]));
3548 nibbles
3549 }
3550
3551 fn leaf_key(suffix: impl AsRef<[u8]>, total_len: usize) -> Nibbles {
3554 let suffix = suffix.as_ref();
3555 let mut nibbles = Nibbles::from_nibbles(suffix);
3556 nibbles.extend(&Nibbles::from_nibbles_unchecked(vec![0; total_len - suffix.len()]));
3557 nibbles
3558 }
3559
3560 fn create_account(nonce: u64) -> Account {
3561 Account { nonce, ..Default::default() }
3562 }
3563
3564 fn large_account_value() -> Vec<u8> {
3565 let account = Account {
3566 nonce: 0x123456789abcdef,
3567 balance: U256::from(0x123456789abcdef0123456789abcdef_u128),
3568 ..Default::default()
3569 };
3570 let mut buf = Vec::new();
3571 account.into_trie_account(EMPTY_ROOT_HASH).encode(&mut buf);
3572 buf
3573 }
3574
3575 fn encode_account_value(nonce: u64) -> Vec<u8> {
3576 let account = Account { nonce, ..Default::default() };
3577 let trie_account = account.into_trie_account(EMPTY_ROOT_HASH);
3578 let mut buf = Vec::new();
3579 trie_account.encode(&mut buf);
3580 buf
3581 }
3582
3583 #[derive(Default)]
3585 struct ParallelSparseTrieTestContext;
3586
3587 impl ParallelSparseTrieTestContext {
3588 fn assert_subtrie_exists(&self, trie: &ParallelSparseTrie, path: &Nibbles) {
3590 let idx = path_subtrie_index_unchecked(path);
3591 assert!(
3592 trie.lower_subtries[idx].as_revealed_ref().is_some(),
3593 "Expected lower subtrie at path {path:?} to exist",
3594 );
3595 }
3596
3597 fn get_subtrie<'a>(
3599 &self,
3600 trie: &'a ParallelSparseTrie,
3601 path: &Nibbles,
3602 ) -> &'a SparseSubtrie {
3603 let idx = path_subtrie_index_unchecked(path);
3604 trie.lower_subtries[idx]
3605 .as_revealed_ref()
3606 .unwrap_or_else(|| panic!("Lower subtrie at path {path:?} should exist"))
3607 }
3608
3609 fn assert_subtrie_path(
3611 &self,
3612 trie: &ParallelSparseTrie,
3613 subtrie_prefix: impl AsRef<[u8]>,
3614 expected_path: impl AsRef<[u8]>,
3615 ) {
3616 let subtrie_prefix = Nibbles::from_nibbles(subtrie_prefix);
3617 let expected_path = Nibbles::from_nibbles(expected_path);
3618 let idx = path_subtrie_index_unchecked(&subtrie_prefix);
3619
3620 let subtrie = trie.lower_subtries[idx].as_revealed_ref().unwrap_or_else(|| {
3621 panic!("Lower subtrie at prefix {subtrie_prefix:?} should exist")
3622 });
3623
3624 assert_eq!(
3625 subtrie.path, expected_path,
3626 "Subtrie at prefix {subtrie_prefix:?} should have path {expected_path:?}, but has {:?}",
3627 subtrie.path
3628 );
3629 }
3630
3631 fn create_test_leaves(&self, paths: &[&[u8]]) -> Vec<(Nibbles, Vec<u8>)> {
3633 paths
3634 .iter()
3635 .enumerate()
3636 .map(|(i, path)| {
3637 (
3638 pad_nibbles_right(Nibbles::from_nibbles(path)),
3639 encode_account_value(i as u64 + 1),
3640 )
3641 })
3642 .collect()
3643 }
3644
3645 fn create_test_leaf(&self, path: impl AsRef<[u8]>, value_nonce: u64) -> (Nibbles, Vec<u8>) {
3647 (pad_nibbles_right(Nibbles::from_nibbles(path)), encode_account_value(value_nonce))
3648 }
3649
3650 fn update_leaves(
3652 &self,
3653 trie: &mut ParallelSparseTrie,
3654 leaves: impl IntoIterator<Item = (Nibbles, Vec<u8>)>,
3655 ) {
3656 for (path, value) in leaves {
3657 trie.update_leaf(path, value, DefaultTrieNodeProvider).unwrap();
3658 }
3659 }
3660
3661 fn assert_subtrie<'a>(
3663 &self,
3664 trie: &'a ParallelSparseTrie,
3665 path: Nibbles,
3666 ) -> SubtrieAssertion<'a> {
3667 self.assert_subtrie_exists(trie, &path);
3668 let subtrie = self.get_subtrie(trie, &path);
3669 SubtrieAssertion::new(subtrie)
3670 }
3671
3672 fn assert_upper_subtrie<'a>(&self, trie: &'a ParallelSparseTrie) -> SubtrieAssertion<'a> {
3674 SubtrieAssertion::new(&trie.upper_subtrie)
3675 }
3676
3677 fn assert_with_hash_builder(
3679 &self,
3680 trie: &mut ParallelSparseTrie,
3681 hash_builder_root: B256,
3682 hash_builder_updates: TrieUpdates,
3683 hash_builder_proof_nodes: ProofNodes,
3684 ) {
3685 assert_eq!(trie.root(), hash_builder_root);
3686 pretty_assertions::assert_eq!(
3687 BTreeMap::from_iter(trie.updates_ref().updated_nodes.clone()),
3688 BTreeMap::from_iter(hash_builder_updates.account_nodes)
3689 );
3690 assert_eq_parallel_sparse_trie_proof_nodes(trie, hash_builder_proof_nodes);
3691 }
3692 }
3693
3694 struct SubtrieAssertion<'a> {
3696 subtrie: &'a SparseSubtrie,
3697 }
3698
3699 impl<'a> SubtrieAssertion<'a> {
3700 fn new(subtrie: &'a SparseSubtrie) -> Self {
3701 Self { subtrie }
3702 }
3703
3704 fn has_branch(self, path: &Nibbles, expected_mask_bits: &[u8]) -> Self {
3705 match self.subtrie.nodes.get(path) {
3706 Some(SparseNode::Branch { state_mask, .. }) => {
3707 for bit in expected_mask_bits {
3708 assert!(
3709 state_mask.is_bit_set(*bit),
3710 "Expected branch at {path:?} to have bit {bit} set, instead mask is: {state_mask:?}",
3711 );
3712 }
3713 }
3714 node => panic!("Expected branch node at {path:?}, found {node:?}"),
3715 }
3716 self
3717 }
3718
3719 fn has_leaf(self, path: &Nibbles, expected_key: &Nibbles) -> Self {
3720 match self.subtrie.nodes.get(path) {
3721 Some(SparseNode::Leaf { key, .. }) => {
3722 assert_eq!(
3723 *key, *expected_key,
3724 "Expected leaf at {path:?} to have key {expected_key:?}, found {key:?}",
3725 );
3726 }
3727 node => panic!("Expected leaf node at {path:?}, found {node:?}"),
3728 }
3729 self
3730 }
3731
3732 fn has_extension(self, path: &Nibbles, expected_key: &Nibbles) -> Self {
3733 match self.subtrie.nodes.get(path) {
3734 Some(SparseNode::Extension { key, .. }) => {
3735 assert_eq!(
3736 *key, *expected_key,
3737 "Expected extension at {path:?} to have key {expected_key:?}, found {key:?}",
3738 );
3739 }
3740 node => panic!("Expected extension node at {path:?}, found {node:?}"),
3741 }
3742 self
3743 }
3744
3745 fn has_value(self, path: &Nibbles, expected_value: &[u8]) -> Self {
3746 let actual = self.subtrie.inner.values.get(path);
3747 assert_eq!(
3748 actual.map(|v| v.as_slice()),
3749 Some(expected_value),
3750 "Expected value at {path:?} to be {expected_value:?}, found {actual:?}",
3751 );
3752 self
3753 }
3754
3755 fn has_no_value(self, path: &Nibbles) -> Self {
3756 let actual = self.subtrie.inner.values.get(path);
3757 assert!(actual.is_none(), "Expected no value at {path:?}, but found {actual:?}");
3758 self
3759 }
3760 }
3761
3762 fn create_leaf_node(key: impl AsRef<[u8]>, value_nonce: u64) -> TrieNodeV2 {
3763 TrieNodeV2::Leaf(LeafNode::new(
3764 Nibbles::from_nibbles(key),
3765 encode_account_value(value_nonce),
3766 ))
3767 }
3768
3769 fn create_branch_node(
3770 key: Nibbles,
3771 children_indices: &[u8],
3772 child_hashes: impl IntoIterator<Item = RlpNode>,
3773 ) -> TrieNodeV2 {
3774 let mut stack = Vec::new();
3775 let mut state_mask = TrieMask::default();
3776
3777 for (&idx, hash) in children_indices.iter().zip(child_hashes) {
3778 state_mask.set_bit(idx);
3779 stack.push(hash);
3780 }
3781
3782 let branch_rlp_node = if key.is_empty() {
3783 None
3784 } else {
3785 Some(RlpNode::from_rlp(&alloy_rlp::encode(BranchNodeRef::new(&stack, state_mask))))
3786 };
3787
3788 TrieNodeV2::Branch(BranchNodeV2::new(key, stack, state_mask, branch_rlp_node))
3789 }
3790
3791 fn create_branch_node_with_children(
3792 children_indices: &[u8],
3793 child_hashes: impl IntoIterator<Item = RlpNode>,
3794 ) -> TrieNodeV2 {
3795 create_branch_node(Nibbles::default(), children_indices, child_hashes)
3796 }
3797
3798 fn run_hash_builder(
3803 state: impl IntoIterator<Item = (Nibbles, Account)> + Clone,
3804 trie_cursor: impl TrieCursor,
3805 destroyed_accounts: B256Set,
3806 proof_targets: impl IntoIterator<Item = Nibbles>,
3807 ) -> (B256, TrieUpdates, ProofNodes, HashMap<Nibbles, TrieMask>, HashMap<Nibbles, TrieMask>)
3808 {
3809 let mut account_rlp = Vec::new();
3810
3811 let mut hash_builder = HashBuilder::default()
3812 .with_updates(true)
3813 .with_proof_retainer(ProofRetainer::from_iter(proof_targets).with_added_removed_keys(
3814 Some(AddedRemovedKeys::default().with_assume_added(true)),
3815 ));
3816
3817 let mut prefix_set = PrefixSetMut::default();
3818 prefix_set.extend_keys(state.clone().into_iter().map(|(nibbles, _)| nibbles));
3819 prefix_set.extend_keys(destroyed_accounts.iter().map(Nibbles::unpack));
3820 let walker = TrieWalker::<_>::state_trie(trie_cursor, prefix_set.freeze())
3821 .with_deletions_retained(true);
3822 let hashed_post_state = HashedPostState::default()
3823 .with_accounts(state.into_iter().map(|(nibbles, account)| {
3824 (nibbles.pack().into_inner().unwrap().into(), Some(account))
3825 }))
3826 .into_sorted();
3827 let mut node_iter = TrieNodeIter::state_trie(
3828 walker,
3829 HashedPostStateCursor::new_account(
3830 NoopHashedCursor::<Account>::default(),
3831 &hashed_post_state,
3832 ),
3833 );
3834
3835 while let Some(node) = node_iter.try_next().unwrap() {
3836 match node {
3837 TrieElement::Branch(branch) => {
3838 hash_builder.add_branch(branch.key, branch.value, branch.children_are_in_trie);
3839 }
3840 TrieElement::Leaf(key, account) => {
3841 let account = account.into_trie_account(EMPTY_ROOT_HASH);
3842 account.encode(&mut account_rlp);
3843
3844 hash_builder.add_leaf(Nibbles::unpack(key), &account_rlp);
3845 account_rlp.clear();
3846 }
3847 }
3848 }
3849 let root = hash_builder.root();
3850 let proof_nodes = hash_builder.take_proof_nodes();
3851 let branch_node_hash_masks = hash_builder
3852 .updated_branch_nodes
3853 .clone()
3854 .unwrap_or_default()
3855 .iter()
3856 .map(|(path, node)| (*path, node.hash_mask))
3857 .collect();
3858 let branch_node_tree_masks = hash_builder
3859 .updated_branch_nodes
3860 .clone()
3861 .unwrap_or_default()
3862 .iter()
3863 .map(|(path, node)| (*path, node.tree_mask))
3864 .collect();
3865
3866 let mut trie_updates = TrieUpdates::default();
3867 let removed_keys = node_iter.walker.take_removed_keys();
3868 trie_updates.finalize(hash_builder, removed_keys, destroyed_accounts);
3869
3870 (root, trie_updates, proof_nodes, branch_node_hash_masks, branch_node_tree_masks)
3871 }
3872
3873 fn new_test_trie<Nodes>(nodes: Nodes) -> ParallelSparseTrie
3876 where
3877 Nodes: Iterator<Item = (Nibbles, SparseNode)>,
3878 {
3879 let mut trie = ParallelSparseTrie::default().with_updates(true);
3880
3881 for (path, node) in nodes {
3882 let subtrie = trie.subtrie_for_path_mut(&path);
3883 if let SparseNode::Leaf { key, .. } = &node {
3884 let mut full_key = path;
3885 full_key.extend(key);
3886 subtrie.inner.values.insert(full_key, "LEAF VALUE".into());
3887 }
3888 subtrie.nodes.insert(path, node);
3889 }
3890 trie
3891 }
3892
3893 fn parallel_sparse_trie_nodes(
3894 sparse_trie: &ParallelSparseTrie,
3895 ) -> impl IntoIterator<Item = (&Nibbles, &SparseNode)> {
3896 let lower_sparse_nodes = sparse_trie
3897 .lower_subtries
3898 .iter()
3899 .filter_map(|subtrie| subtrie.as_revealed_ref())
3900 .flat_map(|subtrie| subtrie.nodes.iter());
3901
3902 let upper_sparse_nodes = sparse_trie.upper_subtrie.nodes.iter();
3903
3904 lower_sparse_nodes.chain(upper_sparse_nodes).sorted_by_key(|(path, _)| *path)
3905 }
3906
3907 fn assert_eq_parallel_sparse_trie_proof_nodes(
3910 sparse_trie: &ParallelSparseTrie,
3911 proof_nodes: ProofNodes,
3912 ) {
3913 let proof_nodes = proof_nodes
3914 .into_nodes_sorted()
3915 .into_iter()
3916 .map(|(path, node)| (path, TrieNodeV2::decode(&mut node.as_ref()).unwrap()));
3917
3918 let all_sparse_nodes = parallel_sparse_trie_nodes(sparse_trie);
3919
3920 for ((proof_node_path, proof_node), (sparse_node_path, sparse_node)) in
3921 proof_nodes.zip(all_sparse_nodes)
3922 {
3923 assert_eq!(&proof_node_path, sparse_node_path);
3924
3925 let equals = match (&proof_node, &sparse_node) {
3926 (TrieNodeV2::EmptyRoot, SparseNode::Empty) => true,
3928 (
3930 TrieNodeV2::Branch(BranchNodeV2 { state_mask: proof_state_mask, .. }),
3931 SparseNode::Branch { state_mask: sparse_state_mask, .. },
3932 ) => proof_state_mask == sparse_state_mask,
3933 (
3935 TrieNodeV2::Extension(ExtensionNode { key: proof_key, .. }),
3936 SparseNode::Extension { key: sparse_key, .. },
3937 ) |
3938 (
3940 TrieNodeV2::Leaf(LeafNode { key: proof_key, .. }),
3941 SparseNode::Leaf { key: sparse_key, .. },
3942 ) => proof_key == sparse_key,
3943 (_, SparseNode::Empty) => continue,
3945 _ => false,
3946 };
3947 assert!(
3948 equals,
3949 "path: {proof_node_path:?}\nproof node: {proof_node:?}\nsparse node: {sparse_node:?}"
3950 );
3951 }
3952 }
3953
3954 #[test]
3955 fn test_get_changed_subtries_empty() {
3956 let mut trie = ParallelSparseTrie::default();
3957 let mut prefix_set = PrefixSetMut::from([Nibbles::default()]).freeze();
3958
3959 let (subtries, unchanged_prefix_set) = trie.take_changed_lower_subtries(&mut prefix_set);
3960 assert!(subtries.is_empty());
3961 assert_eq!(unchanged_prefix_set, PrefixSetMut::from(prefix_set.iter().copied()));
3962 }
3963
3964 #[test]
3965 fn test_get_changed_subtries() {
3966 let mut trie = ParallelSparseTrie::default();
3968 let subtrie_1 = Box::new(SparseSubtrie::new(Nibbles::from_nibbles([0x0, 0x0])));
3969 let subtrie_1_index = path_subtrie_index_unchecked(&subtrie_1.path);
3970 let subtrie_2 = Box::new(SparseSubtrie::new(Nibbles::from_nibbles([0x1, 0x0])));
3971 let subtrie_2_index = path_subtrie_index_unchecked(&subtrie_2.path);
3972 let subtrie_3 = Box::new(SparseSubtrie::new(Nibbles::from_nibbles([0x3, 0x0])));
3973 let subtrie_3_index = path_subtrie_index_unchecked(&subtrie_3.path);
3974
3975 trie.lower_subtries[subtrie_1_index] = LowerSparseSubtrie::Revealed(subtrie_1.clone());
3977 trie.lower_subtries[subtrie_2_index] = LowerSparseSubtrie::Revealed(subtrie_2.clone());
3978 trie.lower_subtries[subtrie_3_index] = LowerSparseSubtrie::Revealed(subtrie_3);
3979
3980 let unchanged_prefix_set = PrefixSetMut::from([
3981 Nibbles::from_nibbles([0x0]),
3982 Nibbles::from_nibbles([0x2, 0x0, 0x0]),
3983 ]);
3984 let mut prefix_set = PrefixSetMut::from([
3986 Nibbles::from_nibbles([0x1, 0x0, 0x0]),
3988 Nibbles::from_nibbles([0x1, 0x0, 0x1, 0x0]),
3989 ]);
3990 prefix_set.extend(unchanged_prefix_set);
3991 let mut prefix_set = prefix_set.freeze();
3992
3993 let (subtries, unchanged_prefix_set) = trie.take_changed_lower_subtries(&mut prefix_set);
3995 assert_eq!(
3996 subtries
3997 .into_iter()
3998 .map(|ChangedSubtrie { index, subtrie, prefix_set, .. }| {
3999 (index, subtrie, prefix_set.iter().copied().collect::<Vec<_>>())
4000 })
4001 .collect::<Vec<_>>(),
4002 vec![(
4003 subtrie_2_index,
4004 subtrie_2,
4005 vec![
4006 Nibbles::from_nibbles([0x1, 0x0, 0x0]),
4007 Nibbles::from_nibbles([0x1, 0x0, 0x1, 0x0])
4008 ]
4009 )]
4010 );
4011 assert_eq!(unchanged_prefix_set, unchanged_prefix_set);
4012 assert!(trie.lower_subtries[subtrie_2_index].as_revealed_ref().is_none());
4013
4014 assert_eq!(trie.lower_subtries[subtrie_1_index], LowerSparseSubtrie::Revealed(subtrie_1));
4016 }
4017
4018 #[test]
4019 fn test_get_changed_subtries_all() {
4020 let mut trie = ParallelSparseTrie::default();
4022 let subtrie_1 = Box::new(SparseSubtrie::new(Nibbles::from_nibbles([0x0, 0x0])));
4023 let subtrie_1_index = path_subtrie_index_unchecked(&subtrie_1.path);
4024 let subtrie_2 = Box::new(SparseSubtrie::new(Nibbles::from_nibbles([0x1, 0x0])));
4025 let subtrie_2_index = path_subtrie_index_unchecked(&subtrie_2.path);
4026 let subtrie_3 = Box::new(SparseSubtrie::new(Nibbles::from_nibbles([0x3, 0x0])));
4027 let subtrie_3_index = path_subtrie_index_unchecked(&subtrie_3.path);
4028
4029 trie.lower_subtries[subtrie_1_index] = LowerSparseSubtrie::Revealed(subtrie_1.clone());
4031 trie.lower_subtries[subtrie_2_index] = LowerSparseSubtrie::Revealed(subtrie_2.clone());
4032 trie.lower_subtries[subtrie_3_index] = LowerSparseSubtrie::Revealed(subtrie_3.clone());
4033
4034 let mut prefix_set = PrefixSetMut::all().freeze();
4036
4037 let (subtries, unchanged_prefix_set) = trie.take_changed_lower_subtries(&mut prefix_set);
4039 assert_eq!(
4040 subtries
4041 .into_iter()
4042 .map(|ChangedSubtrie { index, subtrie, prefix_set, .. }| {
4043 (index, subtrie, prefix_set.all())
4044 })
4045 .collect::<Vec<_>>(),
4046 vec![
4047 (subtrie_1_index, subtrie_1, true),
4048 (subtrie_2_index, subtrie_2, true),
4049 (subtrie_3_index, subtrie_3, true)
4050 ]
4051 );
4052 assert_eq!(unchanged_prefix_set, PrefixSetMut::all());
4053
4054 assert!(trie.lower_subtries.iter().all(|subtrie| subtrie.as_revealed_ref().is_none()));
4055 }
4056
4057 #[test]
4058 fn test_sparse_subtrie_type() {
4059 assert_eq!(SparseSubtrieType::from_path(&Nibbles::new()), SparseSubtrieType::Upper);
4060 assert_eq!(
4061 SparseSubtrieType::from_path(&Nibbles::from_nibbles([0])),
4062 SparseSubtrieType::Upper
4063 );
4064 assert_eq!(
4065 SparseSubtrieType::from_path(&Nibbles::from_nibbles([15])),
4066 SparseSubtrieType::Upper
4067 );
4068 assert_eq!(
4069 SparseSubtrieType::from_path(&Nibbles::from_nibbles([0, 0])),
4070 SparseSubtrieType::Lower(0)
4071 );
4072 assert_eq!(
4073 SparseSubtrieType::from_path(&Nibbles::from_nibbles([0, 0, 0])),
4074 SparseSubtrieType::Lower(0)
4075 );
4076 assert_eq!(
4077 SparseSubtrieType::from_path(&Nibbles::from_nibbles([0, 1])),
4078 SparseSubtrieType::Lower(1)
4079 );
4080 assert_eq!(
4081 SparseSubtrieType::from_path(&Nibbles::from_nibbles([0, 1, 0])),
4082 SparseSubtrieType::Lower(1)
4083 );
4084 assert_eq!(
4085 SparseSubtrieType::from_path(&Nibbles::from_nibbles([0, 15])),
4086 SparseSubtrieType::Lower(15)
4087 );
4088 assert_eq!(
4089 SparseSubtrieType::from_path(&Nibbles::from_nibbles([15, 0])),
4090 SparseSubtrieType::Lower(240)
4091 );
4092 assert_eq!(
4093 SparseSubtrieType::from_path(&Nibbles::from_nibbles([15, 1])),
4094 SparseSubtrieType::Lower(241)
4095 );
4096 assert_eq!(
4097 SparseSubtrieType::from_path(&Nibbles::from_nibbles([15, 15])),
4098 SparseSubtrieType::Lower(255)
4099 );
4100 assert_eq!(
4101 SparseSubtrieType::from_path(&Nibbles::from_nibbles([15, 15, 15])),
4102 SparseSubtrieType::Lower(255)
4103 );
4104 }
4105
4106 #[test]
4107 fn test_reveal_node_leaves() {
4108 let root_branch =
4111 create_branch_node_with_children(&[0x1], [RlpNode::word_rlp(&B256::repeat_byte(0xAA))]);
4112 let mut trie = ParallelSparseTrie::from_root(root_branch, None, false).unwrap();
4113
4114 {
4115 let path = Nibbles::from_nibbles([0x1]);
4116 let node = create_leaf_node([0x2, 0x3], 42);
4117 let masks = None;
4118
4119 trie.reveal_nodes(&mut [ProofTrieNodeV2 { path, node, masks }]).unwrap();
4120
4121 assert_matches!(
4122 trie.upper_subtrie.nodes.get(&path),
4123 Some(SparseNode::Leaf { key, state: SparseNodeState::Cached { .. } })
4124 if key == &Nibbles::from_nibbles([0x2, 0x3])
4125 );
4126
4127 let full_path = Nibbles::from_nibbles([0x1, 0x2, 0x3]);
4128 assert_eq!(
4129 trie.upper_subtrie.inner.values.get(&full_path),
4130 Some(&encode_account_value(42))
4131 );
4132 }
4133
4134 let root_branch =
4139 create_branch_node_with_children(&[0x1], [RlpNode::word_rlp(&B256::repeat_byte(0xAA))]);
4140 let branch_at_1 =
4141 create_branch_node_with_children(&[0x2], [RlpNode::word_rlp(&B256::repeat_byte(0xBB))]);
4142 let mut trie = ParallelSparseTrie::from_root(root_branch, None, false).unwrap();
4143 trie.reveal_nodes(&mut [ProofTrieNodeV2 {
4144 path: Nibbles::from_nibbles([0x1]),
4145 node: branch_at_1,
4146 masks: None,
4147 }])
4148 .unwrap();
4149
4150 {
4151 let path = Nibbles::from_nibbles([0x1, 0x2]);
4152 let node = create_leaf_node([0x3, 0x4], 42);
4153 let masks = None;
4154
4155 trie.reveal_nodes(&mut [ProofTrieNodeV2 { path, node, masks }]).unwrap();
4156
4157 let idx = path_subtrie_index_unchecked(&path);
4159 assert!(trie.lower_subtries[idx].as_revealed_ref().is_some());
4160
4161 let lower_subtrie = trie.lower_subtries[idx].as_revealed_ref().unwrap();
4163 assert_eq!(lower_subtrie.path, path);
4164
4165 assert_matches!(
4166 lower_subtrie.nodes.get(&path),
4167 Some(SparseNode::Leaf { key, state: SparseNodeState::Cached { .. } })
4168 if key == &Nibbles::from_nibbles([0x3, 0x4])
4169 );
4170 }
4171
4172 {
4175 let path = Nibbles::from_nibbles([0x1, 0x2, 0x3]);
4176 let node = create_leaf_node([0x4, 0x5], 42);
4177 let masks = None;
4178
4179 trie.reveal_nodes(&mut [ProofTrieNodeV2 { path, node, masks }]).unwrap();
4180
4181 let idx = path_subtrie_index_unchecked(&path);
4183 let lower_subtrie = trie.lower_subtries[idx].as_revealed_ref().unwrap();
4184 assert_eq!(lower_subtrie.path, Nibbles::from_nibbles([0x1, 0x2]));
4185 }
4186 }
4187
4188 #[test]
4189 fn test_reveal_node_branch_all_upper() {
4190 let path = Nibbles::new();
4191 let child_hashes = [
4192 RlpNode::word_rlp(&B256::repeat_byte(0x11)),
4193 RlpNode::word_rlp(&B256::repeat_byte(0x22)),
4194 ];
4195 let node = create_branch_node_with_children(&[0x0, 0x5], child_hashes.clone());
4196 let masks = None;
4197 let trie = ParallelSparseTrie::from_root(node, masks, true).unwrap();
4198
4199 assert_eq!(
4201 trie.upper_subtrie.nodes.get(&path).unwrap(),
4202 &SparseNode::new_branch(
4203 0b0000000000100001.into(),
4204 &[(0, child_hashes[0].as_hash().unwrap()), (5, child_hashes[1].as_hash().unwrap())]
4205 )
4206 );
4207
4208 let child_path_0 = Nibbles::from_nibbles([0x0]);
4210 let child_path_5 = Nibbles::from_nibbles([0x5]);
4211 assert!(!trie.upper_subtrie.nodes.contains_key(&child_path_0));
4212 assert!(!trie.upper_subtrie.nodes.contains_key(&child_path_5));
4213 }
4214
4215 #[test]
4216 fn test_reveal_node_branch_cross_level() {
4217 let root_branch =
4219 create_branch_node_with_children(&[0x1], [RlpNode::word_rlp(&B256::repeat_byte(0xAA))]);
4220 let mut trie = ParallelSparseTrie::from_root(root_branch, None, false).unwrap();
4221
4222 let path = Nibbles::from_nibbles([0x1]); let child_hashes = [
4224 RlpNode::word_rlp(&B256::repeat_byte(0x33)),
4225 RlpNode::word_rlp(&B256::repeat_byte(0x44)),
4226 RlpNode::word_rlp(&B256::repeat_byte(0x55)),
4227 ];
4228 let node = create_branch_node_with_children(&[0x0, 0x7, 0xf], child_hashes.clone());
4229 let masks = None;
4230
4231 trie.reveal_nodes(&mut [ProofTrieNodeV2 { path, node, masks }]).unwrap();
4232
4233 assert_eq!(
4235 trie.upper_subtrie.nodes.get(&path).unwrap(),
4236 &SparseNode::new_branch(
4237 0b1000000010000001.into(),
4238 &[
4239 (0x0, child_hashes[0].as_hash().unwrap()),
4240 (0x7, child_hashes[1].as_hash().unwrap()),
4241 (0xf, child_hashes[2].as_hash().unwrap())
4242 ]
4243 )
4244 .with_state(SparseNodeState::Cached {
4245 rlp_node: RlpNode::word_rlp(&B256::repeat_byte(0xAA)),
4246 store_in_db_trie: Some(false),
4247 })
4248 );
4249
4250 let child_paths = [
4252 Nibbles::from_nibbles([0x1, 0x0]),
4253 Nibbles::from_nibbles([0x1, 0x7]),
4254 Nibbles::from_nibbles([0x1, 0xf]),
4255 ];
4256
4257 let mut children = child_paths
4258 .iter()
4259 .map(|path| ProofTrieNodeV2 {
4260 path: *path,
4261 node: create_leaf_node([0x0], 1),
4262 masks: None,
4263 })
4264 .collect::<Vec<_>>();
4265
4266 trie.reveal_nodes(&mut children).unwrap();
4267
4268 assert_matches!(
4270 trie.upper_subtrie.nodes.get(&path),
4271 Some(&SparseNode::Branch {
4272 state_mask,
4273 state: SparseNodeState::Cached { ref rlp_node, store_in_db_trie: Some(false) },
4274 blinded_mask,
4275 ..
4276 }) if state_mask == 0b1000000010000001.into() && blinded_mask.is_empty() && *rlp_node == RlpNode::word_rlp(&B256::repeat_byte(0xAA))
4277 );
4278
4279 for (i, child_path) in child_paths.iter().enumerate() {
4280 let idx = path_subtrie_index_unchecked(child_path);
4281 let lower_subtrie = trie.lower_subtries[idx].as_revealed_ref().unwrap();
4282 assert_eq!(&lower_subtrie.path, child_path);
4283 assert_eq!(
4284 lower_subtrie.nodes.get(child_path),
4285 Some(&SparseNode::Leaf {
4286 key: Nibbles::from_nibbles([0x0]),
4287 state: SparseNodeState::Cached {
4288 rlp_node: child_hashes[i].clone(),
4289 store_in_db_trie: Some(false)
4290 }
4291 })
4292 );
4293 }
4294 }
4295
4296 #[test]
4297 fn test_update_subtrie_hashes_prefix_set_matching() {
4298 let root_branch = create_branch_node_with_children(
4301 &[0x0, 0x3],
4302 [
4303 RlpNode::word_rlp(&B256::repeat_byte(0xAA)),
4304 RlpNode::word_rlp(&B256::repeat_byte(0xBB)),
4305 ],
4306 );
4307 let mut trie = ParallelSparseTrie::from_root(root_branch, None, false).unwrap();
4308
4309 let leaf_1_full_path = Nibbles::from_nibbles([0; 64]);
4311 let leaf_1_path = leaf_1_full_path.slice(..2);
4312 let leaf_1_key = leaf_1_full_path.slice(2..);
4313 let leaf_2_full_path = Nibbles::from_nibbles([vec![0, 1], vec![0; 62]].concat());
4314 let leaf_2_path = leaf_2_full_path.slice(..2);
4315 let leaf_2_key = leaf_2_full_path.slice(2..);
4316 let leaf_3_full_path = Nibbles::from_nibbles([vec![0, 2], vec![0; 62]].concat());
4317 let leaf_1 = create_leaf_node(leaf_1_key.to_vec(), 1);
4318 let leaf_2 = create_leaf_node(leaf_2_key.to_vec(), 2);
4319
4320 let child_hashes = [
4324 RlpNode::word_rlp(&B256::repeat_byte(0x00)),
4325 RlpNode::word_rlp(&B256::repeat_byte(0x11)),
4326 ];
4327 let branch_path = Nibbles::from_nibbles([0x0]);
4328 let branch_node = create_branch_node_with_children(&[0x0, 0x1], child_hashes);
4329
4330 trie.reveal_nodes(&mut [
4332 ProofTrieNodeV2 { path: branch_path, node: branch_node, masks: None },
4333 ProofTrieNodeV2 { path: leaf_1_path, node: leaf_1, masks: None },
4334 ProofTrieNodeV2 { path: leaf_2_path, node: leaf_2, masks: None },
4335 ])
4336 .unwrap();
4337
4338 let provider = NoRevealProvider;
4341 trie.update_leaf(leaf_3_full_path, encode_account_value(3), provider).unwrap();
4342
4343 let subtrie_1_index = SparseSubtrieType::from_path(&leaf_1_path).lower_index().unwrap();
4345 let subtrie_2_index = SparseSubtrieType::from_path(&leaf_2_path).lower_index().unwrap();
4346 let leaf_3_path = leaf_3_full_path.slice(..2);
4347 let subtrie_3_index = SparseSubtrieType::from_path(&leaf_3_path).lower_index().unwrap();
4348
4349 let mut unchanged_prefix_set = PrefixSetMut::from([
4350 Nibbles::from_nibbles([0x0]),
4351 leaf_2_full_path,
4352 Nibbles::from_nibbles([0x3, 0x0, 0x0]),
4353 ]);
4354 let mut prefix_set = PrefixSetMut::from([
4356 Nibbles::from_nibbles([0x0, 0x1, 0x0]),
4358 Nibbles::from_nibbles([0x0, 0x1, 0x1, 0x0]),
4359 ]);
4360 prefix_set.extend(unchanged_prefix_set.clone());
4361 trie.prefix_set = prefix_set;
4362
4363 trie.update_subtrie_hashes();
4365
4366 unchanged_prefix_set.insert(leaf_3_full_path);
4370
4371 assert_eq!(
4373 trie.prefix_set.clone().freeze().into_iter().collect::<Vec<_>>(),
4374 unchanged_prefix_set.freeze().into_iter().collect::<Vec<_>>()
4375 );
4376 assert!(trie.lower_subtries[subtrie_1_index].as_revealed_ref().is_some());
4378 assert!(trie.lower_subtries[subtrie_2_index].as_revealed_ref().is_some());
4379 assert!(trie.lower_subtries[subtrie_3_index].as_revealed_ref().is_some());
4380 }
4381
4382 #[test]
4383 fn test_subtrie_update_hashes() {
4384 let mut subtrie = Box::new(SparseSubtrie::new(Nibbles::from_nibbles([0x0, 0x0])));
4385
4386 let leaf_1_full_path = Nibbles::from_nibbles([0; 64]);
4388 let leaf_1_path = leaf_1_full_path.slice(..5);
4389 let leaf_1_key = leaf_1_full_path.slice(5..);
4390 let leaf_2_full_path = Nibbles::from_nibbles([vec![0, 0, 0, 0, 1], vec![0; 59]].concat());
4391 let leaf_2_path = leaf_2_full_path.slice(..5);
4392 let leaf_2_key = leaf_2_full_path.slice(5..);
4393 let leaf_3_full_path = Nibbles::from_nibbles([vec![0, 0, 1], vec![0; 61]].concat());
4394 let leaf_3_path = leaf_3_full_path.slice(..3);
4395 let leaf_3_key = leaf_3_full_path.slice(3..);
4396
4397 let account_1 = create_account(1);
4398 let account_2 = create_account(2);
4399 let account_3 = create_account(3);
4400 let leaf_1 = create_leaf_node(leaf_1_key.to_vec(), account_1.nonce);
4401 let leaf_2 = create_leaf_node(leaf_2_key.to_vec(), account_2.nonce);
4402 let leaf_3 = create_leaf_node(leaf_3_key.to_vec(), account_3.nonce);
4403
4404 let extension_path = Nibbles::from_nibbles([0, 0, 0]);
4406 let branch_1_path = Nibbles::from_nibbles([0, 0, 0, 0]);
4407 let branch_1 = create_branch_node(
4408 Nibbles::from_nibbles([0]),
4409 &[0, 1],
4410 vec![
4411 RlpNode::from_rlp(&alloy_rlp::encode(&leaf_1)),
4412 RlpNode::from_rlp(&alloy_rlp::encode(&leaf_2)),
4413 ],
4414 );
4415
4416 let branch_2_path = Nibbles::from_nibbles([0, 0]);
4418 let branch_2 = create_branch_node_with_children(
4419 &[0, 1],
4420 vec![
4421 RlpNode::from_rlp(&alloy_rlp::encode(&branch_1)),
4422 RlpNode::from_rlp(&alloy_rlp::encode(&leaf_3)),
4423 ],
4424 );
4425
4426 subtrie.reveal_node(branch_2_path, &branch_2, None, None).unwrap();
4428 subtrie.reveal_node(extension_path, &branch_1, None, None).unwrap();
4429 subtrie.reveal_node(leaf_1_path, &leaf_1, None, None).unwrap();
4430 subtrie.reveal_node(leaf_2_path, &leaf_2, None, None).unwrap();
4431 subtrie.reveal_node(leaf_3_path, &leaf_3, None, None).unwrap();
4432
4433 let (_, _, proof_nodes, _, _) = run_hash_builder(
4435 [
4436 (leaf_1_full_path, account_1),
4437 (leaf_2_full_path, account_2),
4438 (leaf_3_full_path, account_3),
4439 ],
4440 NoopAccountTrieCursor::default(),
4441 Default::default(),
4442 [extension_path, branch_2_path, leaf_1_full_path, leaf_2_full_path, leaf_3_full_path],
4443 );
4444
4445 subtrie.update_hashes(
4447 &mut PrefixSetMut::from([leaf_1_full_path, leaf_2_full_path, leaf_3_full_path])
4448 .freeze(),
4449 &mut None,
4450 &BranchNodeMasksMap::default(),
4451 );
4452
4453 let hash_builder_branch_1_hash =
4455 RlpNode::from_rlp(proof_nodes.get(&branch_1_path).unwrap().as_ref()).as_hash().unwrap();
4456 let subtrie_branch_1_hash =
4457 subtrie.nodes.get(&branch_1_path).unwrap().cached_hash().unwrap();
4458 assert_eq!(hash_builder_branch_1_hash, subtrie_branch_1_hash);
4459
4460 let hash_builder_extension_hash =
4461 RlpNode::from_rlp(proof_nodes.get(&extension_path).unwrap().as_ref())
4462 .as_hash()
4463 .unwrap();
4464 let subtrie_extension_hash =
4465 subtrie.nodes.get(&extension_path).unwrap().cached_hash().unwrap();
4466 assert_eq!(hash_builder_extension_hash, subtrie_extension_hash);
4467
4468 let hash_builder_branch_2_hash =
4469 RlpNode::from_rlp(proof_nodes.get(&branch_2_path).unwrap().as_ref()).as_hash().unwrap();
4470 let subtrie_branch_2_hash =
4471 subtrie.nodes.get(&branch_2_path).unwrap().cached_hash().unwrap();
4472 assert_eq!(hash_builder_branch_2_hash, subtrie_branch_2_hash);
4473
4474 let subtrie_leaf_1_hash = subtrie.nodes.get(&leaf_1_path).unwrap().cached_hash().unwrap();
4475 let hash_builder_leaf_1_hash =
4476 RlpNode::from_rlp(proof_nodes.get(&leaf_1_path).unwrap().as_ref()).as_hash().unwrap();
4477 assert_eq!(hash_builder_leaf_1_hash, subtrie_leaf_1_hash);
4478
4479 let hash_builder_leaf_2_hash =
4480 RlpNode::from_rlp(proof_nodes.get(&leaf_2_path).unwrap().as_ref()).as_hash().unwrap();
4481 let subtrie_leaf_2_hash = subtrie.nodes.get(&leaf_2_path).unwrap().cached_hash().unwrap();
4482 assert_eq!(hash_builder_leaf_2_hash, subtrie_leaf_2_hash);
4483
4484 let hash_builder_leaf_3_hash =
4485 RlpNode::from_rlp(proof_nodes.get(&leaf_3_path).unwrap().as_ref()).as_hash().unwrap();
4486 let subtrie_leaf_3_hash = subtrie.nodes.get(&leaf_3_path).unwrap().cached_hash().unwrap();
4487 assert_eq!(hash_builder_leaf_3_hash, subtrie_leaf_3_hash);
4488 }
4489
4490 #[test]
4491 fn test_remove_leaf_branch_becomes_extension() {
4492 let mut trie = new_test_trie(
4504 [
4505 (Nibbles::default(), SparseNode::new_ext(Nibbles::from_nibbles([0x5]))),
4506 (Nibbles::from_nibbles([0x5]), SparseNode::new_branch(TrieMask::new(0b1001), &[])),
4507 (
4508 Nibbles::from_nibbles([0x5, 0x0]),
4509 SparseNode::new_ext(Nibbles::from_nibbles([0x2, 0x3])),
4510 ),
4511 (
4512 Nibbles::from_nibbles([0x5, 0x0, 0x2, 0x3]),
4513 SparseNode::new_branch(TrieMask::new(0b0101), &[]),
4514 ),
4515 (
4516 Nibbles::from_nibbles([0x5, 0x0, 0x2, 0x3, 0x1]),
4517 SparseNode::new_leaf(leaf_key([], 59)),
4518 ),
4519 (
4520 Nibbles::from_nibbles([0x5, 0x0, 0x2, 0x3, 0x3]),
4521 SparseNode::new_leaf(leaf_key([], 59)),
4522 ),
4523 (Nibbles::from_nibbles([0x5, 0x3]), SparseNode::new_leaf(leaf_key([0x7], 62))),
4524 ]
4525 .into_iter(),
4526 );
4527
4528 let provider = NoRevealProvider;
4529
4530 let leaf_full_path = pad_nibbles_right(Nibbles::from_nibbles([0x5, 0x3, 0x7]));
4532 trie.remove_leaf(&leaf_full_path, provider).unwrap();
4533
4534 let upper_subtrie = &trie.upper_subtrie;
4535 let lower_subtrie_50 = trie.lower_subtries[0x50].as_revealed_ref().unwrap();
4536
4537 assert_matches!(trie.lower_subtries[0x53].as_revealed_ref(), None);
4540
4541 assert_matches!(
4544 upper_subtrie.nodes.get(&Nibbles::from_nibbles([])),
4545 Some(SparseNode::Extension{ key, ..})
4546 if key == &Nibbles::from_nibbles([0x5, 0x0, 0x2, 0x3])
4547 );
4548 assert_matches!(upper_subtrie.nodes.get(&Nibbles::from_nibbles([0x5])), None);
4549 assert_matches!(lower_subtrie_50.nodes.get(&Nibbles::from_nibbles([0x5, 0x0])), None);
4550 assert_matches!(
4551 lower_subtrie_50.nodes.get(&Nibbles::from_nibbles([0x5, 0x0, 0x2, 0x3])),
4552 Some(SparseNode::Branch{ state_mask, .. })
4553 if *state_mask == 0b0101.into()
4554 );
4555 }
4556
4557 #[test]
4558 fn test_remove_leaf_branch_becomes_leaf() {
4559 let mut trie = new_test_trie(
4567 [
4568 (Nibbles::default(), SparseNode::new_branch(TrieMask::new(0b0011), &[])),
4569 (Nibbles::from_nibbles([0x0]), SparseNode::new_leaf(leaf_key([0x1, 0x2], 63))),
4570 (Nibbles::from_nibbles([0x1]), SparseNode::new_leaf(leaf_key([0x3, 0x4], 63))),
4571 ]
4572 .into_iter(),
4573 );
4574
4575 if let Some(updates) = trie.updates.as_mut() {
4577 updates
4578 .updated_nodes
4579 .insert(Nibbles::default(), BranchNodeCompact::new(0b11, 0, 0, vec![], None));
4580 }
4581
4582 let provider = NoRevealProvider;
4583
4584 let leaf_full_path = pad_nibbles_right(Nibbles::from_nibbles([0x0, 0x1, 0x2]));
4586 trie.remove_leaf(&leaf_full_path, provider).unwrap();
4587
4588 let upper_subtrie = &trie.upper_subtrie;
4589
4590 assert_matches!(upper_subtrie.inner.values.get(&leaf_full_path), None);
4592
4593 assert_matches!(
4595 upper_subtrie.nodes.get(&Nibbles::default()),
4596 Some(SparseNode::Leaf{ key, ..})
4597 if key == &pad_nibbles_right(Nibbles::from_nibbles([0x1, 0x3, 0x4]))
4598 );
4599
4600 assert_matches!(upper_subtrie.nodes.get(&Nibbles::from_nibbles([0x1])), None);
4602 assert_matches!(upper_subtrie.nodes.get(&Nibbles::from_nibbles([0x0])), None);
4604
4605 let updates = trie.updates.as_ref().unwrap();
4607
4608 assert!(updates.removed_nodes.contains(&Nibbles::default()));
4610
4611 assert!(!updates.updated_nodes.contains_key(&Nibbles::default()));
4613 }
4614
4615 #[test]
4616 fn test_remove_leaf_extension_becomes_leaf() {
4617 let mut trie = new_test_trie(
4626 [
4627 (Nibbles::default(), SparseNode::new_ext(Nibbles::from_nibbles([0x5]))),
4628 (Nibbles::from_nibbles([0x5]), SparseNode::new_branch(TrieMask::new(0b0011), &[])),
4629 (Nibbles::from_nibbles([0x5, 0x0]), SparseNode::new_leaf(leaf_key([0x1, 0x2], 62))),
4630 (Nibbles::from_nibbles([0x5, 0x1]), SparseNode::new_leaf(leaf_key([0x3, 0x4], 62))),
4631 ]
4632 .into_iter(),
4633 );
4634
4635 let provider = NoRevealProvider;
4636
4637 let leaf_full_path = pad_nibbles_right(Nibbles::from_nibbles([0x5, 0x0, 0x1, 0x2]));
4639 trie.remove_leaf(&leaf_full_path, provider).unwrap();
4640
4641 let upper_subtrie = &trie.upper_subtrie;
4642
4643 assert_matches!(trie.lower_subtries[0x50].as_revealed_ref(), None);
4647 assert_matches!(trie.lower_subtries[0x51].as_revealed_ref(), None);
4648
4649 let other_leaf_full_value = pad_nibbles_right(Nibbles::from_nibbles([0x5, 0x1, 0x3, 0x4]));
4651 assert_matches!(upper_subtrie.inner.values.get(&other_leaf_full_value), Some(_));
4652
4653 assert_matches!(
4655 upper_subtrie.nodes.get(&Nibbles::default()),
4656 Some(SparseNode::Leaf{ key, ..})
4657 if key == &pad_nibbles_right(Nibbles::from_nibbles([0x5, 0x1, 0x3, 0x4]))
4658 );
4659
4660 assert_matches!(upper_subtrie.nodes.get(&Nibbles::from_nibbles([0x5])), None);
4662 }
4663
4664 #[test]
4665 fn test_remove_leaf_branch_on_branch() {
4666 let mut trie = new_test_trie(
4676 [
4677 (Nibbles::default(), SparseNode::new_branch(TrieMask::new(0b0101), &[])),
4678 (Nibbles::from_nibbles([0x0]), SparseNode::new_leaf(leaf_key([0x1, 0x2], 63))),
4679 (Nibbles::from_nibbles([0x2]), SparseNode::new_branch(TrieMask::new(0b0011), &[])),
4680 (Nibbles::from_nibbles([0x2, 0x0]), SparseNode::new_leaf(leaf_key([0x3, 0x4], 62))),
4681 (Nibbles::from_nibbles([0x2, 0x1]), SparseNode::new_leaf(leaf_key([0x5, 0x6], 62))),
4682 ]
4683 .into_iter(),
4684 );
4685
4686 let provider = NoRevealProvider;
4687
4688 let leaf_full_path = pad_nibbles_right(Nibbles::from_nibbles([0x2, 0x0, 0x3, 0x4]));
4690 trie.remove_leaf(&leaf_full_path, provider).unwrap();
4691
4692 let upper_subtrie = &trie.upper_subtrie;
4693
4694 assert_matches!(trie.lower_subtries[0x20].as_revealed_ref(), None);
4698 assert_matches!(trie.lower_subtries[0x21].as_revealed_ref(), None);
4699
4700 let other_leaf_full_value = pad_nibbles_right(Nibbles::from_nibbles([0x2, 0x1, 0x5, 0x6]));
4702 assert_matches!(upper_subtrie.inner.values.get(&other_leaf_full_value), Some(_));
4703
4704 assert_matches!(
4706 upper_subtrie.nodes.get(&Nibbles::default()),
4707 Some(SparseNode::Branch{ state_mask, .. })
4708 if *state_mask == 0b0101.into()
4709 );
4710
4711 assert_matches!(
4713 upper_subtrie.nodes.get(&Nibbles::from_nibbles([0x2])),
4714 Some(SparseNode::Leaf{ key, ..})
4715 if key == &leaf_key([0x1, 0x5, 0x6], 63)
4716 );
4717 }
4718
4719 #[test]
4720 fn test_remove_leaf_lower_subtrie_root_path_update() {
4721 let mut trie = new_test_trie(
4735 [
4736 (Nibbles::default(), SparseNode::new_ext(Nibbles::from_nibbles([0x1, 0x2, 0x3]))),
4737 (
4738 Nibbles::from_nibbles([0x1, 0x2, 0x3]),
4739 SparseNode::new_branch(TrieMask::new(0b0011000), &[]),
4740 ),
4741 (
4742 Nibbles::from_nibbles([0x1, 0x2, 0x3, 0x3]),
4743 SparseNode::new_leaf(leaf_key([], 60)),
4744 ),
4745 (
4746 Nibbles::from_nibbles([0x1, 0x2, 0x3, 0x4]),
4747 SparseNode::new_ext(Nibbles::from_nibbles([0x5])),
4748 ),
4749 (
4750 Nibbles::from_nibbles([0x1, 0x2, 0x3, 0x4, 0x5]),
4751 SparseNode::new_branch(TrieMask::new(0b0011), &[]),
4752 ),
4753 (
4754 Nibbles::from_nibbles([0x1, 0x2, 0x3, 0x4, 0x5, 0x0]),
4755 SparseNode::new_leaf(leaf_key([], 58)),
4756 ),
4757 (
4758 Nibbles::from_nibbles([0x1, 0x2, 0x3, 0x4, 0x5, 0x1]),
4759 SparseNode::new_leaf(leaf_key([], 58)),
4760 ),
4761 ]
4762 .into_iter(),
4763 );
4764
4765 let provider = NoRevealProvider;
4766
4767 let lower_subtrie_root_path = Nibbles::from_nibbles([0x1, 0x2, 0x3]);
4769 assert_matches!(
4770 trie.lower_subtrie_for_path_mut(&lower_subtrie_root_path),
4771 Some(subtrie)
4772 if subtrie.path == lower_subtrie_root_path
4773 );
4774
4775 let leaf_full_path = pad_nibbles_right(Nibbles::from_nibbles([0x1, 0x2, 0x3, 0x3]));
4777 trie.remove_leaf(&leaf_full_path, provider).unwrap();
4778
4779 let lower_subtrie = trie.lower_subtries[0x12].as_revealed_ref().unwrap();
4784 assert_eq!(lower_subtrie.path, Nibbles::from_nibbles([0x1, 0x2, 0x3, 0x4, 0x5]));
4785
4786 assert_matches!(
4788 trie.upper_subtrie.nodes.get(&Nibbles::default()),
4789 Some(SparseNode::Extension { key, .. })
4790 if key == &Nibbles::from_nibbles([0x1, 0x2, 0x3, 0x4, 0x5])
4791 );
4792
4793 assert_matches!(
4795 lower_subtrie.nodes.get(&Nibbles::from_nibbles([0x1, 0x2, 0x3, 0x4, 0x5])),
4796 Some(SparseNode::Branch { state_mask, .. })
4797 if state_mask == &TrieMask::new(0b0011)
4798 );
4799 }
4800
4801 #[test]
4802 fn test_remove_leaf_remaining_child_needs_reveal() {
4803 let mut trie = new_test_trie(
4811 [
4812 (
4813 Nibbles::default(),
4814 SparseNode::new_branch(
4815 TrieMask::new(0b0011),
4816 &[(0x1, B256::repeat_byte(0xab))],
4817 ),
4818 ),
4819 (Nibbles::from_nibbles([0x0]), SparseNode::new_leaf(leaf_key([0x1, 0x2], 63))),
4820 ]
4821 .into_iter(),
4822 );
4823
4824 let revealed_leaf = create_leaf_node(leaf_key([0x3, 0x4], 63).to_vec(), 42);
4826 let mut encoded = Vec::new();
4827 revealed_leaf.encode(&mut encoded);
4828
4829 let leaf_full_path = pad_nibbles_right(Nibbles::from_nibbles([0x0, 0x1, 0x2]));
4832 let Err(err) = trie.remove_leaf(&leaf_full_path, NoRevealProvider) else {
4833 panic!("expected error");
4834 };
4835 assert_matches!(err.kind(), SparseTrieErrorKind::BlindedNode(path) if *path == Nibbles::from_nibbles([0x1]));
4836
4837 trie.reveal_nodes(&mut [ProofTrieNodeV2 {
4839 path: Nibbles::from_nibbles([0x1]),
4840 node: revealed_leaf,
4841 masks: None,
4842 }])
4843 .unwrap();
4844 trie.remove_leaf(&leaf_full_path, NoRevealProvider).unwrap();
4845
4846 let upper_subtrie = &trie.upper_subtrie;
4847
4848 assert_matches!(upper_subtrie.inner.values.get(&leaf_full_path), None);
4850
4851 assert_matches!(
4853 upper_subtrie.nodes.get(&Nibbles::default()),
4854 Some(SparseNode::Leaf{ key, ..})
4855 if key == &pad_nibbles_right(Nibbles::from_nibbles([0x1, 0x3, 0x4]))
4856 );
4857
4858 assert_matches!(upper_subtrie.nodes.get(&Nibbles::from_nibbles([0x1])), None);
4860 }
4861
4862 #[test]
4863 fn test_remove_leaf_root() {
4864 let mut trie = new_test_trie(core::iter::once((
4870 Nibbles::default(),
4871 SparseNode::new_leaf(pad_nibbles_right(Nibbles::from_nibbles([0x1, 0x2, 0x3]))),
4872 )));
4873
4874 let provider = NoRevealProvider;
4875
4876 let leaf_full_path = pad_nibbles_right(Nibbles::from_nibbles([0x1, 0x2, 0x3]));
4878 trie.remove_leaf(&leaf_full_path, provider).unwrap();
4879
4880 let upper_subtrie = &trie.upper_subtrie;
4881
4882 assert_matches!(upper_subtrie.inner.values.get(&leaf_full_path), None);
4884
4885 assert_matches!(upper_subtrie.nodes.get(&Nibbles::default()), Some(SparseNode::Empty));
4887 }
4888
4889 #[test]
4890 fn test_remove_leaf_unsets_hash_along_path() {
4891 let make_revealed = |hash: B256| SparseNodeState::Cached {
4906 rlp_node: RlpNode::word_rlp(&hash),
4907 store_in_db_trie: None,
4908 };
4909 let mut trie = new_test_trie(
4910 [
4911 (
4912 Nibbles::default(),
4913 SparseNode::Branch {
4914 state_mask: TrieMask::new(0b0011),
4915 state: make_revealed(B256::repeat_byte(0x10)),
4916 blinded_mask: Default::default(),
4917 blinded_hashes: Default::default(),
4918 },
4919 ),
4920 (
4921 Nibbles::from_nibbles([0x0]),
4922 SparseNode::Extension {
4923 key: Nibbles::from_nibbles([0x1]),
4924 state: make_revealed(B256::repeat_byte(0x20)),
4925 },
4926 ),
4927 (
4928 Nibbles::from_nibbles([0x0, 0x1]),
4929 SparseNode::Branch {
4930 state_mask: TrieMask::new(0b11100),
4931 state: make_revealed(B256::repeat_byte(0x30)),
4932 blinded_mask: Default::default(),
4933 blinded_hashes: Default::default(),
4934 },
4935 ),
4936 (
4937 Nibbles::from_nibbles([0x0, 0x1, 0x2]),
4938 SparseNode::Leaf {
4939 key: leaf_key([0x3, 0x4], 61),
4940 state: make_revealed(B256::repeat_byte(0x40)),
4941 },
4942 ),
4943 (
4944 Nibbles::from_nibbles([0x0, 0x1, 0x3]),
4945 SparseNode::Leaf {
4946 key: leaf_key([0x5, 0x6], 61),
4947 state: make_revealed(B256::repeat_byte(0x50)),
4948 },
4949 ),
4950 (
4951 Nibbles::from_nibbles([0x0, 0x1, 0x4]),
4952 SparseNode::Leaf {
4953 key: leaf_key([0x6, 0x7], 61),
4954 state: make_revealed(B256::repeat_byte(0x60)),
4955 },
4956 ),
4957 (
4958 Nibbles::from_nibbles([0x1]),
4959 SparseNode::Leaf {
4960 key: leaf_key([0x7, 0x8], 63),
4961 state: make_revealed(B256::repeat_byte(0x70)),
4962 },
4963 ),
4964 ]
4965 .into_iter(),
4966 );
4967
4968 let provider = NoRevealProvider;
4969
4970 trie.remove_leaf(
4972 &pad_nibbles_right(Nibbles::from_nibbles([0x0, 0x1, 0x2, 0x3, 0x4, 0xF])),
4973 provider,
4974 )
4975 .unwrap();
4976 for (path, node) in trie.all_nodes() {
4977 assert!(node.cached_hash().is_some(), "path {path:?} should still have a hash");
4978 }
4979
4980 let leaf_full_path = pad_nibbles_right(Nibbles::from_nibbles([0x0, 0x1, 0x2, 0x3, 0x4]));
4982 trie.remove_leaf(&leaf_full_path, provider).unwrap();
4983
4984 let upper_subtrie = &trie.upper_subtrie;
4985 let lower_subtrie_10 = trie.lower_subtries[0x01].as_revealed_ref().unwrap();
4986
4987 assert_matches!(
4989 upper_subtrie.nodes.get(&Nibbles::default()),
4990 Some(SparseNode::Branch { state: SparseNodeState::Dirty, .. })
4991 );
4992 assert_matches!(
4993 upper_subtrie.nodes.get(&Nibbles::from_nibbles([0x0])),
4994 Some(SparseNode::Extension { state: SparseNodeState::Dirty, .. })
4995 );
4996 assert_matches!(
4997 lower_subtrie_10.nodes.get(&Nibbles::from_nibbles([0x0, 0x1])),
4998 Some(SparseNode::Branch { state: SparseNodeState::Dirty, .. })
4999 );
5000
5001 assert_matches!(
5003 upper_subtrie.nodes.get(&Nibbles::from_nibbles([0x1])),
5004 Some(SparseNode::Leaf { state: SparseNodeState::Cached { .. }, .. })
5005 );
5006 assert_matches!(
5007 lower_subtrie_10.nodes.get(&Nibbles::from_nibbles([0x0, 0x1, 0x3])),
5008 Some(SparseNode::Leaf { state: SparseNodeState::Cached { .. }, .. })
5009 );
5010 assert_matches!(
5011 lower_subtrie_10.nodes.get(&Nibbles::from_nibbles([0x0, 0x1, 0x4])),
5012 Some(SparseNode::Leaf { state: SparseNodeState::Cached { .. }, .. })
5013 );
5014 }
5015
5016 #[test]
5017 fn test_parallel_sparse_trie_root() {
5018 let extension_path = Nibbles::new();
5021 let extension_key = Nibbles::from_nibbles([0x2]);
5022
5023 let branch_path = Nibbles::from_nibbles([0x2]);
5025
5026 let leaf_1_path = Nibbles::from_nibbles([0x2, 0x0]);
5028 let leaf_1_key = Nibbles::from_nibbles(vec![0; 62]); let leaf_1_full_path = Nibbles::from_nibbles([vec![0x2, 0x0], vec![0; 62]].concat());
5030
5031 let leaf_2_path = Nibbles::from_nibbles([0x2, 0x1]);
5032 let leaf_2_key = Nibbles::from_nibbles(vec![0; 62]); let leaf_2_full_path = Nibbles::from_nibbles([vec![0x2, 0x1], vec![0; 62]].concat());
5034
5035 let account_1 = create_account(1);
5037 let account_2 = create_account(2);
5038
5039 let leaf_1 = create_leaf_node(leaf_1_key.to_vec(), account_1.nonce);
5041 let leaf_2 = create_leaf_node(leaf_2_key.to_vec(), account_2.nonce);
5042
5043 let branch = create_branch_node(
5045 extension_key,
5046 &[0, 1],
5047 vec![
5048 RlpNode::from_rlp(&alloy_rlp::encode(&leaf_1)),
5049 RlpNode::from_rlp(&alloy_rlp::encode(&leaf_2)),
5050 ],
5051 );
5052
5053 let mut trie = ParallelSparseTrie::from_root(branch, None, true).unwrap();
5055 trie.reveal_nodes(&mut [
5056 ProofTrieNodeV2 { path: leaf_1_path, node: leaf_1, masks: None },
5057 ProofTrieNodeV2 { path: leaf_2_path, node: leaf_2, masks: None },
5058 ])
5059 .unwrap();
5060
5061 trie.upper_subtrie
5064 .nodes
5065 .get_mut(&extension_path)
5066 .unwrap()
5067 .set_state(SparseNodeState::Dirty);
5068 trie.upper_subtrie.nodes.get_mut(&branch_path).unwrap().set_state(SparseNodeState::Dirty);
5069
5070 let leaf_1_subtrie_idx = path_subtrie_index_unchecked(&leaf_1_path);
5072 let leaf_2_subtrie_idx = path_subtrie_index_unchecked(&leaf_2_path);
5073
5074 trie.lower_subtries[leaf_1_subtrie_idx]
5075 .as_revealed_mut()
5076 .unwrap()
5077 .nodes
5078 .get_mut(&leaf_1_path)
5079 .unwrap()
5080 .set_state(SparseNodeState::Dirty);
5081 trie.lower_subtries[leaf_2_subtrie_idx]
5082 .as_revealed_mut()
5083 .unwrap()
5084 .nodes
5085 .get_mut(&leaf_2_path)
5086 .unwrap()
5087 .set_state(SparseNodeState::Dirty);
5088
5089 trie.prefix_set.insert(leaf_1_full_path);
5091 trie.prefix_set.insert(leaf_2_full_path);
5092
5093 let root = trie.root();
5095
5096 let (hash_builder_root, _, _proof_nodes, _, _) = run_hash_builder(
5098 [(leaf_1_full_path, account_1), (leaf_2_full_path, account_2)],
5099 NoopAccountTrieCursor::default(),
5100 Default::default(),
5101 [extension_path, branch_path, leaf_1_full_path, leaf_2_full_path],
5102 );
5103
5104 assert_eq!(root, hash_builder_root);
5106
5107 let leaf_1_subtrie = trie.lower_subtries[leaf_1_subtrie_idx].as_revealed_ref().unwrap();
5109 let leaf_2_subtrie = trie.lower_subtries[leaf_2_subtrie_idx].as_revealed_ref().unwrap();
5110 assert!(trie.upper_subtrie.nodes.get(&extension_path).unwrap().cached_hash().is_some());
5111 assert!(trie.upper_subtrie.nodes.get(&branch_path).unwrap().cached_hash().is_some());
5112 assert!(leaf_1_subtrie.nodes.get(&leaf_1_path).unwrap().cached_hash().is_some());
5113 assert!(leaf_2_subtrie.nodes.get(&leaf_2_path).unwrap().cached_hash().is_some());
5114 }
5115
5116 #[test]
5117 fn sparse_trie_empty_update_one() {
5118 let ctx = ParallelSparseTrieTestContext;
5119
5120 let key = Nibbles::unpack(B256::with_last_byte(42));
5121 let value = || Account::default();
5122 let value_encoded = || {
5123 let mut account_rlp = Vec::new();
5124 value().into_trie_account(EMPTY_ROOT_HASH).encode(&mut account_rlp);
5125 account_rlp
5126 };
5127
5128 let (hash_builder_root, hash_builder_updates, hash_builder_proof_nodes, _, _) =
5129 run_hash_builder(
5130 [(key, value())],
5131 NoopAccountTrieCursor::default(),
5132 Default::default(),
5133 [key],
5134 );
5135
5136 let mut sparse = ParallelSparseTrie::default().with_updates(true);
5137 ctx.update_leaves(&mut sparse, [(key, value_encoded())]);
5138 ctx.assert_with_hash_builder(
5139 &mut sparse,
5140 hash_builder_root,
5141 hash_builder_updates,
5142 hash_builder_proof_nodes,
5143 );
5144 }
5145
5146 #[test]
5147 fn sparse_trie_empty_update_multiple_lower_nibbles() {
5148 let ctx = ParallelSparseTrieTestContext;
5149
5150 let paths = (0..=16).map(|b| Nibbles::unpack(B256::with_last_byte(b))).collect::<Vec<_>>();
5151 let value = || Account::default();
5152 let value_encoded = || {
5153 let mut account_rlp = Vec::new();
5154 value().into_trie_account(EMPTY_ROOT_HASH).encode(&mut account_rlp);
5155 account_rlp
5156 };
5157
5158 let (hash_builder_root, hash_builder_updates, hash_builder_proof_nodes, _, _) =
5159 run_hash_builder(
5160 paths.iter().copied().zip(core::iter::repeat_with(value)),
5161 NoopAccountTrieCursor::default(),
5162 Default::default(),
5163 paths.clone(),
5164 );
5165
5166 let mut sparse = ParallelSparseTrie::default().with_updates(true);
5167 ctx.update_leaves(
5168 &mut sparse,
5169 paths.into_iter().zip(core::iter::repeat_with(value_encoded)),
5170 );
5171
5172 ctx.assert_with_hash_builder(
5173 &mut sparse,
5174 hash_builder_root,
5175 hash_builder_updates,
5176 hash_builder_proof_nodes,
5177 );
5178 }
5179
5180 #[test]
5181 fn sparse_trie_empty_update_multiple_upper_nibbles() {
5182 let paths = (239..=255).map(|b| Nibbles::unpack(B256::repeat_byte(b))).collect::<Vec<_>>();
5183 let value = || Account::default();
5184 let value_encoded = || {
5185 let mut account_rlp = Vec::new();
5186 value().into_trie_account(EMPTY_ROOT_HASH).encode(&mut account_rlp);
5187 account_rlp
5188 };
5189
5190 let (hash_builder_root, hash_builder_updates, hash_builder_proof_nodes, _, _) =
5191 run_hash_builder(
5192 paths.iter().copied().zip(core::iter::repeat_with(value)),
5193 NoopAccountTrieCursor::default(),
5194 Default::default(),
5195 paths.clone(),
5196 );
5197
5198 let provider = DefaultTrieNodeProvider;
5199 let mut sparse = ParallelSparseTrie::default().with_updates(true);
5200 for path in &paths {
5201 sparse.update_leaf(*path, value_encoded(), &provider).unwrap();
5202 }
5203 let sparse_root = sparse.root();
5204 let sparse_updates = sparse.take_updates();
5205
5206 assert_eq!(sparse_root, hash_builder_root);
5207 assert_eq!(sparse_updates.updated_nodes, hash_builder_updates.account_nodes);
5208 assert_eq_parallel_sparse_trie_proof_nodes(&sparse, hash_builder_proof_nodes);
5209 }
5210
5211 #[test]
5212 fn sparse_trie_empty_update_multiple() {
5213 let ctx = ParallelSparseTrieTestContext;
5214
5215 let paths = (0..=255)
5216 .map(|b| {
5217 Nibbles::unpack(if b % 2 == 0 {
5218 B256::repeat_byte(b)
5219 } else {
5220 B256::with_last_byte(b)
5221 })
5222 })
5223 .collect::<Vec<_>>();
5224 let value = || Account::default();
5225 let value_encoded = || {
5226 let mut account_rlp = Vec::new();
5227 value().into_trie_account(EMPTY_ROOT_HASH).encode(&mut account_rlp);
5228 account_rlp
5229 };
5230
5231 let (hash_builder_root, hash_builder_updates, hash_builder_proof_nodes, _, _) =
5232 run_hash_builder(
5233 paths.iter().sorted_unstable().copied().zip(core::iter::repeat_with(value)),
5234 NoopAccountTrieCursor::default(),
5235 Default::default(),
5236 paths.clone(),
5237 );
5238
5239 let mut sparse = ParallelSparseTrie::default().with_updates(true);
5240 ctx.update_leaves(
5241 &mut sparse,
5242 paths.iter().copied().zip(core::iter::repeat_with(value_encoded)),
5243 );
5244 ctx.assert_with_hash_builder(
5245 &mut sparse,
5246 hash_builder_root,
5247 hash_builder_updates,
5248 hash_builder_proof_nodes,
5249 );
5250 }
5251
5252 #[test]
5253 fn sparse_trie_empty_update_repeated() {
5254 let ctx = ParallelSparseTrieTestContext;
5255
5256 let paths = (0..=255).map(|b| Nibbles::unpack(B256::repeat_byte(b))).collect::<Vec<_>>();
5257 let old_value = Account { nonce: 1, ..Default::default() };
5258 let old_value_encoded = {
5259 let mut account_rlp = Vec::new();
5260 old_value.into_trie_account(EMPTY_ROOT_HASH).encode(&mut account_rlp);
5261 account_rlp
5262 };
5263 let new_value = Account { nonce: 2, ..Default::default() };
5264 let new_value_encoded = {
5265 let mut account_rlp = Vec::new();
5266 new_value.into_trie_account(EMPTY_ROOT_HASH).encode(&mut account_rlp);
5267 account_rlp
5268 };
5269
5270 let (hash_builder_root, hash_builder_updates, hash_builder_proof_nodes, _, _) =
5271 run_hash_builder(
5272 paths.iter().copied().zip(core::iter::repeat_with(|| old_value)),
5273 NoopAccountTrieCursor::default(),
5274 Default::default(),
5275 paths.clone(),
5276 );
5277
5278 let mut sparse = ParallelSparseTrie::default().with_updates(true);
5279 ctx.update_leaves(
5280 &mut sparse,
5281 paths.iter().copied().zip(core::iter::repeat(old_value_encoded)),
5282 );
5283 ctx.assert_with_hash_builder(
5284 &mut sparse,
5285 hash_builder_root,
5286 hash_builder_updates,
5287 hash_builder_proof_nodes,
5288 );
5289
5290 let (hash_builder_root, hash_builder_updates, hash_builder_proof_nodes, _, _) =
5291 run_hash_builder(
5292 paths.iter().copied().zip(core::iter::repeat(new_value)),
5293 NoopAccountTrieCursor::default(),
5294 Default::default(),
5295 paths.clone(),
5296 );
5297
5298 ctx.update_leaves(
5299 &mut sparse,
5300 paths.iter().copied().zip(core::iter::repeat(new_value_encoded)),
5301 );
5302 ctx.assert_with_hash_builder(
5303 &mut sparse,
5304 hash_builder_root,
5305 hash_builder_updates,
5306 hash_builder_proof_nodes,
5307 );
5308 }
5309
5310 #[test]
5311 fn sparse_trie_remove_leaf() {
5312 let ctx = ParallelSparseTrieTestContext;
5313 let provider = DefaultTrieNodeProvider;
5314 let mut sparse = ParallelSparseTrie::default();
5315
5316 let value = alloy_rlp::encode_fixed_size(&U256::ZERO).to_vec();
5317
5318 ctx.update_leaves(
5319 &mut sparse,
5320 [
5321 (
5322 pad_nibbles_right(Nibbles::from_nibbles([0x5, 0x0, 0x2, 0x3, 0x1])),
5323 value.clone(),
5324 ),
5325 (
5326 pad_nibbles_right(Nibbles::from_nibbles([0x5, 0x0, 0x2, 0x3, 0x3])),
5327 value.clone(),
5328 ),
5329 (
5330 pad_nibbles_right(Nibbles::from_nibbles([0x5, 0x2, 0x0, 0x1, 0x3])),
5331 value.clone(),
5332 ),
5333 (
5334 pad_nibbles_right(Nibbles::from_nibbles([0x5, 0x3, 0x1, 0x0, 0x2])),
5335 value.clone(),
5336 ),
5337 (
5338 pad_nibbles_right(Nibbles::from_nibbles([0x5, 0x3, 0x3, 0x0, 0x2])),
5339 value.clone(),
5340 ),
5341 (pad_nibbles_right(Nibbles::from_nibbles([0x5, 0x3, 0x3, 0x2, 0x0])), value),
5342 ],
5343 );
5344
5345 pretty_assertions::assert_eq!(
5358 parallel_sparse_trie_nodes(&sparse)
5359 .into_iter()
5360 .map(|(k, v)| (*k, v.clone()))
5361 .collect::<BTreeMap<_, _>>(),
5362 BTreeMap::from_iter([
5363 (Nibbles::default(), SparseNode::new_ext(Nibbles::from_nibbles([0x5]))),
5364 (Nibbles::from_nibbles([0x5]), SparseNode::new_branch(0b1101.into(), &[])),
5365 (
5366 Nibbles::from_nibbles([0x5, 0x0]),
5367 SparseNode::new_ext(Nibbles::from_nibbles([0x2, 0x3]))
5368 ),
5369 (
5370 Nibbles::from_nibbles([0x5, 0x0, 0x2, 0x3]),
5371 SparseNode::new_branch(0b1010.into(), &[])
5372 ),
5373 (
5374 Nibbles::from_nibbles([0x5, 0x0, 0x2, 0x3, 0x1]),
5375 SparseNode::new_leaf(leaf_key([], 59))
5376 ),
5377 (
5378 Nibbles::from_nibbles([0x5, 0x0, 0x2, 0x3, 0x3]),
5379 SparseNode::new_leaf(leaf_key([], 59))
5380 ),
5381 (
5382 Nibbles::from_nibbles([0x5, 0x2]),
5383 SparseNode::new_leaf(leaf_key([0x0, 0x1, 0x3], 62))
5384 ),
5385 (Nibbles::from_nibbles([0x5, 0x3]), SparseNode::new_branch(0b1010.into(), &[])),
5386 (
5387 Nibbles::from_nibbles([0x5, 0x3, 0x1]),
5388 SparseNode::new_leaf(leaf_key([0x0, 0x2], 61))
5389 ),
5390 (
5391 Nibbles::from_nibbles([0x5, 0x3, 0x3]),
5392 SparseNode::new_branch(0b0101.into(), &[])
5393 ),
5394 (
5395 Nibbles::from_nibbles([0x5, 0x3, 0x3, 0x0]),
5396 SparseNode::new_leaf(leaf_key([0x2], 60))
5397 ),
5398 (
5399 Nibbles::from_nibbles([0x5, 0x3, 0x3, 0x2]),
5400 SparseNode::new_leaf(leaf_key([0x0], 60))
5401 )
5402 ])
5403 );
5404
5405 sparse
5406 .remove_leaf(
5407 &pad_nibbles_right(Nibbles::from_nibbles([0x5, 0x2, 0x0, 0x1, 0x3])),
5408 &provider,
5409 )
5410 .unwrap();
5411
5412 pretty_assertions::assert_eq!(
5424 parallel_sparse_trie_nodes(&sparse)
5425 .into_iter()
5426 .map(|(k, v)| (*k, v.clone()))
5427 .collect::<BTreeMap<_, _>>(),
5428 BTreeMap::from_iter([
5429 (Nibbles::default(), SparseNode::new_ext(Nibbles::from_nibbles([0x5]))),
5430 (Nibbles::from_nibbles([0x5]), SparseNode::new_branch(0b1001.into(), &[])),
5431 (
5432 Nibbles::from_nibbles([0x5, 0x0]),
5433 SparseNode::new_ext(Nibbles::from_nibbles([0x2, 0x3]))
5434 ),
5435 (
5436 Nibbles::from_nibbles([0x5, 0x0, 0x2, 0x3]),
5437 SparseNode::new_branch(0b1010.into(), &[])
5438 ),
5439 (
5440 Nibbles::from_nibbles([0x5, 0x0, 0x2, 0x3, 0x1]),
5441 SparseNode::new_leaf(leaf_key([], 59))
5442 ),
5443 (
5444 Nibbles::from_nibbles([0x5, 0x0, 0x2, 0x3, 0x3]),
5445 SparseNode::new_leaf(leaf_key([], 59))
5446 ),
5447 (Nibbles::from_nibbles([0x5, 0x3]), SparseNode::new_branch(0b1010.into(), &[])),
5448 (
5449 Nibbles::from_nibbles([0x5, 0x3, 0x1]),
5450 SparseNode::new_leaf(leaf_key([0x0, 0x2], 61))
5451 ),
5452 (
5453 Nibbles::from_nibbles([0x5, 0x3, 0x3]),
5454 SparseNode::new_branch(0b0101.into(), &[])
5455 ),
5456 (
5457 Nibbles::from_nibbles([0x5, 0x3, 0x3, 0x0]),
5458 SparseNode::new_leaf(leaf_key([0x2], 60))
5459 ),
5460 (
5461 Nibbles::from_nibbles([0x5, 0x3, 0x3, 0x2]),
5462 SparseNode::new_leaf(leaf_key([0x0], 60))
5463 )
5464 ])
5465 );
5466
5467 sparse
5468 .remove_leaf(
5469 &pad_nibbles_right(Nibbles::from_nibbles([0x5, 0x0, 0x2, 0x3, 0x1])),
5470 &provider,
5471 )
5472 .unwrap();
5473
5474 pretty_assertions::assert_eq!(
5483 parallel_sparse_trie_nodes(&sparse)
5484 .into_iter()
5485 .map(|(k, v)| (*k, v.clone()))
5486 .collect::<BTreeMap<_, _>>(),
5487 BTreeMap::from_iter([
5488 (Nibbles::default(), SparseNode::new_ext(Nibbles::from_nibbles([0x5]))),
5489 (Nibbles::from_nibbles([0x5]), SparseNode::new_branch(0b1001.into(), &[])),
5490 (
5491 Nibbles::from_nibbles([0x5, 0x0]),
5492 SparseNode::new_leaf(leaf_key([0x2, 0x3, 0x3], 62))
5493 ),
5494 (Nibbles::from_nibbles([0x5, 0x3]), SparseNode::new_branch(0b1010.into(), &[])),
5495 (
5496 Nibbles::from_nibbles([0x5, 0x3, 0x1]),
5497 SparseNode::new_leaf(leaf_key([0x0, 0x2], 61))
5498 ),
5499 (
5500 Nibbles::from_nibbles([0x5, 0x3, 0x3]),
5501 SparseNode::new_branch(0b0101.into(), &[])
5502 ),
5503 (
5504 Nibbles::from_nibbles([0x5, 0x3, 0x3, 0x0]),
5505 SparseNode::new_leaf(leaf_key([0x2], 60))
5506 ),
5507 (
5508 Nibbles::from_nibbles([0x5, 0x3, 0x3, 0x2]),
5509 SparseNode::new_leaf(leaf_key([0x0], 60))
5510 )
5511 ])
5512 );
5513
5514 sparse
5515 .remove_leaf(
5516 &pad_nibbles_right(Nibbles::from_nibbles([0x5, 0x3, 0x1, 0x0, 0x2])),
5517 &provider,
5518 )
5519 .unwrap();
5520
5521 pretty_assertions::assert_eq!(
5528 parallel_sparse_trie_nodes(&sparse)
5529 .into_iter()
5530 .map(|(k, v)| (*k, v.clone()))
5531 .collect::<BTreeMap<_, _>>(),
5532 BTreeMap::from_iter([
5533 (Nibbles::default(), SparseNode::new_ext(Nibbles::from_nibbles([0x5]))),
5534 (Nibbles::from_nibbles([0x5]), SparseNode::new_branch(0b1001.into(), &[])),
5535 (
5536 Nibbles::from_nibbles([0x5, 0x0]),
5537 SparseNode::new_leaf(leaf_key([0x2, 0x3, 0x3], 62))
5538 ),
5539 (
5540 Nibbles::from_nibbles([0x5, 0x3]),
5541 SparseNode::new_ext(Nibbles::from_nibbles([0x3]))
5542 ),
5543 (
5544 Nibbles::from_nibbles([0x5, 0x3, 0x3]),
5545 SparseNode::new_branch(0b0101.into(), &[])
5546 ),
5547 (
5548 Nibbles::from_nibbles([0x5, 0x3, 0x3, 0x0]),
5549 SparseNode::new_leaf(leaf_key([0x2], 60))
5550 ),
5551 (
5552 Nibbles::from_nibbles([0x5, 0x3, 0x3, 0x2]),
5553 SparseNode::new_leaf(leaf_key([0x0], 60))
5554 )
5555 ])
5556 );
5557
5558 sparse
5559 .remove_leaf(
5560 &pad_nibbles_right(Nibbles::from_nibbles([0x5, 0x3, 0x3, 0x2, 0x0])),
5561 &provider,
5562 )
5563 .unwrap();
5564
5565 pretty_assertions::assert_eq!(
5570 parallel_sparse_trie_nodes(&sparse)
5571 .into_iter()
5572 .map(|(k, v)| (*k, v.clone()))
5573 .collect::<BTreeMap<_, _>>(),
5574 BTreeMap::from_iter([
5575 (Nibbles::default(), SparseNode::new_ext(Nibbles::from_nibbles([0x5]))),
5576 (Nibbles::from_nibbles([0x5]), SparseNode::new_branch(0b1001.into(), &[])),
5577 (
5578 Nibbles::from_nibbles([0x5, 0x0]),
5579 SparseNode::new_leaf(leaf_key([0x2, 0x3, 0x3], 62))
5580 ),
5581 (
5582 Nibbles::from_nibbles([0x5, 0x3]),
5583 SparseNode::new_leaf(leaf_key([0x3, 0x0, 0x2], 62))
5584 ),
5585 ])
5586 );
5587
5588 sparse
5589 .remove_leaf(
5590 &pad_nibbles_right(Nibbles::from_nibbles([0x5, 0x0, 0x2, 0x3, 0x3])),
5591 &provider,
5592 )
5593 .unwrap();
5594
5595 pretty_assertions::assert_eq!(
5597 parallel_sparse_trie_nodes(&sparse)
5598 .into_iter()
5599 .map(|(k, v)| (*k, v.clone()))
5600 .collect::<BTreeMap<_, _>>(),
5601 BTreeMap::from_iter([(
5602 Nibbles::default(),
5603 SparseNode::new_leaf(pad_nibbles_right(Nibbles::from_nibbles([
5604 0x5, 0x3, 0x3, 0x0, 0x2
5605 ])))
5606 ),])
5607 );
5608
5609 sparse
5610 .remove_leaf(
5611 &pad_nibbles_right(Nibbles::from_nibbles([0x5, 0x3, 0x3, 0x0, 0x2])),
5612 &provider,
5613 )
5614 .unwrap();
5615
5616 pretty_assertions::assert_eq!(
5618 parallel_sparse_trie_nodes(&sparse)
5619 .into_iter()
5620 .map(|(k, v)| (*k, v.clone()))
5621 .collect::<BTreeMap<_, _>>(),
5622 BTreeMap::from_iter([(Nibbles::default(), SparseNode::Empty)])
5623 );
5624 }
5625
5626 #[test]
5627 fn sparse_trie_remove_leaf_blinded() {
5628 let leaf = LeafNode::new(
5629 Nibbles::default(),
5630 alloy_rlp::encode_fixed_size(&U256::from(1)).to_vec(),
5631 );
5632 let branch = TrieNodeV2::Branch(BranchNodeV2::new(
5633 Nibbles::default(),
5634 vec![
5635 RlpNode::word_rlp(&B256::repeat_byte(1)),
5636 RlpNode::from_raw_rlp(&alloy_rlp::encode(leaf.clone())).unwrap(),
5637 ],
5638 TrieMask::new(0b11),
5639 None,
5640 ));
5641
5642 let provider = DefaultTrieNodeProvider;
5643 let mut sparse = ParallelSparseTrie::from_root(
5644 branch.clone(),
5645 Some(BranchNodeMasks {
5646 hash_mask: TrieMask::new(0b01),
5647 tree_mask: TrieMask::default(),
5648 }),
5649 false,
5650 )
5651 .unwrap();
5652
5653 sparse
5659 .reveal_nodes(&mut [
5660 ProofTrieNodeV2 {
5661 path: Nibbles::default(),
5662 node: branch,
5663 masks: Some(BranchNodeMasks {
5664 hash_mask: TrieMask::default(),
5665 tree_mask: TrieMask::new(0b01),
5666 }),
5667 },
5668 ProofTrieNodeV2 {
5669 path: Nibbles::from_nibbles([0x1]),
5670 node: TrieNodeV2::Leaf(leaf),
5671 masks: None,
5672 },
5673 ])
5674 .unwrap();
5675
5676 assert_matches!(
5678 sparse.remove_leaf(&pad_nibbles_right(Nibbles::from_nibbles([0x0])), &provider).map_err(|e| e.into_kind()),
5679 Err(SparseTrieErrorKind::BlindedNode(path)) if path == Nibbles::from_nibbles([0x0])
5680 );
5681 }
5682
5683 #[test]
5684 fn sparse_trie_remove_leaf_non_existent() {
5685 let leaf = LeafNode::new(
5686 Nibbles::default(),
5687 alloy_rlp::encode_fixed_size(&U256::from(1)).to_vec(),
5688 );
5689 let branch = TrieNodeV2::Branch(BranchNodeV2::new(
5690 Nibbles::default(),
5691 vec![
5692 RlpNode::word_rlp(&B256::repeat_byte(1)),
5693 RlpNode::from_raw_rlp(&alloy_rlp::encode(leaf.clone())).unwrap(),
5694 ],
5695 TrieMask::new(0b11),
5696 None,
5697 ));
5698
5699 let provider = DefaultTrieNodeProvider;
5700 let mut sparse = ParallelSparseTrie::from_root(
5701 branch.clone(),
5702 Some(BranchNodeMasks {
5703 hash_mask: TrieMask::new(0b01),
5704 tree_mask: TrieMask::default(),
5705 }),
5706 false,
5707 )
5708 .unwrap();
5709
5710 sparse
5716 .reveal_nodes(&mut [
5717 ProofTrieNodeV2 {
5718 path: Nibbles::default(),
5719 node: branch,
5720 masks: Some(BranchNodeMasks {
5721 hash_mask: TrieMask::default(),
5722 tree_mask: TrieMask::new(0b01),
5723 }),
5724 },
5725 ProofTrieNodeV2 {
5726 path: Nibbles::from_nibbles([0x1]),
5727 node: TrieNodeV2::Leaf(leaf),
5728 masks: None,
5729 },
5730 ])
5731 .unwrap();
5732
5733 let sparse_old = sparse.clone();
5735 assert_matches!(
5736 sparse.remove_leaf(&pad_nibbles_right(Nibbles::from_nibbles([0x2])), &provider),
5737 Ok(())
5738 );
5739 assert_eq!(sparse, sparse_old);
5740 }
5741
5742 #[test]
5743 fn sparse_trie_fuzz() {
5744 const KEY_NIBBLES_LEN: usize = 3;
5748
5749 fn test(updates: Vec<(BTreeMap<Nibbles, Account>, BTreeSet<Nibbles>)>) {
5750 {
5751 let mut state = BTreeMap::default();
5752 let default_provider = DefaultTrieNodeProvider;
5753 let provider_factory = create_test_provider_factory();
5754 let mut sparse = ParallelSparseTrie::default().with_updates(true);
5755
5756 for (update, keys_to_delete) in updates {
5757 for (key, account) in update.clone() {
5759 let account = account.into_trie_account(EMPTY_ROOT_HASH);
5760 let mut account_rlp = Vec::new();
5761 account.encode(&mut account_rlp);
5762 sparse.update_leaf(key, account_rlp, &default_provider).unwrap();
5763 }
5764 let mut updated_sparse = sparse.clone();
5768 let sparse_root = updated_sparse.root();
5769 let sparse_updates = updated_sparse.take_updates();
5770
5771 state.extend(update);
5773 let provider = provider_factory.provider().unwrap();
5774 let (hash_builder_root, hash_builder_updates, hash_builder_proof_nodes, _, _) =
5775 reth_trie_db::with_adapter!(provider_factory, |A| {
5776 let trie_cursor =
5777 DatabaseTrieCursorFactory::<_, A>::new(provider.tx_ref());
5778 run_hash_builder(
5779 state.clone(),
5780 trie_cursor.account_trie_cursor().unwrap(),
5781 Default::default(),
5782 state.keys().copied(),
5783 )
5784 });
5785
5786 let hash_builder_account_nodes = hash_builder_updates.account_nodes.clone();
5788
5789 let provider_rw = provider_factory.provider_rw().unwrap();
5791 provider_rw.write_trie_updates(hash_builder_updates).unwrap();
5792 provider_rw.commit().unwrap();
5793
5794 assert_eq!(sparse_root, hash_builder_root);
5796 pretty_assertions::assert_eq!(
5798 BTreeMap::from_iter(sparse_updates.updated_nodes),
5799 BTreeMap::from_iter(hash_builder_account_nodes)
5800 );
5801 assert_eq_parallel_sparse_trie_proof_nodes(
5803 &updated_sparse,
5804 hash_builder_proof_nodes,
5805 );
5806
5807 for key in &keys_to_delete {
5810 state.remove(key).unwrap();
5811 sparse.remove_leaf(key, &default_provider).unwrap();
5812 }
5813
5814 let mut updated_sparse = sparse.clone();
5818 let sparse_root = updated_sparse.root();
5819 let sparse_updates = updated_sparse.take_updates();
5820
5821 let provider = provider_factory.provider().unwrap();
5822 let (hash_builder_root, hash_builder_updates, hash_builder_proof_nodes, _, _) =
5823 reth_trie_db::with_adapter!(provider_factory, |A| {
5824 let trie_cursor =
5825 DatabaseTrieCursorFactory::<_, A>::new(provider.tx_ref());
5826 run_hash_builder(
5827 state.clone(),
5828 trie_cursor.account_trie_cursor().unwrap(),
5829 keys_to_delete
5830 .iter()
5831 .map(|nibbles| B256::from_slice(&nibbles.pack()))
5832 .collect(),
5833 state.keys().copied(),
5834 )
5835 });
5836
5837 let hash_builder_account_nodes = hash_builder_updates.account_nodes.clone();
5839
5840 let provider_rw = provider_factory.provider_rw().unwrap();
5842 provider_rw.write_trie_updates(hash_builder_updates).unwrap();
5843 provider_rw.commit().unwrap();
5844
5845 assert_eq!(sparse_root, hash_builder_root);
5847 pretty_assertions::assert_eq!(
5849 BTreeMap::from_iter(sparse_updates.updated_nodes),
5850 BTreeMap::from_iter(hash_builder_account_nodes)
5851 );
5852 assert_eq_parallel_sparse_trie_proof_nodes(
5854 &updated_sparse,
5855 hash_builder_proof_nodes,
5856 );
5857 }
5858 }
5859 }
5860
5861 fn transform_updates(
5862 updates: Vec<BTreeMap<Nibbles, Account>>,
5863 mut rng: impl rand::Rng,
5864 ) -> Vec<(BTreeMap<Nibbles, Account>, BTreeSet<Nibbles>)> {
5865 let mut keys = BTreeSet::new();
5866 updates
5867 .into_iter()
5868 .map(|update| {
5869 keys.extend(update.keys().copied());
5870
5871 let keys_to_delete_len = update.len() / 2;
5872 let keys_to_delete = (0..keys_to_delete_len)
5873 .map(|_| {
5874 let key =
5875 *rand::seq::IteratorRandom::choose(keys.iter(), &mut rng).unwrap();
5876 keys.take(&key).unwrap()
5877 })
5878 .collect();
5879
5880 (update, keys_to_delete)
5881 })
5882 .collect::<Vec<_>>()
5883 }
5884
5885 proptest!(ProptestConfig::with_cases(10), |(
5886 updates in proptest::collection::vec(
5887 proptest::collection::btree_map(
5888 any_with::<Nibbles>(SizeRange::new(KEY_NIBBLES_LEN..=KEY_NIBBLES_LEN)).prop_map(pad_nibbles_right),
5889 arb::<Account>(),
5890 1..50,
5891 ),
5892 1..50,
5893 ).prop_perturb(transform_updates)
5894 )| {
5895 test(updates)
5896 });
5897 }
5898
5899 #[test]
5900 fn sparse_trie_two_leaves_at_lower_roots() {
5901 let provider = DefaultTrieNodeProvider;
5902 let mut trie = ParallelSparseTrie::default().with_updates(true);
5903 let key_50 = Nibbles::unpack(hex!(
5904 "0x5000000000000000000000000000000000000000000000000000000000000000"
5905 ));
5906 let key_51 = Nibbles::unpack(hex!(
5907 "0x5100000000000000000000000000000000000000000000000000000000000000"
5908 ));
5909
5910 let account = Account::default().into_trie_account(EMPTY_ROOT_HASH);
5911 let mut account_rlp = Vec::new();
5912 account.encode(&mut account_rlp);
5913
5914 trie.update_leaf(key_50, account_rlp.clone(), &provider).unwrap();
5916 trie.root();
5917
5918 trie.update_leaf(key_51, account_rlp.clone(), &provider).unwrap();
5920
5921 let expected_root =
5922 hex!("0xdaf0ef9f91a2f179bb74501209effdb5301db1697bcab041eca2234b126e25de");
5923 let root = trie.root();
5924 assert_eq!(root, expected_root);
5925 assert_eq!(SparseTrieUpdates::default(), trie.take_updates());
5926 }
5927
5928 #[test]
5940 fn sparse_trie_reveal_node_1() {
5941 let key1 = || pad_nibbles_right(Nibbles::from_nibbles_unchecked([0x00]));
5942 let key2 = || pad_nibbles_right(Nibbles::from_nibbles_unchecked([0x01]));
5943 let key3 = || pad_nibbles_right(Nibbles::from_nibbles_unchecked([0x02]));
5944 let value = || Account::default();
5945 let value_encoded = || {
5946 let mut account_rlp = Vec::new();
5947 value().into_trie_account(EMPTY_ROOT_HASH).encode(&mut account_rlp);
5948 account_rlp
5949 };
5950
5951 let (_, _, hash_builder_proof_nodes, branch_node_hash_masks, branch_node_tree_masks) =
5953 run_hash_builder(
5954 [(key1(), value()), (key3(), value())],
5955 NoopAccountTrieCursor::default(),
5956 Default::default(),
5957 [Nibbles::default()],
5958 );
5959
5960 let provider = DefaultTrieNodeProvider;
5961 let masks = match (
5962 branch_node_hash_masks.get(&Nibbles::default()).copied(),
5963 branch_node_tree_masks.get(&Nibbles::default()).copied(),
5964 ) {
5965 (Some(h), Some(t)) => Some(BranchNodeMasks { hash_mask: h, tree_mask: t }),
5966 (Some(h), None) => {
5967 Some(BranchNodeMasks { hash_mask: h, tree_mask: TrieMask::default() })
5968 }
5969 (None, Some(t)) => {
5970 Some(BranchNodeMasks { hash_mask: TrieMask::default(), tree_mask: t })
5971 }
5972 (None, None) => None,
5973 };
5974 let mut sparse = ParallelSparseTrie::from_root(
5975 TrieNodeV2::decode(&mut &hash_builder_proof_nodes.nodes_sorted()[0].1[..]).unwrap(),
5976 masks,
5977 false,
5978 )
5979 .unwrap();
5980
5981 let (_, _, hash_builder_proof_nodes, branch_node_hash_masks, branch_node_tree_masks) =
5983 run_hash_builder(
5984 [(key1(), value()), (key3(), value())],
5985 NoopAccountTrieCursor::default(),
5986 Default::default(),
5987 [key1()],
5988 );
5989 let mut revealed_nodes: Vec<ProofTrieNodeV2> = hash_builder_proof_nodes
5990 .nodes_sorted()
5991 .into_iter()
5992 .map(|(path, node)| {
5993 let hash_mask = branch_node_hash_masks.get(&path).copied();
5994 let tree_mask = branch_node_tree_masks.get(&path).copied();
5995 let masks = BranchNodeMasks::from_optional(hash_mask, tree_mask);
5996 ProofTrieNodeV2 { path, node: TrieNodeV2::decode(&mut &node[..]).unwrap(), masks }
5997 })
5998 .collect();
5999 sparse.reveal_nodes(&mut revealed_nodes).unwrap();
6000
6001 assert_matches!(
6003 sparse.upper_subtrie.nodes.get(&Nibbles::default()),
6004 Some(&SparseNode::Branch { state_mask, state: SparseNodeState::Dirty, .. }) if state_mask == TrieMask::new(0b101)
6005 );
6006
6007 sparse.update_leaf(key2(), value_encoded(), &provider).unwrap();
6009
6010 assert_matches!(
6012 sparse.upper_subtrie.nodes.get(&Nibbles::default()),
6013 Some(&SparseNode::Branch { state_mask, state: SparseNodeState::Dirty, .. }) if state_mask == TrieMask::new(0b111)
6014 );
6015
6016 let (_, _, hash_builder_proof_nodes, branch_node_hash_masks, branch_node_tree_masks) =
6018 run_hash_builder(
6019 [(key1(), value()), (key3(), value())],
6020 NoopAccountTrieCursor::default(),
6021 Default::default(),
6022 [key3()],
6023 );
6024 let mut revealed_nodes: Vec<ProofTrieNodeV2> = hash_builder_proof_nodes
6025 .nodes_sorted()
6026 .into_iter()
6027 .map(|(path, node)| {
6028 let hash_mask = branch_node_hash_masks.get(&path).copied();
6029 let tree_mask = branch_node_tree_masks.get(&path).copied();
6030 let masks = BranchNodeMasks::from_optional(hash_mask, tree_mask);
6031 ProofTrieNodeV2 { path, node: TrieNodeV2::decode(&mut &node[..]).unwrap(), masks }
6032 })
6033 .collect();
6034 sparse.reveal_nodes(&mut revealed_nodes).unwrap();
6035
6036 assert_matches!(
6038 sparse.upper_subtrie.nodes.get(&Nibbles::default()),
6039 Some(&SparseNode::Branch { state_mask, state: SparseNodeState::Dirty, .. }) if state_mask == TrieMask::new(0b111)
6040 );
6041
6042 let (_, _, hash_builder_proof_nodes, _, _) = run_hash_builder(
6045 [(key1(), value()), (key2(), value()), (key3(), value())],
6046 NoopAccountTrieCursor::default(),
6047 Default::default(),
6048 [key1(), key2(), key3()],
6049 );
6050
6051 assert_eq_parallel_sparse_trie_proof_nodes(&sparse, hash_builder_proof_nodes);
6052 }
6053
6054 #[test]
6065 fn sparse_trie_reveal_node_2() {
6066 let key1 = || pad_nibbles_right(Nibbles::from_nibbles_unchecked([0x00, 0x00]));
6067 let key2 = || pad_nibbles_right(Nibbles::from_nibbles_unchecked([0x01, 0x01]));
6068 let key3 = || pad_nibbles_right(Nibbles::from_nibbles_unchecked([0x01, 0x02]));
6069 let value = || Account::default();
6070
6071 let (_, _, hash_builder_proof_nodes, branch_node_hash_masks, branch_node_tree_masks) =
6073 run_hash_builder(
6074 [(key1(), value()), (key2(), value()), (key3(), value())],
6075 NoopAccountTrieCursor::default(),
6076 Default::default(),
6077 [Nibbles::default()],
6078 );
6079
6080 let provider = DefaultTrieNodeProvider;
6081 let masks = match (
6082 branch_node_hash_masks.get(&Nibbles::default()).copied(),
6083 branch_node_tree_masks.get(&Nibbles::default()).copied(),
6084 ) {
6085 (Some(h), Some(t)) => Some(BranchNodeMasks { hash_mask: h, tree_mask: t }),
6086 (Some(h), None) => {
6087 Some(BranchNodeMasks { hash_mask: h, tree_mask: TrieMask::default() })
6088 }
6089 (None, Some(t)) => {
6090 Some(BranchNodeMasks { hash_mask: TrieMask::default(), tree_mask: t })
6091 }
6092 (None, None) => None,
6093 };
6094 let mut sparse = ParallelSparseTrie::from_root(
6095 TrieNodeV2::decode(&mut &hash_builder_proof_nodes.nodes_sorted()[0].1[..]).unwrap(),
6096 masks,
6097 false,
6098 )
6099 .unwrap();
6100
6101 let (_, _, hash_builder_proof_nodes, branch_node_hash_masks, branch_node_tree_masks) =
6104 run_hash_builder(
6105 [(key1(), value()), (key2(), value()), (key3(), value())],
6106 NoopAccountTrieCursor::default(),
6107 Default::default(),
6108 [key1(), Nibbles::from_nibbles_unchecked([0x01])],
6109 );
6110 let mut revealed_nodes: Vec<ProofTrieNodeV2> = hash_builder_proof_nodes
6111 .nodes_sorted()
6112 .into_iter()
6113 .map(|(path, node)| {
6114 let hash_mask = branch_node_hash_masks.get(&path).copied();
6115 let tree_mask = branch_node_tree_masks.get(&path).copied();
6116 let masks = BranchNodeMasks::from_optional(hash_mask, tree_mask);
6117 ProofTrieNodeV2 { path, node: TrieNodeV2::decode(&mut &node[..]).unwrap(), masks }
6118 })
6119 .collect();
6120 sparse.reveal_nodes(&mut revealed_nodes).unwrap();
6121
6122 assert_matches!(
6124 sparse.upper_subtrie.nodes.get(&Nibbles::default()),
6125 Some(&SparseNode::Branch { state_mask, state: SparseNodeState::Dirty, .. }) if state_mask == TrieMask::new(0b11)
6126 );
6127
6128 sparse.remove_leaf(&key1(), &provider).unwrap();
6130
6131 assert_eq!(
6133 sparse.upper_subtrie.nodes.get(&Nibbles::default()),
6134 Some(&SparseNode::new_ext(Nibbles::from_nibbles_unchecked([0x01])))
6135 );
6136
6137 let (_, _, hash_builder_proof_nodes, branch_node_hash_masks, branch_node_tree_masks) =
6139 run_hash_builder(
6140 [(key1(), value()), (key2(), value()), (key3(), value())],
6141 NoopAccountTrieCursor::default(),
6142 Default::default(),
6143 [key2()],
6144 );
6145 let mut revealed_nodes: Vec<ProofTrieNodeV2> = hash_builder_proof_nodes
6146 .nodes_sorted()
6147 .into_iter()
6148 .map(|(path, node)| {
6149 let hash_mask = branch_node_hash_masks.get(&path).copied();
6150 let tree_mask = branch_node_tree_masks.get(&path).copied();
6151 let masks = BranchNodeMasks::from_optional(hash_mask, tree_mask);
6152 ProofTrieNodeV2 { path, node: TrieNodeV2::decode(&mut &node[..]).unwrap(), masks }
6153 })
6154 .collect();
6155 sparse.reveal_nodes(&mut revealed_nodes).unwrap();
6156
6157 assert_eq!(
6159 sparse.upper_subtrie.nodes.get(&Nibbles::default()),
6160 Some(&SparseNode::new_ext(Nibbles::from_nibbles_unchecked([0x01])))
6161 );
6162 }
6163
6164 #[test]
6173 fn sparse_trie_reveal_node_3() {
6174 let key1 = || pad_nibbles_right(Nibbles::from_nibbles_unchecked([0x00, 0x01]));
6175 let key2 = || pad_nibbles_right(Nibbles::from_nibbles_unchecked([0x00, 0x02]));
6176 let key3 = || pad_nibbles_right(Nibbles::from_nibbles_unchecked([0x01, 0x00]));
6177 let value = || Account::default();
6178 let value_encoded = || {
6179 let mut account_rlp = Vec::new();
6180 value().into_trie_account(EMPTY_ROOT_HASH).encode(&mut account_rlp);
6181 account_rlp
6182 };
6183
6184 let (_, _, hash_builder_proof_nodes, branch_node_hash_masks, branch_node_tree_masks) =
6186 run_hash_builder(
6187 [(key1(), value()), (key2(), value())],
6188 NoopAccountTrieCursor::default(),
6189 Default::default(),
6190 [Nibbles::default()],
6191 );
6192
6193 let mut nodes = Vec::new();
6194
6195 for (path, node) in hash_builder_proof_nodes.nodes_sorted() {
6196 let hash_mask = branch_node_hash_masks.get(&path).copied();
6197 let tree_mask = branch_node_tree_masks.get(&path).copied();
6198 let masks = BranchNodeMasks::from_optional(hash_mask, tree_mask);
6199 nodes.push((path, TrieNode::decode(&mut &node[..]).unwrap(), masks));
6200 }
6201
6202 nodes.sort_unstable_by(|a, b| reth_trie_common::depth_first_cmp(&a.0, &b.0));
6203
6204 let nodes = ProofTrieNodeV2::from_sorted_trie_nodes(nodes);
6205
6206 let provider = DefaultTrieNodeProvider;
6207 let mut sparse =
6208 ParallelSparseTrie::from_root(nodes[0].node.clone(), nodes[0].masks, false).unwrap();
6209
6210 assert_matches!(
6212 sparse.upper_subtrie.nodes.get(&Nibbles::default()),
6213 Some(SparseNode::Extension { key, state: SparseNodeState::Dirty }) if *key == Nibbles::from_nibbles([0x00])
6214 );
6215
6216 sparse.update_leaf(key3(), value_encoded(), &provider).unwrap();
6218
6219 assert_eq!(
6221 sparse.upper_subtrie.nodes.get(&Nibbles::default()),
6222 Some(&SparseNode::new_branch(TrieMask::new(0b11), &[]))
6223 );
6224
6225 let (_, _, hash_builder_proof_nodes, branch_node_hash_masks, branch_node_tree_masks) =
6227 run_hash_builder(
6228 [(key1(), value()), (key2(), value())],
6229 NoopAccountTrieCursor::default(),
6230 Default::default(),
6231 [key1()],
6232 );
6233 let mut revealed_nodes: Vec<ProofTrieNodeV2> = hash_builder_proof_nodes
6234 .nodes_sorted()
6235 .into_iter()
6236 .map(|(path, node)| {
6237 let hash_mask = branch_node_hash_masks.get(&path).copied();
6238 let tree_mask = branch_node_tree_masks.get(&path).copied();
6239 let masks = BranchNodeMasks::from_optional(hash_mask, tree_mask);
6240 ProofTrieNodeV2 { path, node: TrieNodeV2::decode(&mut &node[..]).unwrap(), masks }
6241 })
6242 .collect();
6243 sparse.reveal_nodes(&mut revealed_nodes).unwrap();
6244
6245 assert_eq!(
6247 sparse.upper_subtrie.nodes.get(&Nibbles::default()),
6248 Some(&SparseNode::new_branch(TrieMask::new(0b11), &[]))
6249 );
6250 }
6251
6252 #[test]
6253 fn test_update_leaf_cross_level() {
6254 let ctx = ParallelSparseTrieTestContext;
6255 let mut trie = ParallelSparseTrie::from_root(TrieNodeV2::EmptyRoot, None, true).unwrap();
6256
6257 let (leaf1_path, value1) = ctx.create_test_leaf([0x1, 0x3, 0x4, 0x5], 1);
6279 trie.update_leaf(leaf1_path, value1.clone(), DefaultTrieNodeProvider).unwrap();
6280
6281 ctx.assert_upper_subtrie(&trie)
6283 .has_leaf(
6284 &Nibbles::default(),
6285 &pad_nibbles_right(Nibbles::from_nibbles([0x1, 0x3, 0x4, 0x5])),
6286 )
6287 .has_value(&leaf1_path, &value1);
6288
6289 let (leaf2_path, value2) = ctx.create_test_leaf([0x1, 0x2, 0x3, 0x4], 2);
6291 trie.update_leaf(leaf2_path, value2.clone(), DefaultTrieNodeProvider).unwrap();
6292
6293 ctx.assert_upper_subtrie(&trie)
6295 .has_branch(&Nibbles::from_nibbles([0x1]), &[0x2, 0x3])
6296 .has_no_value(&leaf1_path)
6297 .has_no_value(&leaf2_path);
6298
6299 let (leaf3_path, value3) = ctx.create_test_leaf([0x1, 0x2, 0x4, 0x5], 3);
6301 trie.update_leaf(leaf3_path, value3.clone(), DefaultTrieNodeProvider).unwrap();
6302
6303 ctx.assert_subtrie(&trie, Nibbles::from_nibbles([0x1, 0x2]))
6305 .has_branch(&Nibbles::from_nibbles([0x1, 0x2]), &[0x3, 0x4])
6306 .has_leaf(&Nibbles::from_nibbles([0x1, 0x2, 0x3]), &leaf_key([0x4], 61))
6307 .has_leaf(&Nibbles::from_nibbles([0x1, 0x2, 0x4]), &leaf_key([0x5], 61))
6308 .has_value(&leaf2_path, &value2)
6309 .has_value(&leaf3_path, &value3);
6310
6311 let (leaf4_path, value4) = ctx.create_test_leaf([0x1, 0x3, 0x3, 0x4], 4);
6313 trie.update_leaf(leaf4_path, value4.clone(), DefaultTrieNodeProvider).unwrap();
6314
6315 ctx.assert_subtrie(&trie, Nibbles::from_nibbles([0x1, 0x3]))
6317 .has_value(&leaf1_path, &value1)
6318 .has_value(&leaf4_path, &value4);
6319
6320 ctx.assert_subtrie(&trie, Nibbles::from_nibbles([0x1, 0x2]))
6322 .has_value(&leaf2_path, &value2)
6323 .has_value(&leaf3_path, &value3);
6324
6325 ctx.assert_upper_subtrie(&trie)
6327 .has_extension(&Nibbles::default(), &Nibbles::from_nibbles([0x1]))
6328 .has_branch(&Nibbles::from_nibbles([0x1]), &[0x2, 0x3])
6329 .has_no_value(&leaf1_path)
6330 .has_no_value(&leaf2_path)
6331 .has_no_value(&leaf3_path)
6332 .has_no_value(&leaf4_path);
6333 }
6334
6335 #[test]
6336 fn test_update_leaf_split_at_level_boundary() {
6337 let ctx = ParallelSparseTrieTestContext;
6338 let mut trie = ParallelSparseTrie::from_root(TrieNodeV2::EmptyRoot, None, true).unwrap();
6339
6340 let (first_leaf_path, first_value) = ctx.create_test_leaf([0x1, 0x2, 0x2, 0x4], 1);
6355
6356 trie.update_leaf(first_leaf_path, first_value.clone(), DefaultTrieNodeProvider).unwrap();
6357
6358 ctx.assert_upper_subtrie(&trie)
6360 .has_leaf(
6361 &Nibbles::default(),
6362 &pad_nibbles_right(Nibbles::from_nibbles([0x1, 0x2, 0x2, 0x4])),
6363 )
6364 .has_value(&first_leaf_path, &first_value);
6365
6366 let (second_leaf_path, second_value) = ctx.create_test_leaf([0x1, 0x2, 0x3, 0x4], 2);
6368
6369 trie.update_leaf(second_leaf_path, second_value.clone(), DefaultTrieNodeProvider).unwrap();
6370
6371 ctx.assert_subtrie(&trie, Nibbles::from_nibbles([0x1, 0x2]))
6373 .has_branch(&Nibbles::from_nibbles([0x1, 0x2]), &[0x2, 0x3])
6374 .has_leaf(&Nibbles::from_nibbles([0x1, 0x2, 0x2]), &leaf_key([0x4], 61))
6375 .has_leaf(&Nibbles::from_nibbles([0x1, 0x2, 0x3]), &leaf_key([0x4], 61))
6376 .has_value(&first_leaf_path, &first_value)
6377 .has_value(&second_leaf_path, &second_value);
6378
6379 ctx.assert_upper_subtrie(&trie)
6381 .has_no_value(&first_leaf_path)
6382 .has_no_value(&second_leaf_path);
6383 }
6384
6385 #[test]
6386 fn test_update_subtrie_with_multiple_leaves() {
6387 let ctx = ParallelSparseTrieTestContext;
6388 let mut trie = ParallelSparseTrie::from_root(TrieNodeV2::EmptyRoot, None, true).unwrap();
6389
6390 let leaves = ctx.create_test_leaves(&[
6404 &[0x1, 0x2, 0x3, 0x4],
6405 &[0x1, 0x2, 0x3, 0x5],
6406 &[0x1, 0x2, 0x4, 0x6],
6407 &[0x1, 0x2, 0x4, 0x7],
6408 ]);
6409
6410 ctx.update_leaves(&mut trie, leaves.clone());
6412
6413 ctx.assert_upper_subtrie(&trie)
6415 .has_extension(&Nibbles::default(), &Nibbles::from_nibbles([0x1, 0x2]));
6416
6417 ctx.assert_subtrie(&trie, Nibbles::from_nibbles([0x1, 0x2]))
6419 .has_branch(&Nibbles::from_nibbles([0x1, 0x2]), &[0x3, 0x4])
6420 .has_branch(&Nibbles::from_nibbles([0x1, 0x2, 0x3]), &[0x4, 0x5])
6421 .has_branch(&Nibbles::from_nibbles([0x1, 0x2, 0x4]), &[0x6, 0x7])
6422 .has_value(&leaves[0].0, &leaves[0].1)
6423 .has_value(&leaves[1].0, &leaves[1].1)
6424 .has_value(&leaves[2].0, &leaves[2].1)
6425 .has_value(&leaves[3].0, &leaves[3].1);
6426
6427 let updated_path = pad_nibbles_right(Nibbles::from_nibbles([0x1, 0x2, 0x3, 0x4]));
6429 let (_, updated_value) = ctx.create_test_leaf([0x1, 0x2, 0x3, 0x4], 100);
6430
6431 trie.update_leaf(updated_path, updated_value.clone(), DefaultTrieNodeProvider).unwrap();
6432
6433 ctx.assert_subtrie(&trie, Nibbles::from_nibbles([0x1, 0x2]))
6436 .has_branch(&Nibbles::from_nibbles([0x1, 0x2]), &[0x3, 0x4])
6437 .has_value(&updated_path, &updated_value)
6438 .has_value(&leaves[1].0, &leaves[1].1)
6439 .has_value(&leaves[2].0, &leaves[2].1)
6440 .has_value(&leaves[3].0, &leaves[3].1);
6441
6442 let (new_leaf_path, new_leaf_value) = ctx.create_test_leaf([0x1, 0x2, 0x3, 0x6], 200);
6444
6445 trie.update_leaf(new_leaf_path, new_leaf_value.clone(), DefaultTrieNodeProvider).unwrap();
6446
6447 ctx.assert_subtrie(&trie, Nibbles::from_nibbles([0x1, 0x2]))
6449 .has_branch(&Nibbles::from_nibbles([0x1, 0x2, 0x3]), &[0x4, 0x5, 0x6])
6450 .has_value(&new_leaf_path, &new_leaf_value);
6451 }
6452
6453 #[test]
6454 fn test_update_subtrie_extension_node_subtrie() {
6455 let ctx = ParallelSparseTrieTestContext;
6456 let mut trie = ParallelSparseTrie::from_root(TrieNodeV2::EmptyRoot, None, true).unwrap();
6457
6458 let leaves = ctx.create_test_leaves(&[&[0x1, 0x2, 0x3, 0x4], &[0x1, 0x2, 0x3, 0x5]]);
6467
6468 ctx.update_leaves(&mut trie, leaves.clone());
6470
6471 ctx.assert_upper_subtrie(&trie)
6473 .has_extension(&Nibbles::default(), &Nibbles::from_nibbles([0x1, 0x2, 0x3]));
6474
6475 ctx.assert_subtrie(&trie, Nibbles::from_nibbles([0x1, 0x2]))
6477 .has_branch(&Nibbles::from_nibbles([0x1, 0x2, 0x3]), &[0x4, 0x5])
6478 .has_value(&leaves[0].0, &leaves[0].1)
6479 .has_value(&leaves[1].0, &leaves[1].1);
6480 }
6481
6482 #[test]
6483 fn update_subtrie_extension_node_cross_level() {
6484 let ctx = ParallelSparseTrieTestContext;
6485 let mut trie = ParallelSparseTrie::from_root(TrieNodeV2::EmptyRoot, None, true).unwrap();
6486
6487 let leaves = ctx.create_test_leaves(&[&[0x1, 0x2, 0x3, 0x4], &[0x1, 0x2, 0x4, 0x5]]);
6497
6498 ctx.update_leaves(&mut trie, leaves.clone());
6500
6501 ctx.assert_upper_subtrie(&trie)
6503 .has_extension(&Nibbles::default(), &Nibbles::from_nibbles([0x1, 0x2]));
6504
6505 ctx.assert_subtrie(&trie, Nibbles::from_nibbles([0x1, 0x2]))
6507 .has_branch(&Nibbles::from_nibbles([0x1, 0x2]), &[0x3, 0x4])
6508 .has_leaf(&Nibbles::from_nibbles([0x1, 0x2, 0x3]), &leaf_key([0x4], 61))
6509 .has_leaf(&Nibbles::from_nibbles([0x1, 0x2, 0x4]), &leaf_key([0x5], 61))
6510 .has_value(&leaves[0].0, &leaves[0].1)
6511 .has_value(&leaves[1].0, &leaves[1].1);
6512 }
6513
6514 #[test]
6515 fn test_update_single_nibble_paths() {
6516 let ctx = ParallelSparseTrieTestContext;
6517 let mut trie = ParallelSparseTrie::from_root(TrieNodeV2::EmptyRoot, None, true).unwrap();
6518
6519 let (leaf1_path, value1) = ctx.create_test_leaf([0x0], 1);
6531 let (leaf2_path, value2) = ctx.create_test_leaf([0x1], 2);
6532 let (leaf3_path, value3) = ctx.create_test_leaf([0x2], 3);
6533 let (leaf4_path, value4) = ctx.create_test_leaf([0x3], 4);
6534
6535 ctx.update_leaves(
6536 &mut trie,
6537 [
6538 (leaf1_path, value1.clone()),
6539 (leaf2_path, value2.clone()),
6540 (leaf3_path, value3.clone()),
6541 (leaf4_path, value4.clone()),
6542 ],
6543 );
6544
6545 ctx.assert_upper_subtrie(&trie)
6547 .has_branch(&Nibbles::default(), &[0x0, 0x1, 0x2, 0x3])
6548 .has_leaf(&Nibbles::from_nibbles([0x0]), &leaf_key([], 63))
6549 .has_leaf(&Nibbles::from_nibbles([0x1]), &leaf_key([], 63))
6550 .has_leaf(&Nibbles::from_nibbles([0x2]), &leaf_key([], 63))
6551 .has_leaf(&Nibbles::from_nibbles([0x3]), &leaf_key([], 63))
6552 .has_value(&leaf1_path, &value1)
6553 .has_value(&leaf2_path, &value2)
6554 .has_value(&leaf3_path, &value3)
6555 .has_value(&leaf4_path, &value4);
6556 }
6557
6558 #[test]
6559 fn test_update_deep_extension_chain() {
6560 let ctx = ParallelSparseTrieTestContext;
6561 let mut trie = ParallelSparseTrie::from_root(TrieNodeV2::EmptyRoot, None, true).unwrap();
6562
6563 let (leaf1_path, value1) = ctx.create_test_leaf([0x1, 0x1, 0x1, 0x1, 0x1, 0x1, 0x0], 1);
6577 let (leaf2_path, value2) = ctx.create_test_leaf([0x1, 0x1, 0x1, 0x1, 0x1, 0x1, 0x1], 2);
6578
6579 ctx.update_leaves(&mut trie, [(leaf1_path, value1.clone()), (leaf2_path, value2.clone())]);
6580
6581 ctx.assert_upper_subtrie(&trie).has_extension(
6583 &Nibbles::default(),
6584 &Nibbles::from_nibbles([0x1, 0x1, 0x1, 0x1, 0x1, 0x1]),
6585 );
6586
6587 ctx.assert_subtrie(&trie, Nibbles::from_nibbles([0x1, 0x1]))
6589 .has_branch(&Nibbles::from_nibbles([0x1, 0x1, 0x1, 0x1, 0x1, 0x1]), &[0x0, 0x1])
6590 .has_leaf(
6591 &Nibbles::from_nibbles([0x1, 0x1, 0x1, 0x1, 0x1, 0x1, 0x0]),
6592 &leaf_key([], 57),
6593 )
6594 .has_leaf(
6595 &Nibbles::from_nibbles([0x1, 0x1, 0x1, 0x1, 0x1, 0x1, 0x1]),
6596 &leaf_key([], 57),
6597 )
6598 .has_value(&leaf1_path, &value1)
6599 .has_value(&leaf2_path, &value2);
6600 }
6601
6602 #[test]
6603 fn test_update_branch_with_all_nibbles() {
6604 let ctx = ParallelSparseTrieTestContext;
6605 let mut trie = ParallelSparseTrie::from_root(TrieNodeV2::EmptyRoot, None, true).unwrap();
6606
6607 let mut leaves = Vec::new();
6624 for nibble in 0x0..=0xF {
6625 let (path, value) = ctx.create_test_leaf([0xA, 0x0, nibble], nibble as u64 + 1);
6626 leaves.push((path, value));
6627 }
6628
6629 ctx.update_leaves(&mut trie, leaves.iter().cloned());
6631
6632 ctx.assert_upper_subtrie(&trie)
6634 .has_extension(&Nibbles::default(), &Nibbles::from_nibbles([0xA, 0x0]));
6635
6636 let mut subtrie_assert =
6638 ctx.assert_subtrie(&trie, Nibbles::from_nibbles([0xA, 0x0])).has_branch(
6639 &Nibbles::from_nibbles([0xA, 0x0]),
6640 &[0x0, 0x1, 0x2, 0x3, 0x4, 0x5, 0x6, 0x7, 0x8, 0x9, 0xA, 0xB, 0xC, 0xD, 0xE, 0xF],
6641 );
6642
6643 for (i, (path, value)) in leaves.iter().enumerate() {
6645 subtrie_assert = subtrie_assert
6646 .has_leaf(&Nibbles::from_nibbles([0xA, 0x0, i as u8]), &leaf_key([], 61))
6647 .has_value(path, value);
6648 }
6649 }
6650
6651 #[test]
6652 fn test_update_creates_multiple_subtries() {
6653 let ctx = ParallelSparseTrieTestContext;
6654 let mut trie = ParallelSparseTrie::from_root(TrieNodeV2::EmptyRoot, None, true).unwrap();
6655
6656 let leaves = [
6672 ctx.create_test_leaf([0x0, 0x0, 0x1, 0x2], 1),
6673 ctx.create_test_leaf([0x0, 0x1, 0x3, 0x4], 2),
6674 ctx.create_test_leaf([0x0, 0x2, 0x5, 0x6], 3),
6675 ctx.create_test_leaf([0x0, 0x3, 0x7, 0x8], 4),
6676 ];
6677
6678 ctx.update_leaves(&mut trie, leaves.iter().cloned());
6680
6681 ctx.assert_upper_subtrie(&trie)
6683 .has_extension(&Nibbles::default(), &Nibbles::from_nibbles([0x0]))
6684 .has_branch(&Nibbles::from_nibbles([0x0]), &[0x0, 0x1, 0x2, 0x3]);
6685
6686 for (i, (leaf_path, leaf_value)) in leaves.iter().enumerate() {
6688 let subtrie_path = Nibbles::from_nibbles([0x0, i as u8]);
6689 let full_path: [u8; 4] = match i {
6690 0 => [0x0, 0x0, 0x1, 0x2],
6691 1 => [0x0, 0x1, 0x3, 0x4],
6692 2 => [0x0, 0x2, 0x5, 0x6],
6693 3 => [0x0, 0x3, 0x7, 0x8],
6694 _ => unreachable!(),
6695 };
6696 ctx.assert_subtrie(&trie, subtrie_path)
6697 .has_leaf(&subtrie_path, &leaf_key(&full_path[2..], 62))
6698 .has_value(leaf_path, leaf_value);
6699 }
6700 }
6701
6702 #[test]
6703 fn test_update_extension_to_branch_transformation() {
6704 let ctx = ParallelSparseTrieTestContext;
6705 let mut trie = ParallelSparseTrie::from_root(TrieNodeV2::EmptyRoot, None, true).unwrap();
6706
6707 let (leaf1_path, value1) = ctx.create_test_leaf([0xF, 0xF, 0x0, 0x1], 1);
6723 let (leaf2_path, value2) = ctx.create_test_leaf([0xF, 0xF, 0x0, 0x2], 2);
6724 let (leaf3_path, value3) = ctx.create_test_leaf([0xF, 0x0, 0x0, 0x3], 3);
6725
6726 ctx.update_leaves(&mut trie, [(leaf1_path, value1.clone()), (leaf2_path, value2.clone())]);
6727
6728 ctx.assert_upper_subtrie(&trie)
6730 .has_extension(&Nibbles::default(), &Nibbles::from_nibbles([0xF, 0xF, 0x0]));
6731
6732 ctx.update_leaves(&mut trie, [(leaf3_path, value3.clone())]);
6734
6735 ctx.assert_upper_subtrie(&trie)
6737 .has_extension(&Nibbles::default(), &Nibbles::from_nibbles([0xF]))
6738 .has_branch(&Nibbles::from_nibbles([0xF]), &[0x0, 0xF]);
6739
6740 ctx.assert_subtrie(&trie, Nibbles::from_nibbles([0xF, 0xF]))
6742 .has_branch(&Nibbles::from_nibbles([0xF, 0xF, 0x0]), &[0x1, 0x2])
6743 .has_leaf(&Nibbles::from_nibbles([0xF, 0xF, 0x0, 0x1]), &leaf_key([], 60))
6744 .has_leaf(&Nibbles::from_nibbles([0xF, 0xF, 0x0, 0x2]), &leaf_key([], 60))
6745 .has_value(&leaf1_path, &value1)
6746 .has_value(&leaf2_path, &value2);
6747
6748 ctx.assert_subtrie(&trie, Nibbles::from_nibbles([0xF, 0x0]))
6749 .has_leaf(&Nibbles::from_nibbles([0xF, 0x0]), &leaf_key([0x0, 0x3], 62))
6750 .has_value(&leaf3_path, &value3);
6751 }
6752
6753 #[test]
6754 fn test_update_long_shared_prefix_at_boundary() {
6755 let ctx = ParallelSparseTrieTestContext;
6756 let mut trie = ParallelSparseTrie::from_root(TrieNodeV2::EmptyRoot, None, true).unwrap();
6757
6758 let (leaf1_path, value1) = ctx.create_test_leaf([0xA, 0xB, 0xC, 0xD, 0xE, 0xF], 1);
6772 let (leaf2_path, value2) = ctx.create_test_leaf([0xA, 0xB, 0xD, 0xE, 0xF, 0x0], 2);
6773
6774 trie.update_leaf(leaf1_path, value1.clone(), DefaultTrieNodeProvider).unwrap();
6775 trie.update_leaf(leaf2_path, value2.clone(), DefaultTrieNodeProvider).unwrap();
6776
6777 ctx.assert_upper_subtrie(&trie)
6779 .has_extension(&Nibbles::default(), &Nibbles::from_nibbles([0xA, 0xB]));
6780
6781 ctx.assert_subtrie(&trie, Nibbles::from_nibbles([0xA, 0xB]))
6783 .has_branch(&Nibbles::from_nibbles([0xA, 0xB]), &[0xC, 0xD])
6784 .has_leaf(&Nibbles::from_nibbles([0xA, 0xB, 0xC]), &leaf_key([0xD, 0xE, 0xF], 61))
6785 .has_leaf(&Nibbles::from_nibbles([0xA, 0xB, 0xD]), &leaf_key([0xE, 0xF, 0x0], 61))
6786 .has_value(&leaf1_path, &value1)
6787 .has_value(&leaf2_path, &value2);
6788 }
6789
6790 #[test]
6791 fn test_update_branch_to_extension_collapse() {
6792 let ctx = ParallelSparseTrieTestContext;
6793 let mut trie = ParallelSparseTrie::from_root(TrieNodeV2::EmptyRoot, None, true).unwrap();
6794
6795 let (leaf1_path, value1) = ctx.create_test_leaf([0x1, 0x2, 0x3, 0x4], 1);
6821 let (leaf2_path, value2) = ctx.create_test_leaf([0x2, 0x3, 0x4, 0x5], 2);
6822 let (leaf3_path, value3) = ctx.create_test_leaf([0x2, 0x3, 0x5, 0x6], 3);
6823
6824 trie.update_leaf(leaf1_path, value1, DefaultTrieNodeProvider).unwrap();
6825 trie.update_leaf(leaf2_path, value2, DefaultTrieNodeProvider).unwrap();
6826 trie.update_leaf(leaf3_path, value3, DefaultTrieNodeProvider).unwrap();
6827
6828 ctx.assert_upper_subtrie(&trie).has_branch(&Nibbles::default(), &[0x1, 0x2]);
6830
6831 let (new_leaf1_path, new_value1) = ctx.create_test_leaf([0x1, 0x2, 0x3, 0x4], 10);
6834 let (new_leaf2_path, new_value2) = ctx.create_test_leaf([0x1, 0x2, 0x3, 0x5], 11);
6835 let (new_leaf3_path, new_value3) = ctx.create_test_leaf([0x1, 0x2, 0x3, 0x6], 12);
6836
6837 let mut trie = ParallelSparseTrie::from_root(TrieNodeV2::EmptyRoot, None, true).unwrap();
6839 trie.update_leaf(new_leaf1_path, new_value1.clone(), DefaultTrieNodeProvider).unwrap();
6840 trie.update_leaf(new_leaf2_path, new_value2.clone(), DefaultTrieNodeProvider).unwrap();
6841 trie.update_leaf(new_leaf3_path, new_value3.clone(), DefaultTrieNodeProvider).unwrap();
6842
6843 ctx.assert_upper_subtrie(&trie)
6845 .has_extension(&Nibbles::default(), &Nibbles::from_nibbles([0x1, 0x2, 0x3]));
6846
6847 ctx.assert_subtrie_path(&trie, [0x1, 0x2], [0x1, 0x2, 0x3]);
6849
6850 ctx.assert_subtrie(&trie, Nibbles::from_nibbles([0x1, 0x2]))
6852 .has_branch(&Nibbles::from_nibbles([0x1, 0x2, 0x3]), &[0x4, 0x5, 0x6]) .has_leaf(&Nibbles::from_nibbles([0x1, 0x2, 0x3, 0x4]), &leaf_key([], 60))
6854 .has_leaf(&Nibbles::from_nibbles([0x1, 0x2, 0x3, 0x5]), &leaf_key([], 60))
6855 .has_leaf(&Nibbles::from_nibbles([0x1, 0x2, 0x3, 0x6]), &leaf_key([], 60))
6856 .has_value(&new_leaf1_path, &new_value1)
6857 .has_value(&new_leaf2_path, &new_value2)
6858 .has_value(&new_leaf3_path, &new_value3);
6859 }
6860
6861 #[test]
6862 fn test_update_shared_prefix_patterns() {
6863 let ctx = ParallelSparseTrieTestContext;
6864 let mut trie = ParallelSparseTrie::from_root(TrieNodeV2::EmptyRoot, None, true).unwrap();
6865
6866 let (leaf1_path, value1) = ctx.create_test_leaf([0x1, 0x2, 0x3, 0x4], 1);
6882 let (leaf2_path, value2) = ctx.create_test_leaf([0x2, 0x3, 0x4, 0x5], 2);
6883 let (leaf3_path, value3) = ctx.create_test_leaf([0x2, 0x3, 0x5, 0x6], 3);
6884
6885 trie.update_leaf(leaf1_path, value1, DefaultTrieNodeProvider).unwrap();
6886 trie.update_leaf(leaf2_path, value2.clone(), DefaultTrieNodeProvider).unwrap();
6887 trie.update_leaf(leaf3_path, value3.clone(), DefaultTrieNodeProvider).unwrap();
6888
6889 ctx.assert_upper_subtrie(&trie)
6891 .has_branch(&Nibbles::default(), &[0x1, 0x2])
6892 .has_leaf(&Nibbles::from_nibbles([0x1]), &leaf_key([0x2, 0x3, 0x4], 63))
6893 .has_extension(&Nibbles::from_nibbles([0x2]), &Nibbles::from_nibbles([0x3]));
6894
6895 ctx.assert_subtrie(&trie, Nibbles::from_nibbles([0x2, 0x3]))
6897 .has_branch(&Nibbles::from_nibbles([0x2, 0x3]), &[0x4, 0x5])
6898 .has_leaf(&Nibbles::from_nibbles([0x2, 0x3, 0x4]), &leaf_key([0x5], 61))
6899 .has_leaf(&Nibbles::from_nibbles([0x2, 0x3, 0x5]), &leaf_key([0x6], 61))
6900 .has_value(&leaf2_path, &value2)
6901 .has_value(&leaf3_path, &value3);
6902 }
6903
6904 #[test]
6905 fn test_progressive_branch_creation() {
6906 let ctx = ParallelSparseTrieTestContext;
6907 let mut trie = ParallelSparseTrie::from_root(TrieNodeV2::EmptyRoot, None, true).unwrap();
6908
6909 let (leaf1_path, value1) = ctx.create_test_leaf([0x1, 0x2, 0x3, 0x4, 0x5], 1);
6945 trie.update_leaf(leaf1_path, value1.clone(), DefaultTrieNodeProvider).unwrap();
6946
6947 ctx.assert_upper_subtrie(&trie)
6949 .has_leaf(
6950 &Nibbles::default(),
6951 &pad_nibbles_right(Nibbles::from_nibbles([0x1, 0x2, 0x3, 0x4, 0x5])),
6952 )
6953 .has_value(&leaf1_path, &value1);
6954
6955 let (leaf2_path, value2) = ctx.create_test_leaf([0x1, 0x2, 0x3, 0x4, 0x6], 2);
6957 trie.update_leaf(leaf2_path, value2.clone(), DefaultTrieNodeProvider).unwrap();
6958
6959 ctx.assert_upper_subtrie(&trie)
6961 .has_extension(&Nibbles::default(), &Nibbles::from_nibbles([0x1, 0x2, 0x3, 0x4]));
6962
6963 ctx.assert_subtrie_path(&trie, [0x1, 0x2], [0x1, 0x2, 0x3, 0x4]);
6965
6966 ctx.assert_subtrie(&trie, Nibbles::from_nibbles([0x1, 0x2]))
6967 .has_branch(&Nibbles::from_nibbles([0x1, 0x2, 0x3, 0x4]), &[0x5, 0x6])
6968 .has_leaf(&Nibbles::from_nibbles([0x1, 0x2, 0x3, 0x4, 0x5]), &leaf_key([], 59))
6969 .has_leaf(&Nibbles::from_nibbles([0x1, 0x2, 0x3, 0x4, 0x6]), &leaf_key([], 59))
6970 .has_value(&leaf1_path, &value1)
6971 .has_value(&leaf2_path, &value2);
6972
6973 let (leaf3_path, value3) = ctx.create_test_leaf([0x1, 0x2, 0x3, 0x5], 3);
6975 trie.update_leaf(leaf3_path, value3.clone(), DefaultTrieNodeProvider).unwrap();
6976
6977 ctx.assert_upper_subtrie(&trie)
6979 .has_extension(&Nibbles::default(), &Nibbles::from_nibbles([0x1, 0x2, 0x3]));
6980
6981 ctx.assert_subtrie_path(&trie, [0x1, 0x2], [0x1, 0x2, 0x3]);
6983
6984 ctx.assert_subtrie(&trie, Nibbles::from_nibbles([0x1, 0x2]))
6985 .has_branch(&Nibbles::from_nibbles([0x1, 0x2, 0x3]), &[0x4, 0x5])
6986 .has_branch(&Nibbles::from_nibbles([0x1, 0x2, 0x3, 0x4]), &[0x5, 0x6])
6987 .has_leaf(&Nibbles::from_nibbles([0x1, 0x2, 0x3, 0x5]), &leaf_key([], 60))
6988 .has_value(&leaf1_path, &value1)
6989 .has_value(&leaf2_path, &value2)
6990 .has_value(&leaf3_path, &value3);
6991
6992 let (leaf4_path, value4) = ctx.create_test_leaf([0x1, 0x2, 0x4], 4);
6994 trie.update_leaf(leaf4_path, value4.clone(), DefaultTrieNodeProvider).unwrap();
6995
6996 ctx.assert_upper_subtrie(&trie)
6998 .has_extension(&Nibbles::default(), &Nibbles::from_nibbles([0x1, 0x2]));
6999
7000 ctx.assert_subtrie_path(&trie, [0x1, 0x2], [0x1, 0x2]);
7002
7003 ctx.assert_subtrie(&trie, Nibbles::from_nibbles([0x1, 0x2]))
7005 .has_branch(&Nibbles::from_nibbles([0x1, 0x2]), &[0x3, 0x4])
7006 .has_branch(&Nibbles::from_nibbles([0x1, 0x2, 0x3]), &[0x4, 0x5])
7007 .has_branch(&Nibbles::from_nibbles([0x1, 0x2, 0x3, 0x4]), &[0x5, 0x6])
7008 .has_leaf(&Nibbles::from_nibbles([0x1, 0x2, 0x4]), &leaf_key([], 61))
7009 .has_value(&leaf1_path, &value1)
7010 .has_value(&leaf2_path, &value2)
7011 .has_value(&leaf3_path, &value3)
7012 .has_value(&leaf4_path, &value4);
7013 }
7014
7015 #[test]
7016 fn test_update_max_depth_paths() {
7017 let ctx = ParallelSparseTrieTestContext;
7018 let mut trie = ParallelSparseTrie::from_root(TrieNodeV2::EmptyRoot, None, true).unwrap();
7019
7020 let mut path1_nibbles = vec![0xF; 63];
7032 path1_nibbles.push(0x0);
7033 let mut path2_nibbles = vec![0xF; 63];
7034 path2_nibbles.push(0x1);
7035
7036 let (leaf1_path, value1) = ctx.create_test_leaf(&path1_nibbles, 1);
7037 let (leaf2_path, value2) = ctx.create_test_leaf(&path2_nibbles, 2);
7038
7039 trie.update_leaf(leaf1_path, value1.clone(), DefaultTrieNodeProvider).unwrap();
7040 trie.update_leaf(leaf2_path, value2.clone(), DefaultTrieNodeProvider).unwrap();
7041
7042 let extension_key = vec![0xF; 63];
7044 ctx.assert_upper_subtrie(&trie)
7045 .has_extension(&Nibbles::default(), &Nibbles::from_nibbles(&extension_key));
7046
7047 ctx.assert_subtrie(&trie, Nibbles::from_nibbles([0xF, 0xF]))
7049 .has_branch(&Nibbles::from_nibbles(&path1_nibbles[..63]), &[0x0, 0x1])
7050 .has_value(&leaf1_path, &value1)
7051 .has_value(&leaf2_path, &value2);
7052 }
7053
7054 #[test]
7055 fn test_hoodie_block_1_data() {
7056 let root_branch_stack = vec![
7058 hex!("a0550b6aba4dd4582a2434d2cbdad8d3007d09f622d7a6e6eaa7a49385823c2fa2"),
7059 hex!("a04788a4975a9e1efd29b834fd80fdfe8a57cc1b1c5ace6d30ce5a36a15e0092b3"),
7060 hex!("a093aeccf87da304e6f7d09edc5d7bd3a552808866d2149dd0940507a8f9bfa910"),
7061 hex!("a08b5b423ba68d0dec2eca1f408076f9170678505eb4a5db2abbbd83bb37666949"),
7062 hex!("a08592f62216af4218098a78acad7cf472a727fb55e6c27d3cfdf2774d4518eb83"),
7063 hex!("a0ef02aeee845cb64c11f85edc1a3094227c26445952554b8a9248915d80c746c3"),
7064 hex!("a0df2529ee3a1ce4df5a758cf17e6a86d0fb5ea22ab7071cf60af6412e9b0a428a"),
7065 hex!("a0acaa1092db69cd5a63676685827b3484c4b80dc1d3361f6073bbb9240101e144"),
7066 hex!("a09c3f2bb2a729d71f246a833353ade65667716bb330e0127a3299a42d11200f93"),
7067 hex!("a0ce978470f4c0b1f8069570563a14d2b79d709add2db4bf22dd9b6aed3271c566"),
7068 hex!("a095f783cd1d464a60e3c8adcadc28c6eb9fec7306664df39553be41dccc909606"),
7069 hex!("a0a9083f5fb914b255e1feb5d951a4dfddacf3c8003ef1d1ec6a13bb6ba5b2ac62"),
7070 hex!("a0fec113d537d8577cd361e0cabf5e95ef58f1cc34318292fdecce9fae57c3e094"),
7071 hex!("a08b7465f5fe8b3e3c0d087cb7521310d4065ef2a0ee43bf73f68dee8a5742b3dd"),
7072 hex!("a0c589aa1ae3d5fd87d8640957f7d5184a4ac06f393b453a8e8ed7e8fba0d385c8"),
7073 hex!("a0b516d6f3352f87beab4ed6e7322f191fc7a147686500ef4de7dd290ad784ef51"),
7074 ];
7075
7076 let root_branch_rlp_stack: Vec<RlpNode> = root_branch_stack
7077 .iter()
7078 .map(|hex_str| RlpNode::from_raw_rlp(&hex_str[..]).unwrap())
7079 .collect();
7080
7081 let root_branch_node = BranchNodeV2::new(
7082 Default::default(),
7083 root_branch_rlp_stack,
7084 TrieMask::new(0b1111111111111111), None,
7086 );
7087
7088 let root_branch_masks = Some(BranchNodeMasks {
7089 hash_mask: TrieMask::new(0b1111111111111111),
7090 tree_mask: TrieMask::new(0b1111111111111111),
7091 });
7092
7093 let mut trie = ParallelSparseTrie::from_root(
7094 TrieNodeV2::Branch(root_branch_node),
7095 root_branch_masks,
7096 true,
7097 )
7098 .unwrap();
7099
7100 let branch_0x3_stack = vec![
7102 hex!("a09da7d9755fe0c558b3c3de9fdcdf9f28ae641f38c9787b05b73ab22ae53af3e2"),
7103 hex!("a0d9990bf0b810d1145ecb2b011fd68c63cc85564e6724166fd4a9520180706e5f"),
7104 hex!("a0f60eb4b12132a40df05d9bbdb88bbde0185a3f097f3c76bf4200c23eda26cf86"),
7105 hex!("a0ca976997ddaf06f18992f6207e4f6a05979d07acead96568058789017cc6d06b"),
7106 hex!("a04d78166b48044fdc28ed22d2fd39c8df6f8aaa04cb71d3a17286856f6893ff83"),
7107 hex!("a021d4f90c34d3f1706e78463b6482bca77a3aa1cd059a3f326c42a1cfd30b9b60"),
7108 hex!("a0fc3b71c33e2e6b77c5e494c1db7fdbb447473f003daf378c7a63ba9bf3f0049d"),
7109 hex!("a0e33ed2be194a3d93d343e85642447c93a9d0cfc47a016c2c23d14c083be32a7c"),
7110 hex!("a07b8e7a21c1178d28074f157b50fca85ee25c12568ff8e9706dcbcdacb77bf854"),
7111 hex!("a0973274526811393ea0bf4811ca9077531db00d06b86237a2ecd683f55ba4bcb0"),
7112 hex!("a03a93d726d7487874e51b52d8d534c63aa2a689df18e3b307c0d6cb0a388b00f3"),
7113 hex!("a06aa67101d011d1c22fe739ef83b04b5214a3e2f8e1a2625d8bfdb116b447e86f"),
7114 hex!("a02dd545b33c62d33a183e127a08a4767fba891d9f3b94fc20a2ca02600d6d1fff"),
7115 hex!("a0fe6db87d00f06d53bff8169fa497571ff5af1addfb715b649b4d79dd3e394b04"),
7116 hex!("a0d9240a9d2d5851d05a97ff3305334dfdb0101e1e321fc279d2bb3cad6afa8fc8"),
7117 hex!("a01b69c6ab5173de8a8ec53a6ebba965713a4cc7feb86cb3e230def37c230ca2b2"),
7118 ];
7119
7120 let branch_0x3_rlp_stack: Vec<RlpNode> = branch_0x3_stack
7121 .iter()
7122 .map(|hex_str| RlpNode::from_raw_rlp(&hex_str[..]).unwrap())
7123 .collect();
7124
7125 let branch_0x3_node = BranchNodeV2::new(
7126 Default::default(),
7127 branch_0x3_rlp_stack,
7128 TrieMask::new(0b1111111111111111), None,
7130 );
7131
7132 let branch_0x3_masks = Some(BranchNodeMasks {
7133 hash_mask: TrieMask::new(0b0100010000010101),
7134 tree_mask: TrieMask::new(0b0100000000000000),
7135 });
7136
7137 let leaf_path = Nibbles::from_nibbles([0x3, 0x7]);
7139 let leaf_key = Nibbles::unpack(
7140 &hex!("d65eaa92c6bc4c13a5ec45527f0c18ea8932588728769ec7aecfe6d9f32e42")[..],
7141 );
7142 let leaf_value = hex!("f8440180a056e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421a0f57acd40259872606d76197ef052f3d35588dadf919ee1f0e3cb9b62d3f4b02c").to_vec();
7143
7144 let leaf_node = LeafNode::new(leaf_key, leaf_value);
7145 let leaf_masks = None;
7146
7147 trie.reveal_nodes(&mut [
7148 ProofTrieNodeV2 {
7149 path: Nibbles::from_nibbles([0x3]),
7150 node: TrieNodeV2::Branch(branch_0x3_node),
7151 masks: branch_0x3_masks,
7152 },
7153 ProofTrieNodeV2 {
7154 path: leaf_path,
7155 node: TrieNodeV2::Leaf(leaf_node),
7156 masks: leaf_masks,
7157 },
7158 ])
7159 .unwrap();
7160
7161 let mut leaf_full_path = leaf_path;
7163 leaf_full_path.extend(&leaf_key);
7164
7165 let leaf_new_value = vec![
7166 248, 68, 1, 128, 160, 224, 163, 152, 169, 122, 160, 155, 102, 53, 41, 0, 47, 28, 205,
7167 190, 199, 5, 215, 108, 202, 22, 138, 70, 196, 178, 193, 208, 18, 96, 95, 63, 238, 160,
7168 245, 122, 205, 64, 37, 152, 114, 96, 109, 118, 25, 126, 240, 82, 243, 211, 85, 136,
7169 218, 223, 145, 158, 225, 240, 227, 203, 155, 98, 211, 244, 176, 44,
7170 ];
7171
7172 trie.update_leaf(leaf_full_path, leaf_new_value.clone(), DefaultTrieNodeProvider).unwrap();
7173
7174 assert_eq!(
7176 Some(&leaf_new_value),
7177 trie.lower_subtrie_for_path(&leaf_path).unwrap().inner.values.get(&leaf_full_path)
7178 );
7179 assert!(trie.upper_subtrie.inner.values.is_empty());
7180
7181 let expected_root =
7183 b256!("0x29b07de8376e9ce7b3a69e9b102199869514d3f42590b5abc6f7d48ec9b8665c");
7184 assert_eq!(trie.root(), expected_root);
7185 }
7186
7187 #[test]
7188 fn find_leaf_existing_leaf() {
7189 let provider = DefaultTrieNodeProvider;
7191 let mut sparse = ParallelSparseTrie::default();
7192 let path = pad_nibbles_right(Nibbles::from_nibbles([0x1, 0x2, 0x3]));
7193 let value = b"test_value".to_vec();
7194
7195 sparse.update_leaf(path, value.clone(), &provider).unwrap();
7196
7197 let result = sparse.find_leaf(&path, None);
7199 assert_matches!(result, Ok(LeafLookup::Exists));
7200
7201 let result = sparse.find_leaf(&path, Some(&value));
7203 assert_matches!(result, Ok(LeafLookup::Exists));
7204 }
7205
7206 #[test]
7207 fn find_leaf_value_mismatch() {
7208 let provider = DefaultTrieNodeProvider;
7210 let mut sparse = ParallelSparseTrie::default();
7211 let path = pad_nibbles_right(Nibbles::from_nibbles([0x1, 0x2, 0x3]));
7212 let value = b"test_value".to_vec();
7213 let wrong_value = b"wrong_value".to_vec();
7214
7215 sparse.update_leaf(path, value, &provider).unwrap();
7216
7217 let result = sparse.find_leaf(&path, Some(&wrong_value));
7219 assert_matches!(
7220 result,
7221 Err(LeafLookupError::ValueMismatch { path: p, expected: Some(e), actual: _a }) if p == path && e == wrong_value
7222 );
7223 }
7224
7225 #[test]
7226 fn find_leaf_not_found_empty_trie() {
7227 let sparse = ParallelSparseTrie::default();
7229 let path = Nibbles::from_nibbles([0x1, 0x2, 0x3]);
7230
7231 let result = sparse.find_leaf(&path, None);
7233 assert_matches!(result, Ok(LeafLookup::NonExistent));
7234 }
7235
7236 #[test]
7237 fn find_leaf_empty_trie() {
7238 let sparse = ParallelSparseTrie::default();
7239 let path = Nibbles::from_nibbles_unchecked([0x1, 0x2, 0x3, 0x4]);
7240
7241 let result = sparse.find_leaf(&path, None);
7242 assert_matches!(result, Ok(LeafLookup::NonExistent));
7243 }
7244
7245 #[test]
7246 fn find_leaf_exists_no_value_check() {
7247 let provider = DefaultTrieNodeProvider;
7248 let mut sparse = ParallelSparseTrie::default();
7249 let path = pad_nibbles_right(Nibbles::from_nibbles([0x1, 0x2, 0x3, 0x4]));
7250 sparse.update_leaf(path, encode_account_value(0), &provider).unwrap();
7251
7252 let result = sparse.find_leaf(&path, None);
7253 assert_matches!(result, Ok(LeafLookup::Exists));
7254 }
7255
7256 #[test]
7257 fn find_leaf_exists_with_value_check_ok() {
7258 let provider = DefaultTrieNodeProvider;
7259 let mut sparse = ParallelSparseTrie::default();
7260 let path = pad_nibbles_right(Nibbles::from_nibbles([0x1, 0x2, 0x3, 0x4]));
7261 let value = encode_account_value(0);
7262 sparse.update_leaf(path, value.clone(), &provider).unwrap();
7263
7264 let result = sparse.find_leaf(&path, Some(&value));
7265 assert_matches!(result, Ok(LeafLookup::Exists));
7266 }
7267
7268 #[test]
7269 fn find_leaf_exclusion_branch_divergence() {
7270 let provider = DefaultTrieNodeProvider;
7271 let mut sparse = ParallelSparseTrie::default();
7272 let path1 = pad_nibbles_right(Nibbles::from_nibbles([0x1, 0x2, 0x3, 0x4])); let path2 = pad_nibbles_right(Nibbles::from_nibbles([0x1, 0x2, 0x5, 0x6])); let search_path = pad_nibbles_right(Nibbles::from_nibbles([0x1, 0x2, 0x7, 0x8])); sparse.update_leaf(path1, encode_account_value(0), &provider).unwrap();
7277 sparse.update_leaf(path2, encode_account_value(1), &provider).unwrap();
7278
7279 let result = sparse.find_leaf(&search_path, None);
7280 assert_matches!(result, Ok(LeafLookup::NonExistent))
7281 }
7282
7283 #[test]
7284 fn find_leaf_exclusion_extension_divergence() {
7285 let provider = DefaultTrieNodeProvider;
7286 let mut sparse = ParallelSparseTrie::default();
7287 let path1 = pad_nibbles_right(Nibbles::from_nibbles([0x1, 0x2, 0x3, 0x4, 0x5, 0x6]));
7289 let search_path = pad_nibbles_right(Nibbles::from_nibbles([0x1, 0x2, 0x7, 0x8]));
7291
7292 sparse.update_leaf(path1, encode_account_value(0), &provider).unwrap();
7293
7294 let result = sparse.find_leaf(&search_path, None);
7295 assert_matches!(result, Ok(LeafLookup::NonExistent))
7296 }
7297
7298 #[test]
7299 fn find_leaf_exclusion_leaf_divergence() {
7300 let provider = DefaultTrieNodeProvider;
7301 let mut sparse = ParallelSparseTrie::default();
7302 let existing_leaf_path = pad_nibbles_right(Nibbles::from_nibbles([0x1, 0x2, 0x3, 0x4]));
7303 let search_path = pad_nibbles_right(Nibbles::from_nibbles([0x1, 0x2, 0x3, 0x4, 0x5, 0x6]));
7304
7305 sparse.update_leaf(existing_leaf_path, encode_account_value(0), &provider).unwrap();
7306
7307 let result = sparse.find_leaf(&search_path, None);
7308 assert_matches!(result, Ok(LeafLookup::NonExistent))
7309 }
7310
7311 #[test]
7312 fn find_leaf_exclusion_path_ends_at_branch() {
7313 let provider = DefaultTrieNodeProvider;
7314 let mut sparse = ParallelSparseTrie::default();
7315 let path1 = pad_nibbles_right(Nibbles::from_nibbles([0x1, 0x2, 0x3, 0x4])); let path2 = pad_nibbles_right(Nibbles::from_nibbles([0x1, 0x2, 0x5, 0x6]));
7317 let search_path = pad_nibbles_right(Nibbles::from_nibbles([0x1, 0x2])); sparse.update_leaf(path1, encode_account_value(0), &provider).unwrap();
7320 sparse.update_leaf(path2, encode_account_value(1), &provider).unwrap();
7321
7322 let result = sparse.find_leaf(&search_path, None);
7323 assert_matches!(result, Ok(LeafLookup::NonExistent));
7324 }
7325
7326 #[test]
7327 fn find_leaf_error_blinded_node_at_leaf_path() {
7328 let blinded_hash = B256::repeat_byte(0xBB);
7330 let leaf_path = Nibbles::from_nibbles_unchecked([0x1, 0x2, 0x3, 0x4]);
7331
7332 let sparse = new_test_trie(
7333 [
7334 (
7335 Nibbles::default(),
7337 SparseNode::new_ext(Nibbles::from_nibbles_unchecked([0x1, 0x2])),
7338 ),
7339 (
7340 Nibbles::from_nibbles_unchecked([0x1, 0x2]),
7342 SparseNode::new_ext(Nibbles::from_nibbles_unchecked([0x3])),
7343 ),
7344 (
7345 Nibbles::from_nibbles_unchecked([0x1, 0x2, 0x3]),
7347 SparseNode::new_branch(TrieMask::new(0b10000), &[(0x4, blinded_hash)]),
7348 ),
7349 ]
7350 .into_iter(),
7351 );
7352
7353 let result = sparse.find_leaf(&leaf_path, None);
7354
7355 assert_matches!(result, Err(LeafLookupError::BlindedNode { path, hash })
7357 if path == leaf_path && hash == blinded_hash
7358 );
7359 }
7360
7361 #[test]
7362 fn find_leaf_error_blinded_node() {
7363 let blinded_hash = B256::repeat_byte(0xAA);
7364 let path_to_blind = Nibbles::from_nibbles_unchecked([0x1]);
7365 let search_path = Nibbles::from_nibbles_unchecked([0x1, 0x2, 0x3, 0x4]);
7366
7367 let sparse = new_test_trie(
7368 [
7369 (
7372 Nibbles::default(),
7373 SparseNode::new_branch(TrieMask::new(0b100010), &[(0x1, blinded_hash)]),
7374 ),
7375 (
7376 Nibbles::from_nibbles_unchecked([0x5]),
7377 SparseNode::new_leaf(Nibbles::from_nibbles_unchecked([0x6, 0x7, 0x8])),
7378 ),
7379 ]
7380 .into_iter(),
7381 );
7382
7383 let result = sparse.find_leaf(&search_path, None);
7384
7385 assert_matches!(result, Err(LeafLookupError::BlindedNode { path, hash })
7387 if path == path_to_blind && hash == blinded_hash
7388 );
7389 }
7390
7391 #[test]
7392 fn test_mainnet_block_24185431_storage_0x6ba784ee() {
7393 reth_tracing::init_test_tracing();
7394
7395 let mut branch_0x3_hashes = vec![
7397 B256::from(hex!("fc11ba8de4b220b8f19a09f0676c69b8e18bae1350788392640069e59b41733d")),
7398 B256::from(hex!("8afe085cc6685680bd8ba4bac6e65937a4babf737dc5e7413d21cdda958e8f74")),
7399 B256::from(hex!("c7b6f7c0fc601a27aece6ec178fd9be17cdee77c4884ecfbe1ee459731eb57da")),
7400 B256::from(hex!("71c1aec60db78a2deb4e10399b979a2ed5be42b4ee0c0a17c614f9ddc9f9072e")),
7401 B256::from(hex!("e9261302e7c0b77930eaf1851b585210906cd01e015ab6be0f7f3c0cc947c32a")),
7402 B256::from(hex!("38ce8f369c56bd77fabdf679b27265b1f8d0a54b09ef612c8ee8ddfc6b3fab95")),
7403 B256::from(hex!("7b507a8936a28c5776b647d1c4bda0bbbb3d0d227f16c5f5ebba58d02e31918d")),
7404 B256::from(hex!("0f456b9457a824a81e0eb555aa861461acb38674dcf36959b3b26deb24ed0af9")),
7405 B256::from(hex!("2145420289652722ad199ba932622e3003c779d694fa5a2acfb2f77b0782b38a")),
7406 B256::from(hex!("2c1a04dce1a9e2f1cfbf8806edce50a356dfa58e7e7c542c848541502613b796")),
7407 B256::from(hex!("dad7ca55186ac8f40d4450dc874166df8267b44abc07e684d9507260f5712df3")),
7408 B256::from(hex!("3a8c2a1d7d2423e92965ec29014634e7f0307ded60b1a63d28c86c3222b24236")),
7409 B256::from(hex!("4e9929e6728b3a7bf0db6a0750ab376045566b556c9c605e606ecb8ec25200d7")),
7410 B256::from(hex!("1797c36f98922f52292c161590057a1b5582d5503e3370bcfbf6fd939f3ec98b")),
7411 B256::from(hex!("9e514589a9c9210b783c19fa3f0b384bbfaefe98f10ea189a2bfc58c6bf000a1")),
7412 B256::from(hex!("85bdaabbcfa583cbd049650e41d3d19356bd833b3ed585cf225a3548557c7fa3")),
7413 ];
7414 let branch_0x3_node = create_branch_node(
7415 Nibbles::from_nibbles([0x3]),
7416 &[0x0, 0x1, 0x2, 0x3, 0x4, 0x5, 0x6, 0x7, 0x8, 0x9, 0xa, 0xb, 0xc, 0xd, 0xe, 0xf],
7417 branch_0x3_hashes.iter().map(RlpNode::word_rlp),
7418 );
7419
7420 let branch_0x31_hashes = vec![B256::from(hex!(
7422 "3ca994ba59ce70b83fee1f01731c8dac4fdd0f70ade79bf9b0695c4c53531aab"
7423 ))];
7424 let branch_0x31_node = create_branch_node_with_children(
7425 &[0xc],
7426 branch_0x31_hashes.into_iter().map(|h| RlpNode::word_rlp(&h)),
7427 );
7428
7429 let leaf_path = hex!("31b0b645a6c4a0a1bb3d2f0c1d31c39f4aba2e3b015928a8eef7161e28388b81");
7431 let leaf_nibbles = Nibbles::unpack(leaf_path.as_slice());
7432 let leaf_value = hex!("0009ae8ce8245bff").to_vec();
7433
7434 let branch_0x31c_hashes = vec![
7436 B256::from(hex!("1a68fdb36b77e9332b49a977faf800c22d0199e6cecf44032bb083c78943e540")),
7437 B256::from(hex!("cd4622c6df6fd7172c7fed1b284ef241e0f501b4c77b675ef10c612bd0948a7a")),
7438 B256::from(hex!("abf3603d2f991787e21f1709ee4c7375d85dfc506995c0435839fccf3fe2add4")),
7439 ];
7440 let branch_0x31c_node = create_branch_node_with_children(
7441 &[0x3, 0x7, 0xc],
7442 branch_0x31c_hashes.into_iter().map(|h| RlpNode::word_rlp(&h)),
7443 );
7444
7445 let mut proof_nodes = vec![ProofTrieNodeV2 {
7447 path: Nibbles::from_nibbles([0x3, 0x1]),
7448 node: branch_0x31_node,
7449 masks: Some(BranchNodeMasks {
7450 tree_mask: TrieMask::new(4096),
7451 hash_mask: TrieMask::new(4096),
7452 }),
7453 }];
7454
7455 let mut trie = ParallelSparseTrie::default()
7457 .with_root(
7458 branch_0x3_node,
7459 Some(BranchNodeMasks {
7460 tree_mask: TrieMask::new(26099),
7461 hash_mask: TrieMask::new(65535),
7462 }),
7463 true,
7464 )
7465 .expect("root revealed");
7466
7467 trie.reveal_nodes(&mut proof_nodes).unwrap();
7468
7469 trie.update_leaf(leaf_nibbles, leaf_value, NoRevealProvider).unwrap();
7471
7472 let Err(err) = trie.remove_leaf(&leaf_nibbles, NoRevealProvider) else {
7474 panic!("expected blinded node error");
7475 };
7476 assert_matches!(err.kind(), SparseTrieErrorKind::BlindedNode(path) if path == &Nibbles::from_nibbles([0x3, 0x1, 0xc]));
7477
7478 trie.reveal_nodes(&mut [ProofTrieNodeV2 {
7479 path: Nibbles::from_nibbles([0x3, 0x1, 0xc]),
7480 node: branch_0x31c_node,
7481 masks: Some(BranchNodeMasks { tree_mask: 0.into(), hash_mask: 4096.into() }),
7482 }])
7483 .unwrap();
7484
7485 trie.remove_leaf(&leaf_nibbles, NoRevealProvider).unwrap();
7487
7488 let _ = trie.root();
7490
7491 let updates = trie.updates_ref();
7493
7494 let branch_0x3_update = updates
7496 .updated_nodes
7497 .get(&Nibbles::from_nibbles([0x3]))
7498 .expect("Branch at 0x3 should be in updates");
7499
7500 branch_0x3_hashes.remove(1);
7502
7503 let expected_branch = BranchNodeCompact::new(
7505 0b1111111111111111,
7506 0b0110010111110011,
7507 0b1111111111111101,
7508 branch_0x3_hashes,
7509 None,
7510 );
7511
7512 assert_eq!(branch_0x3_update, &expected_branch);
7513 }
7514
7515 #[test]
7516 fn test_get_leaf_value_lower_subtrie() {
7517 let root_branch =
7523 create_branch_node_with_children(&[0x1], [RlpNode::word_rlp(&B256::repeat_byte(0xAA))]);
7524 let branch_at_1 =
7525 create_branch_node_with_children(&[0x2], [RlpNode::word_rlp(&B256::repeat_byte(0xBB))]);
7526 let mut trie = ParallelSparseTrie::from_root(root_branch, None, false).unwrap();
7527 trie.reveal_nodes(&mut [ProofTrieNodeV2 {
7528 path: Nibbles::from_nibbles([0x1]),
7529 node: branch_at_1,
7530 masks: None,
7531 }])
7532 .unwrap();
7533
7534 let leaf_path = Nibbles::from_nibbles([0x1, 0x2]);
7536 let leaf_key = Nibbles::from_nibbles([0x3, 0x4]);
7537 let leaf_node = create_leaf_node(leaf_key.to_vec(), 42);
7538
7539 trie.reveal_nodes(&mut [ProofTrieNodeV2 { path: leaf_path, node: leaf_node, masks: None }])
7541 .unwrap();
7542
7543 let full_path = Nibbles::from_nibbles([0x1, 0x2, 0x3, 0x4]);
7545
7546 let idx = path_subtrie_index_unchecked(&leaf_path);
7548 let lower_subtrie = trie.lower_subtries[idx].as_revealed_ref().unwrap();
7549 assert!(
7550 lower_subtrie.inner.values.contains_key(&full_path),
7551 "value should be in lower subtrie"
7552 );
7553 assert!(
7554 !trie.upper_subtrie.inner.values.contains_key(&full_path),
7555 "value should NOT be in upper subtrie"
7556 );
7557
7558 assert!(
7560 trie.get_leaf_value(&full_path).is_some(),
7561 "get_leaf_value should find the value in lower subtrie"
7562 );
7563 }
7564
7565 #[test]
7572 fn test_get_leaf_value_upper_subtrie_via_update_leaf() {
7573 let provider = NoRevealProvider;
7574
7575 let mut trie = ParallelSparseTrie::default()
7577 .with_root(TrieNodeV2::EmptyRoot, None, false)
7578 .expect("root revealed");
7579
7580 let full_path = pad_nibbles_right(Nibbles::from_nibbles([0x0, 0xA, 0xB, 0xC]));
7582 let value = encode_account_value(42);
7583
7584 trie.update_leaf(full_path, value.clone(), provider).unwrap();
7587
7588 assert!(
7590 trie.upper_subtrie.inner.values.contains_key(&full_path),
7591 "value should be in upper subtrie after update_leaf"
7592 );
7593
7594 let retrieved = trie.get_leaf_value(&full_path);
7598 assert_eq!(retrieved, Some(&value));
7599 }
7600
7601 #[test]
7603 fn test_get_leaf_value_upper_and_lower_subtries() {
7604 let provider = NoRevealProvider;
7605
7606 let mut trie = ParallelSparseTrie::default()
7608 .with_root(TrieNodeV2::EmptyRoot, None, false)
7609 .expect("root revealed");
7610
7611 let path1 = pad_nibbles_right(Nibbles::from_nibbles([0x0, 0xA]));
7613 let value1 = encode_account_value(1);
7614 trie.update_leaf(path1, value1.clone(), provider).unwrap();
7615
7616 let path2 = pad_nibbles_right(Nibbles::from_nibbles([0x1, 0xB]));
7618 let value2 = encode_account_value(2);
7619 trie.update_leaf(path2, value2.clone(), provider).unwrap();
7620
7621 assert_eq!(trie.get_leaf_value(&path1), Some(&value1));
7623 assert_eq!(trie.get_leaf_value(&path2), Some(&value2));
7624 }
7625
7626 #[test]
7628 fn test_get_leaf_value_sparse_storage_trie() {
7629 let provider = NoRevealProvider;
7630
7631 let mut trie = ParallelSparseTrie::default()
7633 .with_root(TrieNodeV2::EmptyRoot, None, false)
7634 .expect("root revealed");
7635
7636 let slot_path = pad_nibbles_right(Nibbles::from_nibbles([0x2, 0x9]));
7638 let slot_value = alloy_rlp::encode(U256::from(12345));
7639 trie.update_leaf(slot_path, slot_value.clone(), provider).unwrap();
7640
7641 assert_eq!(trie.get_leaf_value(&slot_path), Some(&slot_value));
7643 }
7644
7645 #[test]
7646 fn test_prune_empty_suffix_key_regression() {
7647 use crate::provider::DefaultTrieNodeProvider;
7652
7653 let provider = DefaultTrieNodeProvider;
7654 let mut parallel = ParallelSparseTrie::default();
7655
7656 let value = {
7658 let account = Account {
7659 nonce: 0x123456789abcdef,
7660 balance: U256::from(0x123456789abcdef0123456789abcdef_u128),
7661 ..Default::default()
7662 };
7663 let mut buf = Vec::new();
7664 account.into_trie_account(EMPTY_ROOT_HASH).encode(&mut buf);
7665 buf
7666 };
7667
7668 for i in 0..16u8 {
7670 parallel
7671 .update_leaf(
7672 pad_nibbles_right(Nibbles::from_nibbles([i, 0x1, 0x2, 0x3, 0x4, 0x5])),
7673 value.clone(),
7674 &provider,
7675 )
7676 .unwrap();
7677 }
7678
7679 let root_before = parallel.root();
7681
7682 parallel.prune(&[]);
7684
7685 let root_after = parallel.root();
7686 assert_eq!(root_before, root_after, "root hash must be preserved");
7687
7688 for i in 0..16u8 {
7691 let path = pad_nibbles_right(Nibbles::from_nibbles([i, 0x1, 0x2, 0x3, 0x4, 0x5]));
7692 assert!(
7693 parallel.get_leaf_value(&path).is_none(),
7694 "value at {:?} should be removed after prune",
7695 path
7696 );
7697 }
7698 }
7699
7700 #[test]
7701 fn test_prune_empty_trie() {
7702 let mut trie = ParallelSparseTrie::default();
7703 trie.prune(&[]);
7704 let root = trie.root();
7705 assert_eq!(root, EMPTY_ROOT_HASH, "empty trie should have empty root hash");
7706 }
7707
7708 #[test]
7709 fn test_prune_preserves_root_hash() {
7710 let provider = DefaultTrieNodeProvider;
7711 let mut trie = ParallelSparseTrie::default();
7712
7713 let value = large_account_value();
7714
7715 for i in 0..8u8 {
7716 for j in 0..4u8 {
7717 trie.update_leaf(
7718 pad_nibbles_right(Nibbles::from_nibbles([i, j, 0x3, 0x4, 0x5, 0x6])),
7719 value.clone(),
7720 &provider,
7721 )
7722 .unwrap();
7723 }
7724 }
7725
7726 let root_before = trie.root();
7727 trie.prune(&[]);
7728 let root_after = trie.root();
7729 assert_eq!(root_before, root_after, "root hash must be preserved after prune");
7730 }
7731
7732 #[test]
7733 fn test_prune_single_leaf_trie() {
7734 let provider = DefaultTrieNodeProvider;
7735 let mut trie = ParallelSparseTrie::default();
7736
7737 let value = large_account_value();
7738 trie.update_leaf(
7739 pad_nibbles_right(Nibbles::from_nibbles([0x1, 0x2, 0x3, 0x4])),
7740 value,
7741 &provider,
7742 )
7743 .unwrap();
7744
7745 let root_before = trie.root();
7746 let nodes_before = trie.size_hint();
7747
7748 trie.prune(&[]);
7749
7750 let root_after = trie.root();
7751 assert_eq!(root_before, root_after, "root hash should be preserved");
7752 assert_eq!(trie.size_hint(), nodes_before, "single leaf trie should not change");
7753 }
7754
7755 #[test]
7756 fn test_prune_root_hash_preserved() {
7757 let provider = DefaultTrieNodeProvider;
7758 let mut trie = ParallelSparseTrie::default();
7759
7760 let key1 = Nibbles::unpack(B256::repeat_byte(0x00));
7762 let key2 = Nibbles::unpack(B256::repeat_byte(0x11));
7763
7764 let large_value = large_account_value();
7765 trie.update_leaf(key1, large_value.clone(), &provider).unwrap();
7766 trie.update_leaf(key2, large_value, &provider).unwrap();
7767
7768 let root_before = trie.root();
7769
7770 trie.prune(&[]);
7771
7772 assert_eq!(root_before, trie.root(), "root hash must be preserved after pruning");
7773 }
7774
7775 #[test]
7776 fn test_prune_mixed_embedded_and_hashed() {
7777 let provider = DefaultTrieNodeProvider;
7778 let mut trie = ParallelSparseTrie::default();
7779
7780 let large_value = large_account_value();
7781 let small_value = vec![0x80];
7782
7783 for i in 0..8u8 {
7784 let value = if i < 4 { large_value.clone() } else { small_value.clone() };
7785 trie.update_leaf(
7786 pad_nibbles_right(Nibbles::from_nibbles([i, 0x1, 0x2, 0x3])),
7787 value,
7788 &provider,
7789 )
7790 .unwrap();
7791 }
7792
7793 let root_before = trie.root();
7794 trie.prune(&[]);
7795 assert_eq!(root_before, trie.root(), "root hash must be preserved");
7796 }
7797
7798 #[test]
7799 fn test_prune_all_lower_subtries() {
7800 let provider = DefaultTrieNodeProvider;
7801
7802 let large_value = large_account_value();
7803
7804 let mut keys = Vec::new();
7805 for first in 0..16u8 {
7806 for second in 0..16u8 {
7807 keys.push(pad_nibbles_right(Nibbles::from_nibbles([
7808 first, second, 0x1, 0x2, 0x3, 0x4,
7809 ])));
7810 }
7811 }
7812
7813 let mut trie = ParallelSparseTrie::default();
7814
7815 for key in &keys {
7816 trie.update_leaf(*key, large_value.clone(), &provider).unwrap();
7817 }
7818
7819 let root_before = trie.root();
7820
7821 let total_pruned = trie.prune(&[]);
7822
7823 assert!(total_pruned > 0, "should have pruned some nodes");
7824 assert_eq!(root_before, trie.root(), "root hash should be preserved");
7825
7826 for key in &keys {
7827 assert!(trie.get_leaf_value(key).is_none(), "value should be pruned");
7828 }
7829 }
7830
7831 #[test]
7832 fn test_prune_keeps_only_hot_paths() {
7833 let provider = DefaultTrieNodeProvider;
7834 let mut trie = ParallelSparseTrie::default();
7835
7836 let key_keep = pad_nibbles_right(Nibbles::from_nibbles([0x1, 0x2, 0x3, 0x4]));
7837 let key_drop_1 = pad_nibbles_right(Nibbles::from_nibbles([0x5, 0x2, 0x3, 0x4]));
7838 let key_drop_2 = pad_nibbles_right(Nibbles::from_nibbles([0x9, 0x2, 0x3, 0x4]));
7839
7840 let value = large_account_value();
7841 trie.update_leaf(key_keep, value.clone(), &provider).unwrap();
7842 trie.update_leaf(key_drop_1, value.clone(), &provider).unwrap();
7843 trie.update_leaf(key_drop_2, value, &provider).unwrap();
7844
7845 let root_before = trie.root();
7846
7847 let pruned = trie.prune(&[key_keep]);
7848 assert!(pruned > 0, "expected some nodes to be pruned");
7849 assert_eq!(root_before, trie.root(), "root hash should be preserved after LFU prune");
7850
7851 assert!(trie.get_leaf_value(&key_keep).is_some(), "retained key must remain revealed");
7852 assert!(trie.get_leaf_value(&key_drop_1).is_none(), "non-retained key should be pruned");
7853 assert!(trie.get_leaf_value(&key_drop_2).is_none(), "non-retained key should be pruned");
7854 }
7855
7856 #[test]
7857 fn test_prune_update_after() {
7858 let provider = DefaultTrieNodeProvider;
7860 let mut trie = ParallelSparseTrie::default();
7861
7862 let value = large_account_value();
7863
7864 for first in 0..4u8 {
7866 for second in 0..4u8 {
7867 trie.update_leaf(
7868 pad_nibbles_right(Nibbles::from_nibbles([
7869 first, second, 0x1, 0x2, 0x3, 0x4, 0x5, 0x6,
7870 ])),
7871 value.clone(),
7872 &provider,
7873 )
7874 .unwrap();
7875 }
7876 }
7877
7878 let root_before = trie.root();
7879
7880 trie.prune(&[]);
7881
7882 let root_after = trie.root();
7883 assert_eq!(root_before, root_after, "root hash should be preserved");
7884
7885 let new_path =
7888 pad_nibbles_right(Nibbles::from_nibbles([0x5, 0x5, 0x1, 0x2, 0x3, 0x4, 0x5, 0x6]));
7889 trie.update_leaf(new_path, value, &provider).unwrap();
7890
7891 let _ = trie.root();
7893 }
7894
7895 #[test]
7898 fn test_update_leaves_successful_update() {
7899 use crate::LeafUpdate;
7900 use alloy_primitives::map::B256Map;
7901 use std::cell::RefCell;
7902
7903 let provider = DefaultTrieNodeProvider;
7904 let mut trie = ParallelSparseTrie::default();
7905
7906 let b256_key = B256::with_last_byte(42);
7908 let key = Nibbles::unpack(b256_key);
7909 let value = encode_account_value(1);
7910 trie.update_leaf(key, value, &provider).unwrap();
7911
7912 let new_value = encode_account_value(2);
7914
7915 let mut updates: B256Map<LeafUpdate> = B256Map::default();
7916 updates.insert(b256_key, LeafUpdate::Changed(new_value));
7917
7918 let proof_targets = RefCell::new(Vec::new());
7919 trie.update_leaves(&mut updates, |path, min_len| {
7920 proof_targets.borrow_mut().push((path, min_len));
7921 })
7922 .unwrap();
7923
7924 assert!(updates.is_empty(), "Update map should be empty after successful update");
7926 assert!(
7927 proof_targets.borrow().is_empty(),
7928 "Callback should not be invoked for revealed paths"
7929 );
7930 }
7931
7932 #[test]
7933 fn test_update_leaves_insert_new_leaf() {
7934 use crate::LeafUpdate;
7935 use alloy_primitives::map::B256Map;
7936 use std::cell::RefCell;
7937
7938 let mut trie = ParallelSparseTrie::default();
7939
7940 let b256_key = B256::with_last_byte(99);
7942 let new_value = encode_account_value(42);
7943
7944 let mut updates: B256Map<LeafUpdate> = B256Map::default();
7945 updates.insert(b256_key, LeafUpdate::Changed(new_value.clone()));
7946
7947 let proof_targets = RefCell::new(Vec::new());
7948 trie.update_leaves(&mut updates, |path, min_len| {
7949 proof_targets.borrow_mut().push((path, min_len));
7950 })
7951 .unwrap();
7952
7953 assert!(updates.is_empty(), "Update map should be empty after successful insert");
7955 assert!(
7956 proof_targets.borrow().is_empty(),
7957 "Callback should not be invoked for new leaf insert"
7958 );
7959
7960 let full_path = Nibbles::unpack(b256_key);
7962 assert_eq!(
7963 trie.get_leaf_value(&full_path),
7964 Some(&new_value),
7965 "New leaf value should be retrievable"
7966 );
7967 }
7968
7969 #[test]
7970 fn test_update_leaves_blinded_node() {
7971 use crate::LeafUpdate;
7972 use alloy_primitives::map::B256Map;
7973 use std::cell::RefCell;
7974
7975 let small_value = alloy_rlp::encode_fixed_size(&U256::from(1)).to_vec();
7978 let leaf = LeafNode::new(
7979 Nibbles::default(), small_value,
7981 );
7982 let branch = TrieNodeV2::Branch(BranchNodeV2::new(
7983 Nibbles::default(),
7984 vec![
7985 RlpNode::word_rlp(&B256::repeat_byte(1)), RlpNode::from_raw_rlp(&alloy_rlp::encode(leaf.clone())).unwrap(), ],
7988 TrieMask::new(0b11),
7989 None,
7990 ));
7991
7992 let mut trie = ParallelSparseTrie::from_root(
7993 branch.clone(),
7994 Some(BranchNodeMasks {
7995 hash_mask: TrieMask::new(0b01),
7996 tree_mask: TrieMask::default(),
7997 }),
7998 false,
7999 )
8000 .unwrap();
8001
8002 trie.reveal_node(
8004 Nibbles::default(),
8005 branch,
8006 Some(BranchNodeMasks {
8007 hash_mask: TrieMask::default(),
8008 tree_mask: TrieMask::new(0b01),
8009 }),
8010 )
8011 .unwrap();
8012 trie.reveal_node(Nibbles::from_nibbles([0x1]), TrieNodeV2::Leaf(leaf), None).unwrap();
8013
8014 let b256_key = B256::ZERO; let new_value = encode_account_value(42);
8019 let mut updates: B256Map<LeafUpdate> = B256Map::default();
8020 updates.insert(b256_key, LeafUpdate::Changed(new_value));
8021
8022 let proof_targets = RefCell::new(Vec::new());
8023 let prefix_set_len_before = trie.prefix_set.len();
8024 trie.update_leaves(&mut updates, |path, min_len| {
8025 proof_targets.borrow_mut().push((path, min_len));
8026 })
8027 .unwrap();
8028
8029 assert!(!updates.is_empty(), "Update should remain in map when hitting blinded node");
8031
8032 assert_eq!(
8034 trie.prefix_set.len(),
8035 prefix_set_len_before,
8036 "prefix_set should be unchanged after failed update on blinded node"
8037 );
8038
8039 let targets = proof_targets.borrow();
8041 assert!(!targets.is_empty(), "Callback should be invoked for blinded path");
8042
8043 assert_eq!(targets[0].1, 1, "min_len should equal blinded node path length");
8045 }
8046
8047 #[test]
8048 fn test_update_leaves_removal() {
8049 use crate::LeafUpdate;
8050 use alloy_primitives::map::B256Map;
8051 use std::cell::RefCell;
8052
8053 let provider = DefaultTrieNodeProvider;
8054 let mut trie = ParallelSparseTrie::default();
8055
8056 let b256_key1 = B256::with_last_byte(1);
8059 let b256_key2 = B256::with_last_byte(2);
8060 let key1 = Nibbles::unpack(b256_key1);
8061 let key2 = Nibbles::unpack(b256_key2);
8062 let value = encode_account_value(1);
8063 trie.update_leaf(key1, value.clone(), &provider).unwrap();
8064 trie.update_leaf(key2, value, &provider).unwrap();
8065
8066 let mut updates: B256Map<LeafUpdate> = B256Map::default();
8068 updates.insert(b256_key1, LeafUpdate::Changed(vec![])); let proof_targets = RefCell::new(Vec::new());
8071 trie.update_leaves(&mut updates, |path, min_len| {
8072 proof_targets.borrow_mut().push((path, min_len));
8073 })
8074 .unwrap();
8075
8076 assert!(updates.is_empty(), "Update map should be empty after successful removal");
8078 }
8079
8080 #[test]
8081 fn test_update_leaves_removal_blinded() {
8082 use crate::LeafUpdate;
8083 use alloy_primitives::map::B256Map;
8084 use std::cell::RefCell;
8085
8086 let small_value = alloy_rlp::encode_fixed_size(&U256::from(1)).to_vec();
8089 let leaf = LeafNode::new(
8090 Nibbles::default(), small_value,
8092 );
8093 let branch = TrieNodeV2::Branch(BranchNodeV2::new(
8094 Nibbles::default(),
8095 vec![
8096 RlpNode::word_rlp(&B256::repeat_byte(1)), RlpNode::from_raw_rlp(&alloy_rlp::encode(leaf.clone())).unwrap(), ],
8099 TrieMask::new(0b11),
8100 None,
8101 ));
8102
8103 let mut trie = ParallelSparseTrie::from_root(
8104 branch.clone(),
8105 Some(BranchNodeMasks {
8106 hash_mask: TrieMask::new(0b01),
8107 tree_mask: TrieMask::default(),
8108 }),
8109 false,
8110 )
8111 .unwrap();
8112
8113 trie.reveal_node(
8114 Nibbles::default(),
8115 branch,
8116 Some(BranchNodeMasks {
8117 hash_mask: TrieMask::default(),
8118 tree_mask: TrieMask::new(0b01),
8119 }),
8120 )
8121 .unwrap();
8122 trie.reveal_node(Nibbles::from_nibbles([0x1]), TrieNodeV2::Leaf(leaf), None).unwrap();
8123
8124 let b256_key = B256::ZERO; let full_path = Nibbles::unpack(b256_key);
8127
8128 let old_value = encode_account_value(99);
8130 trie.upper_subtrie.inner.values.insert(full_path, old_value.clone());
8131
8132 let mut updates: B256Map<LeafUpdate> = B256Map::default();
8133 updates.insert(b256_key, LeafUpdate::Changed(vec![])); let proof_targets = RefCell::new(Vec::new());
8136 let prefix_set_len_before = trie.prefix_set.len();
8137 trie.update_leaves(&mut updates, |path, min_len| {
8138 proof_targets.borrow_mut().push((path, min_len));
8139 })
8140 .unwrap();
8141
8142 assert!(
8144 !proof_targets.borrow().is_empty(),
8145 "Callback should be invoked when removal hits blinded node"
8146 );
8147
8148 assert!(!updates.is_empty(), "Update should remain in map when removal hits blinded node");
8150
8151 assert_eq!(
8153 trie.upper_subtrie.inner.values.get(&full_path),
8154 Some(&old_value),
8155 "Original value should be preserved after failed removal"
8156 );
8157
8158 assert_eq!(
8160 trie.prefix_set.len(),
8161 prefix_set_len_before,
8162 "prefix_set should be unchanged after failed removal on blinded node"
8163 );
8164 }
8165
8166 #[test]
8167 fn test_update_leaves_removal_branch_collapse_blinded() {
8168 use crate::LeafUpdate;
8169 use alloy_primitives::map::B256Map;
8170 use std::cell::RefCell;
8171
8172 let small_value = alloy_rlp::encode_fixed_size(&U256::from(1)).to_vec();
8176 let leaf = LeafNode::new(Nibbles::default(), small_value);
8177 let branch = TrieNodeV2::Branch(BranchNodeV2::new(
8178 Nibbles::default(),
8179 vec![
8180 RlpNode::word_rlp(&B256::repeat_byte(1)), RlpNode::from_raw_rlp(&alloy_rlp::encode(leaf.clone())).unwrap(), ],
8183 TrieMask::new(0b11),
8184 None,
8185 ));
8186
8187 let mut trie = ParallelSparseTrie::from_root(
8188 branch.clone(),
8189 Some(BranchNodeMasks {
8190 hash_mask: TrieMask::new(0b01), tree_mask: TrieMask::default(),
8192 }),
8193 false,
8194 )
8195 .unwrap();
8196
8197 trie.reveal_node(
8199 Nibbles::default(),
8200 branch,
8201 Some(BranchNodeMasks {
8202 hash_mask: TrieMask::default(),
8203 tree_mask: TrieMask::new(0b01),
8204 }),
8205 )
8206 .unwrap();
8207 trie.reveal_node(Nibbles::from_nibbles([0x1]), TrieNodeV2::Leaf(leaf), None).unwrap();
8208
8209 let b256_key = B256::with_last_byte(0x10);
8212 let full_path = Nibbles::unpack(b256_key);
8213 let leaf_value = encode_account_value(42);
8214 trie.upper_subtrie.inner.values.insert(full_path, leaf_value.clone());
8215
8216 let prefix_set_len_before = trie.prefix_set.len();
8218 let node_count_before = trie.upper_subtrie.nodes.len() +
8219 trie.lower_subtries
8220 .iter()
8221 .filter_map(|s| s.as_revealed_ref())
8222 .map(|s| s.nodes.len())
8223 .sum::<usize>();
8224
8225 let mut updates: B256Map<LeafUpdate> = B256Map::default();
8226 updates.insert(b256_key, LeafUpdate::Changed(vec![])); let proof_targets = RefCell::new(Vec::new());
8229 trie.update_leaves(&mut updates, |path, min_len| {
8230 proof_targets.borrow_mut().push((path, min_len));
8231 })
8232 .unwrap();
8233
8234 assert!(
8236 !updates.is_empty(),
8237 "Update should remain in map when removal would collapse branch with blinded sibling"
8238 );
8239
8240 assert!(
8242 !proof_targets.borrow().is_empty(),
8243 "Callback should be invoked for blinded sibling path"
8244 );
8245
8246 assert_eq!(
8248 trie.prefix_set.len(),
8249 prefix_set_len_before,
8250 "prefix_set should be unchanged after atomic failure"
8251 );
8252
8253 let node_count_after = trie.upper_subtrie.nodes.len() +
8255 trie.lower_subtries
8256 .iter()
8257 .filter_map(|s| s.as_revealed_ref())
8258 .map(|s| s.nodes.len())
8259 .sum::<usize>();
8260 assert_eq!(
8261 node_count_before, node_count_after,
8262 "Node count should be unchanged after atomic failure"
8263 );
8264
8265 assert_eq!(
8267 trie.upper_subtrie.inner.values.get(&full_path),
8268 Some(&leaf_value),
8269 "Leaf value should still exist after failed removal"
8270 );
8271 }
8272
8273 #[test]
8274 fn test_update_leaves_touched() {
8275 use crate::LeafUpdate;
8276 use alloy_primitives::map::B256Map;
8277 use std::cell::RefCell;
8278
8279 let provider = DefaultTrieNodeProvider;
8280 let mut trie = ParallelSparseTrie::default();
8281
8282 let b256_key = B256::with_last_byte(42);
8284 let key = Nibbles::unpack(b256_key);
8285 let value = encode_account_value(1);
8286 trie.update_leaf(key, value, &provider).unwrap();
8287
8288 let mut updates: B256Map<LeafUpdate> = B256Map::default();
8290 updates.insert(b256_key, LeafUpdate::Touched);
8291
8292 let proof_targets = RefCell::new(Vec::new());
8293 let prefix_set_len_before = trie.prefix_set.len();
8294
8295 trie.update_leaves(&mut updates, |path, min_len| {
8296 proof_targets.borrow_mut().push((path, min_len));
8297 })
8298 .unwrap();
8299
8300 assert!(updates.is_empty(), "Touched update should be removed for accessible path");
8302
8303 assert!(
8305 proof_targets.borrow().is_empty(),
8306 "Callback should not be invoked for accessible path"
8307 );
8308
8309 assert_eq!(
8311 trie.prefix_set.len(),
8312 prefix_set_len_before,
8313 "prefix_set should be unchanged for Touched update (read-only)"
8314 );
8315 }
8316
8317 #[test]
8318 fn test_update_leaves_touched_nonexistent() {
8319 use crate::LeafUpdate;
8320 use alloy_primitives::map::B256Map;
8321 use std::cell::RefCell;
8322
8323 let mut trie = ParallelSparseTrie::default();
8324
8325 let b256_key = B256::with_last_byte(99);
8327 let full_path = Nibbles::unpack(b256_key);
8328
8329 let prefix_set_len_before = trie.prefix_set.len();
8330
8331 let mut updates: B256Map<LeafUpdate> = B256Map::default();
8332 updates.insert(b256_key, LeafUpdate::Touched);
8333
8334 let proof_targets = RefCell::new(Vec::new());
8335 trie.update_leaves(&mut updates, |path, min_len| {
8336 proof_targets.borrow_mut().push((path, min_len));
8337 })
8338 .unwrap();
8339
8340 assert!(updates.is_empty(), "Touched update should be removed for accessible (empty) path");
8342
8343 assert!(
8345 proof_targets.borrow().is_empty(),
8346 "Callback should not be invoked for accessible path"
8347 );
8348
8349 assert_eq!(
8351 trie.prefix_set.len(),
8352 prefix_set_len_before,
8353 "prefix_set should not be modified by Touched update"
8354 );
8355
8356 assert!(
8358 trie.get_leaf_value(&full_path).is_none(),
8359 "No value should exist for non-existent key after Touched update"
8360 );
8361 }
8362
8363 #[test]
8364 fn test_update_leaves_touched_blinded() {
8365 use crate::LeafUpdate;
8366 use alloy_primitives::map::B256Map;
8367 use std::cell::RefCell;
8368
8369 let small_value = alloy_rlp::encode_fixed_size(&U256::from(1)).to_vec();
8372 let leaf = LeafNode::new(
8373 Nibbles::default(), small_value,
8375 );
8376 let branch = TrieNodeV2::Branch(BranchNodeV2::new(
8377 Nibbles::default(),
8378 vec![
8379 RlpNode::word_rlp(&B256::repeat_byte(1)), RlpNode::from_raw_rlp(&alloy_rlp::encode(leaf.clone())).unwrap(), ],
8382 TrieMask::new(0b11),
8383 None,
8384 ));
8385
8386 let mut trie = ParallelSparseTrie::from_root(
8387 branch.clone(),
8388 Some(BranchNodeMasks {
8389 hash_mask: TrieMask::new(0b01),
8390 tree_mask: TrieMask::default(),
8391 }),
8392 false,
8393 )
8394 .unwrap();
8395
8396 trie.reveal_node(
8397 Nibbles::default(),
8398 branch,
8399 Some(BranchNodeMasks {
8400 hash_mask: TrieMask::default(),
8401 tree_mask: TrieMask::new(0b01),
8402 }),
8403 )
8404 .unwrap();
8405 trie.reveal_node(Nibbles::from_nibbles([0x1]), TrieNodeV2::Leaf(leaf), None).unwrap();
8406
8407 let b256_key = B256::ZERO; let mut updates: B256Map<LeafUpdate> = B256Map::default();
8411 updates.insert(b256_key, LeafUpdate::Touched);
8412
8413 let proof_targets = RefCell::new(Vec::new());
8414 let prefix_set_len_before = trie.prefix_set.len();
8415 trie.update_leaves(&mut updates, |path, min_len| {
8416 proof_targets.borrow_mut().push((path, min_len));
8417 })
8418 .unwrap();
8419
8420 assert!(!proof_targets.borrow().is_empty(), "Callback should be invoked for blinded path");
8422
8423 assert!(!updates.is_empty(), "Touched update should remain in map for blinded path");
8425
8426 assert_eq!(
8428 trie.prefix_set.len(),
8429 prefix_set_len_before,
8430 "prefix_set should be unchanged for Touched update on blinded path"
8431 );
8432 }
8433
8434 #[test]
8435 fn test_update_leaves_deduplication() {
8436 use crate::LeafUpdate;
8437 use alloy_primitives::map::B256Map;
8438 use std::cell::RefCell;
8439
8440 let small_value = alloy_rlp::encode_fixed_size(&U256::from(1)).to_vec();
8443 let leaf = LeafNode::new(
8444 Nibbles::default(), small_value,
8446 );
8447 let branch = TrieNodeV2::Branch(BranchNodeV2::new(
8448 Nibbles::default(),
8449 vec![
8450 RlpNode::word_rlp(&B256::repeat_byte(1)), RlpNode::from_raw_rlp(&alloy_rlp::encode(leaf.clone())).unwrap(), ],
8453 TrieMask::new(0b11),
8454 None,
8455 ));
8456
8457 let mut trie = ParallelSparseTrie::from_root(
8458 branch.clone(),
8459 Some(BranchNodeMasks {
8460 hash_mask: TrieMask::new(0b01),
8461 tree_mask: TrieMask::default(),
8462 }),
8463 false,
8464 )
8465 .unwrap();
8466
8467 trie.reveal_node(
8468 Nibbles::default(),
8469 branch,
8470 Some(BranchNodeMasks {
8471 hash_mask: TrieMask::default(),
8472 tree_mask: TrieMask::new(0b01),
8473 }),
8474 )
8475 .unwrap();
8476 trie.reveal_node(Nibbles::from_nibbles([0x1]), TrieNodeV2::Leaf(leaf), None).unwrap();
8477
8478 let b256_key1 = B256::ZERO;
8481 let b256_key2 = B256::with_last_byte(1); let b256_key3 = B256::with_last_byte(2); let mut updates: B256Map<LeafUpdate> = B256Map::default();
8485 let value = encode_account_value(42);
8486
8487 updates.insert(b256_key1, LeafUpdate::Changed(value.clone()));
8488 updates.insert(b256_key2, LeafUpdate::Changed(value.clone()));
8489 updates.insert(b256_key3, LeafUpdate::Changed(value));
8490
8491 let proof_targets = RefCell::new(Vec::new());
8492 trie.update_leaves(&mut updates, |path, min_len| {
8493 proof_targets.borrow_mut().push((path, min_len));
8494 })
8495 .unwrap();
8496
8497 let targets = proof_targets.borrow();
8500 assert_eq!(targets.len(), 3, "Callback should be invoked for each unique key");
8501
8502 for (_, min_len) in targets.iter() {
8504 assert_eq!(*min_len, 1, "All should have min_len 1 from blinded node at 0x0");
8505 }
8506 }
8507
8508 #[test]
8509 fn test_nibbles_to_padded_b256() {
8510 let empty = Nibbles::default();
8512 assert_eq!(ParallelSparseTrie::nibbles_to_padded_b256(&empty), B256::ZERO);
8513
8514 let full_key = b256!("0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef");
8516 let full_nibbles = Nibbles::unpack(full_key);
8517 assert_eq!(ParallelSparseTrie::nibbles_to_padded_b256(&full_nibbles), full_key);
8518
8519 let partial = Nibbles::from_nibbles_unchecked([0x1, 0x2, 0x3, 0x4]);
8522 let expected = b256!("1234000000000000000000000000000000000000000000000000000000000000");
8523 assert_eq!(ParallelSparseTrie::nibbles_to_padded_b256(&partial), expected);
8524
8525 let single = Nibbles::from_nibbles_unchecked([0xf]);
8527 let expected_single =
8528 b256!("f000000000000000000000000000000000000000000000000000000000000000");
8529 assert_eq!(ParallelSparseTrie::nibbles_to_padded_b256(&single), expected_single);
8530 }
8531
8532 #[test]
8533 fn test_memory_size() {
8534 let trie = ParallelSparseTrie::default();
8536 let empty_size = trie.memory_size();
8537
8538 assert!(empty_size >= core::mem::size_of::<ParallelSparseTrie>());
8540
8541 let root_branch = create_branch_node_with_children(
8545 &[0x1, 0x5],
8546 [
8547 RlpNode::word_rlp(&B256::repeat_byte(0xAA)),
8548 RlpNode::word_rlp(&B256::repeat_byte(0xBB)),
8549 ],
8550 );
8551 let mut trie = ParallelSparseTrie::from_root(root_branch, None, false).unwrap();
8552
8553 let branch_at_1 =
8554 create_branch_node_with_children(&[0x2], [RlpNode::word_rlp(&B256::repeat_byte(0xCC))]);
8555 let branch_at_5 =
8556 create_branch_node_with_children(&[0x6], [RlpNode::word_rlp(&B256::repeat_byte(0xDD))]);
8557 trie.reveal_nodes(&mut [
8558 ProofTrieNodeV2 {
8559 path: Nibbles::from_nibbles_unchecked([0x1]),
8560 node: branch_at_1,
8561 masks: None,
8562 },
8563 ProofTrieNodeV2 {
8564 path: Nibbles::from_nibbles_unchecked([0x5]),
8565 node: branch_at_5,
8566 masks: None,
8567 },
8568 ])
8569 .unwrap();
8570
8571 let mut nodes = vec![
8572 ProofTrieNodeV2 {
8573 path: Nibbles::from_nibbles_unchecked([0x1, 0x2]),
8574 node: TrieNodeV2::Leaf(LeafNode {
8575 key: Nibbles::from_nibbles_unchecked([0x3, 0x4]),
8576 value: vec![1, 2, 3],
8577 }),
8578 masks: None,
8579 },
8580 ProofTrieNodeV2 {
8581 path: Nibbles::from_nibbles_unchecked([0x5, 0x6]),
8582 node: TrieNodeV2::Leaf(LeafNode {
8583 key: Nibbles::from_nibbles_unchecked([0x7, 0x8]),
8584 value: vec![4, 5, 6],
8585 }),
8586 masks: None,
8587 },
8588 ];
8589 trie.reveal_nodes(&mut nodes).unwrap();
8590
8591 let populated_size = trie.memory_size();
8592
8593 assert!(populated_size > empty_size);
8595 }
8596
8597 #[test]
8598 fn test_reveal_extension_branch_leaves_then_root() {
8599 let ext_key: [u8; 63] = [0; 63];
8610
8611 let branch_path = Nibbles::from_nibbles(ext_key);
8613
8614 let mut leaf1_path_bytes = [0u8; 64];
8616 leaf1_path_bytes[63] = 1;
8617 let leaf1_path = Nibbles::from_nibbles(leaf1_path_bytes);
8618
8619 let mut leaf2_path_bytes = [0u8; 64];
8620 leaf2_path_bytes[63] = 2;
8621 let leaf2_path = Nibbles::from_nibbles(leaf2_path_bytes);
8622
8623 let leaf1_node = LeafNode::new(Nibbles::default(), vec![0x1]);
8626 let leaf2_node = LeafNode::new(Nibbles::default(), vec![0x2]);
8627
8628 let leaf1_rlp = RlpNode::from_rlp(&alloy_rlp::encode(TrieNodeV2::Leaf(leaf1_node.clone())));
8630 let leaf2_rlp = RlpNode::from_rlp(&alloy_rlp::encode(TrieNodeV2::Leaf(leaf2_node.clone())));
8631
8632 let state_mask = TrieMask::new(0b0000_0110); let stack = vec![leaf1_rlp, leaf2_rlp];
8636
8637 let bare_branch = BranchNodeV2::new(Nibbles::new(), stack.clone(), state_mask, None);
8639 let branch_rlp = RlpNode::from_rlp(&alloy_rlp::encode(&bare_branch));
8640
8641 let root_node = TrieNodeV2::Branch(BranchNodeV2::new(
8643 Nibbles::from_nibbles(ext_key),
8644 stack.clone(),
8645 state_mask,
8646 Some(branch_rlp),
8647 ));
8648
8649 let mut trie = ParallelSparseTrie::from_root(root_node, None, false).unwrap();
8651
8652 let mut nodes = vec![
8654 ProofTrieNodeV2 {
8655 path: branch_path,
8656 node: TrieNodeV2::Branch(BranchNodeV2::new(
8657 Nibbles::new(),
8658 stack,
8659 state_mask,
8660 None,
8661 )),
8662 masks: None,
8663 },
8664 ProofTrieNodeV2 { path: leaf1_path, node: TrieNodeV2::Leaf(leaf1_node), masks: None },
8665 ProofTrieNodeV2 { path: leaf2_path, node: TrieNodeV2::Leaf(leaf2_node), masks: None },
8666 ];
8667 trie.reveal_nodes(&mut nodes).unwrap();
8668
8669 trie.prefix_set.insert(leaf1_path);
8671 trie.prefix_set.insert(leaf2_path);
8672
8673 let _root = trie.root();
8675 }
8676
8677 #[test]
8678 fn test_update_leaf_creates_embedded_nodes_then_root() {
8679 let mut leaf1_path_bytes = [0u8; 64];
8689 leaf1_path_bytes[63] = 1;
8690 let leaf1_path = Nibbles::from_nibbles(leaf1_path_bytes);
8691
8692 let mut leaf2_path_bytes = [0u8; 64];
8693 leaf2_path_bytes[63] = 2;
8694 let leaf2_path = Nibbles::from_nibbles(leaf2_path_bytes);
8695
8696 let mut trie = ParallelSparseTrie::default();
8698 trie.update_leaf(leaf1_path, vec![0x1], DefaultTrieNodeProvider).unwrap();
8699 trie.update_leaf(leaf2_path, vec![0x2], DefaultTrieNodeProvider).unwrap();
8700
8701 let _root = trie.root();
8703 }
8704}