1#[cfg(feature = "trie-debug")]
2use crate::debug_recorder::{LeafUpdateRecord, ProofTrieNodeRecord, RecordedOp, TrieDebugRecorder};
3use crate::{
4 lower::LowerSparseSubtrie, provider::TrieNodeProvider, LeafLookup, LeafLookupError,
5 RlpNodeStackItem, SparseNode, SparseNodeState, SparseNodeType, SparseTrie, SparseTrieUpdates,
6};
7use alloc::{borrow::Cow, boxed::Box, vec, vec::Vec};
8use alloy_primitives::{
9 map::{Entry, HashMap, HashSet},
10 B256, U256,
11};
12use alloy_rlp::Decodable;
13use alloy_trie::{BranchNodeCompact, TrieMask, EMPTY_ROOT_HASH};
14use core::cmp::{Ord, Ordering, PartialOrd};
15use reth_execution_errors::{SparseTrieError, SparseTrieErrorKind, SparseTrieResult};
16#[cfg(feature = "metrics")]
17use reth_primitives_traits::FastInstant as Instant;
18use reth_trie_common::{
19 prefix_set::{PrefixSet, PrefixSetMut},
20 BranchNodeMasks, BranchNodeMasksMap, BranchNodeRef, ExtensionNodeRef, LeafNodeRef, Nibbles,
21 ProofTrieNodeV2, RlpNode, TrieNodeV2,
22};
23use smallvec::SmallVec;
24use tracing::{instrument, trace};
25
26pub const UPPER_TRIE_MAX_DEPTH: usize = 2;
29
30pub const NUM_LOWER_SUBTRIES: usize = 16usize.pow(UPPER_TRIE_MAX_DEPTH as u32);
32
33#[derive(Debug, Clone, Copy, PartialEq, Eq, Default)]
35pub struct ParallelismThresholds {
36 pub min_revealed_nodes: usize,
39 pub min_updated_nodes: usize,
43}
44
45#[derive(Clone, PartialEq, Eq, Debug)]
107pub struct ParallelSparseTrie {
108 upper_subtrie: Box<SparseSubtrie>,
110 lower_subtries: Box<[LowerSparseSubtrie; NUM_LOWER_SUBTRIES]>,
112 prefix_set: PrefixSetMut,
115 updates: Option<SparseTrieUpdates>,
117 branch_node_masks: BranchNodeMasksMap,
123 update_actions_buffers: Vec<Vec<SparseTrieUpdatesAction>>,
126 parallelism_thresholds: ParallelismThresholds,
128 subtrie_heat: SubtrieModifications,
131 #[cfg(feature = "metrics")]
133 metrics: crate::metrics::ParallelSparseTrieMetrics,
134 #[cfg(feature = "trie-debug")]
136 debug_recorder: TrieDebugRecorder,
137}
138
139impl Default for ParallelSparseTrie {
140 fn default() -> Self {
141 Self {
142 upper_subtrie: Box::new(SparseSubtrie {
143 nodes: HashMap::from_iter([(Nibbles::default(), SparseNode::Empty)]),
144 ..Default::default()
145 }),
146 lower_subtries: Box::new(
147 [const { LowerSparseSubtrie::Blind(None) }; NUM_LOWER_SUBTRIES],
148 ),
149 prefix_set: PrefixSetMut::default(),
150 updates: None,
151 branch_node_masks: BranchNodeMasksMap::default(),
152 update_actions_buffers: Vec::default(),
153 parallelism_thresholds: Default::default(),
154 subtrie_heat: SubtrieModifications::default(),
155 #[cfg(feature = "metrics")]
156 metrics: Default::default(),
157 #[cfg(feature = "trie-debug")]
158 debug_recorder: Default::default(),
159 }
160 }
161}
162
163impl SparseTrie for ParallelSparseTrie {
164 fn set_root(
165 &mut self,
166 root: TrieNodeV2,
167 masks: Option<BranchNodeMasks>,
168 retain_updates: bool,
169 ) -> SparseTrieResult<()> {
170 #[cfg(feature = "trie-debug")]
171 self.debug_recorder.record(RecordedOp::SetRoot {
172 node: ProofTrieNodeRecord::from_proof_trie_node_v2(&ProofTrieNodeV2 {
173 path: Nibbles::default(),
174 node: root.clone(),
175 masks,
176 }),
177 });
178
179 let path = Nibbles::default();
182 let _removed_root = self.upper_subtrie.nodes.remove(&path).expect("root node should exist");
183 debug_assert_eq!(_removed_root, SparseNode::Empty);
184
185 self.set_updates(retain_updates);
186
187 if let Some(masks) = masks {
188 let branch_path = if let TrieNodeV2::Branch(branch) = &root {
189 branch.key
190 } else {
191 Nibbles::default()
192 };
193
194 self.branch_node_masks.insert(branch_path, masks);
195 }
196
197 self.reveal_upper_node(Nibbles::default(), &root, masks)
198 }
199
200 fn set_updates(&mut self, retain_updates: bool) {
201 self.updates = retain_updates.then(Default::default);
202 }
203
204 fn reveal_nodes(&mut self, nodes: &mut [ProofTrieNodeV2]) -> SparseTrieResult<()> {
205 if nodes.is_empty() {
206 return Ok(())
207 }
208
209 #[cfg(feature = "trie-debug")]
210 self.debug_recorder.record(RecordedOp::RevealNodes {
211 nodes: nodes.iter().map(ProofTrieNodeRecord::from_proof_trie_node_v2).collect(),
212 });
213
214 nodes.sort_unstable_by(
217 |ProofTrieNodeV2 { path: path_a, .. }, ProofTrieNodeV2 { path: path_b, .. }| {
218 let subtrie_type_a = SparseSubtrieType::from_path(path_a);
219 let subtrie_type_b = SparseSubtrieType::from_path(path_b);
220 subtrie_type_a.cmp(&subtrie_type_b).then_with(|| path_a.cmp(path_b))
221 },
222 );
223
224 self.branch_node_masks.reserve(nodes.len());
226 for ProofTrieNodeV2 { path, masks, node } in nodes.iter() {
227 if let Some(branch_masks) = masks {
228 let path = if let TrieNodeV2::Branch(branch) = node &&
230 !branch.key.is_empty()
231 {
232 let mut path = *path;
233 path.extend(&branch.key);
234 path
235 } else {
236 *path
237 };
238 self.branch_node_masks.insert(path, *branch_masks);
239 }
240 }
241
242 let num_upper_nodes = nodes
246 .iter()
247 .position(|n| !SparseSubtrieType::path_len_is_upper(n.path.len()))
248 .unwrap_or(nodes.len());
249 let (upper_nodes, lower_nodes) = nodes.split_at(num_upper_nodes);
250
251 self.upper_subtrie.nodes.reserve(upper_nodes.len());
254 for node in upper_nodes {
255 self.reveal_upper_node(node.path, &node.node, node.masks)?;
256 }
257
258 let reachable_subtries = self.reachable_subtries();
259
260 let hashes_from_upper = nodes
263 .iter()
264 .filter_map(|node| {
265 if node.path.len() == UPPER_TRIE_MAX_DEPTH &&
266 reachable_subtries.get(path_subtrie_index_unchecked(&node.path)) &&
267 let SparseNode::Branch { blinded_mask, blinded_hashes, .. } = self
268 .upper_subtrie
269 .nodes
270 .get_mut(&node.path.slice(0..UPPER_TRIE_MAX_DEPTH - 1))
271 .unwrap()
272 {
273 let nibble = node.path.last().unwrap();
274 blinded_mask.is_bit_set(nibble).then(|| {
275 blinded_mask.unset_bit(nibble);
276 (node.path, blinded_hashes[nibble as usize])
277 })
278 } else {
279 None
280 }
281 })
282 .collect::<HashMap<_, _>>();
283
284 if !self.is_reveal_parallelism_enabled(lower_nodes.len()) {
285 for node in lower_nodes {
286 let idx = path_subtrie_index_unchecked(&node.path);
287 if !reachable_subtries.get(idx) {
288 trace!(
289 target: "trie::parallel_sparse",
290 reveal_path = ?node.path,
291 "Node's lower subtrie is not reachable, skipping",
292 );
293 continue;
294 }
295 if node.path.len() == UPPER_TRIE_MAX_DEPTH &&
297 !Self::is_boundary_leaf_reachable(
298 &self.upper_subtrie.nodes,
299 &node.path,
300 &node.node,
301 )
302 {
303 trace!(
304 target: "trie::parallel_sparse",
305 path = ?node.path,
306 "Boundary leaf not reachable from upper subtrie, skipping",
307 );
308 continue;
309 }
310 self.lower_subtries[idx].reveal(&node.path);
311 self.subtrie_heat.mark_modified(idx);
312 self.lower_subtries[idx].as_revealed_mut().expect("just revealed").reveal_node(
313 node.path,
314 &node.node,
315 node.masks,
316 hashes_from_upper.get(&node.path).copied(),
317 )?;
318 }
319 return Ok(())
320 }
321
322 #[cfg(not(feature = "std"))]
323 unreachable!("nostd is checked by is_reveal_parallelism_enabled");
324
325 #[cfg(feature = "std")]
326 {
328 use rayon::iter::{IntoParallelIterator, ParallelIterator};
329 use tracing::Span;
330
331 let parent_span = Span::current();
333
334 let upper_nodes = &self.upper_subtrie.nodes;
336
337 let results = lower_nodes
339 .chunk_by(|node_a, node_b| {
340 SparseSubtrieType::from_path(&node_a.path) ==
341 SparseSubtrieType::from_path(&node_b.path)
342 })
343 .filter_map(|nodes| {
345 let mut nodes = nodes
346 .iter()
347 .filter(|node| {
348 if node.path.len() == UPPER_TRIE_MAX_DEPTH &&
351 !Self::is_boundary_leaf_reachable(
352 upper_nodes,
353 &node.path,
354 &node.node,
355 )
356 {
357 trace!(
358 target: "trie::parallel_sparse",
359 path = ?node.path,
360 "Boundary leaf not reachable from upper subtrie, skipping",
361 );
362 false
363 } else {
364 true
365 }
366 })
367 .peekable();
368
369 let node = nodes.peek()?;
370 let idx =
371 SparseSubtrieType::from_path(&node.path).lower_index().unwrap_or_else(
372 || panic!("upper subtrie node {node:?} found amongst lower nodes"),
373 );
374
375 if !reachable_subtries.get(idx) {
376 trace!(
377 target: "trie::parallel_sparse",
378 nodes = ?nodes,
379 "Lower subtrie is not reachable, skipping reveal",
380 );
381 return None;
382 }
383
384 self.lower_subtries[idx].reveal(&node.path);
389 Some((
390 idx,
391 self.lower_subtries[idx].take_revealed().expect("just revealed"),
392 nodes,
393 ))
394 })
395 .collect::<Vec<_>>()
396 .into_par_iter()
397 .map(|(subtrie_idx, mut subtrie, nodes)| {
398 let _guard = parent_span.enter();
401
402 subtrie.nodes.reserve(nodes.size_hint().1.unwrap_or(0));
405
406 for node in nodes {
407 let res = subtrie.reveal_node(
409 node.path,
410 &node.node,
411 node.masks,
412 hashes_from_upper.get(&node.path).copied(),
413 );
414 if res.is_err() {
415 return (subtrie_idx, subtrie, res.map(|_| ()))
416 }
417 }
418 (subtrie_idx, subtrie, Ok(()))
419 })
420 .collect::<Vec<_>>();
421
422 let mut any_err = Ok(());
425 for (subtrie_idx, subtrie, res) in results {
426 self.lower_subtries[subtrie_idx] = LowerSparseSubtrie::Revealed(subtrie);
427 if res.is_err() {
428 any_err = res;
429 }
430 }
431
432 any_err
433 }
434 }
435
436 fn update_leaf<P: TrieNodeProvider>(
437 &mut self,
438 full_path: Nibbles,
439 value: Vec<u8>,
440 _provider: P,
441 ) -> SparseTrieResult<()> {
442 debug_assert_eq!(
443 full_path.len(),
444 B256::len_bytes() * 2,
445 "update_leaf full_path must be 64 nibbles (32 bytes), got {} nibbles",
446 full_path.len()
447 );
448
449 trace!(
450 target: "trie::parallel_sparse",
451 ?full_path,
452 value_len = value.len(),
453 "Updating leaf",
454 );
455
456 if self.upper_subtrie.inner.values.contains_key(&full_path) {
458 self.prefix_set.insert(full_path);
459 self.upper_subtrie.inner.values.insert(full_path, value);
460 return Ok(());
461 }
462 if let Some(subtrie) = self.lower_subtrie_for_path(&full_path) &&
464 subtrie.inner.values.contains_key(&full_path)
465 {
466 self.prefix_set.insert(full_path);
467 self.lower_subtrie_for_path_mut(&full_path)
468 .expect("subtrie exists")
469 .inner
470 .values
471 .insert(full_path, value);
472 return Ok(());
473 }
474
475 self.upper_subtrie.inner.values.insert(full_path, value.clone());
478
479 let mut new_nodes = Vec::new();
488 let mut next = Some(Nibbles::default());
489
490 while let Some(current) =
495 next.as_mut().filter(|next| SparseSubtrieType::path_len_is_upper(next.len()))
496 {
497 let step_result = self.upper_subtrie.update_next_node(current, &full_path);
500
501 if step_result.is_err() {
502 self.upper_subtrie.inner.values.remove(&full_path);
503 return step_result.map(|_| ());
504 }
505
506 match step_result? {
507 LeafUpdateStep::Continue => {}
508 LeafUpdateStep::Complete { inserted_nodes } => {
509 new_nodes.extend(inserted_nodes);
510 next = None;
511 }
512 LeafUpdateStep::NodeNotFound => {
513 next = None;
514 }
515 }
516 }
517
518 for node_path in &new_nodes {
520 if SparseSubtrieType::path_len_is_upper(node_path.len()) {
522 continue
523 }
524
525 let node =
526 self.upper_subtrie.nodes.remove(node_path).expect("node belongs to upper subtrie");
527
528 let leaf_value = if let SparseNode::Leaf { key, .. } = &node {
530 let mut leaf_full_path = *node_path;
531 leaf_full_path.extend(key);
532 Some((
533 leaf_full_path,
534 self.upper_subtrie
535 .inner
536 .values
537 .remove(&leaf_full_path)
538 .expect("leaf nodes have associated values entries"),
539 ))
540 } else {
541 None
542 };
543
544 let subtrie = self.subtrie_for_path_mut(node_path);
546
547 if let Some((leaf_full_path, value)) = leaf_value {
549 subtrie.inner.values.insert(leaf_full_path, value);
550 }
551
552 subtrie.nodes.insert(*node_path, node);
554 }
555
556 if let Some(next_path) = next.filter(|n| !SparseSubtrieType::path_len_is_upper(n.len())) {
558 self.upper_subtrie.inner.values.remove(&full_path);
563
564 let subtrie = self.subtrie_for_path_mut(&next_path);
569
570 if subtrie.nodes.is_empty() {
572 subtrie.nodes.insert(subtrie.path, SparseNode::Empty);
573 }
574
575 if let Err(e) = subtrie.update_leaf(full_path, value) {
578 if let Some(lower) = self.lower_subtrie_for_path_mut(&full_path) {
580 lower.inner.values.remove(&full_path);
581 }
582 return Err(e);
583 }
584 }
585
586 self.prefix_set.insert(full_path);
588
589 Ok(())
590 }
591
592 fn remove_leaf<P: TrieNodeProvider>(
593 &mut self,
594 full_path: &Nibbles,
595 _provider: P,
596 ) -> SparseTrieResult<()> {
597 debug_assert_eq!(
598 full_path.len(),
599 B256::len_bytes() * 2,
600 "remove_leaf full_path must be 64 nibbles (32 bytes), got {} nibbles",
601 full_path.len()
602 );
603
604 trace!(
605 target: "trie::parallel_sparse",
606 ?full_path,
607 "Removing leaf",
608 );
609
610 let leaf_path;
626 let leaf_subtrie_type;
627
628 let mut branch_parent_path: Option<Nibbles> = None;
629 let mut branch_parent_node: Option<SparseNode> = None;
630
631 let mut ext_grandparent_path: Option<Nibbles> = None;
632 let mut ext_grandparent_node: Option<SparseNode> = None;
633
634 let mut curr_path = Nibbles::new(); let mut curr_subtrie_type = SparseSubtrieType::Upper;
636
637 let mut paths_to_mark_dirty = Vec::new();
639
640 loop {
641 let curr_subtrie = match curr_subtrie_type {
642 SparseSubtrieType::Upper => &mut self.upper_subtrie,
643 SparseSubtrieType::Lower(idx) => {
644 self.lower_subtries[idx].as_revealed_mut().expect("lower subtrie is revealed")
645 }
646 };
647 let curr_node = curr_subtrie.nodes.get_mut(&curr_path).unwrap();
648
649 match Self::find_next_to_leaf(&curr_path, curr_node, full_path) {
650 FindNextToLeafOutcome::NotFound => return Ok(()), FindNextToLeafOutcome::BlindedNode(path) => {
652 return Err(SparseTrieErrorKind::BlindedNode(path).into())
653 }
654 FindNextToLeafOutcome::Found => {
655 leaf_path = curr_path;
657 leaf_subtrie_type = curr_subtrie_type;
658 break;
659 }
660 FindNextToLeafOutcome::ContinueFrom(next_path) => {
661 match curr_node {
664 SparseNode::Branch { .. } => {
665 paths_to_mark_dirty
666 .push((SparseSubtrieType::from_path(&curr_path), curr_path));
667
668 match (&branch_parent_path, &ext_grandparent_path) {
671 (Some(branch), Some(ext)) if branch.len() > ext.len() => {
672 ext_grandparent_path = None;
673 ext_grandparent_node = None;
674 }
675 _ => (),
676 };
677 branch_parent_path = Some(curr_path);
678 branch_parent_node = Some(curr_node.clone());
679 }
680 SparseNode::Extension { .. } => {
681 paths_to_mark_dirty
682 .push((SparseSubtrieType::from_path(&curr_path), curr_path));
683
684 ext_grandparent_path = Some(curr_path);
688 ext_grandparent_node = Some(curr_node.clone());
689 }
690 SparseNode::Empty | SparseNode::Leaf { .. } => {
691 unreachable!(
692 "find_next_to_leaf only continues to a branch or extension"
693 )
694 }
695 }
696
697 curr_path = next_path;
698
699 let next_subtrie_type = SparseSubtrieType::from_path(&curr_path);
701 if matches!(curr_subtrie_type, SparseSubtrieType::Upper) &&
702 matches!(next_subtrie_type, SparseSubtrieType::Lower(_))
703 {
704 curr_subtrie_type = next_subtrie_type;
705 }
706 }
707 };
708 }
709
710 if let (Some(branch_path), Some(SparseNode::Branch { state_mask, blinded_mask, .. })) =
713 (&branch_parent_path, &branch_parent_node)
714 {
715 let mut check_mask = *state_mask;
716 let child_nibble = leaf_path.get_unchecked(branch_path.len());
717 check_mask.unset_bit(child_nibble);
718
719 if check_mask.count_bits() == 1 {
720 let remaining_nibble =
721 check_mask.first_set_bit_index().expect("state mask is not empty");
722
723 if blinded_mask.is_bit_set(remaining_nibble) {
724 let mut path = *branch_path;
725 path.push_unchecked(remaining_nibble);
726 return Err(SparseTrieErrorKind::BlindedNode(path).into());
727 }
728 }
729 }
730
731 self.prefix_set.insert(*full_path);
734 let leaf_subtrie = match leaf_subtrie_type {
735 SparseSubtrieType::Upper => &mut self.upper_subtrie,
736 SparseSubtrieType::Lower(idx) => {
737 self.lower_subtries[idx].as_revealed_mut().expect("lower subtrie is revealed")
738 }
739 };
740 leaf_subtrie.inner.values.remove(full_path);
741 for (subtrie_type, path) in paths_to_mark_dirty {
742 let node = match subtrie_type {
743 SparseSubtrieType::Upper => self.upper_subtrie.nodes.get_mut(&path),
744 SparseSubtrieType::Lower(idx) => self.lower_subtries[idx]
745 .as_revealed_mut()
746 .expect("lower subtrie is revealed")
747 .nodes
748 .get_mut(&path),
749 }
750 .expect("node exists");
751
752 match node {
753 SparseNode::Extension { state, .. } | SparseNode::Branch { state, .. } => {
754 *state = SparseNodeState::Dirty
755 }
756 SparseNode::Empty | SparseNode::Leaf { .. } => {
757 unreachable!(
758 "only branch and extension nodes can be marked dirty when removing a leaf"
759 )
760 }
761 }
762 }
763 self.remove_node(&leaf_path);
764
765 if leaf_path.is_empty() {
768 self.upper_subtrie.nodes.insert(leaf_path, SparseNode::Empty);
769 return Ok(())
770 }
771
772 if let (
775 Some(branch_path),
776 &Some(SparseNode::Branch { mut state_mask, blinded_mask, ref blinded_hashes, .. }),
777 ) = (&branch_parent_path, &branch_parent_node)
778 {
779 let child_nibble = leaf_path.get_unchecked(branch_path.len());
780 state_mask.unset_bit(child_nibble);
781
782 let new_branch_node = if state_mask.count_bits() == 1 {
783 let remaining_child_nibble =
786 state_mask.first_set_bit_index().expect("state mask is not empty");
787 let mut remaining_child_path = *branch_path;
788 remaining_child_path.push_unchecked(remaining_child_nibble);
789
790 trace!(
791 target: "trie::parallel_sparse",
792 ?leaf_path,
793 ?branch_path,
794 ?remaining_child_path,
795 "Branch node has only one child",
796 );
797
798 if blinded_mask.is_bit_set(remaining_child_nibble) {
801 return Err(SparseTrieErrorKind::BlindedNode(remaining_child_path).into());
802 }
803
804 let remaining_child_node = self
805 .subtrie_for_path_mut(&remaining_child_path)
806 .nodes
807 .get(&remaining_child_path)
808 .unwrap();
809
810 let (new_branch_node, remove_child) = Self::branch_changes_on_leaf_removal(
811 branch_path,
812 &remaining_child_path,
813 remaining_child_node,
814 );
815
816 if remove_child {
817 self.move_value_on_leaf_removal(
818 branch_path,
819 &new_branch_node,
820 &remaining_child_path,
821 );
822 self.remove_node(&remaining_child_path);
823 }
824
825 if let Some(updates) = self.updates.as_mut() {
826 updates.updated_nodes.remove(branch_path);
827 updates.removed_nodes.insert(*branch_path);
828 }
829
830 new_branch_node
831 } else {
832 SparseNode::Branch {
835 state_mask,
836 blinded_mask,
837 blinded_hashes: blinded_hashes.clone(),
838 state: SparseNodeState::Dirty,
839 }
840 };
841
842 let branch_subtrie = self.subtrie_for_path_mut(branch_path);
843 branch_subtrie.nodes.insert(*branch_path, new_branch_node.clone());
844 branch_parent_node = Some(new_branch_node);
845 };
846
847 if let (Some(ext_path), Some(SparseNode::Extension { key: shortkey, .. })) =
851 (ext_grandparent_path, &ext_grandparent_node)
852 {
853 let ext_subtrie = self.subtrie_for_path_mut(&ext_path);
854 let branch_path = branch_parent_path.as_ref().unwrap();
855
856 if let Some(new_ext_node) = Self::extension_changes_on_leaf_removal(
857 &ext_path,
858 shortkey,
859 branch_path,
860 branch_parent_node.as_ref().unwrap(),
861 ) {
862 ext_subtrie.nodes.insert(ext_path, new_ext_node.clone());
863 self.move_value_on_leaf_removal(&ext_path, &new_ext_node, branch_path);
864 self.remove_node(branch_path);
865 }
866 }
867
868 Ok(())
869 }
870
871 #[instrument(level = "trace", target = "trie::sparse::parallel", skip(self))]
872 fn root(&mut self) -> B256 {
873 trace!(target: "trie::parallel_sparse", "Calculating trie root hash");
874
875 #[cfg(feature = "trie-debug")]
876 self.debug_recorder.record(RecordedOp::Root);
877
878 if self.prefix_set.is_empty() &&
879 let Some(rlp_node) = self
880 .upper_subtrie
881 .nodes
882 .get(&Nibbles::default())
883 .and_then(|node| node.cached_rlp_node())
884 {
885 return rlp_node
886 .as_hash()
887 .expect("RLP-encoding of the root node cannot be less than 32 bytes")
888 }
889
890 self.update_subtrie_hashes();
892
893 let mut prefix_set = core::mem::take(&mut self.prefix_set).freeze();
896 let root_rlp = self.update_upper_subtrie_hashes(&mut prefix_set);
897
898 root_rlp.as_hash().unwrap_or(EMPTY_ROOT_HASH)
900 }
901
902 fn is_root_cached(&self) -> bool {
903 self.prefix_set.is_empty() &&
904 self.upper_subtrie
905 .nodes
906 .get(&Nibbles::default())
907 .is_some_and(|node| node.cached_rlp_node().is_some())
908 }
909
910 #[instrument(level = "trace", target = "trie::sparse::parallel", skip(self))]
911 fn update_subtrie_hashes(&mut self) {
912 trace!(target: "trie::parallel_sparse", "Updating subtrie hashes");
913
914 #[cfg(feature = "trie-debug")]
915 self.debug_recorder.record(RecordedOp::UpdateSubtrieHashes);
916
917 let mut prefix_set = core::mem::take(&mut self.prefix_set).freeze();
919 let num_changed_keys = prefix_set.len();
920 let (mut changed_subtries, unchanged_prefix_set) =
921 self.take_changed_lower_subtries(&mut prefix_set);
922
923 #[cfg(feature = "metrics")]
925 self.metrics.subtries_updated.record(changed_subtries.len() as f64);
926
927 self.prefix_set = unchanged_prefix_set;
929
930 if !self.is_update_parallelism_enabled(num_changed_keys) {
932 for changed_subtrie in &mut changed_subtries {
933 changed_subtrie.subtrie.update_hashes(
934 &mut changed_subtrie.prefix_set,
935 &mut changed_subtrie.update_actions_buf,
936 &self.branch_node_masks,
937 );
938 }
939
940 self.insert_changed_subtries(changed_subtries);
941 return
942 }
943
944 #[cfg(not(feature = "std"))]
945 unreachable!("nostd is checked by is_update_parallelism_enabled");
946
947 #[cfg(feature = "std")]
948 {
950 use rayon::prelude::*;
951
952 changed_subtries.par_iter_mut().for_each(|changed_subtrie| {
953 #[cfg(feature = "metrics")]
954 let start = Instant::now();
955 changed_subtrie.subtrie.update_hashes(
956 &mut changed_subtrie.prefix_set,
957 &mut changed_subtrie.update_actions_buf,
958 &self.branch_node_masks,
959 );
960 #[cfg(feature = "metrics")]
961 self.metrics.subtrie_hash_update_latency.record(start.elapsed());
962 });
963
964 self.insert_changed_subtries(changed_subtries);
965 }
966 }
967
968 fn get_leaf_value(&self, full_path: &Nibbles) -> Option<&Vec<u8>> {
969 if let Some(subtrie) = self.subtrie_for_path(full_path) &&
974 !subtrie.is_empty()
975 {
976 return subtrie.inner.values.get(full_path);
977 }
978
979 self.upper_subtrie.inner.values.get(full_path)
980 }
981
982 fn updates_ref(&self) -> Cow<'_, SparseTrieUpdates> {
983 self.updates.as_ref().map_or(Cow::Owned(SparseTrieUpdates::default()), Cow::Borrowed)
984 }
985
986 fn take_updates(&mut self) -> SparseTrieUpdates {
987 match self.updates.take() {
988 Some(updates) => {
989 self.updates = Some(SparseTrieUpdates::with_capacity(
991 updates.updated_nodes.len(),
992 updates.removed_nodes.len(),
993 ));
994 updates
995 }
996 None => SparseTrieUpdates::default(),
997 }
998 }
999
1000 fn wipe(&mut self) {
1001 self.upper_subtrie.wipe();
1002 for trie in &mut *self.lower_subtries {
1003 trie.wipe();
1004 }
1005 self.prefix_set = PrefixSetMut::all();
1006 self.updates = self.updates.is_some().then(SparseTrieUpdates::wiped);
1007 self.subtrie_heat.clear();
1008 }
1009
1010 fn clear(&mut self) {
1011 self.upper_subtrie.clear();
1012 self.upper_subtrie.nodes.insert(Nibbles::default(), SparseNode::Empty);
1013 for subtrie in &mut *self.lower_subtries {
1014 subtrie.clear();
1015 }
1016 self.prefix_set.clear();
1017 self.updates = None;
1018 self.branch_node_masks.clear();
1019 self.subtrie_heat.clear();
1020 #[cfg(feature = "trie-debug")]
1021 self.debug_recorder.reset();
1022 }
1025
1026 fn find_leaf(
1027 &self,
1028 full_path: &Nibbles,
1029 expected_value: Option<&Vec<u8>>,
1030 ) -> Result<LeafLookup, LeafLookupError> {
1031 if let Some(actual_value) = core::iter::once(self.upper_subtrie.as_ref())
1037 .chain(self.lower_subtrie_for_path(full_path))
1038 .filter_map(|subtrie| subtrie.inner.values.get(full_path))
1039 .next()
1040 {
1041 return expected_value
1043 .is_none_or(|v| v == actual_value)
1044 .then_some(LeafLookup::Exists)
1045 .ok_or_else(|| LeafLookupError::ValueMismatch {
1046 path: *full_path,
1047 expected: expected_value.cloned(),
1048 actual: actual_value.clone(),
1049 })
1050 }
1051
1052 let mut curr_path = Nibbles::new(); let mut curr_subtrie = self.upper_subtrie.as_ref();
1060 let mut curr_subtrie_is_upper = true;
1061
1062 loop {
1063 match curr_subtrie.nodes.get(&curr_path).unwrap() {
1064 SparseNode::Empty => return Ok(LeafLookup::NonExistent),
1065 SparseNode::Leaf { key, .. } => {
1066 let mut found_full_path = curr_path;
1067 found_full_path.extend(key);
1068 assert!(&found_full_path != full_path, "target leaf {full_path:?} found, even though value wasn't in values hashmap");
1069 return Ok(LeafLookup::NonExistent)
1070 }
1071 SparseNode::Extension { key, .. } => {
1072 if full_path.len() == curr_path.len() {
1073 return Ok(LeafLookup::NonExistent)
1074 }
1075 curr_path.extend(key);
1076 if !full_path.starts_with(&curr_path) {
1077 return Ok(LeafLookup::NonExistent)
1078 }
1079 }
1080 SparseNode::Branch { state_mask, blinded_mask, blinded_hashes, .. } => {
1081 if full_path.len() == curr_path.len() {
1082 return Ok(LeafLookup::NonExistent)
1083 }
1084 let nibble = full_path.get_unchecked(curr_path.len());
1085 if !state_mask.is_bit_set(nibble) {
1086 return Ok(LeafLookup::NonExistent)
1087 }
1088 curr_path.push_unchecked(nibble);
1089 if blinded_mask.is_bit_set(nibble) {
1090 return Err(LeafLookupError::BlindedNode {
1091 path: curr_path,
1092 hash: blinded_hashes[nibble as usize],
1093 })
1094 }
1095 }
1096 }
1097
1098 if curr_subtrie_is_upper &&
1101 let Some(lower_subtrie) = self.lower_subtrie_for_path(&curr_path)
1102 {
1103 curr_subtrie = lower_subtrie;
1104 curr_subtrie_is_upper = false;
1105 }
1106 }
1107 }
1108
1109 fn shrink_nodes_to(&mut self, size: usize) {
1110 let total_subtries = 1 + NUM_LOWER_SUBTRIES;
1114 let size_per_subtrie = size / total_subtries;
1115
1116 self.upper_subtrie.shrink_nodes_to(size_per_subtrie);
1118
1119 for subtrie in &mut *self.lower_subtries {
1121 subtrie.shrink_nodes_to(size_per_subtrie);
1122 }
1123
1124 self.branch_node_masks.shrink_to(size);
1126 }
1127
1128 fn shrink_values_to(&mut self, size: usize) {
1129 let total_subtries = 1 + NUM_LOWER_SUBTRIES;
1133 let size_per_subtrie = size / total_subtries;
1134
1135 self.upper_subtrie.shrink_values_to(size_per_subtrie);
1137
1138 for subtrie in &mut *self.lower_subtries {
1140 subtrie.shrink_values_to(size_per_subtrie);
1141 }
1142 }
1143
1144 fn size_hint(&self) -> usize {
1146 let upper_count = self.upper_subtrie.nodes.len();
1147 let lower_count: usize = self
1148 .lower_subtries
1149 .iter()
1150 .filter_map(|s| s.as_revealed_ref())
1151 .map(|s| s.nodes.len())
1152 .sum();
1153 upper_count + lower_count
1154 }
1155
1156 fn memory_size(&self) -> usize {
1157 self.memory_size()
1158 }
1159
1160 fn prune(&mut self, max_depth: usize) -> usize {
1161 #[cfg(feature = "trie-debug")]
1162 self.debug_recorder.reset();
1163
1164 self.subtrie_heat.decay_and_reset();
1166
1167 let mut effective_pruned_roots = Vec::<Nibbles>::new();
1171 let mut stack: SmallVec<[(Nibbles, usize); 32]> = SmallVec::new();
1172 stack.push((Nibbles::default(), 0));
1173
1174 while let Some((path, depth)) = stack.pop() {
1176 if depth > max_depth &&
1180 let SparseSubtrieType::Lower(idx) = SparseSubtrieType::from_path(&path) &&
1181 self.subtrie_heat.is_hot(idx)
1182 {
1183 continue;
1184 }
1185
1186 let Some(subtrie) = self.subtrie_for_path_mut_untracked(&path) else { continue };
1187 let Some(node) = subtrie.nodes.get_mut(&path) else { continue };
1188
1189 match node {
1190 SparseNode::Empty | SparseNode::Leaf { .. } => {}
1191 SparseNode::Extension { key, state, .. } => {
1192 if depth == max_depth {
1195 let Some(hash) = state.cached_hash() else { continue };
1196 subtrie.nodes.remove(&path);
1197
1198 let parent_path = path.slice(0..path.len() - 1);
1199 let SparseNode::Branch { blinded_mask, blinded_hashes, .. } =
1200 subtrie.nodes.get_mut(&parent_path).unwrap()
1201 else {
1202 panic!("expected branch node at path {parent_path:?}");
1203 };
1204
1205 let nibble = path.last().unwrap();
1206 blinded_mask.set_bit(nibble);
1207 blinded_hashes[nibble as usize] = hash;
1208
1209 effective_pruned_roots.push(path);
1210 } else {
1211 let mut child = path;
1212 child.extend(key);
1213 stack.push((child, depth + 1));
1214 }
1215 }
1216 SparseNode::Branch { state_mask, blinded_mask, blinded_hashes, .. } => {
1217 if depth == max_depth {
1219 let mut blinded_mask = *blinded_mask;
1220 let mut blinded_hashes = blinded_hashes.clone();
1221 for nibble in state_mask.iter() {
1222 if blinded_mask.is_bit_set(nibble) {
1223 continue;
1224 }
1225 let mut child = path;
1226 child.push_unchecked(nibble);
1227
1228 let Entry::Occupied(entry) = self
1229 .subtrie_for_path_mut_untracked(&child)
1230 .unwrap()
1231 .nodes
1232 .entry(child)
1233 else {
1234 panic!("expected node at path {child:?}");
1235 };
1236
1237 let Some(hash) = entry.get().cached_hash() else {
1238 continue;
1239 };
1240 entry.remove();
1241 blinded_mask.set_bit(nibble);
1242 blinded_hashes[nibble as usize] = hash;
1243 effective_pruned_roots.push(child);
1244 }
1245
1246 let SparseNode::Branch {
1247 blinded_mask: old_blinded_mask,
1248 blinded_hashes: old_blinded_hashes,
1249 ..
1250 } = self
1251 .subtrie_for_path_mut_untracked(&path)
1252 .unwrap()
1253 .nodes
1254 .get_mut(&path)
1255 .unwrap()
1256 else {
1257 unreachable!("expected branch node at path {path:?}");
1258 };
1259 *old_blinded_mask = blinded_mask;
1260 *old_blinded_hashes = blinded_hashes;
1261 } else {
1262 for nibble in state_mask.iter() {
1263 if blinded_mask.is_bit_set(nibble) {
1264 continue;
1265 }
1266 let mut child = path;
1267 child.push_unchecked(nibble);
1268 stack.push((child, depth + 1));
1269 }
1270 }
1271 }
1272 }
1273 }
1274
1275 if effective_pruned_roots.is_empty() {
1276 return 0;
1277 }
1278
1279 let nodes_converted = effective_pruned_roots.len();
1280
1281 effective_pruned_roots.sort_unstable_by(|path_a, path_b| {
1283 let subtrie_type_a = SparseSubtrieType::from_path(path_a);
1284 let subtrie_type_b = SparseSubtrieType::from_path(path_b);
1285 subtrie_type_a.cmp(&subtrie_type_b).then(path_a.cmp(path_b))
1286 });
1287
1288 let num_upper_roots = effective_pruned_roots
1290 .iter()
1291 .position(|p| !SparseSubtrieType::path_len_is_upper(p.len()))
1292 .unwrap_or(effective_pruned_roots.len());
1293
1294 let roots_upper = &effective_pruned_roots[..num_upper_roots];
1295 let roots_lower = &effective_pruned_roots[num_upper_roots..];
1296
1297 debug_assert!(
1298 {
1299 let mut all_roots: Vec<_> = effective_pruned_roots.clone();
1300 all_roots.sort_unstable();
1301 all_roots.windows(2).all(|w| !w[1].starts_with(&w[0]))
1302 },
1303 "prune roots must be prefix-free"
1304 );
1305
1306 if !roots_upper.is_empty() {
1309 for subtrie in &mut *self.lower_subtries {
1310 let should_clear = subtrie.as_revealed_ref().is_some_and(|s| {
1311 let search_idx = roots_upper.partition_point(|root| root <= &s.path);
1312 search_idx > 0 && s.path.starts_with(&roots_upper[search_idx - 1])
1313 });
1314 if should_clear {
1315 subtrie.clear();
1316 }
1317 }
1318 }
1319
1320 self.upper_subtrie.nodes.retain(|p, _| !is_strict_descendant_in(roots_upper, p));
1322 self.upper_subtrie.inner.values.retain(|p, _| {
1323 !starts_with_pruned_in(roots_upper, p) && !starts_with_pruned_in(roots_lower, p)
1324 });
1325
1326 for roots_group in roots_lower.chunk_by(|path_a, path_b| {
1328 SparseSubtrieType::from_path(path_a) == SparseSubtrieType::from_path(path_b)
1329 }) {
1330 let subtrie_idx = path_subtrie_index_unchecked(&roots_group[0]);
1331
1332 let Some(subtrie) = self.lower_subtries[subtrie_idx].as_revealed_mut() else {
1334 continue;
1335 };
1336
1337 subtrie.nodes.retain(|p, _| !is_strict_descendant_in(roots_group, p));
1339 subtrie.inner.values.retain(|p, _| !starts_with_pruned_in(roots_group, p));
1340 }
1341
1342 self.branch_node_masks.retain(|p, _| {
1344 if SparseSubtrieType::path_len_is_upper(p.len()) {
1345 !starts_with_pruned_in(roots_upper, p)
1346 } else {
1347 !starts_with_pruned_in(roots_lower, p) && !starts_with_pruned_in(roots_upper, p)
1348 }
1349 });
1350
1351 nodes_converted
1352 }
1353
1354 fn update_leaves(
1355 &mut self,
1356 updates: &mut alloy_primitives::map::B256Map<crate::LeafUpdate>,
1357 mut proof_required_fn: impl FnMut(B256, u8),
1358 ) -> SparseTrieResult<()> {
1359 use crate::{provider::NoRevealProvider, LeafUpdate};
1360
1361 #[cfg(feature = "trie-debug")]
1362 let recorded_updates: Vec<_> =
1363 updates.iter().map(|(k, v)| (*k, LeafUpdateRecord::from(v))).collect();
1364 #[cfg(feature = "trie-debug")]
1365 let mut recorded_proof_targets: Vec<(B256, u8)> = Vec::new();
1366
1367 let drained: Vec<_> = updates.drain().collect();
1370
1371 for (key, update) in drained {
1372 let full_path = Nibbles::unpack(key);
1373
1374 match update {
1375 LeafUpdate::Changed(value) => {
1376 if value.is_empty() {
1377 match self.remove_leaf(&full_path, NoRevealProvider) {
1380 Ok(()) => {}
1381 Err(e) => {
1382 if let Some(path) = Self::get_retriable_path(&e) {
1383 let (target_key, min_len) =
1384 Self::proof_target_for_path(key, &full_path, &path);
1385 proof_required_fn(target_key, min_len);
1386 #[cfg(feature = "trie-debug")]
1387 recorded_proof_targets.push((target_key, min_len));
1388 updates.insert(key, LeafUpdate::Changed(value));
1389 } else {
1390 return Err(e);
1391 }
1392 }
1393 }
1394 } else {
1395 if let Err(e) = self.update_leaf(full_path, value.clone(), NoRevealProvider)
1397 {
1398 if let Some(path) = Self::get_retriable_path(&e) {
1399 let (target_key, min_len) =
1400 Self::proof_target_for_path(key, &full_path, &path);
1401 proof_required_fn(target_key, min_len);
1402 #[cfg(feature = "trie-debug")]
1403 recorded_proof_targets.push((target_key, min_len));
1404 updates.insert(key, LeafUpdate::Changed(value));
1405 } else {
1406 return Err(e);
1407 }
1408 }
1409 }
1410 }
1411 LeafUpdate::Touched => {
1412 match self.find_leaf(&full_path, None) {
1414 Err(LeafLookupError::BlindedNode { path, .. }) => {
1415 let (target_key, min_len) =
1416 Self::proof_target_for_path(key, &full_path, &path);
1417 proof_required_fn(target_key, min_len);
1418 #[cfg(feature = "trie-debug")]
1419 recorded_proof_targets.push((target_key, min_len));
1420 updates.insert(key, LeafUpdate::Touched);
1421 }
1422 Ok(_) | Err(LeafLookupError::ValueMismatch { .. }) => {}
1424 }
1425 }
1426 }
1427 }
1428
1429 #[cfg(feature = "trie-debug")]
1430 self.debug_recorder.record(RecordedOp::UpdateLeaves {
1431 updates: recorded_updates,
1432 remaining_keys: updates.keys().copied().collect(),
1433 proof_targets: recorded_proof_targets,
1434 });
1435
1436 Ok(())
1437 }
1438
1439 #[cfg(feature = "trie-debug")]
1440 fn take_debug_recorder(&mut self) -> TrieDebugRecorder {
1441 core::mem::take(&mut self.debug_recorder)
1442 }
1443
1444 fn commit_updates(
1445 &mut self,
1446 updated: &HashMap<Nibbles, BranchNodeCompact>,
1447 removed: &HashSet<Nibbles>,
1448 ) {
1449 self.branch_node_masks.reserve(updated.len());
1453 for (path, node) in updated {
1454 self.branch_node_masks.insert(
1455 *path,
1456 BranchNodeMasks { tree_mask: node.tree_mask, hash_mask: node.hash_mask },
1457 );
1458 }
1459 for path in removed {
1460 self.branch_node_masks.remove(path);
1461 }
1462 }
1463}
1464
1465impl ParallelSparseTrie {
1466 pub const fn with_parallelism_thresholds(mut self, thresholds: ParallelismThresholds) -> Self {
1468 self.parallelism_thresholds = thresholds;
1469 self
1470 }
1471
1472 const fn updates_enabled(&self) -> bool {
1474 self.updates.is_some()
1475 }
1476
1477 const fn is_reveal_parallelism_enabled(&self, num_nodes: usize) -> bool {
1480 #[cfg(not(feature = "std"))]
1481 {
1482 let _ = num_nodes;
1483 return false;
1484 }
1485
1486 #[cfg(feature = "std")]
1487 {
1488 num_nodes >= self.parallelism_thresholds.min_revealed_nodes
1489 }
1490 }
1491
1492 const fn is_update_parallelism_enabled(&self, num_changed_keys: usize) -> bool {
1495 #[cfg(not(feature = "std"))]
1496 {
1497 let _ = num_changed_keys;
1498 return false;
1499 }
1500
1501 #[cfg(feature = "std")]
1502 {
1503 num_changed_keys >= self.parallelism_thresholds.min_updated_nodes
1504 }
1505 }
1506
1507 const fn get_retriable_path(e: &SparseTrieError) -> Option<Nibbles> {
1514 match e.kind() {
1515 SparseTrieErrorKind::BlindedNode(path) |
1516 SparseTrieErrorKind::NodeNotFoundInProvider { path } => Some(*path),
1517 _ => None,
1518 }
1519 }
1520
1521 fn nibbles_to_padded_b256(path: &Nibbles) -> B256 {
1523 let mut bytes = [0u8; 32];
1524 path.pack_to(&mut bytes);
1525 B256::from(bytes)
1526 }
1527
1528 fn proof_target_for_path(full_key: B256, full_path: &Nibbles, path: &Nibbles) -> (B256, u8) {
1534 let min_len = (path.len() as u8).min(64);
1535 let target_key =
1536 if full_path.starts_with(path) { full_key } else { Self::nibbles_to_padded_b256(path) };
1537 (target_key, min_len)
1538 }
1539
1540 pub fn from_root(
1555 root: TrieNodeV2,
1556 masks: Option<BranchNodeMasks>,
1557 retain_updates: bool,
1558 ) -> SparseTrieResult<Self> {
1559 Self::default().with_root(root, masks, retain_updates)
1560 }
1561
1562 fn lower_subtrie_for_path(&self, path: &Nibbles) -> Option<&SparseSubtrie> {
1566 match SparseSubtrieType::from_path(path) {
1567 SparseSubtrieType::Upper => None,
1568 SparseSubtrieType::Lower(idx) => self.lower_subtries[idx].as_revealed_ref(),
1569 }
1570 }
1571
1572 fn lower_subtrie_for_path_mut(&mut self, path: &Nibbles) -> Option<&mut SparseSubtrie> {
1579 match SparseSubtrieType::from_path(path) {
1580 SparseSubtrieType::Upper => None,
1581 SparseSubtrieType::Lower(idx) => {
1582 self.lower_subtries[idx].reveal(path);
1583 self.subtrie_heat.mark_modified(idx);
1584 Some(self.lower_subtries[idx].as_revealed_mut().expect("just revealed"))
1585 }
1586 }
1587 }
1588
1589 fn subtrie_for_path(&self, path: &Nibbles) -> Option<&SparseSubtrie> {
1594 if SparseSubtrieType::path_len_is_upper(path.len()) {
1595 Some(&self.upper_subtrie)
1596 } else {
1597 self.lower_subtrie_for_path(path)
1598 }
1599 }
1600
1601 fn subtrie_for_path_mut(&mut self, path: &Nibbles) -> &mut SparseSubtrie {
1608 if SparseSubtrieType::path_len_is_upper(path.len()) {
1611 &mut self.upper_subtrie
1612 } else {
1613 self.lower_subtrie_for_path_mut(path).unwrap()
1614 }
1615 }
1616
1617 fn subtrie_for_path_mut_untracked(&mut self, path: &Nibbles) -> Option<&mut SparseSubtrie> {
1620 if SparseSubtrieType::path_len_is_upper(path.len()) {
1621 Some(&mut self.upper_subtrie)
1622 } else {
1623 match SparseSubtrieType::from_path(path) {
1624 SparseSubtrieType::Upper => None,
1625 SparseSubtrieType::Lower(idx) => self.lower_subtries[idx].as_revealed_mut(),
1626 }
1627 }
1628 }
1629
1630 fn find_next_to_leaf(
1638 from_path: &Nibbles,
1639 from_node: &SparseNode,
1640 leaf_full_path: &Nibbles,
1641 ) -> FindNextToLeafOutcome {
1642 debug_assert!(leaf_full_path.len() >= from_path.len());
1643 debug_assert!(leaf_full_path.starts_with(from_path));
1644
1645 match from_node {
1646 SparseNode::Empty => FindNextToLeafOutcome::NotFound,
1649 SparseNode::Leaf { key, .. } => {
1650 let mut found_full_path = *from_path;
1651 found_full_path.extend(key);
1652
1653 if &found_full_path == leaf_full_path {
1654 return FindNextToLeafOutcome::Found
1655 }
1656 FindNextToLeafOutcome::NotFound
1657 }
1658 SparseNode::Extension { key, .. } => {
1659 if leaf_full_path.len() == from_path.len() {
1660 return FindNextToLeafOutcome::NotFound
1661 }
1662
1663 let mut child_path = *from_path;
1664 child_path.extend(key);
1665
1666 if !leaf_full_path.starts_with(&child_path) {
1667 return FindNextToLeafOutcome::NotFound
1668 }
1669 FindNextToLeafOutcome::ContinueFrom(child_path)
1670 }
1671 SparseNode::Branch { state_mask, blinded_mask, .. } => {
1672 if leaf_full_path.len() == from_path.len() {
1673 return FindNextToLeafOutcome::NotFound
1674 }
1675
1676 let nibble = leaf_full_path.get_unchecked(from_path.len());
1677 if !state_mask.is_bit_set(nibble) {
1678 return FindNextToLeafOutcome::NotFound
1679 }
1680
1681 let mut child_path = *from_path;
1682 child_path.push_unchecked(nibble);
1683
1684 if blinded_mask.is_bit_set(nibble) {
1685 return FindNextToLeafOutcome::BlindedNode(child_path);
1686 }
1687
1688 FindNextToLeafOutcome::ContinueFrom(child_path)
1689 }
1690 }
1691 }
1692
1693 fn move_value_on_leaf_removal(
1698 &mut self,
1699 parent_path: &Nibbles,
1700 new_parent_node: &SparseNode,
1701 prev_child_path: &Nibbles,
1702 ) {
1703 if SparseSubtrieType::from_path(parent_path).lower_index().is_some() {
1706 return;
1707 }
1708
1709 if let SparseNode::Leaf { key, .. } = new_parent_node {
1710 let Some(prev_child_subtrie) = self.lower_subtrie_for_path_mut(prev_child_path) else {
1711 return;
1712 };
1713
1714 let mut leaf_full_path = *parent_path;
1715 leaf_full_path.extend(key);
1716
1717 let val = prev_child_subtrie.inner.values.remove(&leaf_full_path).expect("ParallelSparseTrie is in an inconsistent state, expected value on subtrie which wasn't found");
1718 self.upper_subtrie.inner.values.insert(leaf_full_path, val);
1719 }
1720 }
1721
1722 fn remove_node(&mut self, path: &Nibbles) {
1734 let subtrie = self.subtrie_for_path_mut(path);
1735 let node = subtrie.nodes.remove(path);
1736
1737 let Some(idx) = SparseSubtrieType::from_path(path).lower_index() else {
1738 return;
1741 };
1742
1743 match node {
1744 Some(SparseNode::Leaf { .. }) => {
1745 if subtrie.nodes.is_empty() {
1748 self.lower_subtries[idx].clear();
1749 }
1750 }
1751 Some(SparseNode::Extension { key, .. }) => {
1752 if &subtrie.path == path {
1756 subtrie.path.extend(&key);
1757 }
1758 }
1759 _ => panic!("Expected to remove a leaf or extension, but removed {node:?}"),
1760 }
1761 }
1762
1763 fn branch_changes_on_leaf_removal(
1772 parent_path: &Nibbles,
1773 remaining_child_path: &Nibbles,
1774 remaining_child_node: &SparseNode,
1775 ) -> (SparseNode, bool) {
1776 debug_assert!(remaining_child_path.len() > parent_path.len());
1777 debug_assert!(remaining_child_path.starts_with(parent_path));
1778
1779 let remaining_child_nibble = remaining_child_path.get_unchecked(parent_path.len());
1780
1781 match remaining_child_node {
1784 SparseNode::Empty => {
1785 panic!("remaining child must have been revealed already")
1786 }
1787 SparseNode::Leaf { key, .. } => {
1791 let mut new_key = Nibbles::from_nibbles_unchecked([remaining_child_nibble]);
1792 new_key.extend(key);
1793 (SparseNode::new_leaf(new_key), true)
1794 }
1795 SparseNode::Extension { key, .. } => {
1799 let mut new_key = Nibbles::from_nibbles_unchecked([remaining_child_nibble]);
1800 new_key.extend(key);
1801 (SparseNode::new_ext(new_key), true)
1802 }
1803 SparseNode::Branch { .. } => (
1806 SparseNode::new_ext(Nibbles::from_nibbles_unchecked([remaining_child_nibble])),
1807 false,
1808 ),
1809 }
1810 }
1811
1812 fn extension_changes_on_leaf_removal(
1821 parent_path: &Nibbles,
1822 parent_key: &Nibbles,
1823 child_path: &Nibbles,
1824 child: &SparseNode,
1825 ) -> Option<SparseNode> {
1826 debug_assert!(child_path.len() > parent_path.len());
1827 debug_assert!(child_path.starts_with(parent_path));
1828
1829 match child {
1832 SparseNode::Empty => {
1833 panic!("child must be revealed")
1834 }
1835 SparseNode::Leaf { key, .. } => {
1841 let mut new_key = *parent_key;
1842 new_key.extend(key);
1843 Some(SparseNode::new_leaf(new_key))
1844 }
1845 SparseNode::Extension { key, .. } => {
1848 let mut new_key = *parent_key;
1849 new_key.extend(key);
1850 Some(SparseNode::new_ext(new_key))
1851 }
1852 SparseNode::Branch { .. } => None,
1854 }
1855 }
1856
1857 #[instrument(level = "trace", target = "trie::parallel_sparse", skip_all)]
1860 fn apply_subtrie_update_actions(
1861 &mut self,
1862 update_actions: impl Iterator<Item = SparseTrieUpdatesAction>,
1863 ) {
1864 if let Some(updates) = self.updates.as_mut() {
1865 let additional = update_actions.size_hint().0;
1866 updates.updated_nodes.reserve(additional);
1867 updates.removed_nodes.reserve(additional);
1868 for action in update_actions {
1869 match action {
1870 SparseTrieUpdatesAction::InsertRemoved(path) => {
1871 updates.updated_nodes.remove(&path);
1872 updates.removed_nodes.insert(path);
1873 }
1874 SparseTrieUpdatesAction::RemoveUpdated(path) => {
1875 updates.updated_nodes.remove(&path);
1876 }
1877 SparseTrieUpdatesAction::InsertUpdated(path, branch_node) => {
1878 updates.updated_nodes.insert(path, branch_node);
1879 updates.removed_nodes.remove(&path);
1880 }
1881 }
1882 }
1883 };
1884 }
1885
1886 #[instrument(level = "trace", target = "trie::parallel_sparse", skip_all, ret)]
1888 fn update_upper_subtrie_hashes(&mut self, prefix_set: &mut PrefixSet) -> RlpNode {
1889 trace!(target: "trie::parallel_sparse", "Updating upper subtrie hashes");
1890
1891 debug_assert!(self.upper_subtrie.inner.buffers.path_stack.is_empty());
1892 self.upper_subtrie.inner.buffers.path_stack.push(RlpNodePathStackItem {
1893 path: Nibbles::default(), is_in_prefix_set: None,
1895 });
1896
1897 #[cfg(feature = "metrics")]
1898 let start = Instant::now();
1899
1900 let mut update_actions_buf =
1901 self.updates_enabled().then(|| self.update_actions_buffers.pop().unwrap_or_default());
1902
1903 while let Some(stack_item) = self.upper_subtrie.inner.buffers.path_stack.pop() {
1904 let path = stack_item.path;
1905 let node = if path.len() < UPPER_TRIE_MAX_DEPTH {
1906 self.upper_subtrie.nodes.get_mut(&path).expect("upper subtrie node must exist")
1907 } else {
1908 let index = path_subtrie_index_unchecked(&path);
1909 let node = self.lower_subtries[index]
1910 .as_revealed_mut()
1911 .expect("lower subtrie must exist")
1912 .nodes
1913 .get_mut(&path)
1914 .expect("lower subtrie node must exist");
1915 debug_assert!(
1918 node.cached_rlp_node().is_some(),
1919 "Lower subtrie root node {node:?} at path {path:?} has no cached RLP node"
1920 );
1921 node
1922 };
1923
1924 self.upper_subtrie.inner.rlp_node(
1926 prefix_set,
1927 &mut update_actions_buf,
1928 stack_item,
1929 node,
1930 &self.branch_node_masks,
1931 );
1932 }
1933
1934 if let Some(mut update_actions_buf) = update_actions_buf {
1937 self.apply_subtrie_update_actions(
1938 #[allow(clippy::iter_with_drain)]
1939 update_actions_buf.drain(..),
1940 );
1941 self.update_actions_buffers.push(update_actions_buf);
1942 }
1943
1944 #[cfg(feature = "metrics")]
1945 self.metrics.subtrie_upper_hash_latency.record(start.elapsed());
1946
1947 debug_assert_eq!(self.upper_subtrie.inner.buffers.rlp_node_stack.len(), 1);
1948 self.upper_subtrie.inner.buffers.rlp_node_stack.pop().unwrap().rlp_node
1949 }
1950
1951 #[instrument(level = "trace", target = "trie::parallel_sparse", skip_all, fields(prefix_set_len = prefix_set.len()))]
1965 fn take_changed_lower_subtries(
1966 &mut self,
1967 prefix_set: &mut PrefixSet,
1968 ) -> (Vec<ChangedSubtrie>, PrefixSetMut) {
1969 if prefix_set.is_empty() {
1972 return Default::default();
1973 }
1974
1975 let prefix_set_clone = prefix_set.clone();
1977 let mut prefix_set_iter = prefix_set_clone.into_iter().copied().peekable();
1978 let mut changed_subtries = Vec::new();
1979 let mut unchanged_prefix_set = PrefixSetMut::default();
1980 let updates_enabled = self.updates_enabled();
1981
1982 for (index, subtrie) in self.lower_subtries.iter_mut().enumerate() {
1983 if let Some(subtrie) = subtrie.take_revealed_if(|subtrie| {
1984 prefix_set.contains(&subtrie.path) ||
1985 subtrie
1986 .nodes
1987 .get(&subtrie.path)
1988 .is_some_and(|n| n.cached_rlp_node().is_none())
1989 }) {
1990 let prefix_set = if prefix_set.all() {
1991 unchanged_prefix_set = PrefixSetMut::all();
1992 PrefixSetMut::all()
1993 } else {
1994 let mut new_prefix_set = Vec::new();
1999 while let Some(key) = prefix_set_iter.peek() {
2000 if key.starts_with(&subtrie.path) {
2001 new_prefix_set.push(prefix_set_iter.next().unwrap());
2003 } else if new_prefix_set.is_empty() && key < &subtrie.path {
2004 unchanged_prefix_set.insert(prefix_set_iter.next().unwrap());
2008 } else {
2009 break
2013 }
2014 }
2015 PrefixSetMut::from(new_prefix_set)
2016 }
2017 .freeze();
2018
2019 match subtrie.nodes.get(&subtrie.path) {
2022 Some(SparseNode::Extension { key, .. } | SparseNode::Leaf { key, .. }) => {
2023 unchanged_prefix_set.insert(subtrie.path.join(key));
2024 }
2025 Some(SparseNode::Branch { .. }) => {
2026 unchanged_prefix_set.insert(subtrie.path);
2027 }
2028 _ => {}
2029 }
2030
2031 let update_actions_buf =
2032 updates_enabled.then(|| self.update_actions_buffers.pop().unwrap_or_default());
2033
2034 changed_subtries.push(ChangedSubtrie {
2035 index,
2036 subtrie,
2037 prefix_set,
2038 update_actions_buf,
2039 });
2040 }
2041 }
2042
2043 unchanged_prefix_set.extend_keys(prefix_set_iter);
2045
2046 (changed_subtries, unchanged_prefix_set)
2047 }
2048
2049 #[cfg(test)]
2051 fn all_nodes(&self) -> impl IntoIterator<Item = (&Nibbles, &SparseNode)> {
2052 let mut nodes = vec![];
2053 for subtrie in self.lower_subtries.iter().filter_map(LowerSparseSubtrie::as_revealed_ref) {
2054 nodes.extend(subtrie.nodes.iter())
2055 }
2056 nodes.extend(self.upper_subtrie.nodes.iter());
2057 nodes
2058 }
2059
2060 fn reveal_upper_node(
2077 &mut self,
2078 path: Nibbles,
2079 node: &TrieNodeV2,
2080 masks: Option<BranchNodeMasks>,
2081 ) -> SparseTrieResult<()> {
2082 if !self.is_path_reachable_from_upper(&path) {
2085 return Ok(())
2086 }
2087
2088 if !self.upper_subtrie.reveal_node(path, node, masks, None)? {
2090 if let TrieNodeV2::Branch(branch) = node {
2091 if branch.key.is_empty() {
2092 return Ok(());
2093 }
2094
2095 if SparseSubtrieType::path_len_is_upper(path.len() + branch.key.len()) {
2098 return Ok(())
2099 }
2100 } else {
2101 return Ok(());
2102 }
2103 }
2104
2105 match node {
2110 TrieNodeV2::Branch(branch) => {
2111 let mut branch_path = path;
2112 branch_path.extend(&branch.key);
2113
2114 if !SparseSubtrieType::path_len_is_upper(branch_path.len()) {
2117 self.lower_subtrie_for_path_mut(&branch_path)
2118 .expect("branch_path must have a lower subtrie")
2119 .reveal_branch(
2120 branch_path,
2121 branch.state_mask,
2122 &branch.stack,
2123 masks,
2124 branch.branch_rlp_node.clone(),
2125 )?
2126 } else if !SparseSubtrieType::path_len_is_upper(branch_path.len() + 1) {
2127 for (stack_ptr, idx) in branch.state_mask.iter().enumerate() {
2132 let mut child_path = branch_path;
2133 child_path.push_unchecked(idx);
2134 let child = &branch.stack[stack_ptr];
2135
2136 if !child.is_hash() {
2139 self.lower_subtrie_for_path_mut(&child_path)
2140 .expect("child_path must have a lower subtrie")
2141 .reveal_node(
2142 child_path,
2143 &TrieNodeV2::decode(&mut branch.stack[stack_ptr].as_ref())?,
2144 None,
2145 None,
2146 )?;
2147 }
2148 }
2149 }
2150 }
2151 TrieNodeV2::Extension(ext) => {
2152 let mut child_path = path;
2153 child_path.extend(&ext.key);
2154 if let Some(subtrie) = self.lower_subtrie_for_path_mut(&child_path) {
2155 subtrie.reveal_node(
2156 child_path,
2157 &TrieNodeV2::decode(&mut ext.child.as_ref())?,
2158 None,
2159 None,
2160 )?;
2161 }
2162 }
2163 TrieNodeV2::EmptyRoot | TrieNodeV2::Leaf(_) => (),
2164 }
2165
2166 Ok(())
2167 }
2168
2169 #[instrument(level = "trace", target = "trie::parallel_sparse", skip_all)]
2172 fn insert_changed_subtries(
2173 &mut self,
2174 changed_subtries: impl IntoIterator<Item = ChangedSubtrie>,
2175 ) {
2176 for ChangedSubtrie { index, subtrie, update_actions_buf, .. } in changed_subtries {
2177 if let Some(mut update_actions_buf) = update_actions_buf {
2178 self.apply_subtrie_update_actions(
2179 #[allow(clippy::iter_with_drain)]
2180 update_actions_buf.drain(..),
2181 );
2182 self.update_actions_buffers.push(update_actions_buf);
2183 }
2184
2185 self.lower_subtries[index] = LowerSparseSubtrie::Revealed(subtrie);
2186 self.subtrie_heat.mark_modified(index);
2187 }
2188 }
2189
2190 pub fn memory_size(&self) -> usize {
2202 let mut size = core::mem::size_of::<Self>();
2203
2204 size += self.upper_subtrie.memory_size();
2206
2207 for subtrie in self.lower_subtries.iter() {
2209 size += subtrie.memory_size();
2210 }
2211
2212 size += self.prefix_set.len() * core::mem::size_of::<Nibbles>();
2214
2215 size += self.branch_node_masks.len() *
2217 (core::mem::size_of::<Nibbles>() + core::mem::size_of::<BranchNodeMasks>());
2218
2219 if let Some(updates) = &self.updates {
2221 size += updates.updated_nodes.len() *
2222 (core::mem::size_of::<Nibbles>() + core::mem::size_of::<BranchNodeCompact>());
2223 size += updates.removed_nodes.len() * core::mem::size_of::<Nibbles>();
2224 }
2225
2226 for buf in &self.update_actions_buffers {
2228 size += buf.capacity() * core::mem::size_of::<SparseTrieUpdatesAction>();
2229 }
2230
2231 size
2232 }
2233
2234 fn is_path_reachable_from_upper(&self, path: &Nibbles) -> bool {
2236 let mut current = Nibbles::default();
2237 while current.len() < path.len() {
2238 let Some(node) = self.upper_subtrie.nodes.get(¤t) else { return false };
2239 match node {
2240 SparseNode::Branch { state_mask, .. } => {
2241 if !state_mask.is_bit_set(path.get_unchecked(current.len())) {
2242 return false
2243 }
2244
2245 current.push_unchecked(path.get_unchecked(current.len()));
2246 }
2247 SparseNode::Extension { key, .. } => {
2248 if *key != path.slice(current.len()..current.len() + key.len()) {
2249 return false
2250 }
2251 current.extend(key);
2252 }
2253 SparseNode::Empty | SparseNode::Leaf { .. } => return false,
2254 }
2255 }
2256 true
2257 }
2258
2259 fn is_boundary_leaf_reachable(
2265 upper_nodes: &HashMap<Nibbles, SparseNode>,
2266 path: &Nibbles,
2267 node: &TrieNodeV2,
2268 ) -> bool {
2269 debug_assert_eq!(path.len(), UPPER_TRIE_MAX_DEPTH);
2270
2271 if !matches!(node, TrieNodeV2::Leaf(_)) {
2272 return true
2273 }
2274
2275 let parent_path = path.slice(..path.len() - 1);
2276 let leaf_nibble = path.get_unchecked(path.len() - 1);
2277
2278 match upper_nodes.get(&parent_path) {
2279 Some(SparseNode::Branch { state_mask, .. }) => state_mask.is_bit_set(leaf_nibble),
2280 _ => false,
2281 }
2282 }
2283
2284 fn reachable_subtries(&self) -> SubtriesBitmap {
2287 let mut reachable = SubtriesBitmap::default();
2288
2289 let mut stack = Vec::new();
2290 stack.push(Nibbles::default());
2291
2292 while let Some(current) = stack.pop() {
2293 let Some(node) = self.upper_subtrie.nodes.get(¤t) else { continue };
2294 match node {
2295 SparseNode::Branch { state_mask, .. } => {
2296 for idx in state_mask.iter() {
2297 let mut next = current;
2298 next.push_unchecked(idx);
2299 if next.len() >= UPPER_TRIE_MAX_DEPTH {
2300 reachable.set(path_subtrie_index_unchecked(&next));
2301 } else {
2302 stack.push(next);
2303 }
2304 }
2305 }
2306 SparseNode::Extension { key, .. } => {
2307 let mut next = current;
2308 next.extend(key);
2309 if next.len() >= UPPER_TRIE_MAX_DEPTH {
2310 reachable.set(path_subtrie_index_unchecked(&next));
2311 } else {
2312 stack.push(next);
2313 }
2314 }
2315 SparseNode::Empty | SparseNode::Leaf { .. } => {}
2316 };
2317 }
2318
2319 reachable
2320 }
2321}
2322
2323#[derive(Clone, Default, PartialEq, Eq, Debug)]
2325struct SubtriesBitmap(U256);
2326
2327impl SubtriesBitmap {
2328 #[inline]
2330 fn set(&mut self, idx: usize) {
2331 debug_assert!(idx < NUM_LOWER_SUBTRIES);
2332 self.0.set_bit(idx, true);
2333 }
2334
2335 #[inline]
2337 fn get(&self, idx: usize) -> bool {
2338 debug_assert!(idx < NUM_LOWER_SUBTRIES);
2339 self.0.bit(idx)
2340 }
2341
2342 #[inline]
2344 const fn clear(&mut self) {
2345 self.0 = U256::ZERO;
2346 }
2347}
2348
2349#[derive(Clone, PartialEq, Eq, Debug)]
2358struct SubtrieModifications {
2359 heat: [u8; NUM_LOWER_SUBTRIES],
2361 modified: SubtriesBitmap,
2363}
2364
2365impl Default for SubtrieModifications {
2366 fn default() -> Self {
2367 Self { heat: [0; NUM_LOWER_SUBTRIES], modified: SubtriesBitmap::default() }
2368 }
2369}
2370
2371impl SubtrieModifications {
2372 #[inline]
2374 fn mark_modified(&mut self, idx: usize) {
2375 debug_assert!(idx < NUM_LOWER_SUBTRIES);
2376 self.modified.set(idx);
2377 self.heat[idx] = self.heat[idx].saturating_add(1);
2378 }
2379
2380 #[inline]
2382 fn is_hot(&self, idx: usize) -> bool {
2383 debug_assert!(idx < NUM_LOWER_SUBTRIES);
2384 self.heat[idx] > 0
2385 }
2386
2387 fn decay_and_reset(&mut self) {
2390 for (idx, heat) in self.heat.iter_mut().enumerate() {
2391 if !self.modified.get(idx) {
2392 *heat = heat.saturating_sub(1);
2393 }
2394 }
2395 self.modified.clear();
2396 }
2397
2398 const fn clear(&mut self) {
2400 self.heat = [0; NUM_LOWER_SUBTRIES];
2401 self.modified.clear();
2402 }
2403}
2404
2405#[derive(Clone, PartialEq, Eq, Debug, Default)]
2408pub struct SparseSubtrie {
2409 pub(crate) path: Nibbles,
2417 nodes: HashMap<Nibbles, SparseNode>,
2419 inner: SparseSubtrieInner,
2421}
2422
2423enum FindNextToLeafOutcome {
2426 Found,
2428 ContinueFrom(Nibbles),
2430 NotFound,
2433 BlindedNode(Nibbles),
2436}
2437
2438impl SparseSubtrie {
2439 pub(crate) fn new(path: Nibbles) -> Self {
2441 Self { path, ..Default::default() }
2442 }
2443
2444 pub(crate) fn is_empty(&self) -> bool {
2446 self.nodes.is_empty()
2447 }
2448
2449 fn is_child_same_level(current_path: &Nibbles, child_path: &Nibbles) -> bool {
2451 let current_level = core::mem::discriminant(&SparseSubtrieType::from_path(current_path));
2452 let child_level = core::mem::discriminant(&SparseSubtrieType::from_path(child_path));
2453 current_level == child_level
2454 }
2455
2456 fn is_leaf_reachable_from_parent(&self, path: &Nibbles) -> bool {
2465 if path.is_empty() {
2466 return true
2467 }
2468
2469 let parent_path = path.slice(..path.len() - 1);
2470 let leaf_nibble = path.get_unchecked(path.len() - 1);
2471
2472 match self.nodes.get(&parent_path) {
2473 Some(SparseNode::Branch { state_mask, .. }) => state_mask.is_bit_set(leaf_nibble),
2474 _ => false,
2475 }
2476 }
2477
2478 pub fn update_leaf(&mut self, full_path: Nibbles, value: Vec<u8>) -> SparseTrieResult<()> {
2494 debug_assert!(full_path.starts_with(&self.path));
2495
2496 if let Entry::Occupied(mut e) = self.inner.values.entry(full_path) {
2498 e.insert(value);
2499 return Ok(())
2500 }
2501
2502 let mut current = Some(self.path);
2504
2505 while let Some(current_path) = current.as_mut() {
2506 match self.update_next_node(current_path, &full_path)? {
2507 LeafUpdateStep::Continue => {}
2508 LeafUpdateStep::NodeNotFound | LeafUpdateStep::Complete { .. } => break,
2509 }
2510 }
2511
2512 self.inner.values.insert(full_path, value);
2514
2515 Ok(())
2516 }
2517
2518 fn update_next_node(
2525 &mut self,
2526 current: &mut Nibbles,
2527 path: &Nibbles,
2528 ) -> SparseTrieResult<LeafUpdateStep> {
2529 debug_assert!(path.starts_with(&self.path));
2530 debug_assert!(current.starts_with(&self.path));
2531 debug_assert!(path.starts_with(current));
2532 let Some(node) = self.nodes.get_mut(current) else {
2533 return Ok(LeafUpdateStep::NodeNotFound);
2534 };
2535
2536 match node {
2537 SparseNode::Empty => {
2538 let path = path.slice(self.path.len()..);
2541 *node = SparseNode::new_leaf(path);
2542 Ok(LeafUpdateStep::complete_with_insertions(vec![*current]))
2543 }
2544 SparseNode::Leaf { key: current_key, .. } => {
2545 current.extend(current_key);
2546
2547 debug_assert!(current != path, "we already checked leaf presence in the beginning");
2549
2550 let common = current.common_prefix_length(path);
2552
2553 let new_ext_key = current.slice(current.len() - current_key.len()..common);
2555 *node = SparseNode::new_ext(new_ext_key);
2556
2557 self.nodes.reserve(3);
2559 let branch_path = current.slice(..common);
2560 let new_leaf_path = path.slice(..=common);
2561 let existing_leaf_path = current.slice(..=common);
2562
2563 self.nodes.insert(
2564 branch_path,
2565 SparseNode::new_split_branch(
2566 current.get_unchecked(common),
2567 path.get_unchecked(common),
2568 ),
2569 );
2570 self.nodes.insert(new_leaf_path, SparseNode::new_leaf(path.slice(common + 1..)));
2571 self.nodes
2572 .insert(existing_leaf_path, SparseNode::new_leaf(current.slice(common + 1..)));
2573
2574 Ok(LeafUpdateStep::complete_with_insertions(vec![
2575 branch_path,
2576 new_leaf_path,
2577 existing_leaf_path,
2578 ]))
2579 }
2580 SparseNode::Extension { key, .. } => {
2581 current.extend(key);
2582
2583 if !path.starts_with(current) {
2584 let common = current.common_prefix_length(path);
2586 *key = current.slice(current.len() - key.len()..common);
2587
2588 self.nodes.reserve(3);
2591 let branch_path = current.slice(..common);
2592 let new_leaf_path = path.slice(..=common);
2593 let branch = SparseNode::new_split_branch(
2594 current.get_unchecked(common),
2595 path.get_unchecked(common),
2596 );
2597
2598 self.nodes.insert(branch_path, branch);
2599
2600 let new_leaf = SparseNode::new_leaf(path.slice(common + 1..));
2602 self.nodes.insert(new_leaf_path, new_leaf);
2603
2604 let mut inserted_nodes = vec![branch_path, new_leaf_path];
2605
2606 let key = current.slice(common + 1..);
2608 if !key.is_empty() {
2609 let ext_path = current.slice(..=common);
2610 self.nodes.insert(ext_path, SparseNode::new_ext(key));
2611 inserted_nodes.push(ext_path);
2612 }
2613
2614 return Ok(LeafUpdateStep::complete_with_insertions(inserted_nodes))
2615 }
2616
2617 Ok(LeafUpdateStep::Continue)
2618 }
2619 SparseNode::Branch { state_mask, blinded_mask, .. } => {
2620 let nibble = path.get_unchecked(current.len());
2621 current.push_unchecked(nibble);
2622
2623 if !state_mask.is_bit_set(nibble) {
2624 state_mask.set_bit(nibble);
2625 let new_leaf = SparseNode::new_leaf(path.slice(current.len()..));
2626 self.nodes.insert(*current, new_leaf);
2627 return Ok(LeafUpdateStep::complete_with_insertions(vec![*current]))
2628 }
2629
2630 if blinded_mask.is_bit_set(nibble) {
2631 return Err(SparseTrieErrorKind::BlindedNode(*current).into());
2632 }
2633
2634 Ok(LeafUpdateStep::Continue)
2636 }
2637 }
2638 }
2639
2640 fn reveal_branch(
2642 &mut self,
2643 path: Nibbles,
2644 state_mask: TrieMask,
2645 children: &[RlpNode],
2646 masks: Option<BranchNodeMasks>,
2647 rlp_node: Option<RlpNode>,
2648 ) -> SparseTrieResult<()> {
2649 match self.nodes.entry(path) {
2650 Entry::Occupied(_) => {
2651 return Ok(());
2653 }
2654 Entry::Vacant(entry) => {
2655 let state =
2656 match rlp_node.as_ref() {
2657 Some(rlp_node) => SparseNodeState::Cached {
2658 rlp_node: rlp_node.clone(),
2659 store_in_db_trie: Some(masks.is_some_and(|m| {
2660 !m.hash_mask.is_empty() || !m.tree_mask.is_empty()
2661 })),
2662 },
2663 None => SparseNodeState::Dirty,
2664 };
2665
2666 let mut blinded_mask = TrieMask::default();
2667 let mut blinded_hashes = Box::new([B256::ZERO; 16]);
2668
2669 for (stack_ptr, idx) in state_mask.iter().enumerate() {
2670 let mut child_path = path;
2671 child_path.push_unchecked(idx);
2672 let child = &children[stack_ptr];
2673
2674 if let Some(hash) = child.as_hash() {
2675 blinded_mask.set_bit(idx);
2676 blinded_hashes[idx as usize] = hash;
2677 }
2678 }
2679
2680 entry.insert(SparseNode::Branch {
2681 state_mask,
2682 state,
2683 blinded_mask,
2684 blinded_hashes,
2685 });
2686 }
2687 }
2688
2689 for (stack_ptr, idx) in state_mask.iter().enumerate() {
2692 let mut child_path = path;
2693 child_path.push_unchecked(idx);
2694 let child = &children[stack_ptr];
2695 if !child.is_hash() && Self::is_child_same_level(&path, &child_path) {
2696 self.reveal_node(
2699 child_path,
2700 &TrieNodeV2::decode(&mut child.as_ref())?,
2701 None,
2702 None,
2703 )?;
2704 }
2705 }
2706
2707 Ok(())
2708 }
2709
2710 fn reveal_node(
2715 &mut self,
2716 path: Nibbles,
2717 node: &TrieNodeV2,
2718 masks: Option<BranchNodeMasks>,
2719 hash_from_upper: Option<B256>,
2720 ) -> SparseTrieResult<bool> {
2721 debug_assert!(path.starts_with(&self.path));
2722
2723 if self.nodes.contains_key(&path) {
2725 return Ok(false);
2726 }
2727
2728 let hash = if let Some(hash) = hash_from_upper {
2731 Some(hash)
2732 } else if path.len() != UPPER_TRIE_MAX_DEPTH && !path.is_empty() {
2733 let Some(SparseNode::Branch { state_mask, blinded_mask, blinded_hashes, .. }) =
2734 self.nodes.get_mut(&path.slice(0..path.len() - 1))
2735 else {
2736 return Ok(false);
2737 };
2738 let nibble = path.last().unwrap();
2739 if !state_mask.is_bit_set(nibble) {
2740 return Ok(false);
2741 }
2742
2743 blinded_mask.is_bit_set(nibble).then(|| {
2744 blinded_mask.unset_bit(nibble);
2745 blinded_hashes[nibble as usize]
2746 })
2747 } else {
2748 None
2749 };
2750
2751 trace!(
2752 target: "trie::parallel_sparse",
2753 ?path,
2754 ?node,
2755 ?masks,
2756 "Revealing node",
2757 );
2758
2759 match node {
2760 TrieNodeV2::EmptyRoot => {
2761 debug_assert!(path.is_empty());
2763 debug_assert!(self.path.is_empty());
2764 self.nodes.insert(path, SparseNode::Empty);
2765 }
2766 TrieNodeV2::Branch(branch) => {
2767 if branch.key.is_empty() {
2768 self.reveal_branch(
2769 path,
2770 branch.state_mask,
2771 &branch.stack,
2772 masks,
2773 hash.as_ref().map(RlpNode::word_rlp),
2774 )?;
2775 return Ok(true);
2776 }
2777
2778 self.nodes.insert(
2779 path,
2780 SparseNode::Extension {
2781 key: branch.key,
2782 state: hash
2783 .as_ref()
2784 .map(|hash| SparseNodeState::Cached {
2785 rlp_node: RlpNode::word_rlp(hash),
2786 store_in_db_trie: Some(masks.is_some_and(|m| {
2790 !m.hash_mask.is_empty() || !m.tree_mask.is_empty()
2791 })),
2792 })
2793 .unwrap_or(SparseNodeState::Dirty),
2794 },
2795 );
2796
2797 let mut branch_path = path;
2798 branch_path.extend(&branch.key);
2799
2800 if !Self::is_child_same_level(&path, &branch_path) {
2802 return Ok(true);
2803 }
2804
2805 self.reveal_branch(
2807 branch_path,
2808 branch.state_mask,
2809 &branch.stack,
2810 masks,
2811 branch.branch_rlp_node.clone(),
2812 )?;
2813 }
2814 TrieNodeV2::Extension(_) => unreachable!(),
2815 TrieNodeV2::Leaf(leaf) => {
2816 if path.len() != UPPER_TRIE_MAX_DEPTH && !self.is_leaf_reachable_from_parent(&path)
2821 {
2822 trace!(
2823 target: "trie::parallel_sparse",
2824 ?path,
2825 "Leaf not reachable from parent branch, skipping",
2826 );
2827 return Ok(false)
2828 }
2829
2830 let mut full_key = path;
2831 full_key.extend(&leaf.key);
2832
2833 match self.inner.values.entry(full_key) {
2834 Entry::Occupied(_) => {
2835 trace!(
2836 target: "trie::parallel_sparse",
2837 ?path,
2838 ?full_key,
2839 "Leaf full key value already present, skipping",
2840 );
2841 return Ok(false)
2842 }
2843 Entry::Vacant(entry) => {
2844 entry.insert(leaf.value.clone());
2845 }
2846 }
2847
2848 self.nodes.insert(
2849 path,
2850 SparseNode::Leaf {
2851 key: leaf.key,
2852 state: hash
2853 .as_ref()
2854 .map(|hash| SparseNodeState::Cached {
2855 rlp_node: RlpNode::word_rlp(hash),
2856 store_in_db_trie: Some(false),
2857 })
2858 .unwrap_or(SparseNodeState::Dirty),
2859 },
2860 );
2861 }
2862 }
2863
2864 Ok(true)
2865 }
2866
2867 #[instrument(level = "trace", target = "trie::parallel_sparse", skip_all, fields(root = ?self.path), ret)]
2889 fn update_hashes(
2890 &mut self,
2891 prefix_set: &mut PrefixSet,
2892 update_actions: &mut Option<Vec<SparseTrieUpdatesAction>>,
2893 branch_node_masks: &BranchNodeMasksMap,
2894 ) -> RlpNode {
2895 trace!(target: "trie::parallel_sparse", "Updating subtrie hashes");
2896
2897 debug_assert!(prefix_set.iter().all(|path| path.starts_with(&self.path)));
2898
2899 debug_assert!(self.inner.buffers.path_stack.is_empty());
2900 self.inner
2901 .buffers
2902 .path_stack
2903 .push(RlpNodePathStackItem { path: self.path, is_in_prefix_set: None });
2904
2905 while let Some(stack_item) = self.inner.buffers.path_stack.pop() {
2906 let path = stack_item.path;
2907 let node = self
2908 .nodes
2909 .get_mut(&path)
2910 .unwrap_or_else(|| panic!("node at path {path:?} does not exist"));
2911
2912 self.inner.rlp_node(prefix_set, update_actions, stack_item, node, branch_node_masks);
2913 }
2914
2915 debug_assert_eq!(self.inner.buffers.rlp_node_stack.len(), 1);
2916 self.inner.buffers.rlp_node_stack.pop().unwrap().rlp_node
2917 }
2918
2919 fn wipe(&mut self) {
2922 self.nodes.clear();
2923 self.nodes.insert(Nibbles::default(), SparseNode::Empty);
2924 self.inner.clear();
2925 }
2926
2927 pub(crate) fn clear(&mut self) {
2929 self.nodes.clear();
2930 self.inner.clear();
2931 }
2932
2933 pub(crate) fn shrink_nodes_to(&mut self, size: usize) {
2935 self.nodes.shrink_to(size);
2936 }
2937
2938 pub(crate) fn shrink_values_to(&mut self, size: usize) {
2940 self.inner.values.shrink_to(size);
2941 }
2942
2943 pub(crate) fn memory_size(&self) -> usize {
2945 let mut size = core::mem::size_of::<Self>();
2946
2947 for (path, node) in &self.nodes {
2949 size += core::mem::size_of::<Nibbles>();
2950 size += path.len(); size += node.memory_size();
2952 }
2953
2954 for (path, value) in &self.inner.values {
2956 size += core::mem::size_of::<Nibbles>();
2957 size += path.len(); size += core::mem::size_of::<Vec<u8>>() + value.capacity();
2959 }
2960
2961 size += self.inner.buffers.memory_size();
2963
2964 size
2965 }
2966}
2967
2968#[derive(Clone, PartialEq, Eq, Debug, Default)]
2971struct SparseSubtrieInner {
2972 values: HashMap<Nibbles, Vec<u8>>,
2975 buffers: SparseSubtrieBuffers,
2977}
2978
2979impl SparseSubtrieInner {
2980 fn rlp_node(
3010 &mut self,
3011 prefix_set: &mut PrefixSet,
3012 update_actions: &mut Option<Vec<SparseTrieUpdatesAction>>,
3013 mut stack_item: RlpNodePathStackItem,
3014 node: &mut SparseNode,
3015 branch_node_masks: &BranchNodeMasksMap,
3016 ) {
3017 let path = stack_item.path;
3018 trace!(
3019 target: "trie::parallel_sparse",
3020 ?path,
3021 ?node,
3022 "Calculating node RLP"
3023 );
3024
3025 let mut prefix_set_contains = |path: &Nibbles| {
3029 *stack_item.is_in_prefix_set.get_or_insert_with(|| prefix_set.contains(path))
3030 };
3031
3032 let (rlp_node, node_type) = match node {
3033 SparseNode::Empty => (RlpNode::word_rlp(&EMPTY_ROOT_HASH), SparseNodeType::Empty),
3034 SparseNode::Leaf { key, state } => {
3035 let mut path = path;
3036 path.extend(key);
3037 let value = self.values.get(&path);
3038
3039 let cached_rlp_node = state.cached_rlp_node();
3044 let use_cached =
3045 cached_rlp_node.is_some() && (!prefix_set_contains(&path) || value.is_none());
3046
3047 if let Some(rlp_node) = use_cached.then(|| cached_rlp_node.unwrap()) {
3048 (rlp_node.clone(), SparseNodeType::Leaf)
3050 } else {
3051 let value = value.expect("leaf value must exist in subtrie");
3053 self.buffers.rlp_buf.clear();
3054 let rlp_node = LeafNodeRef { key, value }.rlp(&mut self.buffers.rlp_buf);
3055 *state = SparseNodeState::Cached {
3056 rlp_node: rlp_node.clone(),
3057 store_in_db_trie: Some(false),
3058 };
3059 trace!(
3060 target: "trie::parallel_sparse",
3061 ?path,
3062 ?key,
3063 value = %alloy_primitives::hex::encode(value),
3064 ?rlp_node,
3065 "Calculated leaf RLP node",
3066 );
3067 (rlp_node, SparseNodeType::Leaf)
3068 }
3069 }
3070 SparseNode::Extension { key, state } => {
3071 let mut child_path = path;
3072 child_path.extend(key);
3073 if let Some((rlp_node, store_in_db_trie)) = state
3074 .cached_rlp_node()
3075 .zip(state.store_in_db_trie())
3076 .filter(|_| !prefix_set_contains(&path))
3077 {
3078 (
3081 rlp_node.clone(),
3082 SparseNodeType::Extension { store_in_db_trie: Some(store_in_db_trie) },
3083 )
3084 } else if self.buffers.rlp_node_stack.last().is_some_and(|e| e.path == child_path) {
3085 let RlpNodeStackItem { path: _, rlp_node: child, node_type: child_node_type } =
3088 self.buffers.rlp_node_stack.pop().unwrap();
3089 self.buffers.rlp_buf.clear();
3090 let rlp_node =
3091 ExtensionNodeRef::new(key, &child).rlp(&mut self.buffers.rlp_buf);
3092
3093 let store_in_db_trie_value = child_node_type.store_in_db_trie();
3094
3095 trace!(
3096 target: "trie::parallel_sparse",
3097 ?path,
3098 ?child_path,
3099 ?child_node_type,
3100 "Extension node"
3101 );
3102
3103 *state = SparseNodeState::Cached {
3104 rlp_node: rlp_node.clone(),
3105 store_in_db_trie: store_in_db_trie_value,
3106 };
3107
3108 (
3109 rlp_node,
3110 SparseNodeType::Extension {
3111 store_in_db_trie: store_in_db_trie_value,
3114 },
3115 )
3116 } else {
3117 self.buffers.path_stack.extend([
3120 RlpNodePathStackItem {
3121 path,
3122 is_in_prefix_set: Some(prefix_set_contains(&path)),
3123 },
3124 RlpNodePathStackItem { path: child_path, is_in_prefix_set: None },
3125 ]);
3126 return
3127 }
3128 }
3129 SparseNode::Branch { state_mask, state, blinded_mask, blinded_hashes } => {
3130 if let Some((rlp_node, store_in_db_trie)) = state
3131 .cached_rlp_node()
3132 .zip(state.store_in_db_trie())
3133 .filter(|_| !prefix_set_contains(&path))
3134 {
3135 let node_type =
3136 SparseNodeType::Branch { store_in_db_trie: Some(store_in_db_trie) };
3137
3138 trace!(
3139 target: "trie::parallel_sparse",
3140 ?path,
3141 ?node_type,
3142 ?rlp_node,
3143 "Adding node to RLP node stack (cached branch)"
3144 );
3145
3146 self.buffers.rlp_node_stack.push(RlpNodeStackItem {
3149 path,
3150 rlp_node: rlp_node.clone(),
3151 node_type,
3152 });
3153 return
3154 }
3155
3156 let retain_updates = update_actions.is_some() && prefix_set_contains(&path);
3157
3158 self.buffers.branch_child_buf.clear();
3159 for bit in state_mask.iter().rev() {
3162 let mut child = path;
3163 child.push_unchecked(bit);
3164
3165 if !blinded_mask.is_bit_set(bit) {
3166 self.buffers.branch_child_buf.push(child);
3167 }
3168 }
3169
3170 self.buffers.branch_value_stack_buf.resize(state_mask.len(), Default::default());
3171
3172 let mut tree_mask = TrieMask::default();
3173 let mut hash_mask = TrieMask::default();
3174 let mut hashes = Vec::new();
3175
3176 let mut path_masks_storage = None;
3178 let mut path_masks =
3179 || *path_masks_storage.get_or_insert_with(|| branch_node_masks.get(&path));
3180
3181 for (i, child_nibble) in state_mask.iter().enumerate().rev() {
3182 let mut child_path = path;
3183 child_path.push_unchecked(child_nibble);
3184
3185 let (child, child_node_type) = if blinded_mask.is_bit_set(child_nibble) {
3186 (
3187 RlpNode::word_rlp(&blinded_hashes[child_nibble as usize]),
3188 SparseNodeType::Hash,
3189 )
3190 } else if self
3191 .buffers
3192 .rlp_node_stack
3193 .last()
3194 .is_some_and(|e| e.path == child_path)
3195 {
3196 let RlpNodeStackItem { path: _, rlp_node, node_type } =
3197 self.buffers.rlp_node_stack.pop().unwrap();
3198
3199 (rlp_node, node_type)
3200 } else {
3201 self.buffers.path_stack.push(RlpNodePathStackItem {
3204 path,
3205 is_in_prefix_set: Some(prefix_set_contains(&path)),
3206 });
3207 self.buffers.path_stack.extend(
3208 self.buffers
3209 .branch_child_buf
3210 .drain(..)
3211 .map(|path| RlpNodePathStackItem { path, is_in_prefix_set: None }),
3212 );
3213 return
3214 };
3215
3216 if retain_updates {
3218 let should_set_tree_mask_bit =
3220 if let Some(store_in_db_trie) = child_node_type.store_in_db_trie() {
3221 store_in_db_trie
3224 } else {
3225 child_node_type.is_hash() &&
3227 path_masks().is_some_and(|masks| {
3228 masks.tree_mask.is_bit_set(child_nibble)
3229 })
3230 };
3231 if should_set_tree_mask_bit {
3232 tree_mask.set_bit(child_nibble);
3233 }
3234 let hash = child.as_hash().filter(|_| {
3238 child_node_type.is_branch() ||
3239 (child_node_type.is_hash() &&
3240 path_masks().is_some_and(|masks| {
3241 masks.hash_mask.is_bit_set(child_nibble)
3242 }))
3243 });
3244 if let Some(hash) = hash {
3245 hash_mask.set_bit(child_nibble);
3246 hashes.push(hash);
3247 }
3248 }
3249
3250 self.buffers.branch_value_stack_buf[i] = child;
3254 }
3255
3256 trace!(
3257 target: "trie::parallel_sparse",
3258 ?path,
3259 ?tree_mask,
3260 ?hash_mask,
3261 "Branch node masks"
3262 );
3263
3264 self.buffers.rlp_buf.clear();
3267 let branch_node_ref =
3268 BranchNodeRef::new(&self.buffers.branch_value_stack_buf, *state_mask);
3269 let rlp_node = branch_node_ref.rlp(&mut self.buffers.rlp_buf);
3270
3271 let store_in_db_trie_value = if let Some(update_actions) =
3274 update_actions.as_mut().filter(|_| retain_updates && !path.is_empty())
3275 {
3276 let store_in_db_trie = !tree_mask.is_empty() || !hash_mask.is_empty();
3277 if store_in_db_trie {
3278 hashes.reverse();
3281 let branch_node =
3282 BranchNodeCompact::new(*state_mask, tree_mask, hash_mask, hashes, None);
3283 update_actions
3284 .push(SparseTrieUpdatesAction::InsertUpdated(path, branch_node));
3285 } else {
3286 let prev_had_masks = path_masks()
3288 .is_some_and(|m| !m.tree_mask.is_empty() || !m.hash_mask.is_empty());
3289 if prev_had_masks {
3290 update_actions.push(SparseTrieUpdatesAction::InsertRemoved(path));
3292 } else {
3293 update_actions.push(SparseTrieUpdatesAction::RemoveUpdated(path));
3295 }
3296 }
3297
3298 store_in_db_trie
3299 } else {
3300 false
3301 };
3302
3303 *state = SparseNodeState::Cached {
3304 rlp_node: rlp_node.clone(),
3305 store_in_db_trie: Some(store_in_db_trie_value),
3306 };
3307
3308 (
3309 rlp_node,
3310 SparseNodeType::Branch { store_in_db_trie: Some(store_in_db_trie_value) },
3311 )
3312 }
3313 };
3314
3315 trace!(
3316 target: "trie::parallel_sparse",
3317 ?path,
3318 ?node_type,
3319 ?rlp_node,
3320 "Adding node to RLP node stack"
3321 );
3322 self.buffers.rlp_node_stack.push(RlpNodeStackItem { path, rlp_node, node_type });
3323 }
3324
3325 fn clear(&mut self) {
3327 self.values.clear();
3328 self.buffers.clear();
3329 }
3330}
3331
3332#[derive(Clone, Debug, PartialEq, Eq, Default)]
3334pub enum LeafUpdateStep {
3335 Continue,
3337 Complete {
3339 inserted_nodes: Vec<Nibbles>,
3341 },
3342 #[default]
3344 NodeNotFound,
3345}
3346
3347impl LeafUpdateStep {
3348 pub const fn complete_with_insertions(inserted_nodes: Vec<Nibbles>) -> Self {
3350 Self::Complete { inserted_nodes }
3351 }
3352}
3353
3354#[derive(Clone, Copy, PartialEq, Eq, Debug)]
3363pub enum SparseSubtrieType {
3364 Upper,
3366 Lower(usize),
3369}
3370
3371impl SparseSubtrieType {
3372 pub const fn path_len_is_upper(len: usize) -> bool {
3377 len < UPPER_TRIE_MAX_DEPTH
3378 }
3379
3380 pub fn from_path(path: &Nibbles) -> Self {
3382 if Self::path_len_is_upper(path.len()) {
3383 Self::Upper
3384 } else {
3385 Self::Lower(path_subtrie_index_unchecked(path))
3386 }
3387 }
3388
3389 pub const fn lower_index(&self) -> Option<usize> {
3391 match self {
3392 Self::Upper => None,
3393 Self::Lower(index) => Some(*index),
3394 }
3395 }
3396}
3397
3398impl Ord for SparseSubtrieType {
3399 fn cmp(&self, other: &Self) -> Ordering {
3402 match (self, other) {
3403 (Self::Upper, Self::Upper) => Ordering::Equal,
3404 (Self::Upper, Self::Lower(_)) => Ordering::Less,
3405 (Self::Lower(_), Self::Upper) => Ordering::Greater,
3406 (Self::Lower(idx_a), Self::Lower(idx_b)) if idx_a == idx_b => Ordering::Equal,
3407 (Self::Lower(idx_a), Self::Lower(idx_b)) => idx_a.cmp(idx_b),
3408 }
3409 }
3410}
3411
3412impl PartialOrd for SparseSubtrieType {
3413 fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
3414 Some(self.cmp(other))
3415 }
3416}
3417
3418#[derive(Clone, PartialEq, Eq, Debug, Default)]
3422pub struct SparseSubtrieBuffers {
3423 path_stack: Vec<RlpNodePathStackItem>,
3425 rlp_node_stack: Vec<RlpNodeStackItem>,
3427 branch_child_buf: Vec<Nibbles>,
3429 branch_value_stack_buf: Vec<RlpNode>,
3431 rlp_buf: Vec<u8>,
3433}
3434
3435impl SparseSubtrieBuffers {
3436 fn clear(&mut self) {
3438 self.path_stack.clear();
3439 self.rlp_node_stack.clear();
3440 self.branch_child_buf.clear();
3441 self.branch_value_stack_buf.clear();
3442 self.rlp_buf.clear();
3443 }
3444
3445 const fn memory_size(&self) -> usize {
3447 let mut size = core::mem::size_of::<Self>();
3448
3449 size += self.path_stack.capacity() * core::mem::size_of::<RlpNodePathStackItem>();
3450 size += self.rlp_node_stack.capacity() * core::mem::size_of::<RlpNodeStackItem>();
3451 size += self.branch_child_buf.capacity() * core::mem::size_of::<Nibbles>();
3452 size += self.branch_value_stack_buf.capacity() * core::mem::size_of::<RlpNode>();
3453 size += self.rlp_buf.capacity();
3454
3455 size
3456 }
3457}
3458
3459#[derive(Clone, PartialEq, Eq, Debug)]
3461pub struct RlpNodePathStackItem {
3462 pub path: Nibbles,
3464 pub is_in_prefix_set: Option<bool>,
3466}
3467
3468#[derive(Debug)]
3470struct ChangedSubtrie {
3471 index: usize,
3473 subtrie: Box<SparseSubtrie>,
3475 prefix_set: PrefixSet,
3477 update_actions_buf: Option<Vec<SparseTrieUpdatesAction>>,
3480}
3481
3482fn path_subtrie_index_unchecked(path: &Nibbles) -> usize {
3489 debug_assert_eq!(UPPER_TRIE_MAX_DEPTH, 2);
3490 let idx = path.get_byte_unchecked(0) as usize;
3491 unsafe { core::hint::assert_unchecked(idx < NUM_LOWER_SUBTRIES) };
3493 idx
3494}
3495
3496fn is_strict_descendant_in(roots: &[Nibbles], path: &Nibbles) -> bool {
3501 if roots.is_empty() {
3502 return false;
3503 }
3504 debug_assert!(roots.windows(2).all(|w| w[0] <= w[1]), "roots must be sorted by path");
3505 let idx = roots.partition_point(|root| root <= path);
3506 if idx > 0 {
3507 let candidate = &roots[idx - 1];
3508 if path.starts_with(candidate) && path.len() > candidate.len() {
3509 return true;
3510 }
3511 }
3512 false
3513}
3514
3515fn starts_with_pruned_in(roots: &[Nibbles], path: &Nibbles) -> bool {
3520 if roots.is_empty() {
3521 return false;
3522 }
3523 debug_assert!(roots.windows(2).all(|w| w[0] <= w[1]), "roots must be sorted by path");
3524 let idx = roots.partition_point(|root| root <= path);
3525 if idx > 0 {
3526 let candidate = &roots[idx - 1];
3527 if path.starts_with(candidate) {
3528 return true;
3529 }
3530 }
3531 false
3532}
3533
3534#[derive(Clone, Debug, Eq, PartialEq)]
3536enum SparseTrieUpdatesAction {
3537 InsertRemoved(Nibbles),
3539 RemoveUpdated(Nibbles),
3542 InsertUpdated(Nibbles, BranchNodeCompact),
3544}
3545
3546#[cfg(test)]
3547mod tests {
3548 use super::{
3549 path_subtrie_index_unchecked, LowerSparseSubtrie, ParallelSparseTrie, SparseSubtrie,
3550 SparseSubtrieType,
3551 };
3552 use crate::{
3553 parallel::ChangedSubtrie,
3554 provider::{DefaultTrieNodeProvider, NoRevealProvider},
3555 trie::SparseNodeState,
3556 LeafLookup, LeafLookupError, SparseNode, SparseTrie, SparseTrieUpdates,
3557 };
3558 use alloy_primitives::{
3559 b256, hex,
3560 map::{B256Set, HashMap},
3561 B256, U256,
3562 };
3563 use alloy_rlp::{Decodable, Encodable};
3564 use alloy_trie::{proof::AddedRemovedKeys, BranchNodeCompact, Nibbles};
3565 use assert_matches::assert_matches;
3566 use itertools::Itertools;
3567 use proptest::{prelude::*, sample::SizeRange};
3568 use proptest_arbitrary_interop::arb;
3569 use reth_execution_errors::SparseTrieErrorKind;
3570 use reth_primitives_traits::Account;
3571 use reth_provider::{
3572 test_utils::create_test_provider_factory, StorageSettingsCache, TrieWriter,
3573 };
3574 use reth_trie::{
3575 hashed_cursor::{noop::NoopHashedCursor, HashedPostStateCursor},
3576 node_iter::{TrieElement, TrieNodeIter},
3577 trie_cursor::{noop::NoopAccountTrieCursor, TrieCursor, TrieCursorFactory},
3578 walker::TrieWalker,
3579 HashedPostState,
3580 };
3581 use reth_trie_common::{
3582 prefix_set::PrefixSetMut,
3583 proof::{ProofNodes, ProofRetainer},
3584 updates::TrieUpdates,
3585 BranchNodeMasks, BranchNodeMasksMap, BranchNodeRef, BranchNodeV2, ExtensionNode,
3586 HashBuilder, LeafNode, ProofTrieNodeV2, RlpNode, TrieMask, TrieNode, TrieNodeV2,
3587 EMPTY_ROOT_HASH,
3588 };
3589 use reth_trie_db::DatabaseTrieCursorFactory;
3590 use std::collections::{BTreeMap, BTreeSet};
3591
3592 fn pad_nibbles_right(mut nibbles: Nibbles) -> Nibbles {
3594 nibbles.extend(&Nibbles::from_nibbles_unchecked(vec![
3595 0;
3596 B256::len_bytes() * 2 - nibbles.len()
3597 ]));
3598 nibbles
3599 }
3600
3601 fn leaf_key(suffix: impl AsRef<[u8]>, total_len: usize) -> Nibbles {
3604 let suffix = suffix.as_ref();
3605 let mut nibbles = Nibbles::from_nibbles(suffix);
3606 nibbles.extend(&Nibbles::from_nibbles_unchecked(vec![0; total_len - suffix.len()]));
3607 nibbles
3608 }
3609
3610 fn create_account(nonce: u64) -> Account {
3611 Account { nonce, ..Default::default() }
3612 }
3613
3614 fn large_account_value() -> Vec<u8> {
3615 let account = Account {
3616 nonce: 0x123456789abcdef,
3617 balance: U256::from(0x123456789abcdef0123456789abcdef_u128),
3618 ..Default::default()
3619 };
3620 let mut buf = Vec::new();
3621 account.into_trie_account(EMPTY_ROOT_HASH).encode(&mut buf);
3622 buf
3623 }
3624
3625 fn encode_account_value(nonce: u64) -> Vec<u8> {
3626 let account = Account { nonce, ..Default::default() };
3627 let trie_account = account.into_trie_account(EMPTY_ROOT_HASH);
3628 let mut buf = Vec::new();
3629 trie_account.encode(&mut buf);
3630 buf
3631 }
3632
3633 #[derive(Default)]
3635 struct ParallelSparseTrieTestContext;
3636
3637 impl ParallelSparseTrieTestContext {
3638 fn assert_subtrie_exists(&self, trie: &ParallelSparseTrie, path: &Nibbles) {
3640 let idx = path_subtrie_index_unchecked(path);
3641 assert!(
3642 trie.lower_subtries[idx].as_revealed_ref().is_some(),
3643 "Expected lower subtrie at path {path:?} to exist",
3644 );
3645 }
3646
3647 fn get_subtrie<'a>(
3649 &self,
3650 trie: &'a ParallelSparseTrie,
3651 path: &Nibbles,
3652 ) -> &'a SparseSubtrie {
3653 let idx = path_subtrie_index_unchecked(path);
3654 trie.lower_subtries[idx]
3655 .as_revealed_ref()
3656 .unwrap_or_else(|| panic!("Lower subtrie at path {path:?} should exist"))
3657 }
3658
3659 fn assert_subtrie_path(
3661 &self,
3662 trie: &ParallelSparseTrie,
3663 subtrie_prefix: impl AsRef<[u8]>,
3664 expected_path: impl AsRef<[u8]>,
3665 ) {
3666 let subtrie_prefix = Nibbles::from_nibbles(subtrie_prefix);
3667 let expected_path = Nibbles::from_nibbles(expected_path);
3668 let idx = path_subtrie_index_unchecked(&subtrie_prefix);
3669
3670 let subtrie = trie.lower_subtries[idx].as_revealed_ref().unwrap_or_else(|| {
3671 panic!("Lower subtrie at prefix {subtrie_prefix:?} should exist")
3672 });
3673
3674 assert_eq!(
3675 subtrie.path, expected_path,
3676 "Subtrie at prefix {subtrie_prefix:?} should have path {expected_path:?}, but has {:?}",
3677 subtrie.path
3678 );
3679 }
3680
3681 fn create_test_leaves(&self, paths: &[&[u8]]) -> Vec<(Nibbles, Vec<u8>)> {
3683 paths
3684 .iter()
3685 .enumerate()
3686 .map(|(i, path)| {
3687 (
3688 pad_nibbles_right(Nibbles::from_nibbles(path)),
3689 encode_account_value(i as u64 + 1),
3690 )
3691 })
3692 .collect()
3693 }
3694
3695 fn create_test_leaf(&self, path: impl AsRef<[u8]>, value_nonce: u64) -> (Nibbles, Vec<u8>) {
3697 (pad_nibbles_right(Nibbles::from_nibbles(path)), encode_account_value(value_nonce))
3698 }
3699
3700 fn update_leaves(
3702 &self,
3703 trie: &mut ParallelSparseTrie,
3704 leaves: impl IntoIterator<Item = (Nibbles, Vec<u8>)>,
3705 ) {
3706 for (path, value) in leaves {
3707 trie.update_leaf(path, value, DefaultTrieNodeProvider).unwrap();
3708 }
3709 }
3710
3711 fn assert_subtrie<'a>(
3713 &self,
3714 trie: &'a ParallelSparseTrie,
3715 path: Nibbles,
3716 ) -> SubtrieAssertion<'a> {
3717 self.assert_subtrie_exists(trie, &path);
3718 let subtrie = self.get_subtrie(trie, &path);
3719 SubtrieAssertion::new(subtrie)
3720 }
3721
3722 fn assert_upper_subtrie<'a>(&self, trie: &'a ParallelSparseTrie) -> SubtrieAssertion<'a> {
3724 SubtrieAssertion::new(&trie.upper_subtrie)
3725 }
3726
3727 fn assert_with_hash_builder(
3729 &self,
3730 trie: &mut ParallelSparseTrie,
3731 hash_builder_root: B256,
3732 hash_builder_updates: TrieUpdates,
3733 hash_builder_proof_nodes: ProofNodes,
3734 ) {
3735 assert_eq!(trie.root(), hash_builder_root);
3736 pretty_assertions::assert_eq!(
3737 BTreeMap::from_iter(trie.updates_ref().updated_nodes.clone()),
3738 BTreeMap::from_iter(hash_builder_updates.account_nodes)
3739 );
3740 assert_eq_parallel_sparse_trie_proof_nodes(trie, hash_builder_proof_nodes);
3741 }
3742 }
3743
3744 struct SubtrieAssertion<'a> {
3746 subtrie: &'a SparseSubtrie,
3747 }
3748
3749 impl<'a> SubtrieAssertion<'a> {
3750 fn new(subtrie: &'a SparseSubtrie) -> Self {
3751 Self { subtrie }
3752 }
3753
3754 fn has_branch(self, path: &Nibbles, expected_mask_bits: &[u8]) -> Self {
3755 match self.subtrie.nodes.get(path) {
3756 Some(SparseNode::Branch { state_mask, .. }) => {
3757 for bit in expected_mask_bits {
3758 assert!(
3759 state_mask.is_bit_set(*bit),
3760 "Expected branch at {path:?} to have bit {bit} set, instead mask is: {state_mask:?}",
3761 );
3762 }
3763 }
3764 node => panic!("Expected branch node at {path:?}, found {node:?}"),
3765 }
3766 self
3767 }
3768
3769 fn has_leaf(self, path: &Nibbles, expected_key: &Nibbles) -> Self {
3770 match self.subtrie.nodes.get(path) {
3771 Some(SparseNode::Leaf { key, .. }) => {
3772 assert_eq!(
3773 *key, *expected_key,
3774 "Expected leaf at {path:?} to have key {expected_key:?}, found {key:?}",
3775 );
3776 }
3777 node => panic!("Expected leaf node at {path:?}, found {node:?}"),
3778 }
3779 self
3780 }
3781
3782 fn has_extension(self, path: &Nibbles, expected_key: &Nibbles) -> Self {
3783 match self.subtrie.nodes.get(path) {
3784 Some(SparseNode::Extension { key, .. }) => {
3785 assert_eq!(
3786 *key, *expected_key,
3787 "Expected extension at {path:?} to have key {expected_key:?}, found {key:?}",
3788 );
3789 }
3790 node => panic!("Expected extension node at {path:?}, found {node:?}"),
3791 }
3792 self
3793 }
3794
3795 fn has_value(self, path: &Nibbles, expected_value: &[u8]) -> Self {
3796 let actual = self.subtrie.inner.values.get(path);
3797 assert_eq!(
3798 actual.map(|v| v.as_slice()),
3799 Some(expected_value),
3800 "Expected value at {path:?} to be {expected_value:?}, found {actual:?}",
3801 );
3802 self
3803 }
3804
3805 fn has_no_value(self, path: &Nibbles) -> Self {
3806 let actual = self.subtrie.inner.values.get(path);
3807 assert!(actual.is_none(), "Expected no value at {path:?}, but found {actual:?}");
3808 self
3809 }
3810 }
3811
3812 fn create_leaf_node(key: impl AsRef<[u8]>, value_nonce: u64) -> TrieNodeV2 {
3813 TrieNodeV2::Leaf(LeafNode::new(
3814 Nibbles::from_nibbles(key),
3815 encode_account_value(value_nonce),
3816 ))
3817 }
3818
3819 fn create_branch_node(
3820 key: Nibbles,
3821 children_indices: &[u8],
3822 child_hashes: impl IntoIterator<Item = RlpNode>,
3823 ) -> TrieNodeV2 {
3824 let mut stack = Vec::new();
3825 let mut state_mask = TrieMask::default();
3826
3827 for (&idx, hash) in children_indices.iter().zip(child_hashes) {
3828 state_mask.set_bit(idx);
3829 stack.push(hash);
3830 }
3831
3832 let branch_rlp_node = if key.is_empty() {
3833 None
3834 } else {
3835 Some(RlpNode::from_rlp(&alloy_rlp::encode(BranchNodeRef::new(&stack, state_mask))))
3836 };
3837
3838 TrieNodeV2::Branch(BranchNodeV2::new(key, stack, state_mask, branch_rlp_node))
3839 }
3840
3841 fn create_branch_node_with_children(
3842 children_indices: &[u8],
3843 child_hashes: impl IntoIterator<Item = RlpNode>,
3844 ) -> TrieNodeV2 {
3845 create_branch_node(Nibbles::default(), children_indices, child_hashes)
3846 }
3847
3848 fn run_hash_builder(
3853 state: impl IntoIterator<Item = (Nibbles, Account)> + Clone,
3854 trie_cursor: impl TrieCursor,
3855 destroyed_accounts: B256Set,
3856 proof_targets: impl IntoIterator<Item = Nibbles>,
3857 ) -> (B256, TrieUpdates, ProofNodes, HashMap<Nibbles, TrieMask>, HashMap<Nibbles, TrieMask>)
3858 {
3859 let mut account_rlp = Vec::new();
3860
3861 let mut hash_builder = HashBuilder::default()
3862 .with_updates(true)
3863 .with_proof_retainer(ProofRetainer::from_iter(proof_targets).with_added_removed_keys(
3864 Some(AddedRemovedKeys::default().with_assume_added(true)),
3865 ));
3866
3867 let mut prefix_set = PrefixSetMut::default();
3868 prefix_set.extend_keys(state.clone().into_iter().map(|(nibbles, _)| nibbles));
3869 prefix_set.extend_keys(destroyed_accounts.iter().map(Nibbles::unpack));
3870 let walker = TrieWalker::<_>::state_trie(trie_cursor, prefix_set.freeze())
3871 .with_deletions_retained(true);
3872 let hashed_post_state = HashedPostState::default()
3873 .with_accounts(state.into_iter().map(|(nibbles, account)| {
3874 (nibbles.pack().into_inner().unwrap().into(), Some(account))
3875 }))
3876 .into_sorted();
3877 let mut node_iter = TrieNodeIter::state_trie(
3878 walker,
3879 HashedPostStateCursor::new_account(
3880 NoopHashedCursor::<Account>::default(),
3881 &hashed_post_state,
3882 ),
3883 );
3884
3885 while let Some(node) = node_iter.try_next().unwrap() {
3886 match node {
3887 TrieElement::Branch(branch) => {
3888 hash_builder.add_branch(branch.key, branch.value, branch.children_are_in_trie);
3889 }
3890 TrieElement::Leaf(key, account) => {
3891 let account = account.into_trie_account(EMPTY_ROOT_HASH);
3892 account.encode(&mut account_rlp);
3893
3894 hash_builder.add_leaf(Nibbles::unpack(key), &account_rlp);
3895 account_rlp.clear();
3896 }
3897 }
3898 }
3899 let root = hash_builder.root();
3900 let proof_nodes = hash_builder.take_proof_nodes();
3901 let branch_node_hash_masks = hash_builder
3902 .updated_branch_nodes
3903 .clone()
3904 .unwrap_or_default()
3905 .iter()
3906 .map(|(path, node)| (*path, node.hash_mask))
3907 .collect();
3908 let branch_node_tree_masks = hash_builder
3909 .updated_branch_nodes
3910 .clone()
3911 .unwrap_or_default()
3912 .iter()
3913 .map(|(path, node)| (*path, node.tree_mask))
3914 .collect();
3915
3916 let mut trie_updates = TrieUpdates::default();
3917 let removed_keys = node_iter.walker.take_removed_keys();
3918 trie_updates.finalize(hash_builder, removed_keys, destroyed_accounts);
3919
3920 (root, trie_updates, proof_nodes, branch_node_hash_masks, branch_node_tree_masks)
3921 }
3922
3923 fn new_test_trie<Nodes>(nodes: Nodes) -> ParallelSparseTrie
3926 where
3927 Nodes: Iterator<Item = (Nibbles, SparseNode)>,
3928 {
3929 let mut trie = ParallelSparseTrie::default().with_updates(true);
3930
3931 for (path, node) in nodes {
3932 let subtrie = trie.subtrie_for_path_mut(&path);
3933 if let SparseNode::Leaf { key, .. } = &node {
3934 let mut full_key = path;
3935 full_key.extend(key);
3936 subtrie.inner.values.insert(full_key, "LEAF VALUE".into());
3937 }
3938 subtrie.nodes.insert(path, node);
3939 }
3940 trie
3941 }
3942
3943 fn parallel_sparse_trie_nodes(
3944 sparse_trie: &ParallelSparseTrie,
3945 ) -> impl IntoIterator<Item = (&Nibbles, &SparseNode)> {
3946 let lower_sparse_nodes = sparse_trie
3947 .lower_subtries
3948 .iter()
3949 .filter_map(|subtrie| subtrie.as_revealed_ref())
3950 .flat_map(|subtrie| subtrie.nodes.iter());
3951
3952 let upper_sparse_nodes = sparse_trie.upper_subtrie.nodes.iter();
3953
3954 lower_sparse_nodes.chain(upper_sparse_nodes).sorted_by_key(|(path, _)| *path)
3955 }
3956
3957 fn assert_eq_parallel_sparse_trie_proof_nodes(
3960 sparse_trie: &ParallelSparseTrie,
3961 proof_nodes: ProofNodes,
3962 ) {
3963 let proof_nodes = proof_nodes
3964 .into_nodes_sorted()
3965 .into_iter()
3966 .map(|(path, node)| (path, TrieNodeV2::decode(&mut node.as_ref()).unwrap()));
3967
3968 let all_sparse_nodes = parallel_sparse_trie_nodes(sparse_trie);
3969
3970 for ((proof_node_path, proof_node), (sparse_node_path, sparse_node)) in
3971 proof_nodes.zip(all_sparse_nodes)
3972 {
3973 assert_eq!(&proof_node_path, sparse_node_path);
3974
3975 let equals = match (&proof_node, &sparse_node) {
3976 (TrieNodeV2::EmptyRoot, SparseNode::Empty) => true,
3978 (
3980 TrieNodeV2::Branch(BranchNodeV2 { state_mask: proof_state_mask, .. }),
3981 SparseNode::Branch { state_mask: sparse_state_mask, .. },
3982 ) => proof_state_mask == sparse_state_mask,
3983 (
3985 TrieNodeV2::Extension(ExtensionNode { key: proof_key, .. }),
3986 SparseNode::Extension { key: sparse_key, .. },
3987 ) |
3988 (
3990 TrieNodeV2::Leaf(LeafNode { key: proof_key, .. }),
3991 SparseNode::Leaf { key: sparse_key, .. },
3992 ) => proof_key == sparse_key,
3993 (_, SparseNode::Empty) => continue,
3995 _ => false,
3996 };
3997 assert!(
3998 equals,
3999 "path: {proof_node_path:?}\nproof node: {proof_node:?}\nsparse node: {sparse_node:?}"
4000 );
4001 }
4002 }
4003
4004 #[test]
4005 fn test_get_changed_subtries_empty() {
4006 let mut trie = ParallelSparseTrie::default();
4007 let mut prefix_set = PrefixSetMut::from([Nibbles::default()]).freeze();
4008
4009 let (subtries, unchanged_prefix_set) = trie.take_changed_lower_subtries(&mut prefix_set);
4010 assert!(subtries.is_empty());
4011 assert_eq!(unchanged_prefix_set, PrefixSetMut::from(prefix_set.iter().copied()));
4012 }
4013
4014 #[test]
4015 fn test_get_changed_subtries() {
4016 let mut trie = ParallelSparseTrie::default();
4018 let subtrie_1 = Box::new(SparseSubtrie::new(Nibbles::from_nibbles([0x0, 0x0])));
4019 let subtrie_1_index = path_subtrie_index_unchecked(&subtrie_1.path);
4020 let subtrie_2 = Box::new(SparseSubtrie::new(Nibbles::from_nibbles([0x1, 0x0])));
4021 let subtrie_2_index = path_subtrie_index_unchecked(&subtrie_2.path);
4022 let subtrie_3 = Box::new(SparseSubtrie::new(Nibbles::from_nibbles([0x3, 0x0])));
4023 let subtrie_3_index = path_subtrie_index_unchecked(&subtrie_3.path);
4024
4025 trie.lower_subtries[subtrie_1_index] = LowerSparseSubtrie::Revealed(subtrie_1.clone());
4027 trie.lower_subtries[subtrie_2_index] = LowerSparseSubtrie::Revealed(subtrie_2.clone());
4028 trie.lower_subtries[subtrie_3_index] = LowerSparseSubtrie::Revealed(subtrie_3);
4029
4030 let unchanged_prefix_set = PrefixSetMut::from([
4031 Nibbles::from_nibbles([0x0]),
4032 Nibbles::from_nibbles([0x2, 0x0, 0x0]),
4033 ]);
4034 let mut prefix_set = PrefixSetMut::from([
4036 Nibbles::from_nibbles([0x1, 0x0, 0x0]),
4038 Nibbles::from_nibbles([0x1, 0x0, 0x1, 0x0]),
4039 ]);
4040 prefix_set.extend(unchanged_prefix_set);
4041 let mut prefix_set = prefix_set.freeze();
4042
4043 let (subtries, unchanged_prefix_set) = trie.take_changed_lower_subtries(&mut prefix_set);
4045 assert_eq!(
4046 subtries
4047 .into_iter()
4048 .map(|ChangedSubtrie { index, subtrie, prefix_set, .. }| {
4049 (index, subtrie, prefix_set.iter().copied().collect::<Vec<_>>())
4050 })
4051 .collect::<Vec<_>>(),
4052 vec![(
4053 subtrie_2_index,
4054 subtrie_2,
4055 vec![
4056 Nibbles::from_nibbles([0x1, 0x0, 0x0]),
4057 Nibbles::from_nibbles([0x1, 0x0, 0x1, 0x0])
4058 ]
4059 )]
4060 );
4061 assert_eq!(unchanged_prefix_set, unchanged_prefix_set);
4062 assert!(trie.lower_subtries[subtrie_2_index].as_revealed_ref().is_none());
4063
4064 assert_eq!(trie.lower_subtries[subtrie_1_index], LowerSparseSubtrie::Revealed(subtrie_1));
4066 }
4067
4068 #[test]
4069 fn test_get_changed_subtries_all() {
4070 let mut trie = ParallelSparseTrie::default();
4072 let subtrie_1 = Box::new(SparseSubtrie::new(Nibbles::from_nibbles([0x0, 0x0])));
4073 let subtrie_1_index = path_subtrie_index_unchecked(&subtrie_1.path);
4074 let subtrie_2 = Box::new(SparseSubtrie::new(Nibbles::from_nibbles([0x1, 0x0])));
4075 let subtrie_2_index = path_subtrie_index_unchecked(&subtrie_2.path);
4076 let subtrie_3 = Box::new(SparseSubtrie::new(Nibbles::from_nibbles([0x3, 0x0])));
4077 let subtrie_3_index = path_subtrie_index_unchecked(&subtrie_3.path);
4078
4079 trie.lower_subtries[subtrie_1_index] = LowerSparseSubtrie::Revealed(subtrie_1.clone());
4081 trie.lower_subtries[subtrie_2_index] = LowerSparseSubtrie::Revealed(subtrie_2.clone());
4082 trie.lower_subtries[subtrie_3_index] = LowerSparseSubtrie::Revealed(subtrie_3.clone());
4083
4084 let mut prefix_set = PrefixSetMut::all().freeze();
4086
4087 let (subtries, unchanged_prefix_set) = trie.take_changed_lower_subtries(&mut prefix_set);
4089 assert_eq!(
4090 subtries
4091 .into_iter()
4092 .map(|ChangedSubtrie { index, subtrie, prefix_set, .. }| {
4093 (index, subtrie, prefix_set.all())
4094 })
4095 .collect::<Vec<_>>(),
4096 vec![
4097 (subtrie_1_index, subtrie_1, true),
4098 (subtrie_2_index, subtrie_2, true),
4099 (subtrie_3_index, subtrie_3, true)
4100 ]
4101 );
4102 assert_eq!(unchanged_prefix_set, PrefixSetMut::all());
4103
4104 assert!(trie.lower_subtries.iter().all(|subtrie| subtrie.as_revealed_ref().is_none()));
4105 }
4106
4107 #[test]
4108 fn test_sparse_subtrie_type() {
4109 assert_eq!(SparseSubtrieType::from_path(&Nibbles::new()), SparseSubtrieType::Upper);
4110 assert_eq!(
4111 SparseSubtrieType::from_path(&Nibbles::from_nibbles([0])),
4112 SparseSubtrieType::Upper
4113 );
4114 assert_eq!(
4115 SparseSubtrieType::from_path(&Nibbles::from_nibbles([15])),
4116 SparseSubtrieType::Upper
4117 );
4118 assert_eq!(
4119 SparseSubtrieType::from_path(&Nibbles::from_nibbles([0, 0])),
4120 SparseSubtrieType::Lower(0)
4121 );
4122 assert_eq!(
4123 SparseSubtrieType::from_path(&Nibbles::from_nibbles([0, 0, 0])),
4124 SparseSubtrieType::Lower(0)
4125 );
4126 assert_eq!(
4127 SparseSubtrieType::from_path(&Nibbles::from_nibbles([0, 1])),
4128 SparseSubtrieType::Lower(1)
4129 );
4130 assert_eq!(
4131 SparseSubtrieType::from_path(&Nibbles::from_nibbles([0, 1, 0])),
4132 SparseSubtrieType::Lower(1)
4133 );
4134 assert_eq!(
4135 SparseSubtrieType::from_path(&Nibbles::from_nibbles([0, 15])),
4136 SparseSubtrieType::Lower(15)
4137 );
4138 assert_eq!(
4139 SparseSubtrieType::from_path(&Nibbles::from_nibbles([15, 0])),
4140 SparseSubtrieType::Lower(240)
4141 );
4142 assert_eq!(
4143 SparseSubtrieType::from_path(&Nibbles::from_nibbles([15, 1])),
4144 SparseSubtrieType::Lower(241)
4145 );
4146 assert_eq!(
4147 SparseSubtrieType::from_path(&Nibbles::from_nibbles([15, 15])),
4148 SparseSubtrieType::Lower(255)
4149 );
4150 assert_eq!(
4151 SparseSubtrieType::from_path(&Nibbles::from_nibbles([15, 15, 15])),
4152 SparseSubtrieType::Lower(255)
4153 );
4154 }
4155
4156 #[test]
4157 fn test_reveal_node_leaves() {
4158 let root_branch =
4161 create_branch_node_with_children(&[0x1], [RlpNode::word_rlp(&B256::repeat_byte(0xAA))]);
4162 let mut trie = ParallelSparseTrie::from_root(root_branch, None, false).unwrap();
4163
4164 {
4165 let path = Nibbles::from_nibbles([0x1]);
4166 let node = create_leaf_node([0x2, 0x3], 42);
4167 let masks = None;
4168
4169 trie.reveal_nodes(&mut [ProofTrieNodeV2 { path, node, masks }]).unwrap();
4170
4171 assert_matches!(
4172 trie.upper_subtrie.nodes.get(&path),
4173 Some(SparseNode::Leaf { key, state: SparseNodeState::Cached { .. } })
4174 if key == &Nibbles::from_nibbles([0x2, 0x3])
4175 );
4176
4177 let full_path = Nibbles::from_nibbles([0x1, 0x2, 0x3]);
4178 assert_eq!(
4179 trie.upper_subtrie.inner.values.get(&full_path),
4180 Some(&encode_account_value(42))
4181 );
4182 }
4183
4184 let root_branch =
4189 create_branch_node_with_children(&[0x1], [RlpNode::word_rlp(&B256::repeat_byte(0xAA))]);
4190 let branch_at_1 =
4191 create_branch_node_with_children(&[0x2], [RlpNode::word_rlp(&B256::repeat_byte(0xBB))]);
4192 let mut trie = ParallelSparseTrie::from_root(root_branch, None, false).unwrap();
4193 trie.reveal_nodes(&mut [ProofTrieNodeV2 {
4194 path: Nibbles::from_nibbles([0x1]),
4195 node: branch_at_1,
4196 masks: None,
4197 }])
4198 .unwrap();
4199
4200 {
4201 let path = Nibbles::from_nibbles([0x1, 0x2]);
4202 let node = create_leaf_node([0x3, 0x4], 42);
4203 let masks = None;
4204
4205 trie.reveal_nodes(&mut [ProofTrieNodeV2 { path, node, masks }]).unwrap();
4206
4207 let idx = path_subtrie_index_unchecked(&path);
4209 assert!(trie.lower_subtries[idx].as_revealed_ref().is_some());
4210
4211 let lower_subtrie = trie.lower_subtries[idx].as_revealed_ref().unwrap();
4213 assert_eq!(lower_subtrie.path, path);
4214
4215 assert_matches!(
4216 lower_subtrie.nodes.get(&path),
4217 Some(SparseNode::Leaf { key, state: SparseNodeState::Cached { .. } })
4218 if key == &Nibbles::from_nibbles([0x3, 0x4])
4219 );
4220 }
4221
4222 {
4225 let path = Nibbles::from_nibbles([0x1, 0x2, 0x3]);
4226 let node = create_leaf_node([0x4, 0x5], 42);
4227 let masks = None;
4228
4229 trie.reveal_nodes(&mut [ProofTrieNodeV2 { path, node, masks }]).unwrap();
4230
4231 let idx = path_subtrie_index_unchecked(&path);
4233 let lower_subtrie = trie.lower_subtries[idx].as_revealed_ref().unwrap();
4234 assert_eq!(lower_subtrie.path, Nibbles::from_nibbles([0x1, 0x2]));
4235 }
4236 }
4237
4238 #[test]
4239 fn test_reveal_node_branch_all_upper() {
4240 let path = Nibbles::new();
4241 let child_hashes = [
4242 RlpNode::word_rlp(&B256::repeat_byte(0x11)),
4243 RlpNode::word_rlp(&B256::repeat_byte(0x22)),
4244 ];
4245 let node = create_branch_node_with_children(&[0x0, 0x5], child_hashes.clone());
4246 let masks = None;
4247 let trie = ParallelSparseTrie::from_root(node, masks, true).unwrap();
4248
4249 assert_eq!(
4251 trie.upper_subtrie.nodes.get(&path).unwrap(),
4252 &SparseNode::new_branch(
4253 0b0000000000100001.into(),
4254 &[(0, child_hashes[0].as_hash().unwrap()), (5, child_hashes[1].as_hash().unwrap())]
4255 )
4256 );
4257
4258 let child_path_0 = Nibbles::from_nibbles([0x0]);
4260 let child_path_5 = Nibbles::from_nibbles([0x5]);
4261 assert!(!trie.upper_subtrie.nodes.contains_key(&child_path_0));
4262 assert!(!trie.upper_subtrie.nodes.contains_key(&child_path_5));
4263 }
4264
4265 #[test]
4266 fn test_reveal_node_branch_cross_level() {
4267 let root_branch =
4269 create_branch_node_with_children(&[0x1], [RlpNode::word_rlp(&B256::repeat_byte(0xAA))]);
4270 let mut trie = ParallelSparseTrie::from_root(root_branch, None, false).unwrap();
4271
4272 let path = Nibbles::from_nibbles([0x1]); let child_hashes = [
4274 RlpNode::word_rlp(&B256::repeat_byte(0x33)),
4275 RlpNode::word_rlp(&B256::repeat_byte(0x44)),
4276 RlpNode::word_rlp(&B256::repeat_byte(0x55)),
4277 ];
4278 let node = create_branch_node_with_children(&[0x0, 0x7, 0xf], child_hashes.clone());
4279 let masks = None;
4280
4281 trie.reveal_nodes(&mut [ProofTrieNodeV2 { path, node, masks }]).unwrap();
4282
4283 assert_eq!(
4285 trie.upper_subtrie.nodes.get(&path).unwrap(),
4286 &SparseNode::new_branch(
4287 0b1000000010000001.into(),
4288 &[
4289 (0x0, child_hashes[0].as_hash().unwrap()),
4290 (0x7, child_hashes[1].as_hash().unwrap()),
4291 (0xf, child_hashes[2].as_hash().unwrap())
4292 ]
4293 )
4294 .with_state(SparseNodeState::Cached {
4295 rlp_node: RlpNode::word_rlp(&B256::repeat_byte(0xAA)),
4296 store_in_db_trie: Some(false),
4297 })
4298 );
4299
4300 let child_paths = [
4302 Nibbles::from_nibbles([0x1, 0x0]),
4303 Nibbles::from_nibbles([0x1, 0x7]),
4304 Nibbles::from_nibbles([0x1, 0xf]),
4305 ];
4306
4307 let mut children = child_paths
4308 .iter()
4309 .map(|path| ProofTrieNodeV2 {
4310 path: *path,
4311 node: create_leaf_node([0x0], 1),
4312 masks: None,
4313 })
4314 .collect::<Vec<_>>();
4315
4316 trie.reveal_nodes(&mut children).unwrap();
4317
4318 assert_matches!(
4320 trie.upper_subtrie.nodes.get(&path),
4321 Some(&SparseNode::Branch {
4322 state_mask,
4323 state: SparseNodeState::Cached { ref rlp_node, store_in_db_trie: Some(false) },
4324 blinded_mask,
4325 ..
4326 }) if state_mask == 0b1000000010000001.into() && blinded_mask.is_empty() && *rlp_node == RlpNode::word_rlp(&B256::repeat_byte(0xAA))
4327 );
4328
4329 for (i, child_path) in child_paths.iter().enumerate() {
4330 let idx = path_subtrie_index_unchecked(child_path);
4331 let lower_subtrie = trie.lower_subtries[idx].as_revealed_ref().unwrap();
4332 assert_eq!(&lower_subtrie.path, child_path);
4333 assert_eq!(
4334 lower_subtrie.nodes.get(child_path),
4335 Some(&SparseNode::Leaf {
4336 key: Nibbles::from_nibbles([0x0]),
4337 state: SparseNodeState::Cached {
4338 rlp_node: child_hashes[i].clone(),
4339 store_in_db_trie: Some(false)
4340 }
4341 })
4342 );
4343 }
4344 }
4345
4346 #[test]
4347 fn test_update_subtrie_hashes_prefix_set_matching() {
4348 let root_branch = create_branch_node_with_children(
4351 &[0x0, 0x3],
4352 [
4353 RlpNode::word_rlp(&B256::repeat_byte(0xAA)),
4354 RlpNode::word_rlp(&B256::repeat_byte(0xBB)),
4355 ],
4356 );
4357 let mut trie = ParallelSparseTrie::from_root(root_branch, None, false).unwrap();
4358
4359 let leaf_1_full_path = Nibbles::from_nibbles([0; 64]);
4361 let leaf_1_path = leaf_1_full_path.slice(..2);
4362 let leaf_1_key = leaf_1_full_path.slice(2..);
4363 let leaf_2_full_path = Nibbles::from_nibbles([vec![0, 1], vec![0; 62]].concat());
4364 let leaf_2_path = leaf_2_full_path.slice(..2);
4365 let leaf_2_key = leaf_2_full_path.slice(2..);
4366 let leaf_3_full_path = Nibbles::from_nibbles([vec![0, 2], vec![0; 62]].concat());
4367 let leaf_1 = create_leaf_node(leaf_1_key.to_vec(), 1);
4368 let leaf_2 = create_leaf_node(leaf_2_key.to_vec(), 2);
4369
4370 let child_hashes = [
4374 RlpNode::word_rlp(&B256::repeat_byte(0x00)),
4375 RlpNode::word_rlp(&B256::repeat_byte(0x11)),
4376 ];
4377 let branch_path = Nibbles::from_nibbles([0x0]);
4378 let branch_node = create_branch_node_with_children(&[0x0, 0x1], child_hashes);
4379
4380 trie.reveal_nodes(&mut [
4382 ProofTrieNodeV2 { path: branch_path, node: branch_node, masks: None },
4383 ProofTrieNodeV2 { path: leaf_1_path, node: leaf_1, masks: None },
4384 ProofTrieNodeV2 { path: leaf_2_path, node: leaf_2, masks: None },
4385 ])
4386 .unwrap();
4387
4388 let provider = NoRevealProvider;
4391 trie.update_leaf(leaf_3_full_path, encode_account_value(3), provider).unwrap();
4392
4393 let subtrie_1_index = SparseSubtrieType::from_path(&leaf_1_path).lower_index().unwrap();
4395 let subtrie_2_index = SparseSubtrieType::from_path(&leaf_2_path).lower_index().unwrap();
4396 let leaf_3_path = leaf_3_full_path.slice(..2);
4397 let subtrie_3_index = SparseSubtrieType::from_path(&leaf_3_path).lower_index().unwrap();
4398
4399 let mut unchanged_prefix_set = PrefixSetMut::from([
4400 Nibbles::from_nibbles([0x0]),
4401 leaf_2_full_path,
4402 Nibbles::from_nibbles([0x3, 0x0, 0x0]),
4403 ]);
4404 let mut prefix_set = PrefixSetMut::from([
4406 Nibbles::from_nibbles([0x0, 0x1, 0x0]),
4408 Nibbles::from_nibbles([0x0, 0x1, 0x1, 0x0]),
4409 ]);
4410 prefix_set.extend(unchanged_prefix_set.clone());
4411 trie.prefix_set = prefix_set;
4412
4413 trie.update_subtrie_hashes();
4415
4416 unchanged_prefix_set.insert(leaf_3_full_path);
4420
4421 assert_eq!(
4423 trie.prefix_set.clone().freeze().into_iter().collect::<Vec<_>>(),
4424 unchanged_prefix_set.freeze().into_iter().collect::<Vec<_>>()
4425 );
4426 assert!(trie.lower_subtries[subtrie_1_index].as_revealed_ref().is_some());
4428 assert!(trie.lower_subtries[subtrie_2_index].as_revealed_ref().is_some());
4429 assert!(trie.lower_subtries[subtrie_3_index].as_revealed_ref().is_some());
4430 }
4431
4432 #[test]
4433 fn test_subtrie_update_hashes() {
4434 let mut subtrie = Box::new(SparseSubtrie::new(Nibbles::from_nibbles([0x0, 0x0])));
4435
4436 let leaf_1_full_path = Nibbles::from_nibbles([0; 64]);
4438 let leaf_1_path = leaf_1_full_path.slice(..5);
4439 let leaf_1_key = leaf_1_full_path.slice(5..);
4440 let leaf_2_full_path = Nibbles::from_nibbles([vec![0, 0, 0, 0, 1], vec![0; 59]].concat());
4441 let leaf_2_path = leaf_2_full_path.slice(..5);
4442 let leaf_2_key = leaf_2_full_path.slice(5..);
4443 let leaf_3_full_path = Nibbles::from_nibbles([vec![0, 0, 1], vec![0; 61]].concat());
4444 let leaf_3_path = leaf_3_full_path.slice(..3);
4445 let leaf_3_key = leaf_3_full_path.slice(3..);
4446
4447 let account_1 = create_account(1);
4448 let account_2 = create_account(2);
4449 let account_3 = create_account(3);
4450 let leaf_1 = create_leaf_node(leaf_1_key.to_vec(), account_1.nonce);
4451 let leaf_2 = create_leaf_node(leaf_2_key.to_vec(), account_2.nonce);
4452 let leaf_3 = create_leaf_node(leaf_3_key.to_vec(), account_3.nonce);
4453
4454 let extension_path = Nibbles::from_nibbles([0, 0, 0]);
4456 let branch_1_path = Nibbles::from_nibbles([0, 0, 0, 0]);
4457 let branch_1 = create_branch_node(
4458 Nibbles::from_nibbles([0]),
4459 &[0, 1],
4460 vec![
4461 RlpNode::from_rlp(&alloy_rlp::encode(&leaf_1)),
4462 RlpNode::from_rlp(&alloy_rlp::encode(&leaf_2)),
4463 ],
4464 );
4465
4466 let branch_2_path = Nibbles::from_nibbles([0, 0]);
4468 let branch_2 = create_branch_node_with_children(
4469 &[0, 1],
4470 vec![
4471 RlpNode::from_rlp(&alloy_rlp::encode(&branch_1)),
4472 RlpNode::from_rlp(&alloy_rlp::encode(&leaf_3)),
4473 ],
4474 );
4475
4476 subtrie.reveal_node(branch_2_path, &branch_2, None, None).unwrap();
4478 subtrie.reveal_node(extension_path, &branch_1, None, None).unwrap();
4479 subtrie.reveal_node(leaf_1_path, &leaf_1, None, None).unwrap();
4480 subtrie.reveal_node(leaf_2_path, &leaf_2, None, None).unwrap();
4481 subtrie.reveal_node(leaf_3_path, &leaf_3, None, None).unwrap();
4482
4483 let (_, _, proof_nodes, _, _) = run_hash_builder(
4485 [
4486 (leaf_1_full_path, account_1),
4487 (leaf_2_full_path, account_2),
4488 (leaf_3_full_path, account_3),
4489 ],
4490 NoopAccountTrieCursor::default(),
4491 Default::default(),
4492 [extension_path, branch_2_path, leaf_1_full_path, leaf_2_full_path, leaf_3_full_path],
4493 );
4494
4495 subtrie.update_hashes(
4497 &mut PrefixSetMut::from([leaf_1_full_path, leaf_2_full_path, leaf_3_full_path])
4498 .freeze(),
4499 &mut None,
4500 &BranchNodeMasksMap::default(),
4501 );
4502
4503 let hash_builder_branch_1_hash =
4505 RlpNode::from_rlp(proof_nodes.get(&branch_1_path).unwrap().as_ref()).as_hash().unwrap();
4506 let subtrie_branch_1_hash =
4507 subtrie.nodes.get(&branch_1_path).unwrap().cached_hash().unwrap();
4508 assert_eq!(hash_builder_branch_1_hash, subtrie_branch_1_hash);
4509
4510 let hash_builder_extension_hash =
4511 RlpNode::from_rlp(proof_nodes.get(&extension_path).unwrap().as_ref())
4512 .as_hash()
4513 .unwrap();
4514 let subtrie_extension_hash =
4515 subtrie.nodes.get(&extension_path).unwrap().cached_hash().unwrap();
4516 assert_eq!(hash_builder_extension_hash, subtrie_extension_hash);
4517
4518 let hash_builder_branch_2_hash =
4519 RlpNode::from_rlp(proof_nodes.get(&branch_2_path).unwrap().as_ref()).as_hash().unwrap();
4520 let subtrie_branch_2_hash =
4521 subtrie.nodes.get(&branch_2_path).unwrap().cached_hash().unwrap();
4522 assert_eq!(hash_builder_branch_2_hash, subtrie_branch_2_hash);
4523
4524 let subtrie_leaf_1_hash = subtrie.nodes.get(&leaf_1_path).unwrap().cached_hash().unwrap();
4525 let hash_builder_leaf_1_hash =
4526 RlpNode::from_rlp(proof_nodes.get(&leaf_1_path).unwrap().as_ref()).as_hash().unwrap();
4527 assert_eq!(hash_builder_leaf_1_hash, subtrie_leaf_1_hash);
4528
4529 let hash_builder_leaf_2_hash =
4530 RlpNode::from_rlp(proof_nodes.get(&leaf_2_path).unwrap().as_ref()).as_hash().unwrap();
4531 let subtrie_leaf_2_hash = subtrie.nodes.get(&leaf_2_path).unwrap().cached_hash().unwrap();
4532 assert_eq!(hash_builder_leaf_2_hash, subtrie_leaf_2_hash);
4533
4534 let hash_builder_leaf_3_hash =
4535 RlpNode::from_rlp(proof_nodes.get(&leaf_3_path).unwrap().as_ref()).as_hash().unwrap();
4536 let subtrie_leaf_3_hash = subtrie.nodes.get(&leaf_3_path).unwrap().cached_hash().unwrap();
4537 assert_eq!(hash_builder_leaf_3_hash, subtrie_leaf_3_hash);
4538 }
4539
4540 #[test]
4541 fn test_remove_leaf_branch_becomes_extension() {
4542 let mut trie = new_test_trie(
4554 [
4555 (Nibbles::default(), SparseNode::new_ext(Nibbles::from_nibbles([0x5]))),
4556 (Nibbles::from_nibbles([0x5]), SparseNode::new_branch(TrieMask::new(0b1001), &[])),
4557 (
4558 Nibbles::from_nibbles([0x5, 0x0]),
4559 SparseNode::new_ext(Nibbles::from_nibbles([0x2, 0x3])),
4560 ),
4561 (
4562 Nibbles::from_nibbles([0x5, 0x0, 0x2, 0x3]),
4563 SparseNode::new_branch(TrieMask::new(0b0101), &[]),
4564 ),
4565 (
4566 Nibbles::from_nibbles([0x5, 0x0, 0x2, 0x3, 0x1]),
4567 SparseNode::new_leaf(leaf_key([], 59)),
4568 ),
4569 (
4570 Nibbles::from_nibbles([0x5, 0x0, 0x2, 0x3, 0x3]),
4571 SparseNode::new_leaf(leaf_key([], 59)),
4572 ),
4573 (Nibbles::from_nibbles([0x5, 0x3]), SparseNode::new_leaf(leaf_key([0x7], 62))),
4574 ]
4575 .into_iter(),
4576 );
4577
4578 let provider = NoRevealProvider;
4579
4580 let leaf_full_path = pad_nibbles_right(Nibbles::from_nibbles([0x5, 0x3, 0x7]));
4582 trie.remove_leaf(&leaf_full_path, provider).unwrap();
4583
4584 let upper_subtrie = &trie.upper_subtrie;
4585 let lower_subtrie_50 = trie.lower_subtries[0x50].as_revealed_ref().unwrap();
4586
4587 assert_matches!(trie.lower_subtries[0x53].as_revealed_ref(), None);
4590
4591 assert_matches!(
4594 upper_subtrie.nodes.get(&Nibbles::from_nibbles([])),
4595 Some(SparseNode::Extension{ key, ..})
4596 if key == &Nibbles::from_nibbles([0x5, 0x0, 0x2, 0x3])
4597 );
4598 assert_matches!(upper_subtrie.nodes.get(&Nibbles::from_nibbles([0x5])), None);
4599 assert_matches!(lower_subtrie_50.nodes.get(&Nibbles::from_nibbles([0x5, 0x0])), None);
4600 assert_matches!(
4601 lower_subtrie_50.nodes.get(&Nibbles::from_nibbles([0x5, 0x0, 0x2, 0x3])),
4602 Some(SparseNode::Branch{ state_mask, .. })
4603 if *state_mask == 0b0101.into()
4604 );
4605 }
4606
4607 #[test]
4608 fn test_remove_leaf_branch_becomes_leaf() {
4609 let mut trie = new_test_trie(
4617 [
4618 (Nibbles::default(), SparseNode::new_branch(TrieMask::new(0b0011), &[])),
4619 (Nibbles::from_nibbles([0x0]), SparseNode::new_leaf(leaf_key([0x1, 0x2], 63))),
4620 (Nibbles::from_nibbles([0x1]), SparseNode::new_leaf(leaf_key([0x3, 0x4], 63))),
4621 ]
4622 .into_iter(),
4623 );
4624
4625 if let Some(updates) = trie.updates.as_mut() {
4627 updates
4628 .updated_nodes
4629 .insert(Nibbles::default(), BranchNodeCompact::new(0b11, 0, 0, vec![], None));
4630 }
4631
4632 let provider = NoRevealProvider;
4633
4634 let leaf_full_path = pad_nibbles_right(Nibbles::from_nibbles([0x0, 0x1, 0x2]));
4636 trie.remove_leaf(&leaf_full_path, provider).unwrap();
4637
4638 let upper_subtrie = &trie.upper_subtrie;
4639
4640 assert_matches!(upper_subtrie.inner.values.get(&leaf_full_path), None);
4642
4643 assert_matches!(
4645 upper_subtrie.nodes.get(&Nibbles::default()),
4646 Some(SparseNode::Leaf{ key, ..})
4647 if key == &pad_nibbles_right(Nibbles::from_nibbles([0x1, 0x3, 0x4]))
4648 );
4649
4650 assert_matches!(upper_subtrie.nodes.get(&Nibbles::from_nibbles([0x1])), None);
4652 assert_matches!(upper_subtrie.nodes.get(&Nibbles::from_nibbles([0x0])), None);
4654
4655 let updates = trie.updates.as_ref().unwrap();
4657
4658 assert!(updates.removed_nodes.contains(&Nibbles::default()));
4660
4661 assert!(!updates.updated_nodes.contains_key(&Nibbles::default()));
4663 }
4664
4665 #[test]
4666 fn test_remove_leaf_extension_becomes_leaf() {
4667 let mut trie = new_test_trie(
4676 [
4677 (Nibbles::default(), SparseNode::new_ext(Nibbles::from_nibbles([0x5]))),
4678 (Nibbles::from_nibbles([0x5]), SparseNode::new_branch(TrieMask::new(0b0011), &[])),
4679 (Nibbles::from_nibbles([0x5, 0x0]), SparseNode::new_leaf(leaf_key([0x1, 0x2], 62))),
4680 (Nibbles::from_nibbles([0x5, 0x1]), SparseNode::new_leaf(leaf_key([0x3, 0x4], 62))),
4681 ]
4682 .into_iter(),
4683 );
4684
4685 let provider = NoRevealProvider;
4686
4687 let leaf_full_path = pad_nibbles_right(Nibbles::from_nibbles([0x5, 0x0, 0x1, 0x2]));
4689 trie.remove_leaf(&leaf_full_path, provider).unwrap();
4690
4691 let upper_subtrie = &trie.upper_subtrie;
4692
4693 assert_matches!(trie.lower_subtries[0x50].as_revealed_ref(), None);
4697 assert_matches!(trie.lower_subtries[0x51].as_revealed_ref(), None);
4698
4699 let other_leaf_full_value = pad_nibbles_right(Nibbles::from_nibbles([0x5, 0x1, 0x3, 0x4]));
4701 assert_matches!(upper_subtrie.inner.values.get(&other_leaf_full_value), Some(_));
4702
4703 assert_matches!(
4705 upper_subtrie.nodes.get(&Nibbles::default()),
4706 Some(SparseNode::Leaf{ key, ..})
4707 if key == &pad_nibbles_right(Nibbles::from_nibbles([0x5, 0x1, 0x3, 0x4]))
4708 );
4709
4710 assert_matches!(upper_subtrie.nodes.get(&Nibbles::from_nibbles([0x5])), None);
4712 }
4713
4714 #[test]
4715 fn test_remove_leaf_branch_on_branch() {
4716 let mut trie = new_test_trie(
4726 [
4727 (Nibbles::default(), SparseNode::new_branch(TrieMask::new(0b0101), &[])),
4728 (Nibbles::from_nibbles([0x0]), SparseNode::new_leaf(leaf_key([0x1, 0x2], 63))),
4729 (Nibbles::from_nibbles([0x2]), SparseNode::new_branch(TrieMask::new(0b0011), &[])),
4730 (Nibbles::from_nibbles([0x2, 0x0]), SparseNode::new_leaf(leaf_key([0x3, 0x4], 62))),
4731 (Nibbles::from_nibbles([0x2, 0x1]), SparseNode::new_leaf(leaf_key([0x5, 0x6], 62))),
4732 ]
4733 .into_iter(),
4734 );
4735
4736 let provider = NoRevealProvider;
4737
4738 let leaf_full_path = pad_nibbles_right(Nibbles::from_nibbles([0x2, 0x0, 0x3, 0x4]));
4740 trie.remove_leaf(&leaf_full_path, provider).unwrap();
4741
4742 let upper_subtrie = &trie.upper_subtrie;
4743
4744 assert_matches!(trie.lower_subtries[0x20].as_revealed_ref(), None);
4748 assert_matches!(trie.lower_subtries[0x21].as_revealed_ref(), None);
4749
4750 let other_leaf_full_value = pad_nibbles_right(Nibbles::from_nibbles([0x2, 0x1, 0x5, 0x6]));
4752 assert_matches!(upper_subtrie.inner.values.get(&other_leaf_full_value), Some(_));
4753
4754 assert_matches!(
4756 upper_subtrie.nodes.get(&Nibbles::default()),
4757 Some(SparseNode::Branch{ state_mask, .. })
4758 if *state_mask == 0b0101.into()
4759 );
4760
4761 assert_matches!(
4763 upper_subtrie.nodes.get(&Nibbles::from_nibbles([0x2])),
4764 Some(SparseNode::Leaf{ key, ..})
4765 if key == &leaf_key([0x1, 0x5, 0x6], 63)
4766 );
4767 }
4768
4769 #[test]
4770 fn test_remove_leaf_lower_subtrie_root_path_update() {
4771 let mut trie = new_test_trie(
4785 [
4786 (Nibbles::default(), SparseNode::new_ext(Nibbles::from_nibbles([0x1, 0x2, 0x3]))),
4787 (
4788 Nibbles::from_nibbles([0x1, 0x2, 0x3]),
4789 SparseNode::new_branch(TrieMask::new(0b0011000), &[]),
4790 ),
4791 (
4792 Nibbles::from_nibbles([0x1, 0x2, 0x3, 0x3]),
4793 SparseNode::new_leaf(leaf_key([], 60)),
4794 ),
4795 (
4796 Nibbles::from_nibbles([0x1, 0x2, 0x3, 0x4]),
4797 SparseNode::new_ext(Nibbles::from_nibbles([0x5])),
4798 ),
4799 (
4800 Nibbles::from_nibbles([0x1, 0x2, 0x3, 0x4, 0x5]),
4801 SparseNode::new_branch(TrieMask::new(0b0011), &[]),
4802 ),
4803 (
4804 Nibbles::from_nibbles([0x1, 0x2, 0x3, 0x4, 0x5, 0x0]),
4805 SparseNode::new_leaf(leaf_key([], 58)),
4806 ),
4807 (
4808 Nibbles::from_nibbles([0x1, 0x2, 0x3, 0x4, 0x5, 0x1]),
4809 SparseNode::new_leaf(leaf_key([], 58)),
4810 ),
4811 ]
4812 .into_iter(),
4813 );
4814
4815 let provider = NoRevealProvider;
4816
4817 let lower_subtrie_root_path = Nibbles::from_nibbles([0x1, 0x2, 0x3]);
4819 assert_matches!(
4820 trie.lower_subtrie_for_path_mut(&lower_subtrie_root_path),
4821 Some(subtrie)
4822 if subtrie.path == lower_subtrie_root_path
4823 );
4824
4825 let leaf_full_path = pad_nibbles_right(Nibbles::from_nibbles([0x1, 0x2, 0x3, 0x3]));
4827 trie.remove_leaf(&leaf_full_path, provider).unwrap();
4828
4829 let lower_subtrie = trie.lower_subtries[0x12].as_revealed_ref().unwrap();
4834 assert_eq!(lower_subtrie.path, Nibbles::from_nibbles([0x1, 0x2, 0x3, 0x4, 0x5]));
4835
4836 assert_matches!(
4838 trie.upper_subtrie.nodes.get(&Nibbles::default()),
4839 Some(SparseNode::Extension { key, .. })
4840 if key == &Nibbles::from_nibbles([0x1, 0x2, 0x3, 0x4, 0x5])
4841 );
4842
4843 assert_matches!(
4845 lower_subtrie.nodes.get(&Nibbles::from_nibbles([0x1, 0x2, 0x3, 0x4, 0x5])),
4846 Some(SparseNode::Branch { state_mask, .. })
4847 if state_mask == &TrieMask::new(0b0011)
4848 );
4849 }
4850
4851 #[test]
4852 fn test_remove_leaf_remaining_child_needs_reveal() {
4853 let mut trie = new_test_trie(
4861 [
4862 (
4863 Nibbles::default(),
4864 SparseNode::new_branch(
4865 TrieMask::new(0b0011),
4866 &[(0x1, B256::repeat_byte(0xab))],
4867 ),
4868 ),
4869 (Nibbles::from_nibbles([0x0]), SparseNode::new_leaf(leaf_key([0x1, 0x2], 63))),
4870 ]
4871 .into_iter(),
4872 );
4873
4874 let revealed_leaf = create_leaf_node(leaf_key([0x3, 0x4], 63).to_vec(), 42);
4876 let mut encoded = Vec::new();
4877 revealed_leaf.encode(&mut encoded);
4878
4879 let leaf_full_path = pad_nibbles_right(Nibbles::from_nibbles([0x0, 0x1, 0x2]));
4882 let Err(err) = trie.remove_leaf(&leaf_full_path, NoRevealProvider) else {
4883 panic!("expected error");
4884 };
4885 assert_matches!(err.kind(), SparseTrieErrorKind::BlindedNode(path) if *path == Nibbles::from_nibbles([0x1]));
4886
4887 trie.reveal_nodes(&mut [ProofTrieNodeV2 {
4889 path: Nibbles::from_nibbles([0x1]),
4890 node: revealed_leaf,
4891 masks: None,
4892 }])
4893 .unwrap();
4894 trie.remove_leaf(&leaf_full_path, NoRevealProvider).unwrap();
4895
4896 let upper_subtrie = &trie.upper_subtrie;
4897
4898 assert_matches!(upper_subtrie.inner.values.get(&leaf_full_path), None);
4900
4901 assert_matches!(
4903 upper_subtrie.nodes.get(&Nibbles::default()),
4904 Some(SparseNode::Leaf{ key, ..})
4905 if key == &pad_nibbles_right(Nibbles::from_nibbles([0x1, 0x3, 0x4]))
4906 );
4907
4908 assert_matches!(upper_subtrie.nodes.get(&Nibbles::from_nibbles([0x1])), None);
4910 }
4911
4912 #[test]
4913 fn test_remove_leaf_root() {
4914 let mut trie = new_test_trie(core::iter::once((
4920 Nibbles::default(),
4921 SparseNode::new_leaf(pad_nibbles_right(Nibbles::from_nibbles([0x1, 0x2, 0x3]))),
4922 )));
4923
4924 let provider = NoRevealProvider;
4925
4926 let leaf_full_path = pad_nibbles_right(Nibbles::from_nibbles([0x1, 0x2, 0x3]));
4928 trie.remove_leaf(&leaf_full_path, provider).unwrap();
4929
4930 let upper_subtrie = &trie.upper_subtrie;
4931
4932 assert_matches!(upper_subtrie.inner.values.get(&leaf_full_path), None);
4934
4935 assert_matches!(upper_subtrie.nodes.get(&Nibbles::default()), Some(SparseNode::Empty));
4937 }
4938
4939 #[test]
4940 fn test_remove_leaf_unsets_hash_along_path() {
4941 let make_revealed = |hash: B256| SparseNodeState::Cached {
4956 rlp_node: RlpNode::word_rlp(&hash),
4957 store_in_db_trie: None,
4958 };
4959 let mut trie = new_test_trie(
4960 [
4961 (
4962 Nibbles::default(),
4963 SparseNode::Branch {
4964 state_mask: TrieMask::new(0b0011),
4965 state: make_revealed(B256::repeat_byte(0x10)),
4966 blinded_mask: Default::default(),
4967 blinded_hashes: Default::default(),
4968 },
4969 ),
4970 (
4971 Nibbles::from_nibbles([0x0]),
4972 SparseNode::Extension {
4973 key: Nibbles::from_nibbles([0x1]),
4974 state: make_revealed(B256::repeat_byte(0x20)),
4975 },
4976 ),
4977 (
4978 Nibbles::from_nibbles([0x0, 0x1]),
4979 SparseNode::Branch {
4980 state_mask: TrieMask::new(0b11100),
4981 state: make_revealed(B256::repeat_byte(0x30)),
4982 blinded_mask: Default::default(),
4983 blinded_hashes: Default::default(),
4984 },
4985 ),
4986 (
4987 Nibbles::from_nibbles([0x0, 0x1, 0x2]),
4988 SparseNode::Leaf {
4989 key: leaf_key([0x3, 0x4], 61),
4990 state: make_revealed(B256::repeat_byte(0x40)),
4991 },
4992 ),
4993 (
4994 Nibbles::from_nibbles([0x0, 0x1, 0x3]),
4995 SparseNode::Leaf {
4996 key: leaf_key([0x5, 0x6], 61),
4997 state: make_revealed(B256::repeat_byte(0x50)),
4998 },
4999 ),
5000 (
5001 Nibbles::from_nibbles([0x0, 0x1, 0x4]),
5002 SparseNode::Leaf {
5003 key: leaf_key([0x6, 0x7], 61),
5004 state: make_revealed(B256::repeat_byte(0x60)),
5005 },
5006 ),
5007 (
5008 Nibbles::from_nibbles([0x1]),
5009 SparseNode::Leaf {
5010 key: leaf_key([0x7, 0x8], 63),
5011 state: make_revealed(B256::repeat_byte(0x70)),
5012 },
5013 ),
5014 ]
5015 .into_iter(),
5016 );
5017
5018 let provider = NoRevealProvider;
5019
5020 trie.remove_leaf(
5022 &pad_nibbles_right(Nibbles::from_nibbles([0x0, 0x1, 0x2, 0x3, 0x4, 0xF])),
5023 provider,
5024 )
5025 .unwrap();
5026 for (path, node) in trie.all_nodes() {
5027 assert!(node.cached_hash().is_some(), "path {path:?} should still have a hash");
5028 }
5029
5030 let leaf_full_path = pad_nibbles_right(Nibbles::from_nibbles([0x0, 0x1, 0x2, 0x3, 0x4]));
5032 trie.remove_leaf(&leaf_full_path, provider).unwrap();
5033
5034 let upper_subtrie = &trie.upper_subtrie;
5035 let lower_subtrie_10 = trie.lower_subtries[0x01].as_revealed_ref().unwrap();
5036
5037 assert_matches!(
5039 upper_subtrie.nodes.get(&Nibbles::default()),
5040 Some(SparseNode::Branch { state: SparseNodeState::Dirty, .. })
5041 );
5042 assert_matches!(
5043 upper_subtrie.nodes.get(&Nibbles::from_nibbles([0x0])),
5044 Some(SparseNode::Extension { state: SparseNodeState::Dirty, .. })
5045 );
5046 assert_matches!(
5047 lower_subtrie_10.nodes.get(&Nibbles::from_nibbles([0x0, 0x1])),
5048 Some(SparseNode::Branch { state: SparseNodeState::Dirty, .. })
5049 );
5050
5051 assert_matches!(
5053 upper_subtrie.nodes.get(&Nibbles::from_nibbles([0x1])),
5054 Some(SparseNode::Leaf { state: SparseNodeState::Cached { .. }, .. })
5055 );
5056 assert_matches!(
5057 lower_subtrie_10.nodes.get(&Nibbles::from_nibbles([0x0, 0x1, 0x3])),
5058 Some(SparseNode::Leaf { state: SparseNodeState::Cached { .. }, .. })
5059 );
5060 assert_matches!(
5061 lower_subtrie_10.nodes.get(&Nibbles::from_nibbles([0x0, 0x1, 0x4])),
5062 Some(SparseNode::Leaf { state: SparseNodeState::Cached { .. }, .. })
5063 );
5064 }
5065
5066 #[test]
5067 fn test_parallel_sparse_trie_root() {
5068 let extension_path = Nibbles::new();
5071 let extension_key = Nibbles::from_nibbles([0x2]);
5072
5073 let branch_path = Nibbles::from_nibbles([0x2]);
5075
5076 let leaf_1_path = Nibbles::from_nibbles([0x2, 0x0]);
5078 let leaf_1_key = Nibbles::from_nibbles(vec![0; 62]); let leaf_1_full_path = Nibbles::from_nibbles([vec![0x2, 0x0], vec![0; 62]].concat());
5080
5081 let leaf_2_path = Nibbles::from_nibbles([0x2, 0x1]);
5082 let leaf_2_key = Nibbles::from_nibbles(vec![0; 62]); let leaf_2_full_path = Nibbles::from_nibbles([vec![0x2, 0x1], vec![0; 62]].concat());
5084
5085 let account_1 = create_account(1);
5087 let account_2 = create_account(2);
5088
5089 let leaf_1 = create_leaf_node(leaf_1_key.to_vec(), account_1.nonce);
5091 let leaf_2 = create_leaf_node(leaf_2_key.to_vec(), account_2.nonce);
5092
5093 let branch = create_branch_node(
5095 extension_key,
5096 &[0, 1],
5097 vec![
5098 RlpNode::from_rlp(&alloy_rlp::encode(&leaf_1)),
5099 RlpNode::from_rlp(&alloy_rlp::encode(&leaf_2)),
5100 ],
5101 );
5102
5103 let mut trie = ParallelSparseTrie::from_root(branch, None, true).unwrap();
5105 trie.reveal_nodes(&mut [
5106 ProofTrieNodeV2 { path: leaf_1_path, node: leaf_1, masks: None },
5107 ProofTrieNodeV2 { path: leaf_2_path, node: leaf_2, masks: None },
5108 ])
5109 .unwrap();
5110
5111 trie.upper_subtrie
5114 .nodes
5115 .get_mut(&extension_path)
5116 .unwrap()
5117 .set_state(SparseNodeState::Dirty);
5118 trie.upper_subtrie.nodes.get_mut(&branch_path).unwrap().set_state(SparseNodeState::Dirty);
5119
5120 let leaf_1_subtrie_idx = path_subtrie_index_unchecked(&leaf_1_path);
5122 let leaf_2_subtrie_idx = path_subtrie_index_unchecked(&leaf_2_path);
5123
5124 trie.lower_subtries[leaf_1_subtrie_idx]
5125 .as_revealed_mut()
5126 .unwrap()
5127 .nodes
5128 .get_mut(&leaf_1_path)
5129 .unwrap()
5130 .set_state(SparseNodeState::Dirty);
5131 trie.lower_subtries[leaf_2_subtrie_idx]
5132 .as_revealed_mut()
5133 .unwrap()
5134 .nodes
5135 .get_mut(&leaf_2_path)
5136 .unwrap()
5137 .set_state(SparseNodeState::Dirty);
5138
5139 trie.prefix_set.insert(leaf_1_full_path);
5141 trie.prefix_set.insert(leaf_2_full_path);
5142
5143 let root = trie.root();
5145
5146 let (hash_builder_root, _, _proof_nodes, _, _) = run_hash_builder(
5148 [(leaf_1_full_path, account_1), (leaf_2_full_path, account_2)],
5149 NoopAccountTrieCursor::default(),
5150 Default::default(),
5151 [extension_path, branch_path, leaf_1_full_path, leaf_2_full_path],
5152 );
5153
5154 assert_eq!(root, hash_builder_root);
5156
5157 let leaf_1_subtrie = trie.lower_subtries[leaf_1_subtrie_idx].as_revealed_ref().unwrap();
5159 let leaf_2_subtrie = trie.lower_subtries[leaf_2_subtrie_idx].as_revealed_ref().unwrap();
5160 assert!(trie.upper_subtrie.nodes.get(&extension_path).unwrap().cached_hash().is_some());
5161 assert!(trie.upper_subtrie.nodes.get(&branch_path).unwrap().cached_hash().is_some());
5162 assert!(leaf_1_subtrie.nodes.get(&leaf_1_path).unwrap().cached_hash().is_some());
5163 assert!(leaf_2_subtrie.nodes.get(&leaf_2_path).unwrap().cached_hash().is_some());
5164 }
5165
5166 #[test]
5167 fn sparse_trie_empty_update_one() {
5168 let ctx = ParallelSparseTrieTestContext;
5169
5170 let key = Nibbles::unpack(B256::with_last_byte(42));
5171 let value = || Account::default();
5172 let value_encoded = || {
5173 let mut account_rlp = Vec::new();
5174 value().into_trie_account(EMPTY_ROOT_HASH).encode(&mut account_rlp);
5175 account_rlp
5176 };
5177
5178 let (hash_builder_root, hash_builder_updates, hash_builder_proof_nodes, _, _) =
5179 run_hash_builder(
5180 [(key, value())],
5181 NoopAccountTrieCursor::default(),
5182 Default::default(),
5183 [key],
5184 );
5185
5186 let mut sparse = ParallelSparseTrie::default().with_updates(true);
5187 ctx.update_leaves(&mut sparse, [(key, value_encoded())]);
5188 ctx.assert_with_hash_builder(
5189 &mut sparse,
5190 hash_builder_root,
5191 hash_builder_updates,
5192 hash_builder_proof_nodes,
5193 );
5194 }
5195
5196 #[test]
5197 fn sparse_trie_empty_update_multiple_lower_nibbles() {
5198 let ctx = ParallelSparseTrieTestContext;
5199
5200 let paths = (0..=16).map(|b| Nibbles::unpack(B256::with_last_byte(b))).collect::<Vec<_>>();
5201 let value = || Account::default();
5202 let value_encoded = || {
5203 let mut account_rlp = Vec::new();
5204 value().into_trie_account(EMPTY_ROOT_HASH).encode(&mut account_rlp);
5205 account_rlp
5206 };
5207
5208 let (hash_builder_root, hash_builder_updates, hash_builder_proof_nodes, _, _) =
5209 run_hash_builder(
5210 paths.iter().copied().zip(core::iter::repeat_with(value)),
5211 NoopAccountTrieCursor::default(),
5212 Default::default(),
5213 paths.clone(),
5214 );
5215
5216 let mut sparse = ParallelSparseTrie::default().with_updates(true);
5217 ctx.update_leaves(
5218 &mut sparse,
5219 paths.into_iter().zip(core::iter::repeat_with(value_encoded)),
5220 );
5221
5222 ctx.assert_with_hash_builder(
5223 &mut sparse,
5224 hash_builder_root,
5225 hash_builder_updates,
5226 hash_builder_proof_nodes,
5227 );
5228 }
5229
5230 #[test]
5231 fn sparse_trie_empty_update_multiple_upper_nibbles() {
5232 let paths = (239..=255).map(|b| Nibbles::unpack(B256::repeat_byte(b))).collect::<Vec<_>>();
5233 let value = || Account::default();
5234 let value_encoded = || {
5235 let mut account_rlp = Vec::new();
5236 value().into_trie_account(EMPTY_ROOT_HASH).encode(&mut account_rlp);
5237 account_rlp
5238 };
5239
5240 let (hash_builder_root, hash_builder_updates, hash_builder_proof_nodes, _, _) =
5241 run_hash_builder(
5242 paths.iter().copied().zip(core::iter::repeat_with(value)),
5243 NoopAccountTrieCursor::default(),
5244 Default::default(),
5245 paths.clone(),
5246 );
5247
5248 let provider = DefaultTrieNodeProvider;
5249 let mut sparse = ParallelSparseTrie::default().with_updates(true);
5250 for path in &paths {
5251 sparse.update_leaf(*path, value_encoded(), &provider).unwrap();
5252 }
5253 let sparse_root = sparse.root();
5254 let sparse_updates = sparse.take_updates();
5255
5256 assert_eq!(sparse_root, hash_builder_root);
5257 assert_eq!(sparse_updates.updated_nodes, hash_builder_updates.account_nodes);
5258 assert_eq_parallel_sparse_trie_proof_nodes(&sparse, hash_builder_proof_nodes);
5259 }
5260
5261 #[test]
5262 fn sparse_trie_empty_update_multiple() {
5263 let ctx = ParallelSparseTrieTestContext;
5264
5265 let paths = (0..=255)
5266 .map(|b| {
5267 Nibbles::unpack(if b % 2 == 0 {
5268 B256::repeat_byte(b)
5269 } else {
5270 B256::with_last_byte(b)
5271 })
5272 })
5273 .collect::<Vec<_>>();
5274 let value = || Account::default();
5275 let value_encoded = || {
5276 let mut account_rlp = Vec::new();
5277 value().into_trie_account(EMPTY_ROOT_HASH).encode(&mut account_rlp);
5278 account_rlp
5279 };
5280
5281 let (hash_builder_root, hash_builder_updates, hash_builder_proof_nodes, _, _) =
5282 run_hash_builder(
5283 paths.iter().sorted_unstable().copied().zip(core::iter::repeat_with(value)),
5284 NoopAccountTrieCursor::default(),
5285 Default::default(),
5286 paths.clone(),
5287 );
5288
5289 let mut sparse = ParallelSparseTrie::default().with_updates(true);
5290 ctx.update_leaves(
5291 &mut sparse,
5292 paths.iter().copied().zip(core::iter::repeat_with(value_encoded)),
5293 );
5294 ctx.assert_with_hash_builder(
5295 &mut sparse,
5296 hash_builder_root,
5297 hash_builder_updates,
5298 hash_builder_proof_nodes,
5299 );
5300 }
5301
5302 #[test]
5303 fn sparse_trie_empty_update_repeated() {
5304 let ctx = ParallelSparseTrieTestContext;
5305
5306 let paths = (0..=255).map(|b| Nibbles::unpack(B256::repeat_byte(b))).collect::<Vec<_>>();
5307 let old_value = Account { nonce: 1, ..Default::default() };
5308 let old_value_encoded = {
5309 let mut account_rlp = Vec::new();
5310 old_value.into_trie_account(EMPTY_ROOT_HASH).encode(&mut account_rlp);
5311 account_rlp
5312 };
5313 let new_value = Account { nonce: 2, ..Default::default() };
5314 let new_value_encoded = {
5315 let mut account_rlp = Vec::new();
5316 new_value.into_trie_account(EMPTY_ROOT_HASH).encode(&mut account_rlp);
5317 account_rlp
5318 };
5319
5320 let (hash_builder_root, hash_builder_updates, hash_builder_proof_nodes, _, _) =
5321 run_hash_builder(
5322 paths.iter().copied().zip(core::iter::repeat_with(|| old_value)),
5323 NoopAccountTrieCursor::default(),
5324 Default::default(),
5325 paths.clone(),
5326 );
5327
5328 let mut sparse = ParallelSparseTrie::default().with_updates(true);
5329 ctx.update_leaves(
5330 &mut sparse,
5331 paths.iter().copied().zip(core::iter::repeat(old_value_encoded)),
5332 );
5333 ctx.assert_with_hash_builder(
5334 &mut sparse,
5335 hash_builder_root,
5336 hash_builder_updates,
5337 hash_builder_proof_nodes,
5338 );
5339
5340 let (hash_builder_root, hash_builder_updates, hash_builder_proof_nodes, _, _) =
5341 run_hash_builder(
5342 paths.iter().copied().zip(core::iter::repeat(new_value)),
5343 NoopAccountTrieCursor::default(),
5344 Default::default(),
5345 paths.clone(),
5346 );
5347
5348 ctx.update_leaves(
5349 &mut sparse,
5350 paths.iter().copied().zip(core::iter::repeat(new_value_encoded)),
5351 );
5352 ctx.assert_with_hash_builder(
5353 &mut sparse,
5354 hash_builder_root,
5355 hash_builder_updates,
5356 hash_builder_proof_nodes,
5357 );
5358 }
5359
5360 #[test]
5361 fn sparse_trie_remove_leaf() {
5362 let ctx = ParallelSparseTrieTestContext;
5363 let provider = DefaultTrieNodeProvider;
5364 let mut sparse = ParallelSparseTrie::default();
5365
5366 let value = alloy_rlp::encode_fixed_size(&U256::ZERO).to_vec();
5367
5368 ctx.update_leaves(
5369 &mut sparse,
5370 [
5371 (
5372 pad_nibbles_right(Nibbles::from_nibbles([0x5, 0x0, 0x2, 0x3, 0x1])),
5373 value.clone(),
5374 ),
5375 (
5376 pad_nibbles_right(Nibbles::from_nibbles([0x5, 0x0, 0x2, 0x3, 0x3])),
5377 value.clone(),
5378 ),
5379 (
5380 pad_nibbles_right(Nibbles::from_nibbles([0x5, 0x2, 0x0, 0x1, 0x3])),
5381 value.clone(),
5382 ),
5383 (
5384 pad_nibbles_right(Nibbles::from_nibbles([0x5, 0x3, 0x1, 0x0, 0x2])),
5385 value.clone(),
5386 ),
5387 (
5388 pad_nibbles_right(Nibbles::from_nibbles([0x5, 0x3, 0x3, 0x0, 0x2])),
5389 value.clone(),
5390 ),
5391 (pad_nibbles_right(Nibbles::from_nibbles([0x5, 0x3, 0x3, 0x2, 0x0])), value),
5392 ],
5393 );
5394
5395 pretty_assertions::assert_eq!(
5408 parallel_sparse_trie_nodes(&sparse)
5409 .into_iter()
5410 .map(|(k, v)| (*k, v.clone()))
5411 .collect::<BTreeMap<_, _>>(),
5412 BTreeMap::from_iter([
5413 (Nibbles::default(), SparseNode::new_ext(Nibbles::from_nibbles([0x5]))),
5414 (Nibbles::from_nibbles([0x5]), SparseNode::new_branch(0b1101.into(), &[])),
5415 (
5416 Nibbles::from_nibbles([0x5, 0x0]),
5417 SparseNode::new_ext(Nibbles::from_nibbles([0x2, 0x3]))
5418 ),
5419 (
5420 Nibbles::from_nibbles([0x5, 0x0, 0x2, 0x3]),
5421 SparseNode::new_branch(0b1010.into(), &[])
5422 ),
5423 (
5424 Nibbles::from_nibbles([0x5, 0x0, 0x2, 0x3, 0x1]),
5425 SparseNode::new_leaf(leaf_key([], 59))
5426 ),
5427 (
5428 Nibbles::from_nibbles([0x5, 0x0, 0x2, 0x3, 0x3]),
5429 SparseNode::new_leaf(leaf_key([], 59))
5430 ),
5431 (
5432 Nibbles::from_nibbles([0x5, 0x2]),
5433 SparseNode::new_leaf(leaf_key([0x0, 0x1, 0x3], 62))
5434 ),
5435 (Nibbles::from_nibbles([0x5, 0x3]), SparseNode::new_branch(0b1010.into(), &[])),
5436 (
5437 Nibbles::from_nibbles([0x5, 0x3, 0x1]),
5438 SparseNode::new_leaf(leaf_key([0x0, 0x2], 61))
5439 ),
5440 (
5441 Nibbles::from_nibbles([0x5, 0x3, 0x3]),
5442 SparseNode::new_branch(0b0101.into(), &[])
5443 ),
5444 (
5445 Nibbles::from_nibbles([0x5, 0x3, 0x3, 0x0]),
5446 SparseNode::new_leaf(leaf_key([0x2], 60))
5447 ),
5448 (
5449 Nibbles::from_nibbles([0x5, 0x3, 0x3, 0x2]),
5450 SparseNode::new_leaf(leaf_key([0x0], 60))
5451 )
5452 ])
5453 );
5454
5455 sparse
5456 .remove_leaf(
5457 &pad_nibbles_right(Nibbles::from_nibbles([0x5, 0x2, 0x0, 0x1, 0x3])),
5458 &provider,
5459 )
5460 .unwrap();
5461
5462 pretty_assertions::assert_eq!(
5474 parallel_sparse_trie_nodes(&sparse)
5475 .into_iter()
5476 .map(|(k, v)| (*k, v.clone()))
5477 .collect::<BTreeMap<_, _>>(),
5478 BTreeMap::from_iter([
5479 (Nibbles::default(), SparseNode::new_ext(Nibbles::from_nibbles([0x5]))),
5480 (Nibbles::from_nibbles([0x5]), SparseNode::new_branch(0b1001.into(), &[])),
5481 (
5482 Nibbles::from_nibbles([0x5, 0x0]),
5483 SparseNode::new_ext(Nibbles::from_nibbles([0x2, 0x3]))
5484 ),
5485 (
5486 Nibbles::from_nibbles([0x5, 0x0, 0x2, 0x3]),
5487 SparseNode::new_branch(0b1010.into(), &[])
5488 ),
5489 (
5490 Nibbles::from_nibbles([0x5, 0x0, 0x2, 0x3, 0x1]),
5491 SparseNode::new_leaf(leaf_key([], 59))
5492 ),
5493 (
5494 Nibbles::from_nibbles([0x5, 0x0, 0x2, 0x3, 0x3]),
5495 SparseNode::new_leaf(leaf_key([], 59))
5496 ),
5497 (Nibbles::from_nibbles([0x5, 0x3]), SparseNode::new_branch(0b1010.into(), &[])),
5498 (
5499 Nibbles::from_nibbles([0x5, 0x3, 0x1]),
5500 SparseNode::new_leaf(leaf_key([0x0, 0x2], 61))
5501 ),
5502 (
5503 Nibbles::from_nibbles([0x5, 0x3, 0x3]),
5504 SparseNode::new_branch(0b0101.into(), &[])
5505 ),
5506 (
5507 Nibbles::from_nibbles([0x5, 0x3, 0x3, 0x0]),
5508 SparseNode::new_leaf(leaf_key([0x2], 60))
5509 ),
5510 (
5511 Nibbles::from_nibbles([0x5, 0x3, 0x3, 0x2]),
5512 SparseNode::new_leaf(leaf_key([0x0], 60))
5513 )
5514 ])
5515 );
5516
5517 sparse
5518 .remove_leaf(
5519 &pad_nibbles_right(Nibbles::from_nibbles([0x5, 0x0, 0x2, 0x3, 0x1])),
5520 &provider,
5521 )
5522 .unwrap();
5523
5524 pretty_assertions::assert_eq!(
5533 parallel_sparse_trie_nodes(&sparse)
5534 .into_iter()
5535 .map(|(k, v)| (*k, v.clone()))
5536 .collect::<BTreeMap<_, _>>(),
5537 BTreeMap::from_iter([
5538 (Nibbles::default(), SparseNode::new_ext(Nibbles::from_nibbles([0x5]))),
5539 (Nibbles::from_nibbles([0x5]), SparseNode::new_branch(0b1001.into(), &[])),
5540 (
5541 Nibbles::from_nibbles([0x5, 0x0]),
5542 SparseNode::new_leaf(leaf_key([0x2, 0x3, 0x3], 62))
5543 ),
5544 (Nibbles::from_nibbles([0x5, 0x3]), SparseNode::new_branch(0b1010.into(), &[])),
5545 (
5546 Nibbles::from_nibbles([0x5, 0x3, 0x1]),
5547 SparseNode::new_leaf(leaf_key([0x0, 0x2], 61))
5548 ),
5549 (
5550 Nibbles::from_nibbles([0x5, 0x3, 0x3]),
5551 SparseNode::new_branch(0b0101.into(), &[])
5552 ),
5553 (
5554 Nibbles::from_nibbles([0x5, 0x3, 0x3, 0x0]),
5555 SparseNode::new_leaf(leaf_key([0x2], 60))
5556 ),
5557 (
5558 Nibbles::from_nibbles([0x5, 0x3, 0x3, 0x2]),
5559 SparseNode::new_leaf(leaf_key([0x0], 60))
5560 )
5561 ])
5562 );
5563
5564 sparse
5565 .remove_leaf(
5566 &pad_nibbles_right(Nibbles::from_nibbles([0x5, 0x3, 0x1, 0x0, 0x2])),
5567 &provider,
5568 )
5569 .unwrap();
5570
5571 pretty_assertions::assert_eq!(
5578 parallel_sparse_trie_nodes(&sparse)
5579 .into_iter()
5580 .map(|(k, v)| (*k, v.clone()))
5581 .collect::<BTreeMap<_, _>>(),
5582 BTreeMap::from_iter([
5583 (Nibbles::default(), SparseNode::new_ext(Nibbles::from_nibbles([0x5]))),
5584 (Nibbles::from_nibbles([0x5]), SparseNode::new_branch(0b1001.into(), &[])),
5585 (
5586 Nibbles::from_nibbles([0x5, 0x0]),
5587 SparseNode::new_leaf(leaf_key([0x2, 0x3, 0x3], 62))
5588 ),
5589 (
5590 Nibbles::from_nibbles([0x5, 0x3]),
5591 SparseNode::new_ext(Nibbles::from_nibbles([0x3]))
5592 ),
5593 (
5594 Nibbles::from_nibbles([0x5, 0x3, 0x3]),
5595 SparseNode::new_branch(0b0101.into(), &[])
5596 ),
5597 (
5598 Nibbles::from_nibbles([0x5, 0x3, 0x3, 0x0]),
5599 SparseNode::new_leaf(leaf_key([0x2], 60))
5600 ),
5601 (
5602 Nibbles::from_nibbles([0x5, 0x3, 0x3, 0x2]),
5603 SparseNode::new_leaf(leaf_key([0x0], 60))
5604 )
5605 ])
5606 );
5607
5608 sparse
5609 .remove_leaf(
5610 &pad_nibbles_right(Nibbles::from_nibbles([0x5, 0x3, 0x3, 0x2, 0x0])),
5611 &provider,
5612 )
5613 .unwrap();
5614
5615 pretty_assertions::assert_eq!(
5620 parallel_sparse_trie_nodes(&sparse)
5621 .into_iter()
5622 .map(|(k, v)| (*k, v.clone()))
5623 .collect::<BTreeMap<_, _>>(),
5624 BTreeMap::from_iter([
5625 (Nibbles::default(), SparseNode::new_ext(Nibbles::from_nibbles([0x5]))),
5626 (Nibbles::from_nibbles([0x5]), SparseNode::new_branch(0b1001.into(), &[])),
5627 (
5628 Nibbles::from_nibbles([0x5, 0x0]),
5629 SparseNode::new_leaf(leaf_key([0x2, 0x3, 0x3], 62))
5630 ),
5631 (
5632 Nibbles::from_nibbles([0x5, 0x3]),
5633 SparseNode::new_leaf(leaf_key([0x3, 0x0, 0x2], 62))
5634 ),
5635 ])
5636 );
5637
5638 sparse
5639 .remove_leaf(
5640 &pad_nibbles_right(Nibbles::from_nibbles([0x5, 0x0, 0x2, 0x3, 0x3])),
5641 &provider,
5642 )
5643 .unwrap();
5644
5645 pretty_assertions::assert_eq!(
5647 parallel_sparse_trie_nodes(&sparse)
5648 .into_iter()
5649 .map(|(k, v)| (*k, v.clone()))
5650 .collect::<BTreeMap<_, _>>(),
5651 BTreeMap::from_iter([(
5652 Nibbles::default(),
5653 SparseNode::new_leaf(pad_nibbles_right(Nibbles::from_nibbles([
5654 0x5, 0x3, 0x3, 0x0, 0x2
5655 ])))
5656 ),])
5657 );
5658
5659 sparse
5660 .remove_leaf(
5661 &pad_nibbles_right(Nibbles::from_nibbles([0x5, 0x3, 0x3, 0x0, 0x2])),
5662 &provider,
5663 )
5664 .unwrap();
5665
5666 pretty_assertions::assert_eq!(
5668 parallel_sparse_trie_nodes(&sparse)
5669 .into_iter()
5670 .map(|(k, v)| (*k, v.clone()))
5671 .collect::<BTreeMap<_, _>>(),
5672 BTreeMap::from_iter([(Nibbles::default(), SparseNode::Empty)])
5673 );
5674 }
5675
5676 #[test]
5677 fn sparse_trie_remove_leaf_blinded() {
5678 let leaf = LeafNode::new(
5679 Nibbles::default(),
5680 alloy_rlp::encode_fixed_size(&U256::from(1)).to_vec(),
5681 );
5682 let branch = TrieNodeV2::Branch(BranchNodeV2::new(
5683 Nibbles::default(),
5684 vec![
5685 RlpNode::word_rlp(&B256::repeat_byte(1)),
5686 RlpNode::from_raw_rlp(&alloy_rlp::encode(leaf.clone())).unwrap(),
5687 ],
5688 TrieMask::new(0b11),
5689 None,
5690 ));
5691
5692 let provider = DefaultTrieNodeProvider;
5693 let mut sparse = ParallelSparseTrie::from_root(
5694 branch.clone(),
5695 Some(BranchNodeMasks {
5696 hash_mask: TrieMask::new(0b01),
5697 tree_mask: TrieMask::default(),
5698 }),
5699 false,
5700 )
5701 .unwrap();
5702
5703 sparse
5709 .reveal_nodes(&mut [
5710 ProofTrieNodeV2 {
5711 path: Nibbles::default(),
5712 node: branch,
5713 masks: Some(BranchNodeMasks {
5714 hash_mask: TrieMask::default(),
5715 tree_mask: TrieMask::new(0b01),
5716 }),
5717 },
5718 ProofTrieNodeV2 {
5719 path: Nibbles::from_nibbles([0x1]),
5720 node: TrieNodeV2::Leaf(leaf),
5721 masks: None,
5722 },
5723 ])
5724 .unwrap();
5725
5726 assert_matches!(
5728 sparse.remove_leaf(&pad_nibbles_right(Nibbles::from_nibbles([0x0])), &provider).map_err(|e| e.into_kind()),
5729 Err(SparseTrieErrorKind::BlindedNode(path)) if path == Nibbles::from_nibbles([0x0])
5730 );
5731 }
5732
5733 #[test]
5734 fn sparse_trie_remove_leaf_non_existent() {
5735 let leaf = LeafNode::new(
5736 Nibbles::default(),
5737 alloy_rlp::encode_fixed_size(&U256::from(1)).to_vec(),
5738 );
5739 let branch = TrieNodeV2::Branch(BranchNodeV2::new(
5740 Nibbles::default(),
5741 vec![
5742 RlpNode::word_rlp(&B256::repeat_byte(1)),
5743 RlpNode::from_raw_rlp(&alloy_rlp::encode(leaf.clone())).unwrap(),
5744 ],
5745 TrieMask::new(0b11),
5746 None,
5747 ));
5748
5749 let provider = DefaultTrieNodeProvider;
5750 let mut sparse = ParallelSparseTrie::from_root(
5751 branch.clone(),
5752 Some(BranchNodeMasks {
5753 hash_mask: TrieMask::new(0b01),
5754 tree_mask: TrieMask::default(),
5755 }),
5756 false,
5757 )
5758 .unwrap();
5759
5760 sparse
5766 .reveal_nodes(&mut [
5767 ProofTrieNodeV2 {
5768 path: Nibbles::default(),
5769 node: branch,
5770 masks: Some(BranchNodeMasks {
5771 hash_mask: TrieMask::default(),
5772 tree_mask: TrieMask::new(0b01),
5773 }),
5774 },
5775 ProofTrieNodeV2 {
5776 path: Nibbles::from_nibbles([0x1]),
5777 node: TrieNodeV2::Leaf(leaf),
5778 masks: None,
5779 },
5780 ])
5781 .unwrap();
5782
5783 let sparse_old = sparse.clone();
5785 assert_matches!(
5786 sparse.remove_leaf(&pad_nibbles_right(Nibbles::from_nibbles([0x2])), &provider),
5787 Ok(())
5788 );
5789 assert_eq!(sparse, sparse_old);
5790 }
5791
5792 #[test]
5793 fn sparse_trie_fuzz() {
5794 const KEY_NIBBLES_LEN: usize = 3;
5798
5799 fn test(updates: Vec<(BTreeMap<Nibbles, Account>, BTreeSet<Nibbles>)>) {
5800 {
5801 let mut state = BTreeMap::default();
5802 let default_provider = DefaultTrieNodeProvider;
5803 let provider_factory = create_test_provider_factory();
5804 let mut sparse = ParallelSparseTrie::default().with_updates(true);
5805
5806 for (update, keys_to_delete) in updates {
5807 for (key, account) in update.clone() {
5809 let account = account.into_trie_account(EMPTY_ROOT_HASH);
5810 let mut account_rlp = Vec::new();
5811 account.encode(&mut account_rlp);
5812 sparse.update_leaf(key, account_rlp, &default_provider).unwrap();
5813 }
5814 let mut updated_sparse = sparse.clone();
5818 let sparse_root = updated_sparse.root();
5819 let sparse_updates = updated_sparse.take_updates();
5820
5821 state.extend(update);
5823 let provider = provider_factory.provider().unwrap();
5824 let (hash_builder_root, hash_builder_updates, hash_builder_proof_nodes, _, _) =
5825 reth_trie_db::with_adapter!(provider_factory, |A| {
5826 let trie_cursor =
5827 DatabaseTrieCursorFactory::<_, A>::new(provider.tx_ref());
5828 run_hash_builder(
5829 state.clone(),
5830 trie_cursor.account_trie_cursor().unwrap(),
5831 Default::default(),
5832 state.keys().copied(),
5833 )
5834 });
5835
5836 let hash_builder_account_nodes = hash_builder_updates.account_nodes.clone();
5838
5839 let provider_rw = provider_factory.provider_rw().unwrap();
5841 provider_rw.write_trie_updates(hash_builder_updates).unwrap();
5842 provider_rw.commit().unwrap();
5843
5844 assert_eq!(sparse_root, hash_builder_root);
5846 pretty_assertions::assert_eq!(
5848 BTreeMap::from_iter(sparse_updates.updated_nodes),
5849 BTreeMap::from_iter(hash_builder_account_nodes)
5850 );
5851 assert_eq_parallel_sparse_trie_proof_nodes(
5853 &updated_sparse,
5854 hash_builder_proof_nodes,
5855 );
5856
5857 for key in &keys_to_delete {
5860 state.remove(key).unwrap();
5861 sparse.remove_leaf(key, &default_provider).unwrap();
5862 }
5863
5864 let mut updated_sparse = sparse.clone();
5868 let sparse_root = updated_sparse.root();
5869 let sparse_updates = updated_sparse.take_updates();
5870
5871 let provider = provider_factory.provider().unwrap();
5872 let (hash_builder_root, hash_builder_updates, hash_builder_proof_nodes, _, _) =
5873 reth_trie_db::with_adapter!(provider_factory, |A| {
5874 let trie_cursor =
5875 DatabaseTrieCursorFactory::<_, A>::new(provider.tx_ref());
5876 run_hash_builder(
5877 state.clone(),
5878 trie_cursor.account_trie_cursor().unwrap(),
5879 keys_to_delete
5880 .iter()
5881 .map(|nibbles| B256::from_slice(&nibbles.pack()))
5882 .collect(),
5883 state.keys().copied(),
5884 )
5885 });
5886
5887 let hash_builder_account_nodes = hash_builder_updates.account_nodes.clone();
5889
5890 let provider_rw = provider_factory.provider_rw().unwrap();
5892 provider_rw.write_trie_updates(hash_builder_updates).unwrap();
5893 provider_rw.commit().unwrap();
5894
5895 assert_eq!(sparse_root, hash_builder_root);
5897 pretty_assertions::assert_eq!(
5899 BTreeMap::from_iter(sparse_updates.updated_nodes),
5900 BTreeMap::from_iter(hash_builder_account_nodes)
5901 );
5902 assert_eq_parallel_sparse_trie_proof_nodes(
5904 &updated_sparse,
5905 hash_builder_proof_nodes,
5906 );
5907 }
5908 }
5909 }
5910
5911 fn transform_updates(
5912 updates: Vec<BTreeMap<Nibbles, Account>>,
5913 mut rng: impl rand::Rng,
5914 ) -> Vec<(BTreeMap<Nibbles, Account>, BTreeSet<Nibbles>)> {
5915 let mut keys = BTreeSet::new();
5916 updates
5917 .into_iter()
5918 .map(|update| {
5919 keys.extend(update.keys().copied());
5920
5921 let keys_to_delete_len = update.len() / 2;
5922 let keys_to_delete = (0..keys_to_delete_len)
5923 .map(|_| {
5924 let key =
5925 *rand::seq::IteratorRandom::choose(keys.iter(), &mut rng).unwrap();
5926 keys.take(&key).unwrap()
5927 })
5928 .collect();
5929
5930 (update, keys_to_delete)
5931 })
5932 .collect::<Vec<_>>()
5933 }
5934
5935 proptest!(ProptestConfig::with_cases(10), |(
5936 updates in proptest::collection::vec(
5937 proptest::collection::btree_map(
5938 any_with::<Nibbles>(SizeRange::new(KEY_NIBBLES_LEN..=KEY_NIBBLES_LEN)).prop_map(pad_nibbles_right),
5939 arb::<Account>(),
5940 1..50,
5941 ),
5942 1..50,
5943 ).prop_perturb(transform_updates)
5944 )| {
5945 test(updates)
5946 });
5947 }
5948
5949 #[test]
5950 fn sparse_trie_two_leaves_at_lower_roots() {
5951 let provider = DefaultTrieNodeProvider;
5952 let mut trie = ParallelSparseTrie::default().with_updates(true);
5953 let key_50 = Nibbles::unpack(hex!(
5954 "0x5000000000000000000000000000000000000000000000000000000000000000"
5955 ));
5956 let key_51 = Nibbles::unpack(hex!(
5957 "0x5100000000000000000000000000000000000000000000000000000000000000"
5958 ));
5959
5960 let account = Account::default().into_trie_account(EMPTY_ROOT_HASH);
5961 let mut account_rlp = Vec::new();
5962 account.encode(&mut account_rlp);
5963
5964 trie.update_leaf(key_50, account_rlp.clone(), &provider).unwrap();
5966 trie.root();
5967
5968 trie.update_leaf(key_51, account_rlp.clone(), &provider).unwrap();
5970
5971 let expected_root =
5972 hex!("0xdaf0ef9f91a2f179bb74501209effdb5301db1697bcab041eca2234b126e25de");
5973 let root = trie.root();
5974 assert_eq!(root, expected_root);
5975 assert_eq!(SparseTrieUpdates::default(), trie.take_updates());
5976 }
5977
5978 #[test]
5990 fn sparse_trie_reveal_node_1() {
5991 let key1 = || pad_nibbles_right(Nibbles::from_nibbles_unchecked([0x00]));
5992 let key2 = || pad_nibbles_right(Nibbles::from_nibbles_unchecked([0x01]));
5993 let key3 = || pad_nibbles_right(Nibbles::from_nibbles_unchecked([0x02]));
5994 let value = || Account::default();
5995 let value_encoded = || {
5996 let mut account_rlp = Vec::new();
5997 value().into_trie_account(EMPTY_ROOT_HASH).encode(&mut account_rlp);
5998 account_rlp
5999 };
6000
6001 let (_, _, hash_builder_proof_nodes, branch_node_hash_masks, branch_node_tree_masks) =
6003 run_hash_builder(
6004 [(key1(), value()), (key3(), value())],
6005 NoopAccountTrieCursor::default(),
6006 Default::default(),
6007 [Nibbles::default()],
6008 );
6009
6010 let provider = DefaultTrieNodeProvider;
6011 let masks = match (
6012 branch_node_hash_masks.get(&Nibbles::default()).copied(),
6013 branch_node_tree_masks.get(&Nibbles::default()).copied(),
6014 ) {
6015 (Some(h), Some(t)) => Some(BranchNodeMasks { hash_mask: h, tree_mask: t }),
6016 (Some(h), None) => {
6017 Some(BranchNodeMasks { hash_mask: h, tree_mask: TrieMask::default() })
6018 }
6019 (None, Some(t)) => {
6020 Some(BranchNodeMasks { hash_mask: TrieMask::default(), tree_mask: t })
6021 }
6022 (None, None) => None,
6023 };
6024 let mut sparse = ParallelSparseTrie::from_root(
6025 TrieNodeV2::decode(&mut &hash_builder_proof_nodes.nodes_sorted()[0].1[..]).unwrap(),
6026 masks,
6027 false,
6028 )
6029 .unwrap();
6030
6031 let (_, _, hash_builder_proof_nodes, branch_node_hash_masks, branch_node_tree_masks) =
6033 run_hash_builder(
6034 [(key1(), value()), (key3(), value())],
6035 NoopAccountTrieCursor::default(),
6036 Default::default(),
6037 [key1()],
6038 );
6039 let mut revealed_nodes: Vec<ProofTrieNodeV2> = hash_builder_proof_nodes
6040 .nodes_sorted()
6041 .into_iter()
6042 .map(|(path, node)| {
6043 let hash_mask = branch_node_hash_masks.get(&path).copied();
6044 let tree_mask = branch_node_tree_masks.get(&path).copied();
6045 let masks = BranchNodeMasks::from_optional(hash_mask, tree_mask);
6046 ProofTrieNodeV2 { path, node: TrieNodeV2::decode(&mut &node[..]).unwrap(), masks }
6047 })
6048 .collect();
6049 sparse.reveal_nodes(&mut revealed_nodes).unwrap();
6050
6051 assert_matches!(
6053 sparse.upper_subtrie.nodes.get(&Nibbles::default()),
6054 Some(&SparseNode::Branch { state_mask, state: SparseNodeState::Dirty, .. }) if state_mask == TrieMask::new(0b101)
6055 );
6056
6057 sparse.update_leaf(key2(), value_encoded(), &provider).unwrap();
6059
6060 assert_matches!(
6062 sparse.upper_subtrie.nodes.get(&Nibbles::default()),
6063 Some(&SparseNode::Branch { state_mask, state: SparseNodeState::Dirty, .. }) if state_mask == TrieMask::new(0b111)
6064 );
6065
6066 let (_, _, hash_builder_proof_nodes, branch_node_hash_masks, branch_node_tree_masks) =
6068 run_hash_builder(
6069 [(key1(), value()), (key3(), value())],
6070 NoopAccountTrieCursor::default(),
6071 Default::default(),
6072 [key3()],
6073 );
6074 let mut revealed_nodes: Vec<ProofTrieNodeV2> = hash_builder_proof_nodes
6075 .nodes_sorted()
6076 .into_iter()
6077 .map(|(path, node)| {
6078 let hash_mask = branch_node_hash_masks.get(&path).copied();
6079 let tree_mask = branch_node_tree_masks.get(&path).copied();
6080 let masks = BranchNodeMasks::from_optional(hash_mask, tree_mask);
6081 ProofTrieNodeV2 { path, node: TrieNodeV2::decode(&mut &node[..]).unwrap(), masks }
6082 })
6083 .collect();
6084 sparse.reveal_nodes(&mut revealed_nodes).unwrap();
6085
6086 assert_matches!(
6088 sparse.upper_subtrie.nodes.get(&Nibbles::default()),
6089 Some(&SparseNode::Branch { state_mask, state: SparseNodeState::Dirty, .. }) if state_mask == TrieMask::new(0b111)
6090 );
6091
6092 let (_, _, hash_builder_proof_nodes, _, _) = run_hash_builder(
6095 [(key1(), value()), (key2(), value()), (key3(), value())],
6096 NoopAccountTrieCursor::default(),
6097 Default::default(),
6098 [key1(), key2(), key3()],
6099 );
6100
6101 assert_eq_parallel_sparse_trie_proof_nodes(&sparse, hash_builder_proof_nodes);
6102 }
6103
6104 #[test]
6115 fn sparse_trie_reveal_node_2() {
6116 let key1 = || pad_nibbles_right(Nibbles::from_nibbles_unchecked([0x00, 0x00]));
6117 let key2 = || pad_nibbles_right(Nibbles::from_nibbles_unchecked([0x01, 0x01]));
6118 let key3 = || pad_nibbles_right(Nibbles::from_nibbles_unchecked([0x01, 0x02]));
6119 let value = || Account::default();
6120
6121 let (_, _, hash_builder_proof_nodes, branch_node_hash_masks, branch_node_tree_masks) =
6123 run_hash_builder(
6124 [(key1(), value()), (key2(), value()), (key3(), value())],
6125 NoopAccountTrieCursor::default(),
6126 Default::default(),
6127 [Nibbles::default()],
6128 );
6129
6130 let provider = DefaultTrieNodeProvider;
6131 let masks = match (
6132 branch_node_hash_masks.get(&Nibbles::default()).copied(),
6133 branch_node_tree_masks.get(&Nibbles::default()).copied(),
6134 ) {
6135 (Some(h), Some(t)) => Some(BranchNodeMasks { hash_mask: h, tree_mask: t }),
6136 (Some(h), None) => {
6137 Some(BranchNodeMasks { hash_mask: h, tree_mask: TrieMask::default() })
6138 }
6139 (None, Some(t)) => {
6140 Some(BranchNodeMasks { hash_mask: TrieMask::default(), tree_mask: t })
6141 }
6142 (None, None) => None,
6143 };
6144 let mut sparse = ParallelSparseTrie::from_root(
6145 TrieNodeV2::decode(&mut &hash_builder_proof_nodes.nodes_sorted()[0].1[..]).unwrap(),
6146 masks,
6147 false,
6148 )
6149 .unwrap();
6150
6151 let (_, _, hash_builder_proof_nodes, branch_node_hash_masks, branch_node_tree_masks) =
6154 run_hash_builder(
6155 [(key1(), value()), (key2(), value()), (key3(), value())],
6156 NoopAccountTrieCursor::default(),
6157 Default::default(),
6158 [key1(), Nibbles::from_nibbles_unchecked([0x01])],
6159 );
6160 let mut revealed_nodes: Vec<ProofTrieNodeV2> = hash_builder_proof_nodes
6161 .nodes_sorted()
6162 .into_iter()
6163 .map(|(path, node)| {
6164 let hash_mask = branch_node_hash_masks.get(&path).copied();
6165 let tree_mask = branch_node_tree_masks.get(&path).copied();
6166 let masks = BranchNodeMasks::from_optional(hash_mask, tree_mask);
6167 ProofTrieNodeV2 { path, node: TrieNodeV2::decode(&mut &node[..]).unwrap(), masks }
6168 })
6169 .collect();
6170 sparse.reveal_nodes(&mut revealed_nodes).unwrap();
6171
6172 assert_matches!(
6174 sparse.upper_subtrie.nodes.get(&Nibbles::default()),
6175 Some(&SparseNode::Branch { state_mask, state: SparseNodeState::Dirty, .. }) if state_mask == TrieMask::new(0b11)
6176 );
6177
6178 sparse.remove_leaf(&key1(), &provider).unwrap();
6180
6181 assert_eq!(
6183 sparse.upper_subtrie.nodes.get(&Nibbles::default()),
6184 Some(&SparseNode::new_ext(Nibbles::from_nibbles_unchecked([0x01])))
6185 );
6186
6187 let (_, _, hash_builder_proof_nodes, branch_node_hash_masks, branch_node_tree_masks) =
6189 run_hash_builder(
6190 [(key1(), value()), (key2(), value()), (key3(), value())],
6191 NoopAccountTrieCursor::default(),
6192 Default::default(),
6193 [key2()],
6194 );
6195 let mut revealed_nodes: Vec<ProofTrieNodeV2> = hash_builder_proof_nodes
6196 .nodes_sorted()
6197 .into_iter()
6198 .map(|(path, node)| {
6199 let hash_mask = branch_node_hash_masks.get(&path).copied();
6200 let tree_mask = branch_node_tree_masks.get(&path).copied();
6201 let masks = BranchNodeMasks::from_optional(hash_mask, tree_mask);
6202 ProofTrieNodeV2 { path, node: TrieNodeV2::decode(&mut &node[..]).unwrap(), masks }
6203 })
6204 .collect();
6205 sparse.reveal_nodes(&mut revealed_nodes).unwrap();
6206
6207 assert_eq!(
6209 sparse.upper_subtrie.nodes.get(&Nibbles::default()),
6210 Some(&SparseNode::new_ext(Nibbles::from_nibbles_unchecked([0x01])))
6211 );
6212 }
6213
6214 #[test]
6223 fn sparse_trie_reveal_node_3() {
6224 let key1 = || pad_nibbles_right(Nibbles::from_nibbles_unchecked([0x00, 0x01]));
6225 let key2 = || pad_nibbles_right(Nibbles::from_nibbles_unchecked([0x00, 0x02]));
6226 let key3 = || pad_nibbles_right(Nibbles::from_nibbles_unchecked([0x01, 0x00]));
6227 let value = || Account::default();
6228 let value_encoded = || {
6229 let mut account_rlp = Vec::new();
6230 value().into_trie_account(EMPTY_ROOT_HASH).encode(&mut account_rlp);
6231 account_rlp
6232 };
6233
6234 let (_, _, hash_builder_proof_nodes, branch_node_hash_masks, branch_node_tree_masks) =
6236 run_hash_builder(
6237 [(key1(), value()), (key2(), value())],
6238 NoopAccountTrieCursor::default(),
6239 Default::default(),
6240 [Nibbles::default()],
6241 );
6242
6243 let mut nodes = Vec::new();
6244
6245 for (path, node) in hash_builder_proof_nodes.nodes_sorted() {
6246 let hash_mask = branch_node_hash_masks.get(&path).copied();
6247 let tree_mask = branch_node_tree_masks.get(&path).copied();
6248 let masks = BranchNodeMasks::from_optional(hash_mask, tree_mask);
6249 nodes.push((path, TrieNode::decode(&mut &node[..]).unwrap(), masks));
6250 }
6251
6252 nodes.sort_unstable_by(|a, b| reth_trie_common::depth_first_cmp(&a.0, &b.0));
6253
6254 let nodes = ProofTrieNodeV2::from_sorted_trie_nodes(nodes);
6255
6256 let provider = DefaultTrieNodeProvider;
6257 let mut sparse =
6258 ParallelSparseTrie::from_root(nodes[0].node.clone(), nodes[0].masks, false).unwrap();
6259
6260 assert_matches!(
6262 sparse.upper_subtrie.nodes.get(&Nibbles::default()),
6263 Some(SparseNode::Extension { key, state: SparseNodeState::Dirty }) if *key == Nibbles::from_nibbles([0x00])
6264 );
6265
6266 sparse.update_leaf(key3(), value_encoded(), &provider).unwrap();
6268
6269 assert_eq!(
6271 sparse.upper_subtrie.nodes.get(&Nibbles::default()),
6272 Some(&SparseNode::new_branch(TrieMask::new(0b11), &[]))
6273 );
6274
6275 let (_, _, hash_builder_proof_nodes, branch_node_hash_masks, branch_node_tree_masks) =
6277 run_hash_builder(
6278 [(key1(), value()), (key2(), value())],
6279 NoopAccountTrieCursor::default(),
6280 Default::default(),
6281 [key1()],
6282 );
6283 let mut revealed_nodes: Vec<ProofTrieNodeV2> = hash_builder_proof_nodes
6284 .nodes_sorted()
6285 .into_iter()
6286 .map(|(path, node)| {
6287 let hash_mask = branch_node_hash_masks.get(&path).copied();
6288 let tree_mask = branch_node_tree_masks.get(&path).copied();
6289 let masks = BranchNodeMasks::from_optional(hash_mask, tree_mask);
6290 ProofTrieNodeV2 { path, node: TrieNodeV2::decode(&mut &node[..]).unwrap(), masks }
6291 })
6292 .collect();
6293 sparse.reveal_nodes(&mut revealed_nodes).unwrap();
6294
6295 assert_eq!(
6297 sparse.upper_subtrie.nodes.get(&Nibbles::default()),
6298 Some(&SparseNode::new_branch(TrieMask::new(0b11), &[]))
6299 );
6300 }
6301
6302 #[test]
6303 fn test_update_leaf_cross_level() {
6304 let ctx = ParallelSparseTrieTestContext;
6305 let mut trie = ParallelSparseTrie::from_root(TrieNodeV2::EmptyRoot, None, true).unwrap();
6306
6307 let (leaf1_path, value1) = ctx.create_test_leaf([0x1, 0x3, 0x4, 0x5], 1);
6329 trie.update_leaf(leaf1_path, value1.clone(), DefaultTrieNodeProvider).unwrap();
6330
6331 ctx.assert_upper_subtrie(&trie)
6333 .has_leaf(
6334 &Nibbles::default(),
6335 &pad_nibbles_right(Nibbles::from_nibbles([0x1, 0x3, 0x4, 0x5])),
6336 )
6337 .has_value(&leaf1_path, &value1);
6338
6339 let (leaf2_path, value2) = ctx.create_test_leaf([0x1, 0x2, 0x3, 0x4], 2);
6341 trie.update_leaf(leaf2_path, value2.clone(), DefaultTrieNodeProvider).unwrap();
6342
6343 ctx.assert_upper_subtrie(&trie)
6345 .has_branch(&Nibbles::from_nibbles([0x1]), &[0x2, 0x3])
6346 .has_no_value(&leaf1_path)
6347 .has_no_value(&leaf2_path);
6348
6349 let (leaf3_path, value3) = ctx.create_test_leaf([0x1, 0x2, 0x4, 0x5], 3);
6351 trie.update_leaf(leaf3_path, value3.clone(), DefaultTrieNodeProvider).unwrap();
6352
6353 ctx.assert_subtrie(&trie, Nibbles::from_nibbles([0x1, 0x2]))
6355 .has_branch(&Nibbles::from_nibbles([0x1, 0x2]), &[0x3, 0x4])
6356 .has_leaf(&Nibbles::from_nibbles([0x1, 0x2, 0x3]), &leaf_key([0x4], 61))
6357 .has_leaf(&Nibbles::from_nibbles([0x1, 0x2, 0x4]), &leaf_key([0x5], 61))
6358 .has_value(&leaf2_path, &value2)
6359 .has_value(&leaf3_path, &value3);
6360
6361 let (leaf4_path, value4) = ctx.create_test_leaf([0x1, 0x3, 0x3, 0x4], 4);
6363 trie.update_leaf(leaf4_path, value4.clone(), DefaultTrieNodeProvider).unwrap();
6364
6365 ctx.assert_subtrie(&trie, Nibbles::from_nibbles([0x1, 0x3]))
6367 .has_value(&leaf1_path, &value1)
6368 .has_value(&leaf4_path, &value4);
6369
6370 ctx.assert_subtrie(&trie, Nibbles::from_nibbles([0x1, 0x2]))
6372 .has_value(&leaf2_path, &value2)
6373 .has_value(&leaf3_path, &value3);
6374
6375 ctx.assert_upper_subtrie(&trie)
6377 .has_extension(&Nibbles::default(), &Nibbles::from_nibbles([0x1]))
6378 .has_branch(&Nibbles::from_nibbles([0x1]), &[0x2, 0x3])
6379 .has_no_value(&leaf1_path)
6380 .has_no_value(&leaf2_path)
6381 .has_no_value(&leaf3_path)
6382 .has_no_value(&leaf4_path);
6383 }
6384
6385 #[test]
6386 fn test_update_leaf_split_at_level_boundary() {
6387 let ctx = ParallelSparseTrieTestContext;
6388 let mut trie = ParallelSparseTrie::from_root(TrieNodeV2::EmptyRoot, None, true).unwrap();
6389
6390 let (first_leaf_path, first_value) = ctx.create_test_leaf([0x1, 0x2, 0x2, 0x4], 1);
6405
6406 trie.update_leaf(first_leaf_path, first_value.clone(), DefaultTrieNodeProvider).unwrap();
6407
6408 ctx.assert_upper_subtrie(&trie)
6410 .has_leaf(
6411 &Nibbles::default(),
6412 &pad_nibbles_right(Nibbles::from_nibbles([0x1, 0x2, 0x2, 0x4])),
6413 )
6414 .has_value(&first_leaf_path, &first_value);
6415
6416 let (second_leaf_path, second_value) = ctx.create_test_leaf([0x1, 0x2, 0x3, 0x4], 2);
6418
6419 trie.update_leaf(second_leaf_path, second_value.clone(), DefaultTrieNodeProvider).unwrap();
6420
6421 ctx.assert_subtrie(&trie, Nibbles::from_nibbles([0x1, 0x2]))
6423 .has_branch(&Nibbles::from_nibbles([0x1, 0x2]), &[0x2, 0x3])
6424 .has_leaf(&Nibbles::from_nibbles([0x1, 0x2, 0x2]), &leaf_key([0x4], 61))
6425 .has_leaf(&Nibbles::from_nibbles([0x1, 0x2, 0x3]), &leaf_key([0x4], 61))
6426 .has_value(&first_leaf_path, &first_value)
6427 .has_value(&second_leaf_path, &second_value);
6428
6429 ctx.assert_upper_subtrie(&trie)
6431 .has_no_value(&first_leaf_path)
6432 .has_no_value(&second_leaf_path);
6433 }
6434
6435 #[test]
6436 fn test_update_subtrie_with_multiple_leaves() {
6437 let ctx = ParallelSparseTrieTestContext;
6438 let mut trie = ParallelSparseTrie::from_root(TrieNodeV2::EmptyRoot, None, true).unwrap();
6439
6440 let leaves = ctx.create_test_leaves(&[
6454 &[0x1, 0x2, 0x3, 0x4],
6455 &[0x1, 0x2, 0x3, 0x5],
6456 &[0x1, 0x2, 0x4, 0x6],
6457 &[0x1, 0x2, 0x4, 0x7],
6458 ]);
6459
6460 ctx.update_leaves(&mut trie, leaves.clone());
6462
6463 ctx.assert_upper_subtrie(&trie)
6465 .has_extension(&Nibbles::default(), &Nibbles::from_nibbles([0x1, 0x2]));
6466
6467 ctx.assert_subtrie(&trie, Nibbles::from_nibbles([0x1, 0x2]))
6469 .has_branch(&Nibbles::from_nibbles([0x1, 0x2]), &[0x3, 0x4])
6470 .has_branch(&Nibbles::from_nibbles([0x1, 0x2, 0x3]), &[0x4, 0x5])
6471 .has_branch(&Nibbles::from_nibbles([0x1, 0x2, 0x4]), &[0x6, 0x7])
6472 .has_value(&leaves[0].0, &leaves[0].1)
6473 .has_value(&leaves[1].0, &leaves[1].1)
6474 .has_value(&leaves[2].0, &leaves[2].1)
6475 .has_value(&leaves[3].0, &leaves[3].1);
6476
6477 let updated_path = pad_nibbles_right(Nibbles::from_nibbles([0x1, 0x2, 0x3, 0x4]));
6479 let (_, updated_value) = ctx.create_test_leaf([0x1, 0x2, 0x3, 0x4], 100);
6480
6481 trie.update_leaf(updated_path, updated_value.clone(), DefaultTrieNodeProvider).unwrap();
6482
6483 ctx.assert_subtrie(&trie, Nibbles::from_nibbles([0x1, 0x2]))
6486 .has_branch(&Nibbles::from_nibbles([0x1, 0x2]), &[0x3, 0x4])
6487 .has_value(&updated_path, &updated_value)
6488 .has_value(&leaves[1].0, &leaves[1].1)
6489 .has_value(&leaves[2].0, &leaves[2].1)
6490 .has_value(&leaves[3].0, &leaves[3].1);
6491
6492 let (new_leaf_path, new_leaf_value) = ctx.create_test_leaf([0x1, 0x2, 0x3, 0x6], 200);
6494
6495 trie.update_leaf(new_leaf_path, new_leaf_value.clone(), DefaultTrieNodeProvider).unwrap();
6496
6497 ctx.assert_subtrie(&trie, Nibbles::from_nibbles([0x1, 0x2]))
6499 .has_branch(&Nibbles::from_nibbles([0x1, 0x2, 0x3]), &[0x4, 0x5, 0x6])
6500 .has_value(&new_leaf_path, &new_leaf_value);
6501 }
6502
6503 #[test]
6504 fn test_update_subtrie_extension_node_subtrie() {
6505 let ctx = ParallelSparseTrieTestContext;
6506 let mut trie = ParallelSparseTrie::from_root(TrieNodeV2::EmptyRoot, None, true).unwrap();
6507
6508 let leaves = ctx.create_test_leaves(&[&[0x1, 0x2, 0x3, 0x4], &[0x1, 0x2, 0x3, 0x5]]);
6517
6518 ctx.update_leaves(&mut trie, leaves.clone());
6520
6521 ctx.assert_upper_subtrie(&trie)
6523 .has_extension(&Nibbles::default(), &Nibbles::from_nibbles([0x1, 0x2, 0x3]));
6524
6525 ctx.assert_subtrie(&trie, Nibbles::from_nibbles([0x1, 0x2]))
6527 .has_branch(&Nibbles::from_nibbles([0x1, 0x2, 0x3]), &[0x4, 0x5])
6528 .has_value(&leaves[0].0, &leaves[0].1)
6529 .has_value(&leaves[1].0, &leaves[1].1);
6530 }
6531
6532 #[test]
6533 fn update_subtrie_extension_node_cross_level() {
6534 let ctx = ParallelSparseTrieTestContext;
6535 let mut trie = ParallelSparseTrie::from_root(TrieNodeV2::EmptyRoot, None, true).unwrap();
6536
6537 let leaves = ctx.create_test_leaves(&[&[0x1, 0x2, 0x3, 0x4], &[0x1, 0x2, 0x4, 0x5]]);
6547
6548 ctx.update_leaves(&mut trie, leaves.clone());
6550
6551 ctx.assert_upper_subtrie(&trie)
6553 .has_extension(&Nibbles::default(), &Nibbles::from_nibbles([0x1, 0x2]));
6554
6555 ctx.assert_subtrie(&trie, Nibbles::from_nibbles([0x1, 0x2]))
6557 .has_branch(&Nibbles::from_nibbles([0x1, 0x2]), &[0x3, 0x4])
6558 .has_leaf(&Nibbles::from_nibbles([0x1, 0x2, 0x3]), &leaf_key([0x4], 61))
6559 .has_leaf(&Nibbles::from_nibbles([0x1, 0x2, 0x4]), &leaf_key([0x5], 61))
6560 .has_value(&leaves[0].0, &leaves[0].1)
6561 .has_value(&leaves[1].0, &leaves[1].1);
6562 }
6563
6564 #[test]
6565 fn test_update_single_nibble_paths() {
6566 let ctx = ParallelSparseTrieTestContext;
6567 let mut trie = ParallelSparseTrie::from_root(TrieNodeV2::EmptyRoot, None, true).unwrap();
6568
6569 let (leaf1_path, value1) = ctx.create_test_leaf([0x0], 1);
6581 let (leaf2_path, value2) = ctx.create_test_leaf([0x1], 2);
6582 let (leaf3_path, value3) = ctx.create_test_leaf([0x2], 3);
6583 let (leaf4_path, value4) = ctx.create_test_leaf([0x3], 4);
6584
6585 ctx.update_leaves(
6586 &mut trie,
6587 [
6588 (leaf1_path, value1.clone()),
6589 (leaf2_path, value2.clone()),
6590 (leaf3_path, value3.clone()),
6591 (leaf4_path, value4.clone()),
6592 ],
6593 );
6594
6595 ctx.assert_upper_subtrie(&trie)
6597 .has_branch(&Nibbles::default(), &[0x0, 0x1, 0x2, 0x3])
6598 .has_leaf(&Nibbles::from_nibbles([0x0]), &leaf_key([], 63))
6599 .has_leaf(&Nibbles::from_nibbles([0x1]), &leaf_key([], 63))
6600 .has_leaf(&Nibbles::from_nibbles([0x2]), &leaf_key([], 63))
6601 .has_leaf(&Nibbles::from_nibbles([0x3]), &leaf_key([], 63))
6602 .has_value(&leaf1_path, &value1)
6603 .has_value(&leaf2_path, &value2)
6604 .has_value(&leaf3_path, &value3)
6605 .has_value(&leaf4_path, &value4);
6606 }
6607
6608 #[test]
6609 fn test_update_deep_extension_chain() {
6610 let ctx = ParallelSparseTrieTestContext;
6611 let mut trie = ParallelSparseTrie::from_root(TrieNodeV2::EmptyRoot, None, true).unwrap();
6612
6613 let (leaf1_path, value1) = ctx.create_test_leaf([0x1, 0x1, 0x1, 0x1, 0x1, 0x1, 0x0], 1);
6627 let (leaf2_path, value2) = ctx.create_test_leaf([0x1, 0x1, 0x1, 0x1, 0x1, 0x1, 0x1], 2);
6628
6629 ctx.update_leaves(&mut trie, [(leaf1_path, value1.clone()), (leaf2_path, value2.clone())]);
6630
6631 ctx.assert_upper_subtrie(&trie).has_extension(
6633 &Nibbles::default(),
6634 &Nibbles::from_nibbles([0x1, 0x1, 0x1, 0x1, 0x1, 0x1]),
6635 );
6636
6637 ctx.assert_subtrie(&trie, Nibbles::from_nibbles([0x1, 0x1]))
6639 .has_branch(&Nibbles::from_nibbles([0x1, 0x1, 0x1, 0x1, 0x1, 0x1]), &[0x0, 0x1])
6640 .has_leaf(
6641 &Nibbles::from_nibbles([0x1, 0x1, 0x1, 0x1, 0x1, 0x1, 0x0]),
6642 &leaf_key([], 57),
6643 )
6644 .has_leaf(
6645 &Nibbles::from_nibbles([0x1, 0x1, 0x1, 0x1, 0x1, 0x1, 0x1]),
6646 &leaf_key([], 57),
6647 )
6648 .has_value(&leaf1_path, &value1)
6649 .has_value(&leaf2_path, &value2);
6650 }
6651
6652 #[test]
6653 fn test_update_branch_with_all_nibbles() {
6654 let ctx = ParallelSparseTrieTestContext;
6655 let mut trie = ParallelSparseTrie::from_root(TrieNodeV2::EmptyRoot, None, true).unwrap();
6656
6657 let mut leaves = Vec::new();
6674 for nibble in 0x0..=0xF {
6675 let (path, value) = ctx.create_test_leaf([0xA, 0x0, nibble], nibble as u64 + 1);
6676 leaves.push((path, value));
6677 }
6678
6679 ctx.update_leaves(&mut trie, leaves.iter().cloned());
6681
6682 ctx.assert_upper_subtrie(&trie)
6684 .has_extension(&Nibbles::default(), &Nibbles::from_nibbles([0xA, 0x0]));
6685
6686 let mut subtrie_assert =
6688 ctx.assert_subtrie(&trie, Nibbles::from_nibbles([0xA, 0x0])).has_branch(
6689 &Nibbles::from_nibbles([0xA, 0x0]),
6690 &[0x0, 0x1, 0x2, 0x3, 0x4, 0x5, 0x6, 0x7, 0x8, 0x9, 0xA, 0xB, 0xC, 0xD, 0xE, 0xF],
6691 );
6692
6693 for (i, (path, value)) in leaves.iter().enumerate() {
6695 subtrie_assert = subtrie_assert
6696 .has_leaf(&Nibbles::from_nibbles([0xA, 0x0, i as u8]), &leaf_key([], 61))
6697 .has_value(path, value);
6698 }
6699 }
6700
6701 #[test]
6702 fn test_update_creates_multiple_subtries() {
6703 let ctx = ParallelSparseTrieTestContext;
6704 let mut trie = ParallelSparseTrie::from_root(TrieNodeV2::EmptyRoot, None, true).unwrap();
6705
6706 let leaves = [
6722 ctx.create_test_leaf([0x0, 0x0, 0x1, 0x2], 1),
6723 ctx.create_test_leaf([0x0, 0x1, 0x3, 0x4], 2),
6724 ctx.create_test_leaf([0x0, 0x2, 0x5, 0x6], 3),
6725 ctx.create_test_leaf([0x0, 0x3, 0x7, 0x8], 4),
6726 ];
6727
6728 ctx.update_leaves(&mut trie, leaves.iter().cloned());
6730
6731 ctx.assert_upper_subtrie(&trie)
6733 .has_extension(&Nibbles::default(), &Nibbles::from_nibbles([0x0]))
6734 .has_branch(&Nibbles::from_nibbles([0x0]), &[0x0, 0x1, 0x2, 0x3]);
6735
6736 for (i, (leaf_path, leaf_value)) in leaves.iter().enumerate() {
6738 let subtrie_path = Nibbles::from_nibbles([0x0, i as u8]);
6739 let full_path: [u8; 4] = match i {
6740 0 => [0x0, 0x0, 0x1, 0x2],
6741 1 => [0x0, 0x1, 0x3, 0x4],
6742 2 => [0x0, 0x2, 0x5, 0x6],
6743 3 => [0x0, 0x3, 0x7, 0x8],
6744 _ => unreachable!(),
6745 };
6746 ctx.assert_subtrie(&trie, subtrie_path)
6747 .has_leaf(&subtrie_path, &leaf_key(&full_path[2..], 62))
6748 .has_value(leaf_path, leaf_value);
6749 }
6750 }
6751
6752 #[test]
6753 fn test_update_extension_to_branch_transformation() {
6754 let ctx = ParallelSparseTrieTestContext;
6755 let mut trie = ParallelSparseTrie::from_root(TrieNodeV2::EmptyRoot, None, true).unwrap();
6756
6757 let (leaf1_path, value1) = ctx.create_test_leaf([0xF, 0xF, 0x0, 0x1], 1);
6773 let (leaf2_path, value2) = ctx.create_test_leaf([0xF, 0xF, 0x0, 0x2], 2);
6774 let (leaf3_path, value3) = ctx.create_test_leaf([0xF, 0x0, 0x0, 0x3], 3);
6775
6776 ctx.update_leaves(&mut trie, [(leaf1_path, value1.clone()), (leaf2_path, value2.clone())]);
6777
6778 ctx.assert_upper_subtrie(&trie)
6780 .has_extension(&Nibbles::default(), &Nibbles::from_nibbles([0xF, 0xF, 0x0]));
6781
6782 ctx.update_leaves(&mut trie, [(leaf3_path, value3.clone())]);
6784
6785 ctx.assert_upper_subtrie(&trie)
6787 .has_extension(&Nibbles::default(), &Nibbles::from_nibbles([0xF]))
6788 .has_branch(&Nibbles::from_nibbles([0xF]), &[0x0, 0xF]);
6789
6790 ctx.assert_subtrie(&trie, Nibbles::from_nibbles([0xF, 0xF]))
6792 .has_branch(&Nibbles::from_nibbles([0xF, 0xF, 0x0]), &[0x1, 0x2])
6793 .has_leaf(&Nibbles::from_nibbles([0xF, 0xF, 0x0, 0x1]), &leaf_key([], 60))
6794 .has_leaf(&Nibbles::from_nibbles([0xF, 0xF, 0x0, 0x2]), &leaf_key([], 60))
6795 .has_value(&leaf1_path, &value1)
6796 .has_value(&leaf2_path, &value2);
6797
6798 ctx.assert_subtrie(&trie, Nibbles::from_nibbles([0xF, 0x0]))
6799 .has_leaf(&Nibbles::from_nibbles([0xF, 0x0]), &leaf_key([0x0, 0x3], 62))
6800 .has_value(&leaf3_path, &value3);
6801 }
6802
6803 #[test]
6804 fn test_update_long_shared_prefix_at_boundary() {
6805 let ctx = ParallelSparseTrieTestContext;
6806 let mut trie = ParallelSparseTrie::from_root(TrieNodeV2::EmptyRoot, None, true).unwrap();
6807
6808 let (leaf1_path, value1) = ctx.create_test_leaf([0xA, 0xB, 0xC, 0xD, 0xE, 0xF], 1);
6822 let (leaf2_path, value2) = ctx.create_test_leaf([0xA, 0xB, 0xD, 0xE, 0xF, 0x0], 2);
6823
6824 trie.update_leaf(leaf1_path, value1.clone(), DefaultTrieNodeProvider).unwrap();
6825 trie.update_leaf(leaf2_path, value2.clone(), DefaultTrieNodeProvider).unwrap();
6826
6827 ctx.assert_upper_subtrie(&trie)
6829 .has_extension(&Nibbles::default(), &Nibbles::from_nibbles([0xA, 0xB]));
6830
6831 ctx.assert_subtrie(&trie, Nibbles::from_nibbles([0xA, 0xB]))
6833 .has_branch(&Nibbles::from_nibbles([0xA, 0xB]), &[0xC, 0xD])
6834 .has_leaf(&Nibbles::from_nibbles([0xA, 0xB, 0xC]), &leaf_key([0xD, 0xE, 0xF], 61))
6835 .has_leaf(&Nibbles::from_nibbles([0xA, 0xB, 0xD]), &leaf_key([0xE, 0xF, 0x0], 61))
6836 .has_value(&leaf1_path, &value1)
6837 .has_value(&leaf2_path, &value2);
6838 }
6839
6840 #[test]
6841 fn test_update_branch_to_extension_collapse() {
6842 let ctx = ParallelSparseTrieTestContext;
6843 let mut trie = ParallelSparseTrie::from_root(TrieNodeV2::EmptyRoot, None, true).unwrap();
6844
6845 let (leaf1_path, value1) = ctx.create_test_leaf([0x1, 0x2, 0x3, 0x4], 1);
6871 let (leaf2_path, value2) = ctx.create_test_leaf([0x2, 0x3, 0x4, 0x5], 2);
6872 let (leaf3_path, value3) = ctx.create_test_leaf([0x2, 0x3, 0x5, 0x6], 3);
6873
6874 trie.update_leaf(leaf1_path, value1, DefaultTrieNodeProvider).unwrap();
6875 trie.update_leaf(leaf2_path, value2, DefaultTrieNodeProvider).unwrap();
6876 trie.update_leaf(leaf3_path, value3, DefaultTrieNodeProvider).unwrap();
6877
6878 ctx.assert_upper_subtrie(&trie).has_branch(&Nibbles::default(), &[0x1, 0x2]);
6880
6881 let (new_leaf1_path, new_value1) = ctx.create_test_leaf([0x1, 0x2, 0x3, 0x4], 10);
6884 let (new_leaf2_path, new_value2) = ctx.create_test_leaf([0x1, 0x2, 0x3, 0x5], 11);
6885 let (new_leaf3_path, new_value3) = ctx.create_test_leaf([0x1, 0x2, 0x3, 0x6], 12);
6886
6887 let mut trie = ParallelSparseTrie::from_root(TrieNodeV2::EmptyRoot, None, true).unwrap();
6889 trie.update_leaf(new_leaf1_path, new_value1.clone(), DefaultTrieNodeProvider).unwrap();
6890 trie.update_leaf(new_leaf2_path, new_value2.clone(), DefaultTrieNodeProvider).unwrap();
6891 trie.update_leaf(new_leaf3_path, new_value3.clone(), DefaultTrieNodeProvider).unwrap();
6892
6893 ctx.assert_upper_subtrie(&trie)
6895 .has_extension(&Nibbles::default(), &Nibbles::from_nibbles([0x1, 0x2, 0x3]));
6896
6897 ctx.assert_subtrie_path(&trie, [0x1, 0x2], [0x1, 0x2, 0x3]);
6899
6900 ctx.assert_subtrie(&trie, Nibbles::from_nibbles([0x1, 0x2]))
6902 .has_branch(&Nibbles::from_nibbles([0x1, 0x2, 0x3]), &[0x4, 0x5, 0x6]) .has_leaf(&Nibbles::from_nibbles([0x1, 0x2, 0x3, 0x4]), &leaf_key([], 60))
6904 .has_leaf(&Nibbles::from_nibbles([0x1, 0x2, 0x3, 0x5]), &leaf_key([], 60))
6905 .has_leaf(&Nibbles::from_nibbles([0x1, 0x2, 0x3, 0x6]), &leaf_key([], 60))
6906 .has_value(&new_leaf1_path, &new_value1)
6907 .has_value(&new_leaf2_path, &new_value2)
6908 .has_value(&new_leaf3_path, &new_value3);
6909 }
6910
6911 #[test]
6912 fn test_update_shared_prefix_patterns() {
6913 let ctx = ParallelSparseTrieTestContext;
6914 let mut trie = ParallelSparseTrie::from_root(TrieNodeV2::EmptyRoot, None, true).unwrap();
6915
6916 let (leaf1_path, value1) = ctx.create_test_leaf([0x1, 0x2, 0x3, 0x4], 1);
6932 let (leaf2_path, value2) = ctx.create_test_leaf([0x2, 0x3, 0x4, 0x5], 2);
6933 let (leaf3_path, value3) = ctx.create_test_leaf([0x2, 0x3, 0x5, 0x6], 3);
6934
6935 trie.update_leaf(leaf1_path, value1, DefaultTrieNodeProvider).unwrap();
6936 trie.update_leaf(leaf2_path, value2.clone(), DefaultTrieNodeProvider).unwrap();
6937 trie.update_leaf(leaf3_path, value3.clone(), DefaultTrieNodeProvider).unwrap();
6938
6939 ctx.assert_upper_subtrie(&trie)
6941 .has_branch(&Nibbles::default(), &[0x1, 0x2])
6942 .has_leaf(&Nibbles::from_nibbles([0x1]), &leaf_key([0x2, 0x3, 0x4], 63))
6943 .has_extension(&Nibbles::from_nibbles([0x2]), &Nibbles::from_nibbles([0x3]));
6944
6945 ctx.assert_subtrie(&trie, Nibbles::from_nibbles([0x2, 0x3]))
6947 .has_branch(&Nibbles::from_nibbles([0x2, 0x3]), &[0x4, 0x5])
6948 .has_leaf(&Nibbles::from_nibbles([0x2, 0x3, 0x4]), &leaf_key([0x5], 61))
6949 .has_leaf(&Nibbles::from_nibbles([0x2, 0x3, 0x5]), &leaf_key([0x6], 61))
6950 .has_value(&leaf2_path, &value2)
6951 .has_value(&leaf3_path, &value3);
6952 }
6953
6954 #[test]
6955 fn test_progressive_branch_creation() {
6956 let ctx = ParallelSparseTrieTestContext;
6957 let mut trie = ParallelSparseTrie::from_root(TrieNodeV2::EmptyRoot, None, true).unwrap();
6958
6959 let (leaf1_path, value1) = ctx.create_test_leaf([0x1, 0x2, 0x3, 0x4, 0x5], 1);
6995 trie.update_leaf(leaf1_path, value1.clone(), DefaultTrieNodeProvider).unwrap();
6996
6997 ctx.assert_upper_subtrie(&trie)
6999 .has_leaf(
7000 &Nibbles::default(),
7001 &pad_nibbles_right(Nibbles::from_nibbles([0x1, 0x2, 0x3, 0x4, 0x5])),
7002 )
7003 .has_value(&leaf1_path, &value1);
7004
7005 let (leaf2_path, value2) = ctx.create_test_leaf([0x1, 0x2, 0x3, 0x4, 0x6], 2);
7007 trie.update_leaf(leaf2_path, value2.clone(), DefaultTrieNodeProvider).unwrap();
7008
7009 ctx.assert_upper_subtrie(&trie)
7011 .has_extension(&Nibbles::default(), &Nibbles::from_nibbles([0x1, 0x2, 0x3, 0x4]));
7012
7013 ctx.assert_subtrie_path(&trie, [0x1, 0x2], [0x1, 0x2, 0x3, 0x4]);
7015
7016 ctx.assert_subtrie(&trie, Nibbles::from_nibbles([0x1, 0x2]))
7017 .has_branch(&Nibbles::from_nibbles([0x1, 0x2, 0x3, 0x4]), &[0x5, 0x6])
7018 .has_leaf(&Nibbles::from_nibbles([0x1, 0x2, 0x3, 0x4, 0x5]), &leaf_key([], 59))
7019 .has_leaf(&Nibbles::from_nibbles([0x1, 0x2, 0x3, 0x4, 0x6]), &leaf_key([], 59))
7020 .has_value(&leaf1_path, &value1)
7021 .has_value(&leaf2_path, &value2);
7022
7023 let (leaf3_path, value3) = ctx.create_test_leaf([0x1, 0x2, 0x3, 0x5], 3);
7025 trie.update_leaf(leaf3_path, value3.clone(), DefaultTrieNodeProvider).unwrap();
7026
7027 ctx.assert_upper_subtrie(&trie)
7029 .has_extension(&Nibbles::default(), &Nibbles::from_nibbles([0x1, 0x2, 0x3]));
7030
7031 ctx.assert_subtrie_path(&trie, [0x1, 0x2], [0x1, 0x2, 0x3]);
7033
7034 ctx.assert_subtrie(&trie, Nibbles::from_nibbles([0x1, 0x2]))
7035 .has_branch(&Nibbles::from_nibbles([0x1, 0x2, 0x3]), &[0x4, 0x5])
7036 .has_branch(&Nibbles::from_nibbles([0x1, 0x2, 0x3, 0x4]), &[0x5, 0x6])
7037 .has_leaf(&Nibbles::from_nibbles([0x1, 0x2, 0x3, 0x5]), &leaf_key([], 60))
7038 .has_value(&leaf1_path, &value1)
7039 .has_value(&leaf2_path, &value2)
7040 .has_value(&leaf3_path, &value3);
7041
7042 let (leaf4_path, value4) = ctx.create_test_leaf([0x1, 0x2, 0x4], 4);
7044 trie.update_leaf(leaf4_path, value4.clone(), DefaultTrieNodeProvider).unwrap();
7045
7046 ctx.assert_upper_subtrie(&trie)
7048 .has_extension(&Nibbles::default(), &Nibbles::from_nibbles([0x1, 0x2]));
7049
7050 ctx.assert_subtrie_path(&trie, [0x1, 0x2], [0x1, 0x2]);
7052
7053 ctx.assert_subtrie(&trie, Nibbles::from_nibbles([0x1, 0x2]))
7055 .has_branch(&Nibbles::from_nibbles([0x1, 0x2]), &[0x3, 0x4])
7056 .has_branch(&Nibbles::from_nibbles([0x1, 0x2, 0x3]), &[0x4, 0x5])
7057 .has_branch(&Nibbles::from_nibbles([0x1, 0x2, 0x3, 0x4]), &[0x5, 0x6])
7058 .has_leaf(&Nibbles::from_nibbles([0x1, 0x2, 0x4]), &leaf_key([], 61))
7059 .has_value(&leaf1_path, &value1)
7060 .has_value(&leaf2_path, &value2)
7061 .has_value(&leaf3_path, &value3)
7062 .has_value(&leaf4_path, &value4);
7063 }
7064
7065 #[test]
7066 fn test_update_max_depth_paths() {
7067 let ctx = ParallelSparseTrieTestContext;
7068 let mut trie = ParallelSparseTrie::from_root(TrieNodeV2::EmptyRoot, None, true).unwrap();
7069
7070 let mut path1_nibbles = vec![0xF; 63];
7082 path1_nibbles.push(0x0);
7083 let mut path2_nibbles = vec![0xF; 63];
7084 path2_nibbles.push(0x1);
7085
7086 let (leaf1_path, value1) = ctx.create_test_leaf(&path1_nibbles, 1);
7087 let (leaf2_path, value2) = ctx.create_test_leaf(&path2_nibbles, 2);
7088
7089 trie.update_leaf(leaf1_path, value1.clone(), DefaultTrieNodeProvider).unwrap();
7090 trie.update_leaf(leaf2_path, value2.clone(), DefaultTrieNodeProvider).unwrap();
7091
7092 let extension_key = vec![0xF; 63];
7094 ctx.assert_upper_subtrie(&trie)
7095 .has_extension(&Nibbles::default(), &Nibbles::from_nibbles(&extension_key));
7096
7097 ctx.assert_subtrie(&trie, Nibbles::from_nibbles([0xF, 0xF]))
7099 .has_branch(&Nibbles::from_nibbles(&path1_nibbles[..63]), &[0x0, 0x1])
7100 .has_value(&leaf1_path, &value1)
7101 .has_value(&leaf2_path, &value2);
7102 }
7103
7104 #[test]
7105 fn test_hoodie_block_1_data() {
7106 let root_branch_stack = vec![
7108 hex!("a0550b6aba4dd4582a2434d2cbdad8d3007d09f622d7a6e6eaa7a49385823c2fa2"),
7109 hex!("a04788a4975a9e1efd29b834fd80fdfe8a57cc1b1c5ace6d30ce5a36a15e0092b3"),
7110 hex!("a093aeccf87da304e6f7d09edc5d7bd3a552808866d2149dd0940507a8f9bfa910"),
7111 hex!("a08b5b423ba68d0dec2eca1f408076f9170678505eb4a5db2abbbd83bb37666949"),
7112 hex!("a08592f62216af4218098a78acad7cf472a727fb55e6c27d3cfdf2774d4518eb83"),
7113 hex!("a0ef02aeee845cb64c11f85edc1a3094227c26445952554b8a9248915d80c746c3"),
7114 hex!("a0df2529ee3a1ce4df5a758cf17e6a86d0fb5ea22ab7071cf60af6412e9b0a428a"),
7115 hex!("a0acaa1092db69cd5a63676685827b3484c4b80dc1d3361f6073bbb9240101e144"),
7116 hex!("a09c3f2bb2a729d71f246a833353ade65667716bb330e0127a3299a42d11200f93"),
7117 hex!("a0ce978470f4c0b1f8069570563a14d2b79d709add2db4bf22dd9b6aed3271c566"),
7118 hex!("a095f783cd1d464a60e3c8adcadc28c6eb9fec7306664df39553be41dccc909606"),
7119 hex!("a0a9083f5fb914b255e1feb5d951a4dfddacf3c8003ef1d1ec6a13bb6ba5b2ac62"),
7120 hex!("a0fec113d537d8577cd361e0cabf5e95ef58f1cc34318292fdecce9fae57c3e094"),
7121 hex!("a08b7465f5fe8b3e3c0d087cb7521310d4065ef2a0ee43bf73f68dee8a5742b3dd"),
7122 hex!("a0c589aa1ae3d5fd87d8640957f7d5184a4ac06f393b453a8e8ed7e8fba0d385c8"),
7123 hex!("a0b516d6f3352f87beab4ed6e7322f191fc7a147686500ef4de7dd290ad784ef51"),
7124 ];
7125
7126 let root_branch_rlp_stack: Vec<RlpNode> = root_branch_stack
7127 .iter()
7128 .map(|hex_str| RlpNode::from_raw_rlp(&hex_str[..]).unwrap())
7129 .collect();
7130
7131 let root_branch_node = BranchNodeV2::new(
7132 Default::default(),
7133 root_branch_rlp_stack,
7134 TrieMask::new(0b1111111111111111), None,
7136 );
7137
7138 let root_branch_masks = Some(BranchNodeMasks {
7139 hash_mask: TrieMask::new(0b1111111111111111),
7140 tree_mask: TrieMask::new(0b1111111111111111),
7141 });
7142
7143 let mut trie = ParallelSparseTrie::from_root(
7144 TrieNodeV2::Branch(root_branch_node),
7145 root_branch_masks,
7146 true,
7147 )
7148 .unwrap();
7149
7150 let branch_0x3_stack = vec![
7152 hex!("a09da7d9755fe0c558b3c3de9fdcdf9f28ae641f38c9787b05b73ab22ae53af3e2"),
7153 hex!("a0d9990bf0b810d1145ecb2b011fd68c63cc85564e6724166fd4a9520180706e5f"),
7154 hex!("a0f60eb4b12132a40df05d9bbdb88bbde0185a3f097f3c76bf4200c23eda26cf86"),
7155 hex!("a0ca976997ddaf06f18992f6207e4f6a05979d07acead96568058789017cc6d06b"),
7156 hex!("a04d78166b48044fdc28ed22d2fd39c8df6f8aaa04cb71d3a17286856f6893ff83"),
7157 hex!("a021d4f90c34d3f1706e78463b6482bca77a3aa1cd059a3f326c42a1cfd30b9b60"),
7158 hex!("a0fc3b71c33e2e6b77c5e494c1db7fdbb447473f003daf378c7a63ba9bf3f0049d"),
7159 hex!("a0e33ed2be194a3d93d343e85642447c93a9d0cfc47a016c2c23d14c083be32a7c"),
7160 hex!("a07b8e7a21c1178d28074f157b50fca85ee25c12568ff8e9706dcbcdacb77bf854"),
7161 hex!("a0973274526811393ea0bf4811ca9077531db00d06b86237a2ecd683f55ba4bcb0"),
7162 hex!("a03a93d726d7487874e51b52d8d534c63aa2a689df18e3b307c0d6cb0a388b00f3"),
7163 hex!("a06aa67101d011d1c22fe739ef83b04b5214a3e2f8e1a2625d8bfdb116b447e86f"),
7164 hex!("a02dd545b33c62d33a183e127a08a4767fba891d9f3b94fc20a2ca02600d6d1fff"),
7165 hex!("a0fe6db87d00f06d53bff8169fa497571ff5af1addfb715b649b4d79dd3e394b04"),
7166 hex!("a0d9240a9d2d5851d05a97ff3305334dfdb0101e1e321fc279d2bb3cad6afa8fc8"),
7167 hex!("a01b69c6ab5173de8a8ec53a6ebba965713a4cc7feb86cb3e230def37c230ca2b2"),
7168 ];
7169
7170 let branch_0x3_rlp_stack: Vec<RlpNode> = branch_0x3_stack
7171 .iter()
7172 .map(|hex_str| RlpNode::from_raw_rlp(&hex_str[..]).unwrap())
7173 .collect();
7174
7175 let branch_0x3_node = BranchNodeV2::new(
7176 Default::default(),
7177 branch_0x3_rlp_stack,
7178 TrieMask::new(0b1111111111111111), None,
7180 );
7181
7182 let branch_0x3_masks = Some(BranchNodeMasks {
7183 hash_mask: TrieMask::new(0b0100010000010101),
7184 tree_mask: TrieMask::new(0b0100000000000000),
7185 });
7186
7187 let leaf_path = Nibbles::from_nibbles([0x3, 0x7]);
7189 let leaf_key = Nibbles::unpack(
7190 &hex!("d65eaa92c6bc4c13a5ec45527f0c18ea8932588728769ec7aecfe6d9f32e42")[..],
7191 );
7192 let leaf_value = hex!("f8440180a056e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421a0f57acd40259872606d76197ef052f3d35588dadf919ee1f0e3cb9b62d3f4b02c").to_vec();
7193
7194 let leaf_node = LeafNode::new(leaf_key, leaf_value);
7195 let leaf_masks = None;
7196
7197 trie.reveal_nodes(&mut [
7198 ProofTrieNodeV2 {
7199 path: Nibbles::from_nibbles([0x3]),
7200 node: TrieNodeV2::Branch(branch_0x3_node),
7201 masks: branch_0x3_masks,
7202 },
7203 ProofTrieNodeV2 {
7204 path: leaf_path,
7205 node: TrieNodeV2::Leaf(leaf_node),
7206 masks: leaf_masks,
7207 },
7208 ])
7209 .unwrap();
7210
7211 let mut leaf_full_path = leaf_path;
7213 leaf_full_path.extend(&leaf_key);
7214
7215 let leaf_new_value = vec![
7216 248, 68, 1, 128, 160, 224, 163, 152, 169, 122, 160, 155, 102, 53, 41, 0, 47, 28, 205,
7217 190, 199, 5, 215, 108, 202, 22, 138, 70, 196, 178, 193, 208, 18, 96, 95, 63, 238, 160,
7218 245, 122, 205, 64, 37, 152, 114, 96, 109, 118, 25, 126, 240, 82, 243, 211, 85, 136,
7219 218, 223, 145, 158, 225, 240, 227, 203, 155, 98, 211, 244, 176, 44,
7220 ];
7221
7222 trie.update_leaf(leaf_full_path, leaf_new_value.clone(), DefaultTrieNodeProvider).unwrap();
7223
7224 assert_eq!(
7226 Some(&leaf_new_value),
7227 trie.lower_subtrie_for_path(&leaf_path).unwrap().inner.values.get(&leaf_full_path)
7228 );
7229 assert!(trie.upper_subtrie.inner.values.is_empty());
7230
7231 let expected_root =
7233 b256!("0x29b07de8376e9ce7b3a69e9b102199869514d3f42590b5abc6f7d48ec9b8665c");
7234 assert_eq!(trie.root(), expected_root);
7235 }
7236
7237 #[test]
7238 fn find_leaf_existing_leaf() {
7239 let provider = DefaultTrieNodeProvider;
7241 let mut sparse = ParallelSparseTrie::default();
7242 let path = pad_nibbles_right(Nibbles::from_nibbles([0x1, 0x2, 0x3]));
7243 let value = b"test_value".to_vec();
7244
7245 sparse.update_leaf(path, value.clone(), &provider).unwrap();
7246
7247 let result = sparse.find_leaf(&path, None);
7249 assert_matches!(result, Ok(LeafLookup::Exists));
7250
7251 let result = sparse.find_leaf(&path, Some(&value));
7253 assert_matches!(result, Ok(LeafLookup::Exists));
7254 }
7255
7256 #[test]
7257 fn find_leaf_value_mismatch() {
7258 let provider = DefaultTrieNodeProvider;
7260 let mut sparse = ParallelSparseTrie::default();
7261 let path = pad_nibbles_right(Nibbles::from_nibbles([0x1, 0x2, 0x3]));
7262 let value = b"test_value".to_vec();
7263 let wrong_value = b"wrong_value".to_vec();
7264
7265 sparse.update_leaf(path, value, &provider).unwrap();
7266
7267 let result = sparse.find_leaf(&path, Some(&wrong_value));
7269 assert_matches!(
7270 result,
7271 Err(LeafLookupError::ValueMismatch { path: p, expected: Some(e), actual: _a }) if p == path && e == wrong_value
7272 );
7273 }
7274
7275 #[test]
7276 fn find_leaf_not_found_empty_trie() {
7277 let sparse = ParallelSparseTrie::default();
7279 let path = Nibbles::from_nibbles([0x1, 0x2, 0x3]);
7280
7281 let result = sparse.find_leaf(&path, None);
7283 assert_matches!(result, Ok(LeafLookup::NonExistent));
7284 }
7285
7286 #[test]
7287 fn find_leaf_empty_trie() {
7288 let sparse = ParallelSparseTrie::default();
7289 let path = Nibbles::from_nibbles_unchecked([0x1, 0x2, 0x3, 0x4]);
7290
7291 let result = sparse.find_leaf(&path, None);
7292 assert_matches!(result, Ok(LeafLookup::NonExistent));
7293 }
7294
7295 #[test]
7296 fn find_leaf_exists_no_value_check() {
7297 let provider = DefaultTrieNodeProvider;
7298 let mut sparse = ParallelSparseTrie::default();
7299 let path = pad_nibbles_right(Nibbles::from_nibbles([0x1, 0x2, 0x3, 0x4]));
7300 sparse.update_leaf(path, encode_account_value(0), &provider).unwrap();
7301
7302 let result = sparse.find_leaf(&path, None);
7303 assert_matches!(result, Ok(LeafLookup::Exists));
7304 }
7305
7306 #[test]
7307 fn find_leaf_exists_with_value_check_ok() {
7308 let provider = DefaultTrieNodeProvider;
7309 let mut sparse = ParallelSparseTrie::default();
7310 let path = pad_nibbles_right(Nibbles::from_nibbles([0x1, 0x2, 0x3, 0x4]));
7311 let value = encode_account_value(0);
7312 sparse.update_leaf(path, value.clone(), &provider).unwrap();
7313
7314 let result = sparse.find_leaf(&path, Some(&value));
7315 assert_matches!(result, Ok(LeafLookup::Exists));
7316 }
7317
7318 #[test]
7319 fn find_leaf_exclusion_branch_divergence() {
7320 let provider = DefaultTrieNodeProvider;
7321 let mut sparse = ParallelSparseTrie::default();
7322 let path1 = pad_nibbles_right(Nibbles::from_nibbles([0x1, 0x2, 0x3, 0x4])); let path2 = pad_nibbles_right(Nibbles::from_nibbles([0x1, 0x2, 0x5, 0x6])); let search_path = pad_nibbles_right(Nibbles::from_nibbles([0x1, 0x2, 0x7, 0x8])); sparse.update_leaf(path1, encode_account_value(0), &provider).unwrap();
7327 sparse.update_leaf(path2, encode_account_value(1), &provider).unwrap();
7328
7329 let result = sparse.find_leaf(&search_path, None);
7330 assert_matches!(result, Ok(LeafLookup::NonExistent))
7331 }
7332
7333 #[test]
7334 fn find_leaf_exclusion_extension_divergence() {
7335 let provider = DefaultTrieNodeProvider;
7336 let mut sparse = ParallelSparseTrie::default();
7337 let path1 = pad_nibbles_right(Nibbles::from_nibbles([0x1, 0x2, 0x3, 0x4, 0x5, 0x6]));
7339 let search_path = pad_nibbles_right(Nibbles::from_nibbles([0x1, 0x2, 0x7, 0x8]));
7341
7342 sparse.update_leaf(path1, encode_account_value(0), &provider).unwrap();
7343
7344 let result = sparse.find_leaf(&search_path, None);
7345 assert_matches!(result, Ok(LeafLookup::NonExistent))
7346 }
7347
7348 #[test]
7349 fn find_leaf_exclusion_leaf_divergence() {
7350 let provider = DefaultTrieNodeProvider;
7351 let mut sparse = ParallelSparseTrie::default();
7352 let existing_leaf_path = pad_nibbles_right(Nibbles::from_nibbles([0x1, 0x2, 0x3, 0x4]));
7353 let search_path = pad_nibbles_right(Nibbles::from_nibbles([0x1, 0x2, 0x3, 0x4, 0x5, 0x6]));
7354
7355 sparse.update_leaf(existing_leaf_path, encode_account_value(0), &provider).unwrap();
7356
7357 let result = sparse.find_leaf(&search_path, None);
7358 assert_matches!(result, Ok(LeafLookup::NonExistent))
7359 }
7360
7361 #[test]
7362 fn find_leaf_exclusion_path_ends_at_branch() {
7363 let provider = DefaultTrieNodeProvider;
7364 let mut sparse = ParallelSparseTrie::default();
7365 let path1 = pad_nibbles_right(Nibbles::from_nibbles([0x1, 0x2, 0x3, 0x4])); let path2 = pad_nibbles_right(Nibbles::from_nibbles([0x1, 0x2, 0x5, 0x6]));
7367 let search_path = pad_nibbles_right(Nibbles::from_nibbles([0x1, 0x2])); sparse.update_leaf(path1, encode_account_value(0), &provider).unwrap();
7370 sparse.update_leaf(path2, encode_account_value(1), &provider).unwrap();
7371
7372 let result = sparse.find_leaf(&search_path, None);
7373 assert_matches!(result, Ok(LeafLookup::NonExistent));
7374 }
7375
7376 #[test]
7377 fn find_leaf_error_blinded_node_at_leaf_path() {
7378 let blinded_hash = B256::repeat_byte(0xBB);
7380 let leaf_path = Nibbles::from_nibbles_unchecked([0x1, 0x2, 0x3, 0x4]);
7381
7382 let sparse = new_test_trie(
7383 [
7384 (
7385 Nibbles::default(),
7387 SparseNode::new_ext(Nibbles::from_nibbles_unchecked([0x1, 0x2])),
7388 ),
7389 (
7390 Nibbles::from_nibbles_unchecked([0x1, 0x2]),
7392 SparseNode::new_ext(Nibbles::from_nibbles_unchecked([0x3])),
7393 ),
7394 (
7395 Nibbles::from_nibbles_unchecked([0x1, 0x2, 0x3]),
7397 SparseNode::new_branch(TrieMask::new(0b10000), &[(0x4, blinded_hash)]),
7398 ),
7399 ]
7400 .into_iter(),
7401 );
7402
7403 let result = sparse.find_leaf(&leaf_path, None);
7404
7405 assert_matches!(result, Err(LeafLookupError::BlindedNode { path, hash })
7407 if path == leaf_path && hash == blinded_hash
7408 );
7409 }
7410
7411 #[test]
7412 fn find_leaf_error_blinded_node() {
7413 let blinded_hash = B256::repeat_byte(0xAA);
7414 let path_to_blind = Nibbles::from_nibbles_unchecked([0x1]);
7415 let search_path = Nibbles::from_nibbles_unchecked([0x1, 0x2, 0x3, 0x4]);
7416
7417 let sparse = new_test_trie(
7418 [
7419 (
7422 Nibbles::default(),
7423 SparseNode::new_branch(TrieMask::new(0b100010), &[(0x1, blinded_hash)]),
7424 ),
7425 (
7426 Nibbles::from_nibbles_unchecked([0x5]),
7427 SparseNode::new_leaf(Nibbles::from_nibbles_unchecked([0x6, 0x7, 0x8])),
7428 ),
7429 ]
7430 .into_iter(),
7431 );
7432
7433 let result = sparse.find_leaf(&search_path, None);
7434
7435 assert_matches!(result, Err(LeafLookupError::BlindedNode { path, hash })
7437 if path == path_to_blind && hash == blinded_hash
7438 );
7439 }
7440
7441 #[test]
7442 fn test_mainnet_block_24185431_storage_0x6ba784ee() {
7443 reth_tracing::init_test_tracing();
7444
7445 let mut branch_0x3_hashes = vec![
7447 B256::from(hex!("fc11ba8de4b220b8f19a09f0676c69b8e18bae1350788392640069e59b41733d")),
7448 B256::from(hex!("8afe085cc6685680bd8ba4bac6e65937a4babf737dc5e7413d21cdda958e8f74")),
7449 B256::from(hex!("c7b6f7c0fc601a27aece6ec178fd9be17cdee77c4884ecfbe1ee459731eb57da")),
7450 B256::from(hex!("71c1aec60db78a2deb4e10399b979a2ed5be42b4ee0c0a17c614f9ddc9f9072e")),
7451 B256::from(hex!("e9261302e7c0b77930eaf1851b585210906cd01e015ab6be0f7f3c0cc947c32a")),
7452 B256::from(hex!("38ce8f369c56bd77fabdf679b27265b1f8d0a54b09ef612c8ee8ddfc6b3fab95")),
7453 B256::from(hex!("7b507a8936a28c5776b647d1c4bda0bbbb3d0d227f16c5f5ebba58d02e31918d")),
7454 B256::from(hex!("0f456b9457a824a81e0eb555aa861461acb38674dcf36959b3b26deb24ed0af9")),
7455 B256::from(hex!("2145420289652722ad199ba932622e3003c779d694fa5a2acfb2f77b0782b38a")),
7456 B256::from(hex!("2c1a04dce1a9e2f1cfbf8806edce50a356dfa58e7e7c542c848541502613b796")),
7457 B256::from(hex!("dad7ca55186ac8f40d4450dc874166df8267b44abc07e684d9507260f5712df3")),
7458 B256::from(hex!("3a8c2a1d7d2423e92965ec29014634e7f0307ded60b1a63d28c86c3222b24236")),
7459 B256::from(hex!("4e9929e6728b3a7bf0db6a0750ab376045566b556c9c605e606ecb8ec25200d7")),
7460 B256::from(hex!("1797c36f98922f52292c161590057a1b5582d5503e3370bcfbf6fd939f3ec98b")),
7461 B256::from(hex!("9e514589a9c9210b783c19fa3f0b384bbfaefe98f10ea189a2bfc58c6bf000a1")),
7462 B256::from(hex!("85bdaabbcfa583cbd049650e41d3d19356bd833b3ed585cf225a3548557c7fa3")),
7463 ];
7464 let branch_0x3_node = create_branch_node(
7465 Nibbles::from_nibbles([0x3]),
7466 &[0x0, 0x1, 0x2, 0x3, 0x4, 0x5, 0x6, 0x7, 0x8, 0x9, 0xa, 0xb, 0xc, 0xd, 0xe, 0xf],
7467 branch_0x3_hashes.iter().map(RlpNode::word_rlp),
7468 );
7469
7470 let branch_0x31_hashes = vec![B256::from(hex!(
7472 "3ca994ba59ce70b83fee1f01731c8dac4fdd0f70ade79bf9b0695c4c53531aab"
7473 ))];
7474 let branch_0x31_node = create_branch_node_with_children(
7475 &[0xc],
7476 branch_0x31_hashes.into_iter().map(|h| RlpNode::word_rlp(&h)),
7477 );
7478
7479 let leaf_path = hex!("31b0b645a6c4a0a1bb3d2f0c1d31c39f4aba2e3b015928a8eef7161e28388b81");
7481 let leaf_nibbles = Nibbles::unpack(leaf_path.as_slice());
7482 let leaf_value = hex!("0009ae8ce8245bff").to_vec();
7483
7484 let branch_0x31c_hashes = vec![
7486 B256::from(hex!("1a68fdb36b77e9332b49a977faf800c22d0199e6cecf44032bb083c78943e540")),
7487 B256::from(hex!("cd4622c6df6fd7172c7fed1b284ef241e0f501b4c77b675ef10c612bd0948a7a")),
7488 B256::from(hex!("abf3603d2f991787e21f1709ee4c7375d85dfc506995c0435839fccf3fe2add4")),
7489 ];
7490 let branch_0x31c_node = create_branch_node_with_children(
7491 &[0x3, 0x7, 0xc],
7492 branch_0x31c_hashes.into_iter().map(|h| RlpNode::word_rlp(&h)),
7493 );
7494
7495 let mut proof_nodes = vec![ProofTrieNodeV2 {
7497 path: Nibbles::from_nibbles([0x3, 0x1]),
7498 node: branch_0x31_node,
7499 masks: Some(BranchNodeMasks {
7500 tree_mask: TrieMask::new(4096),
7501 hash_mask: TrieMask::new(4096),
7502 }),
7503 }];
7504
7505 let mut trie = ParallelSparseTrie::default()
7507 .with_root(
7508 branch_0x3_node,
7509 Some(BranchNodeMasks {
7510 tree_mask: TrieMask::new(26099),
7511 hash_mask: TrieMask::new(65535),
7512 }),
7513 true,
7514 )
7515 .expect("root revealed");
7516
7517 trie.reveal_nodes(&mut proof_nodes).unwrap();
7518
7519 trie.update_leaf(leaf_nibbles, leaf_value, NoRevealProvider).unwrap();
7521
7522 let Err(err) = trie.remove_leaf(&leaf_nibbles, NoRevealProvider) else {
7524 panic!("expected blinded node error");
7525 };
7526 assert_matches!(err.kind(), SparseTrieErrorKind::BlindedNode(path) if path == &Nibbles::from_nibbles([0x3, 0x1, 0xc]));
7527
7528 trie.reveal_nodes(&mut [ProofTrieNodeV2 {
7529 path: Nibbles::from_nibbles([0x3, 0x1, 0xc]),
7530 node: branch_0x31c_node,
7531 masks: Some(BranchNodeMasks { tree_mask: 0.into(), hash_mask: 4096.into() }),
7532 }])
7533 .unwrap();
7534
7535 trie.remove_leaf(&leaf_nibbles, NoRevealProvider).unwrap();
7537
7538 let _ = trie.root();
7540
7541 let updates = trie.updates_ref();
7543
7544 let branch_0x3_update = updates
7546 .updated_nodes
7547 .get(&Nibbles::from_nibbles([0x3]))
7548 .expect("Branch at 0x3 should be in updates");
7549
7550 branch_0x3_hashes.remove(1);
7552
7553 let expected_branch = BranchNodeCompact::new(
7555 0b1111111111111111,
7556 0b0110010111110011,
7557 0b1111111111111101,
7558 branch_0x3_hashes,
7559 None,
7560 );
7561
7562 assert_eq!(branch_0x3_update, &expected_branch);
7563 }
7564
7565 #[test]
7566 fn test_get_leaf_value_lower_subtrie() {
7567 let root_branch =
7573 create_branch_node_with_children(&[0x1], [RlpNode::word_rlp(&B256::repeat_byte(0xAA))]);
7574 let branch_at_1 =
7575 create_branch_node_with_children(&[0x2], [RlpNode::word_rlp(&B256::repeat_byte(0xBB))]);
7576 let mut trie = ParallelSparseTrie::from_root(root_branch, None, false).unwrap();
7577 trie.reveal_nodes(&mut [ProofTrieNodeV2 {
7578 path: Nibbles::from_nibbles([0x1]),
7579 node: branch_at_1,
7580 masks: None,
7581 }])
7582 .unwrap();
7583
7584 let leaf_path = Nibbles::from_nibbles([0x1, 0x2]);
7586 let leaf_key = Nibbles::from_nibbles([0x3, 0x4]);
7587 let leaf_node = create_leaf_node(leaf_key.to_vec(), 42);
7588
7589 trie.reveal_nodes(&mut [ProofTrieNodeV2 { path: leaf_path, node: leaf_node, masks: None }])
7591 .unwrap();
7592
7593 let full_path = Nibbles::from_nibbles([0x1, 0x2, 0x3, 0x4]);
7595
7596 let idx = path_subtrie_index_unchecked(&leaf_path);
7598 let lower_subtrie = trie.lower_subtries[idx].as_revealed_ref().unwrap();
7599 assert!(
7600 lower_subtrie.inner.values.contains_key(&full_path),
7601 "value should be in lower subtrie"
7602 );
7603 assert!(
7604 !trie.upper_subtrie.inner.values.contains_key(&full_path),
7605 "value should NOT be in upper subtrie"
7606 );
7607
7608 assert!(
7610 trie.get_leaf_value(&full_path).is_some(),
7611 "get_leaf_value should find the value in lower subtrie"
7612 );
7613 }
7614
7615 #[test]
7622 fn test_get_leaf_value_upper_subtrie_via_update_leaf() {
7623 let provider = NoRevealProvider;
7624
7625 let mut trie = ParallelSparseTrie::default()
7627 .with_root(TrieNodeV2::EmptyRoot, None, false)
7628 .expect("root revealed");
7629
7630 let full_path = pad_nibbles_right(Nibbles::from_nibbles([0x0, 0xA, 0xB, 0xC]));
7632 let value = encode_account_value(42);
7633
7634 trie.update_leaf(full_path, value.clone(), provider).unwrap();
7637
7638 assert!(
7640 trie.upper_subtrie.inner.values.contains_key(&full_path),
7641 "value should be in upper subtrie after update_leaf"
7642 );
7643
7644 let retrieved = trie.get_leaf_value(&full_path);
7648 assert_eq!(retrieved, Some(&value));
7649 }
7650
7651 #[test]
7653 fn test_get_leaf_value_upper_and_lower_subtries() {
7654 let provider = NoRevealProvider;
7655
7656 let mut trie = ParallelSparseTrie::default()
7658 .with_root(TrieNodeV2::EmptyRoot, None, false)
7659 .expect("root revealed");
7660
7661 let path1 = pad_nibbles_right(Nibbles::from_nibbles([0x0, 0xA]));
7663 let value1 = encode_account_value(1);
7664 trie.update_leaf(path1, value1.clone(), provider).unwrap();
7665
7666 let path2 = pad_nibbles_right(Nibbles::from_nibbles([0x1, 0xB]));
7668 let value2 = encode_account_value(2);
7669 trie.update_leaf(path2, value2.clone(), provider).unwrap();
7670
7671 assert_eq!(trie.get_leaf_value(&path1), Some(&value1));
7673 assert_eq!(trie.get_leaf_value(&path2), Some(&value2));
7674 }
7675
7676 #[test]
7678 fn test_get_leaf_value_sparse_storage_trie() {
7679 let provider = NoRevealProvider;
7680
7681 let mut trie = ParallelSparseTrie::default()
7683 .with_root(TrieNodeV2::EmptyRoot, None, false)
7684 .expect("root revealed");
7685
7686 let slot_path = pad_nibbles_right(Nibbles::from_nibbles([0x2, 0x9]));
7688 let slot_value = alloy_rlp::encode(U256::from(12345));
7689 trie.update_leaf(slot_path, slot_value.clone(), provider).unwrap();
7690
7691 assert_eq!(trie.get_leaf_value(&slot_path), Some(&slot_value));
7693 }
7694
7695 #[test]
7696 fn test_prune_empty_suffix_key_regression() {
7697 use crate::provider::DefaultTrieNodeProvider;
7702
7703 let provider = DefaultTrieNodeProvider;
7704 let mut parallel = ParallelSparseTrie::default();
7705
7706 let value = {
7708 let account = Account {
7709 nonce: 0x123456789abcdef,
7710 balance: U256::from(0x123456789abcdef0123456789abcdef_u128),
7711 ..Default::default()
7712 };
7713 let mut buf = Vec::new();
7714 account.into_trie_account(EMPTY_ROOT_HASH).encode(&mut buf);
7715 buf
7716 };
7717
7718 for i in 0..16u8 {
7720 parallel
7721 .update_leaf(
7722 pad_nibbles_right(Nibbles::from_nibbles([i, 0x1, 0x2, 0x3, 0x4, 0x5])),
7723 value.clone(),
7724 &provider,
7725 )
7726 .unwrap();
7727 }
7728
7729 let root_before = parallel.root();
7731
7732 parallel.prune(0);
7734
7735 let root_after = parallel.root();
7736 assert_eq!(root_before, root_after, "root hash must be preserved");
7737
7738 for i in 0..16u8 {
7741 let path = pad_nibbles_right(Nibbles::from_nibbles([i, 0x1, 0x2, 0x3, 0x4, 0x5]));
7742 assert!(
7743 parallel.get_leaf_value(&path).is_none(),
7744 "value at {:?} should be removed after prune",
7745 path
7746 );
7747 }
7748 }
7749
7750 #[test]
7751 fn test_prune_at_various_depths() {
7752 for max_depth in [0, 1] {
7757 let provider = DefaultTrieNodeProvider;
7758 let mut trie = ParallelSparseTrie::default();
7759
7760 let value = large_account_value();
7761
7762 for i in 0..4u8 {
7763 for j in 0..4u8 {
7764 for k in 0..4u8 {
7765 trie.update_leaf(
7766 pad_nibbles_right(Nibbles::from_nibbles([i, j, k, 0x1, 0x2, 0x3])),
7767 value.clone(),
7768 &provider,
7769 )
7770 .unwrap();
7771 }
7772 }
7773 }
7774
7775 let root_before = trie.root();
7776 let nodes_before = trie.size_hint();
7777
7778 for _ in 0..2 {
7782 trie.prune(max_depth);
7783 }
7784
7785 let root_after = trie.root();
7786 assert_eq!(root_before, root_after, "root hash should be preserved after prune");
7787
7788 let nodes_after = trie.size_hint();
7789 assert!(
7790 nodes_after < nodes_before,
7791 "node count should decrease after prune at depth {max_depth}"
7792 );
7793
7794 if max_depth == 0 {
7795 assert_eq!(nodes_after, 1, "root");
7797 }
7798 }
7799 }
7800
7801 #[test]
7802 fn test_prune_empty_trie() {
7803 let mut trie = ParallelSparseTrie::default();
7804 trie.prune(2);
7805 let root = trie.root();
7806 assert_eq!(root, EMPTY_ROOT_HASH, "empty trie should have empty root hash");
7807 }
7808
7809 #[test]
7810 fn test_prune_preserves_root_hash() {
7811 let provider = DefaultTrieNodeProvider;
7812 let mut trie = ParallelSparseTrie::default();
7813
7814 let value = large_account_value();
7815
7816 for i in 0..8u8 {
7817 for j in 0..4u8 {
7818 trie.update_leaf(
7819 pad_nibbles_right(Nibbles::from_nibbles([i, j, 0x3, 0x4, 0x5, 0x6])),
7820 value.clone(),
7821 &provider,
7822 )
7823 .unwrap();
7824 }
7825 }
7826
7827 let root_before = trie.root();
7828 trie.prune(1);
7829 let root_after = trie.root();
7830 assert_eq!(root_before, root_after, "root hash must be preserved after prune");
7831 }
7832
7833 #[test]
7834 fn test_prune_single_leaf_trie() {
7835 let provider = DefaultTrieNodeProvider;
7836 let mut trie = ParallelSparseTrie::default();
7837
7838 let value = large_account_value();
7839 trie.update_leaf(
7840 pad_nibbles_right(Nibbles::from_nibbles([0x1, 0x2, 0x3, 0x4])),
7841 value,
7842 &provider,
7843 )
7844 .unwrap();
7845
7846 let root_before = trie.root();
7847 let nodes_before = trie.size_hint();
7848
7849 trie.prune(0);
7850
7851 let root_after = trie.root();
7852 assert_eq!(root_before, root_after, "root hash should be preserved");
7853 assert_eq!(trie.size_hint(), nodes_before, "single leaf trie should not change");
7854 }
7855
7856 #[test]
7857 fn test_prune_deep_depth_no_effect() {
7858 let provider = DefaultTrieNodeProvider;
7859 let mut trie = ParallelSparseTrie::default();
7860
7861 let value = large_account_value();
7862
7863 for i in 0..4u8 {
7864 trie.update_leaf(
7865 pad_nibbles_right(Nibbles::from_nibbles([i, 0x2, 0x3, 0x4])),
7866 value.clone(),
7867 &provider,
7868 )
7869 .unwrap();
7870 }
7871
7872 trie.root();
7873 let nodes_before = trie.size_hint();
7874
7875 trie.prune(100);
7876
7877 assert_eq!(nodes_before, trie.size_hint(), "deep prune should have no effect");
7878 }
7879
7880 #[test]
7881 fn test_prune_extension_node_depth_semantics() {
7882 let provider = DefaultTrieNodeProvider;
7883 let mut trie = ParallelSparseTrie::default();
7884
7885 let value = large_account_value();
7886
7887 trie.update_leaf(
7888 pad_nibbles_right(Nibbles::from_nibbles([0, 1, 2, 3, 0, 5, 6, 7])),
7889 value.clone(),
7890 &provider,
7891 )
7892 .unwrap();
7893 trie.update_leaf(
7894 pad_nibbles_right(Nibbles::from_nibbles([0, 1, 2, 3, 1, 5, 6, 7])),
7895 value,
7896 &provider,
7897 )
7898 .unwrap();
7899
7900 let root_before = trie.root();
7901 for _ in 0..2 {
7905 trie.prune(1);
7906 }
7907
7908 assert_eq!(root_before, trie.root(), "root hash should be preserved");
7909 assert_eq!(trie.size_hint(), 2, "root + extension + hash stubs after prune(1)");
7911 }
7912
7913 #[test]
7914 fn test_prune_root_hash_preserved() {
7915 let provider = DefaultTrieNodeProvider;
7916 let mut trie = ParallelSparseTrie::default();
7917
7918 let key1 = Nibbles::unpack(B256::repeat_byte(0x00));
7920 let key2 = Nibbles::unpack(B256::repeat_byte(0x11));
7921
7922 let large_value = large_account_value();
7923 trie.update_leaf(key1, large_value.clone(), &provider).unwrap();
7924 trie.update_leaf(key2, large_value, &provider).unwrap();
7925
7926 let root_before = trie.root();
7927
7928 trie.prune(0);
7929
7930 assert_eq!(root_before, trie.root(), "root hash must be preserved after pruning");
7931 }
7932
7933 #[test]
7934 fn test_prune_mixed_embedded_and_hashed() {
7935 let provider = DefaultTrieNodeProvider;
7936 let mut trie = ParallelSparseTrie::default();
7937
7938 let large_value = large_account_value();
7939 let small_value = vec![0x80];
7940
7941 for i in 0..8u8 {
7942 let value = if i < 4 { large_value.clone() } else { small_value.clone() };
7943 trie.update_leaf(
7944 pad_nibbles_right(Nibbles::from_nibbles([i, 0x1, 0x2, 0x3])),
7945 value,
7946 &provider,
7947 )
7948 .unwrap();
7949 }
7950
7951 let root_before = trie.root();
7952 trie.prune(0);
7953 assert_eq!(root_before, trie.root(), "root hash must be preserved");
7954 }
7955
7956 #[test]
7957 fn test_prune_many_lower_subtries() {
7958 let provider = DefaultTrieNodeProvider;
7959
7960 let large_value = large_account_value();
7961
7962 let mut keys = Vec::new();
7963 for first in 0..16u8 {
7964 for second in 0..16u8 {
7965 keys.push(pad_nibbles_right(Nibbles::from_nibbles([
7966 first, second, 0x1, 0x2, 0x3, 0x4,
7967 ])));
7968 }
7969 }
7970
7971 let mut trie = ParallelSparseTrie::default();
7972
7973 for key in &keys {
7974 trie.update_leaf(*key, large_value.clone(), &provider).unwrap();
7975 }
7976
7977 let root_before = trie.root();
7978
7979 let mut total_pruned = 0;
7982 for _ in 0..2 {
7983 total_pruned += trie.prune(1);
7984 }
7985
7986 assert!(total_pruned > 0, "should have pruned some nodes");
7987 assert_eq!(root_before, trie.root(), "root hash should be preserved");
7988
7989 for key in &keys {
7990 assert!(trie.get_leaf_value(key).is_none(), "value should be pruned");
7991 }
7992 }
7993
7994 #[test]
7995 fn test_prune_max_depth_overflow() {
7996 let provider = DefaultTrieNodeProvider;
7998 let mut trie = ParallelSparseTrie::default();
7999
8000 let value = large_account_value();
8001
8002 for i in 0..4u8 {
8003 trie.update_leaf(
8004 pad_nibbles_right(Nibbles::from_nibbles([i, 0x1, 0x2, 0x3])),
8005 value.clone(),
8006 &provider,
8007 )
8008 .unwrap();
8009 }
8010
8011 trie.root();
8012 let nodes_before = trie.size_hint();
8013
8014 trie.prune(300);
8016
8017 assert_eq!(
8018 nodes_before,
8019 trie.size_hint(),
8020 "prune(300) should have no effect on a shallow trie"
8021 );
8022 }
8023
8024 #[test]
8025 fn test_prune_fast_path_case2_update_after() {
8026 let provider = DefaultTrieNodeProvider;
8029 let mut trie = ParallelSparseTrie::default();
8030
8031 let value = large_account_value();
8032
8033 for first in 0..4u8 {
8036 for second in 0..4u8 {
8037 trie.update_leaf(
8038 pad_nibbles_right(Nibbles::from_nibbles([
8039 first, second, 0x1, 0x2, 0x3, 0x4, 0x5, 0x6,
8040 ])),
8041 value.clone(),
8042 &provider,
8043 )
8044 .unwrap();
8045 }
8046 }
8047
8048 let root_before = trie.root();
8049
8050 trie.prune(0);
8052
8053 let root_after = trie.root();
8054 assert_eq!(root_before, root_after, "root hash should be preserved");
8055
8056 let new_path =
8059 pad_nibbles_right(Nibbles::from_nibbles([0x5, 0x5, 0x1, 0x2, 0x3, 0x4, 0x5, 0x6]));
8060 trie.update_leaf(new_path, value, &provider).unwrap();
8061
8062 let _ = trie.root();
8064 }
8065
8066 #[test]
8069 fn test_update_leaves_successful_update() {
8070 use crate::LeafUpdate;
8071 use alloy_primitives::map::B256Map;
8072 use std::cell::RefCell;
8073
8074 let provider = DefaultTrieNodeProvider;
8075 let mut trie = ParallelSparseTrie::default();
8076
8077 let b256_key = B256::with_last_byte(42);
8079 let key = Nibbles::unpack(b256_key);
8080 let value = encode_account_value(1);
8081 trie.update_leaf(key, value, &provider).unwrap();
8082
8083 let new_value = encode_account_value(2);
8085
8086 let mut updates: B256Map<LeafUpdate> = B256Map::default();
8087 updates.insert(b256_key, LeafUpdate::Changed(new_value));
8088
8089 let proof_targets = RefCell::new(Vec::new());
8090 trie.update_leaves(&mut updates, |path, min_len| {
8091 proof_targets.borrow_mut().push((path, min_len));
8092 })
8093 .unwrap();
8094
8095 assert!(updates.is_empty(), "Update map should be empty after successful update");
8097 assert!(
8098 proof_targets.borrow().is_empty(),
8099 "Callback should not be invoked for revealed paths"
8100 );
8101 }
8102
8103 #[test]
8104 fn test_update_leaves_insert_new_leaf() {
8105 use crate::LeafUpdate;
8106 use alloy_primitives::map::B256Map;
8107 use std::cell::RefCell;
8108
8109 let mut trie = ParallelSparseTrie::default();
8110
8111 let b256_key = B256::with_last_byte(99);
8113 let new_value = encode_account_value(42);
8114
8115 let mut updates: B256Map<LeafUpdate> = B256Map::default();
8116 updates.insert(b256_key, LeafUpdate::Changed(new_value.clone()));
8117
8118 let proof_targets = RefCell::new(Vec::new());
8119 trie.update_leaves(&mut updates, |path, min_len| {
8120 proof_targets.borrow_mut().push((path, min_len));
8121 })
8122 .unwrap();
8123
8124 assert!(updates.is_empty(), "Update map should be empty after successful insert");
8126 assert!(
8127 proof_targets.borrow().is_empty(),
8128 "Callback should not be invoked for new leaf insert"
8129 );
8130
8131 let full_path = Nibbles::unpack(b256_key);
8133 assert_eq!(
8134 trie.get_leaf_value(&full_path),
8135 Some(&new_value),
8136 "New leaf value should be retrievable"
8137 );
8138 }
8139
8140 #[test]
8141 fn test_update_leaves_blinded_node() {
8142 use crate::LeafUpdate;
8143 use alloy_primitives::map::B256Map;
8144 use std::cell::RefCell;
8145
8146 let small_value = alloy_rlp::encode_fixed_size(&U256::from(1)).to_vec();
8149 let leaf = LeafNode::new(
8150 Nibbles::default(), small_value,
8152 );
8153 let branch = TrieNodeV2::Branch(BranchNodeV2::new(
8154 Nibbles::default(),
8155 vec![
8156 RlpNode::word_rlp(&B256::repeat_byte(1)), RlpNode::from_raw_rlp(&alloy_rlp::encode(leaf.clone())).unwrap(), ],
8159 TrieMask::new(0b11),
8160 None,
8161 ));
8162
8163 let mut trie = ParallelSparseTrie::from_root(
8164 branch.clone(),
8165 Some(BranchNodeMasks {
8166 hash_mask: TrieMask::new(0b01),
8167 tree_mask: TrieMask::default(),
8168 }),
8169 false,
8170 )
8171 .unwrap();
8172
8173 trie.reveal_node(
8175 Nibbles::default(),
8176 branch,
8177 Some(BranchNodeMasks {
8178 hash_mask: TrieMask::default(),
8179 tree_mask: TrieMask::new(0b01),
8180 }),
8181 )
8182 .unwrap();
8183 trie.reveal_node(Nibbles::from_nibbles([0x1]), TrieNodeV2::Leaf(leaf), None).unwrap();
8184
8185 let b256_key = B256::ZERO; let new_value = encode_account_value(42);
8190 let mut updates: B256Map<LeafUpdate> = B256Map::default();
8191 updates.insert(b256_key, LeafUpdate::Changed(new_value));
8192
8193 let proof_targets = RefCell::new(Vec::new());
8194 let prefix_set_len_before = trie.prefix_set.len();
8195 trie.update_leaves(&mut updates, |path, min_len| {
8196 proof_targets.borrow_mut().push((path, min_len));
8197 })
8198 .unwrap();
8199
8200 assert!(!updates.is_empty(), "Update should remain in map when hitting blinded node");
8202
8203 assert_eq!(
8205 trie.prefix_set.len(),
8206 prefix_set_len_before,
8207 "prefix_set should be unchanged after failed update on blinded node"
8208 );
8209
8210 let targets = proof_targets.borrow();
8212 assert!(!targets.is_empty(), "Callback should be invoked for blinded path");
8213
8214 assert_eq!(targets[0].1, 1, "min_len should equal blinded node path length");
8216 }
8217
8218 #[test]
8219 fn test_update_leaves_removal() {
8220 use crate::LeafUpdate;
8221 use alloy_primitives::map::B256Map;
8222 use std::cell::RefCell;
8223
8224 let provider = DefaultTrieNodeProvider;
8225 let mut trie = ParallelSparseTrie::default();
8226
8227 let b256_key1 = B256::with_last_byte(1);
8230 let b256_key2 = B256::with_last_byte(2);
8231 let key1 = Nibbles::unpack(b256_key1);
8232 let key2 = Nibbles::unpack(b256_key2);
8233 let value = encode_account_value(1);
8234 trie.update_leaf(key1, value.clone(), &provider).unwrap();
8235 trie.update_leaf(key2, value, &provider).unwrap();
8236
8237 let mut updates: B256Map<LeafUpdate> = B256Map::default();
8239 updates.insert(b256_key1, LeafUpdate::Changed(vec![])); let proof_targets = RefCell::new(Vec::new());
8242 trie.update_leaves(&mut updates, |path, min_len| {
8243 proof_targets.borrow_mut().push((path, min_len));
8244 })
8245 .unwrap();
8246
8247 assert!(updates.is_empty(), "Update map should be empty after successful removal");
8249 }
8250
8251 #[test]
8252 fn test_update_leaves_removal_blinded() {
8253 use crate::LeafUpdate;
8254 use alloy_primitives::map::B256Map;
8255 use std::cell::RefCell;
8256
8257 let small_value = alloy_rlp::encode_fixed_size(&U256::from(1)).to_vec();
8260 let leaf = LeafNode::new(
8261 Nibbles::default(), small_value,
8263 );
8264 let branch = TrieNodeV2::Branch(BranchNodeV2::new(
8265 Nibbles::default(),
8266 vec![
8267 RlpNode::word_rlp(&B256::repeat_byte(1)), RlpNode::from_raw_rlp(&alloy_rlp::encode(leaf.clone())).unwrap(), ],
8270 TrieMask::new(0b11),
8271 None,
8272 ));
8273
8274 let mut trie = ParallelSparseTrie::from_root(
8275 branch.clone(),
8276 Some(BranchNodeMasks {
8277 hash_mask: TrieMask::new(0b01),
8278 tree_mask: TrieMask::default(),
8279 }),
8280 false,
8281 )
8282 .unwrap();
8283
8284 trie.reveal_node(
8285 Nibbles::default(),
8286 branch,
8287 Some(BranchNodeMasks {
8288 hash_mask: TrieMask::default(),
8289 tree_mask: TrieMask::new(0b01),
8290 }),
8291 )
8292 .unwrap();
8293 trie.reveal_node(Nibbles::from_nibbles([0x1]), TrieNodeV2::Leaf(leaf), None).unwrap();
8294
8295 let b256_key = B256::ZERO; let full_path = Nibbles::unpack(b256_key);
8298
8299 let old_value = encode_account_value(99);
8301 trie.upper_subtrie.inner.values.insert(full_path, old_value.clone());
8302
8303 let mut updates: B256Map<LeafUpdate> = B256Map::default();
8304 updates.insert(b256_key, LeafUpdate::Changed(vec![])); let proof_targets = RefCell::new(Vec::new());
8307 let prefix_set_len_before = trie.prefix_set.len();
8308 trie.update_leaves(&mut updates, |path, min_len| {
8309 proof_targets.borrow_mut().push((path, min_len));
8310 })
8311 .unwrap();
8312
8313 assert!(
8315 !proof_targets.borrow().is_empty(),
8316 "Callback should be invoked when removal hits blinded node"
8317 );
8318
8319 assert!(!updates.is_empty(), "Update should remain in map when removal hits blinded node");
8321
8322 assert_eq!(
8324 trie.upper_subtrie.inner.values.get(&full_path),
8325 Some(&old_value),
8326 "Original value should be preserved after failed removal"
8327 );
8328
8329 assert_eq!(
8331 trie.prefix_set.len(),
8332 prefix_set_len_before,
8333 "prefix_set should be unchanged after failed removal on blinded node"
8334 );
8335 }
8336
8337 #[test]
8338 fn test_update_leaves_removal_branch_collapse_blinded() {
8339 use crate::LeafUpdate;
8340 use alloy_primitives::map::B256Map;
8341 use std::cell::RefCell;
8342
8343 let small_value = alloy_rlp::encode_fixed_size(&U256::from(1)).to_vec();
8347 let leaf = LeafNode::new(Nibbles::default(), small_value);
8348 let branch = TrieNodeV2::Branch(BranchNodeV2::new(
8349 Nibbles::default(),
8350 vec![
8351 RlpNode::word_rlp(&B256::repeat_byte(1)), RlpNode::from_raw_rlp(&alloy_rlp::encode(leaf.clone())).unwrap(), ],
8354 TrieMask::new(0b11),
8355 None,
8356 ));
8357
8358 let mut trie = ParallelSparseTrie::from_root(
8359 branch.clone(),
8360 Some(BranchNodeMasks {
8361 hash_mask: TrieMask::new(0b01), tree_mask: TrieMask::default(),
8363 }),
8364 false,
8365 )
8366 .unwrap();
8367
8368 trie.reveal_node(
8370 Nibbles::default(),
8371 branch,
8372 Some(BranchNodeMasks {
8373 hash_mask: TrieMask::default(),
8374 tree_mask: TrieMask::new(0b01),
8375 }),
8376 )
8377 .unwrap();
8378 trie.reveal_node(Nibbles::from_nibbles([0x1]), TrieNodeV2::Leaf(leaf), None).unwrap();
8379
8380 let b256_key = B256::with_last_byte(0x10);
8383 let full_path = Nibbles::unpack(b256_key);
8384 let leaf_value = encode_account_value(42);
8385 trie.upper_subtrie.inner.values.insert(full_path, leaf_value.clone());
8386
8387 let prefix_set_len_before = trie.prefix_set.len();
8389 let node_count_before = trie.upper_subtrie.nodes.len() +
8390 trie.lower_subtries
8391 .iter()
8392 .filter_map(|s| s.as_revealed_ref())
8393 .map(|s| s.nodes.len())
8394 .sum::<usize>();
8395
8396 let mut updates: B256Map<LeafUpdate> = B256Map::default();
8397 updates.insert(b256_key, LeafUpdate::Changed(vec![])); let proof_targets = RefCell::new(Vec::new());
8400 trie.update_leaves(&mut updates, |path, min_len| {
8401 proof_targets.borrow_mut().push((path, min_len));
8402 })
8403 .unwrap();
8404
8405 assert!(
8407 !updates.is_empty(),
8408 "Update should remain in map when removal would collapse branch with blinded sibling"
8409 );
8410
8411 assert!(
8413 !proof_targets.borrow().is_empty(),
8414 "Callback should be invoked for blinded sibling path"
8415 );
8416
8417 assert_eq!(
8419 trie.prefix_set.len(),
8420 prefix_set_len_before,
8421 "prefix_set should be unchanged after atomic failure"
8422 );
8423
8424 let node_count_after = trie.upper_subtrie.nodes.len() +
8426 trie.lower_subtries
8427 .iter()
8428 .filter_map(|s| s.as_revealed_ref())
8429 .map(|s| s.nodes.len())
8430 .sum::<usize>();
8431 assert_eq!(
8432 node_count_before, node_count_after,
8433 "Node count should be unchanged after atomic failure"
8434 );
8435
8436 assert_eq!(
8438 trie.upper_subtrie.inner.values.get(&full_path),
8439 Some(&leaf_value),
8440 "Leaf value should still exist after failed removal"
8441 );
8442 }
8443
8444 #[test]
8445 fn test_update_leaves_touched() {
8446 use crate::LeafUpdate;
8447 use alloy_primitives::map::B256Map;
8448 use std::cell::RefCell;
8449
8450 let provider = DefaultTrieNodeProvider;
8451 let mut trie = ParallelSparseTrie::default();
8452
8453 let b256_key = B256::with_last_byte(42);
8455 let key = Nibbles::unpack(b256_key);
8456 let value = encode_account_value(1);
8457 trie.update_leaf(key, value, &provider).unwrap();
8458
8459 let mut updates: B256Map<LeafUpdate> = B256Map::default();
8461 updates.insert(b256_key, LeafUpdate::Touched);
8462
8463 let proof_targets = RefCell::new(Vec::new());
8464 let prefix_set_len_before = trie.prefix_set.len();
8465
8466 trie.update_leaves(&mut updates, |path, min_len| {
8467 proof_targets.borrow_mut().push((path, min_len));
8468 })
8469 .unwrap();
8470
8471 assert!(updates.is_empty(), "Touched update should be removed for accessible path");
8473
8474 assert!(
8476 proof_targets.borrow().is_empty(),
8477 "Callback should not be invoked for accessible path"
8478 );
8479
8480 assert_eq!(
8482 trie.prefix_set.len(),
8483 prefix_set_len_before,
8484 "prefix_set should be unchanged for Touched update (read-only)"
8485 );
8486 }
8487
8488 #[test]
8489 fn test_update_leaves_touched_nonexistent() {
8490 use crate::LeafUpdate;
8491 use alloy_primitives::map::B256Map;
8492 use std::cell::RefCell;
8493
8494 let mut trie = ParallelSparseTrie::default();
8495
8496 let b256_key = B256::with_last_byte(99);
8498 let full_path = Nibbles::unpack(b256_key);
8499
8500 let prefix_set_len_before = trie.prefix_set.len();
8501
8502 let mut updates: B256Map<LeafUpdate> = B256Map::default();
8503 updates.insert(b256_key, LeafUpdate::Touched);
8504
8505 let proof_targets = RefCell::new(Vec::new());
8506 trie.update_leaves(&mut updates, |path, min_len| {
8507 proof_targets.borrow_mut().push((path, min_len));
8508 })
8509 .unwrap();
8510
8511 assert!(updates.is_empty(), "Touched update should be removed for accessible (empty) path");
8513
8514 assert!(
8516 proof_targets.borrow().is_empty(),
8517 "Callback should not be invoked for accessible path"
8518 );
8519
8520 assert_eq!(
8522 trie.prefix_set.len(),
8523 prefix_set_len_before,
8524 "prefix_set should not be modified by Touched update"
8525 );
8526
8527 assert!(
8529 trie.get_leaf_value(&full_path).is_none(),
8530 "No value should exist for non-existent key after Touched update"
8531 );
8532 }
8533
8534 #[test]
8535 fn test_update_leaves_touched_blinded() {
8536 use crate::LeafUpdate;
8537 use alloy_primitives::map::B256Map;
8538 use std::cell::RefCell;
8539
8540 let small_value = alloy_rlp::encode_fixed_size(&U256::from(1)).to_vec();
8543 let leaf = LeafNode::new(
8544 Nibbles::default(), small_value,
8546 );
8547 let branch = TrieNodeV2::Branch(BranchNodeV2::new(
8548 Nibbles::default(),
8549 vec![
8550 RlpNode::word_rlp(&B256::repeat_byte(1)), RlpNode::from_raw_rlp(&alloy_rlp::encode(leaf.clone())).unwrap(), ],
8553 TrieMask::new(0b11),
8554 None,
8555 ));
8556
8557 let mut trie = ParallelSparseTrie::from_root(
8558 branch.clone(),
8559 Some(BranchNodeMasks {
8560 hash_mask: TrieMask::new(0b01),
8561 tree_mask: TrieMask::default(),
8562 }),
8563 false,
8564 )
8565 .unwrap();
8566
8567 trie.reveal_node(
8568 Nibbles::default(),
8569 branch,
8570 Some(BranchNodeMasks {
8571 hash_mask: TrieMask::default(),
8572 tree_mask: TrieMask::new(0b01),
8573 }),
8574 )
8575 .unwrap();
8576 trie.reveal_node(Nibbles::from_nibbles([0x1]), TrieNodeV2::Leaf(leaf), None).unwrap();
8577
8578 let b256_key = B256::ZERO; let mut updates: B256Map<LeafUpdate> = B256Map::default();
8582 updates.insert(b256_key, LeafUpdate::Touched);
8583
8584 let proof_targets = RefCell::new(Vec::new());
8585 let prefix_set_len_before = trie.prefix_set.len();
8586 trie.update_leaves(&mut updates, |path, min_len| {
8587 proof_targets.borrow_mut().push((path, min_len));
8588 })
8589 .unwrap();
8590
8591 assert!(!proof_targets.borrow().is_empty(), "Callback should be invoked for blinded path");
8593
8594 assert!(!updates.is_empty(), "Touched update should remain in map for blinded path");
8596
8597 assert_eq!(
8599 trie.prefix_set.len(),
8600 prefix_set_len_before,
8601 "prefix_set should be unchanged for Touched update on blinded path"
8602 );
8603 }
8604
8605 #[test]
8606 fn test_update_leaves_deduplication() {
8607 use crate::LeafUpdate;
8608 use alloy_primitives::map::B256Map;
8609 use std::cell::RefCell;
8610
8611 let small_value = alloy_rlp::encode_fixed_size(&U256::from(1)).to_vec();
8614 let leaf = LeafNode::new(
8615 Nibbles::default(), small_value,
8617 );
8618 let branch = TrieNodeV2::Branch(BranchNodeV2::new(
8619 Nibbles::default(),
8620 vec![
8621 RlpNode::word_rlp(&B256::repeat_byte(1)), RlpNode::from_raw_rlp(&alloy_rlp::encode(leaf.clone())).unwrap(), ],
8624 TrieMask::new(0b11),
8625 None,
8626 ));
8627
8628 let mut trie = ParallelSparseTrie::from_root(
8629 branch.clone(),
8630 Some(BranchNodeMasks {
8631 hash_mask: TrieMask::new(0b01),
8632 tree_mask: TrieMask::default(),
8633 }),
8634 false,
8635 )
8636 .unwrap();
8637
8638 trie.reveal_node(
8639 Nibbles::default(),
8640 branch,
8641 Some(BranchNodeMasks {
8642 hash_mask: TrieMask::default(),
8643 tree_mask: TrieMask::new(0b01),
8644 }),
8645 )
8646 .unwrap();
8647 trie.reveal_node(Nibbles::from_nibbles([0x1]), TrieNodeV2::Leaf(leaf), None).unwrap();
8648
8649 let b256_key1 = B256::ZERO;
8652 let b256_key2 = B256::with_last_byte(1); let b256_key3 = B256::with_last_byte(2); let mut updates: B256Map<LeafUpdate> = B256Map::default();
8656 let value = encode_account_value(42);
8657
8658 updates.insert(b256_key1, LeafUpdate::Changed(value.clone()));
8659 updates.insert(b256_key2, LeafUpdate::Changed(value.clone()));
8660 updates.insert(b256_key3, LeafUpdate::Changed(value));
8661
8662 let proof_targets = RefCell::new(Vec::new());
8663 trie.update_leaves(&mut updates, |path, min_len| {
8664 proof_targets.borrow_mut().push((path, min_len));
8665 })
8666 .unwrap();
8667
8668 let targets = proof_targets.borrow();
8671 assert_eq!(targets.len(), 3, "Callback should be invoked for each unique key");
8672
8673 for (_, min_len) in targets.iter() {
8675 assert_eq!(*min_len, 1, "All should have min_len 1 from blinded node at 0x0");
8676 }
8677 }
8678
8679 #[test]
8680 fn test_nibbles_to_padded_b256() {
8681 let empty = Nibbles::default();
8683 assert_eq!(ParallelSparseTrie::nibbles_to_padded_b256(&empty), B256::ZERO);
8684
8685 let full_key = b256!("0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef");
8687 let full_nibbles = Nibbles::unpack(full_key);
8688 assert_eq!(ParallelSparseTrie::nibbles_to_padded_b256(&full_nibbles), full_key);
8689
8690 let partial = Nibbles::from_nibbles_unchecked([0x1, 0x2, 0x3, 0x4]);
8693 let expected = b256!("1234000000000000000000000000000000000000000000000000000000000000");
8694 assert_eq!(ParallelSparseTrie::nibbles_to_padded_b256(&partial), expected);
8695
8696 let single = Nibbles::from_nibbles_unchecked([0xf]);
8698 let expected_single =
8699 b256!("f000000000000000000000000000000000000000000000000000000000000000");
8700 assert_eq!(ParallelSparseTrie::nibbles_to_padded_b256(&single), expected_single);
8701 }
8702
8703 #[test]
8704 fn test_memory_size() {
8705 let trie = ParallelSparseTrie::default();
8707 let empty_size = trie.memory_size();
8708
8709 assert!(empty_size >= core::mem::size_of::<ParallelSparseTrie>());
8711
8712 let root_branch = create_branch_node_with_children(
8716 &[0x1, 0x5],
8717 [
8718 RlpNode::word_rlp(&B256::repeat_byte(0xAA)),
8719 RlpNode::word_rlp(&B256::repeat_byte(0xBB)),
8720 ],
8721 );
8722 let mut trie = ParallelSparseTrie::from_root(root_branch, None, false).unwrap();
8723
8724 let branch_at_1 =
8725 create_branch_node_with_children(&[0x2], [RlpNode::word_rlp(&B256::repeat_byte(0xCC))]);
8726 let branch_at_5 =
8727 create_branch_node_with_children(&[0x6], [RlpNode::word_rlp(&B256::repeat_byte(0xDD))]);
8728 trie.reveal_nodes(&mut [
8729 ProofTrieNodeV2 {
8730 path: Nibbles::from_nibbles_unchecked([0x1]),
8731 node: branch_at_1,
8732 masks: None,
8733 },
8734 ProofTrieNodeV2 {
8735 path: Nibbles::from_nibbles_unchecked([0x5]),
8736 node: branch_at_5,
8737 masks: None,
8738 },
8739 ])
8740 .unwrap();
8741
8742 let mut nodes = vec![
8743 ProofTrieNodeV2 {
8744 path: Nibbles::from_nibbles_unchecked([0x1, 0x2]),
8745 node: TrieNodeV2::Leaf(LeafNode {
8746 key: Nibbles::from_nibbles_unchecked([0x3, 0x4]),
8747 value: vec![1, 2, 3],
8748 }),
8749 masks: None,
8750 },
8751 ProofTrieNodeV2 {
8752 path: Nibbles::from_nibbles_unchecked([0x5, 0x6]),
8753 node: TrieNodeV2::Leaf(LeafNode {
8754 key: Nibbles::from_nibbles_unchecked([0x7, 0x8]),
8755 value: vec![4, 5, 6],
8756 }),
8757 masks: None,
8758 },
8759 ];
8760 trie.reveal_nodes(&mut nodes).unwrap();
8761
8762 let populated_size = trie.memory_size();
8763
8764 assert!(populated_size > empty_size);
8766 }
8767
8768 #[test]
8769 fn test_reveal_extension_branch_leaves_then_root() {
8770 let ext_key: [u8; 63] = [0; 63];
8781
8782 let branch_path = Nibbles::from_nibbles(ext_key);
8784
8785 let mut leaf1_path_bytes = [0u8; 64];
8787 leaf1_path_bytes[63] = 1;
8788 let leaf1_path = Nibbles::from_nibbles(leaf1_path_bytes);
8789
8790 let mut leaf2_path_bytes = [0u8; 64];
8791 leaf2_path_bytes[63] = 2;
8792 let leaf2_path = Nibbles::from_nibbles(leaf2_path_bytes);
8793
8794 let leaf1_node = LeafNode::new(Nibbles::default(), vec![0x1]);
8797 let leaf2_node = LeafNode::new(Nibbles::default(), vec![0x2]);
8798
8799 let leaf1_rlp = RlpNode::from_rlp(&alloy_rlp::encode(TrieNodeV2::Leaf(leaf1_node.clone())));
8801 let leaf2_rlp = RlpNode::from_rlp(&alloy_rlp::encode(TrieNodeV2::Leaf(leaf2_node.clone())));
8802
8803 let state_mask = TrieMask::new(0b0000_0110); let stack = vec![leaf1_rlp, leaf2_rlp];
8807
8808 let bare_branch = BranchNodeV2::new(Nibbles::new(), stack.clone(), state_mask, None);
8810 let branch_rlp = RlpNode::from_rlp(&alloy_rlp::encode(&bare_branch));
8811
8812 let root_node = TrieNodeV2::Branch(BranchNodeV2::new(
8814 Nibbles::from_nibbles(ext_key),
8815 stack.clone(),
8816 state_mask,
8817 Some(branch_rlp),
8818 ));
8819
8820 let mut trie = ParallelSparseTrie::from_root(root_node, None, false).unwrap();
8822
8823 let mut nodes = vec![
8825 ProofTrieNodeV2 {
8826 path: branch_path,
8827 node: TrieNodeV2::Branch(BranchNodeV2::new(
8828 Nibbles::new(),
8829 stack,
8830 state_mask,
8831 None,
8832 )),
8833 masks: None,
8834 },
8835 ProofTrieNodeV2 { path: leaf1_path, node: TrieNodeV2::Leaf(leaf1_node), masks: None },
8836 ProofTrieNodeV2 { path: leaf2_path, node: TrieNodeV2::Leaf(leaf2_node), masks: None },
8837 ];
8838 trie.reveal_nodes(&mut nodes).unwrap();
8839
8840 trie.prefix_set.insert(leaf1_path);
8842 trie.prefix_set.insert(leaf2_path);
8843
8844 let _root = trie.root();
8846 }
8847
8848 #[test]
8849 fn test_update_leaf_creates_embedded_nodes_then_root() {
8850 let mut leaf1_path_bytes = [0u8; 64];
8860 leaf1_path_bytes[63] = 1;
8861 let leaf1_path = Nibbles::from_nibbles(leaf1_path_bytes);
8862
8863 let mut leaf2_path_bytes = [0u8; 64];
8864 leaf2_path_bytes[63] = 2;
8865 let leaf2_path = Nibbles::from_nibbles(leaf2_path_bytes);
8866
8867 let mut trie = ParallelSparseTrie::default();
8869 trie.update_leaf(leaf1_path, vec![0x1], DefaultTrieNodeProvider).unwrap();
8870 trie.update_leaf(leaf2_path, vec![0x2], DefaultTrieNodeProvider).unwrap();
8871
8872 let _root = trie.root();
8874 }
8875}