Further tidy cached tree hash
This commit is contained in:
parent
c18cdf2abf
commit
8e5b79452a
@ -17,7 +17,7 @@ impl TreeHashCache {
|
||||
where
|
||||
T: CachedTreeHash<T>,
|
||||
{
|
||||
item.build_tree_hash_cache()
|
||||
item.new_cache()
|
||||
}
|
||||
|
||||
pub fn from_elems(cache: Vec<u8>, chunk_modified: Vec<bool>) -> Self {
|
||||
|
@ -2,12 +2,14 @@ use super::resize::{grow_merkle_cache, shrink_merkle_cache};
|
||||
use super::*;
|
||||
use ssz::ssz_encode;
|
||||
|
||||
mod vec;
|
||||
|
||||
impl CachedTreeHash<u64> for u64 {
|
||||
fn item_type() -> ItemType {
|
||||
ItemType::Basic
|
||||
}
|
||||
|
||||
fn build_tree_hash_cache(&self) -> Result<TreeHashCache, Error> {
|
||||
fn new_cache(&self) -> Result<TreeHashCache, Error> {
|
||||
Ok(TreeHashCache::from_bytes(
|
||||
merkleize(ssz_encode(self)),
|
||||
false,
|
||||
@ -26,7 +28,7 @@ impl CachedTreeHash<u64> for u64 {
|
||||
HASHSIZE / 8
|
||||
}
|
||||
|
||||
fn cached_hash_tree_root(
|
||||
fn update_cache(
|
||||
&self,
|
||||
other: &Self,
|
||||
cache: &mut TreeHashCache,
|
||||
@ -40,185 +42,3 @@ impl CachedTreeHash<u64> for u64 {
|
||||
Ok(chunk + 1)
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> CachedTreeHash<Vec<T>> for Vec<T>
|
||||
where
|
||||
T: CachedTreeHash<T>,
|
||||
{
|
||||
fn item_type() -> ItemType {
|
||||
ItemType::List
|
||||
}
|
||||
|
||||
fn build_tree_hash_cache(&self) -> Result<TreeHashCache, Error> {
|
||||
match T::item_type() {
|
||||
ItemType::Basic => {
|
||||
TreeHashCache::from_bytes(merkleize(get_packed_leaves(self)?), false)
|
||||
}
|
||||
ItemType::Composite | ItemType::List => {
|
||||
let subtrees = self
|
||||
.iter()
|
||||
.map(|item| TreeHashCache::new(item))
|
||||
.collect::<Result<Vec<TreeHashCache>, _>>()?;
|
||||
|
||||
TreeHashCache::from_leaves_and_subtrees(self, subtrees)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn btree_overlay(&self, chunk_offset: usize) -> Result<BTreeOverlay, Error> {
|
||||
let lengths = match T::item_type() {
|
||||
ItemType::Basic => vec![1; self.len() / T::packing_factor()],
|
||||
ItemType::Composite | ItemType::List => {
|
||||
let mut lengths = vec![];
|
||||
|
||||
for item in self {
|
||||
lengths.push(BTreeOverlay::new(item, 0)?.total_nodes())
|
||||
}
|
||||
|
||||
lengths
|
||||
}
|
||||
};
|
||||
|
||||
BTreeOverlay::from_lengths(chunk_offset, lengths)
|
||||
}
|
||||
|
||||
fn packed_encoding(&self) -> Result<Vec<u8>, Error> {
|
||||
Err(Error::ShouldNeverBePacked(Self::item_type()))
|
||||
}
|
||||
|
||||
fn packing_factor() -> usize {
|
||||
1
|
||||
}
|
||||
|
||||
fn cached_hash_tree_root(
|
||||
&self,
|
||||
other: &Vec<T>,
|
||||
cache: &mut TreeHashCache,
|
||||
chunk: usize,
|
||||
) -> Result<usize, Error> {
|
||||
let offset_handler = BTreeOverlay::new(self, chunk)?;
|
||||
let old_offset_handler = BTreeOverlay::new(other, chunk)?;
|
||||
|
||||
if offset_handler.num_leaf_nodes != old_offset_handler.num_leaf_nodes {
|
||||
let old_offset_handler = BTreeOverlay::new(other, chunk)?;
|
||||
|
||||
// Get slices of the exsiting tree from the cache.
|
||||
let (old_bytes, old_flags) = cache
|
||||
.slices(old_offset_handler.chunk_range())
|
||||
.ok_or_else(|| Error::UnableToObtainSlices)?;
|
||||
|
||||
let (new_bytes, new_flags) =
|
||||
if offset_handler.num_leaf_nodes > old_offset_handler.num_leaf_nodes {
|
||||
grow_merkle_cache(
|
||||
old_bytes,
|
||||
old_flags,
|
||||
old_offset_handler.height(),
|
||||
offset_handler.height(),
|
||||
)
|
||||
.ok_or_else(|| Error::UnableToGrowMerkleTree)?
|
||||
} else {
|
||||
shrink_merkle_cache(
|
||||
old_bytes,
|
||||
old_flags,
|
||||
old_offset_handler.height(),
|
||||
offset_handler.height(),
|
||||
offset_handler.total_chunks(),
|
||||
)
|
||||
.ok_or_else(|| Error::UnableToShrinkMerkleTree)?
|
||||
};
|
||||
|
||||
// Create a `TreeHashCache` from the raw elements.
|
||||
let modified_cache = TreeHashCache::from_elems(new_bytes, new_flags);
|
||||
|
||||
// Splice the newly created `TreeHashCache` over the existing elements.
|
||||
cache.splice(old_offset_handler.chunk_range(), modified_cache);
|
||||
}
|
||||
|
||||
match T::item_type() {
|
||||
ItemType::Basic => {
|
||||
let leaves = get_packed_leaves(self)?;
|
||||
|
||||
for (i, chunk) in offset_handler.iter_leaf_nodes().enumerate() {
|
||||
if let Some(latest) = leaves.get(i * HASHSIZE..(i + 1) * HASHSIZE) {
|
||||
cache.maybe_update_chunk(*chunk, latest)?;
|
||||
}
|
||||
}
|
||||
let first_leaf_chunk = offset_handler.first_leaf_node()?;
|
||||
|
||||
cache.splice(
|
||||
first_leaf_chunk..offset_handler.next_node,
|
||||
TreeHashCache::from_bytes(leaves, true)?,
|
||||
);
|
||||
}
|
||||
ItemType::Composite | ItemType::List => {
|
||||
let mut i = offset_handler.num_leaf_nodes;
|
||||
for &start_chunk in offset_handler.iter_leaf_nodes().rev() {
|
||||
i -= 1;
|
||||
match (other.get(i), self.get(i)) {
|
||||
// The item existed in the previous list and exsits in the current list.
|
||||
(Some(old), Some(new)) => {
|
||||
new.cached_hash_tree_root(old, cache, start_chunk)?;
|
||||
}
|
||||
// The item existed in the previous list but does not exist in this list.
|
||||
//
|
||||
// I.e., the list has been shortened.
|
||||
(Some(old), None) => {
|
||||
// Splice out the entire tree of the removed node, replacing it with a
|
||||
// single padding node.
|
||||
let end_chunk = BTreeOverlay::new(old, start_chunk)?.next_node;
|
||||
|
||||
cache.splice(
|
||||
start_chunk..end_chunk,
|
||||
TreeHashCache::from_bytes(vec![0; HASHSIZE], true)?,
|
||||
);
|
||||
}
|
||||
// The item existed in the previous list but does exist in this list.
|
||||
//
|
||||
// I.e., the list has been lengthened.
|
||||
(None, Some(new)) => {
|
||||
let bytes: Vec<u8> = TreeHashCache::new(new)?.into();
|
||||
|
||||
cache.splice(
|
||||
start_chunk..start_chunk + 1,
|
||||
TreeHashCache::from_bytes(bytes, true)?,
|
||||
);
|
||||
}
|
||||
// The item didn't exist in the old list and doesn't exist in the new list,
|
||||
// nothing to do.
|
||||
(None, None) => {}
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for (&parent, children) in offset_handler.iter_internal_nodes().rev() {
|
||||
if cache.either_modified(children)? {
|
||||
cache.modify_chunk(parent, &cache.hash_children(children)?)?;
|
||||
}
|
||||
}
|
||||
|
||||
// If the root node or the length has changed, mix in the length of the list.
|
||||
let root_node = offset_handler.root();
|
||||
if cache.changed(root_node)? | (self.len() != other.len()) {
|
||||
cache.modify_chunk(root_node, &cache.mix_in_length(root_node, self.len())?)?;
|
||||
}
|
||||
|
||||
Ok(offset_handler.next_node)
|
||||
}
|
||||
}
|
||||
|
||||
fn get_packed_leaves<T>(vec: &Vec<T>) -> Result<Vec<u8>, Error>
|
||||
where
|
||||
T: CachedTreeHash<T>,
|
||||
{
|
||||
let num_packed_bytes = (BYTES_PER_CHUNK / T::packing_factor()) * vec.len();
|
||||
let num_leaves = num_sanitized_leaves(num_packed_bytes);
|
||||
|
||||
let mut packed = Vec::with_capacity(num_leaves * HASHSIZE);
|
||||
|
||||
for item in vec {
|
||||
packed.append(&mut item.packed_encoding()?);
|
||||
}
|
||||
|
||||
Ok(sanitise_bytes(packed))
|
||||
}
|
||||
|
183
eth2/utils/tree_hash/src/impls/vec.rs
Normal file
183
eth2/utils/tree_hash/src/impls/vec.rs
Normal file
@ -0,0 +1,183 @@
|
||||
use super::*;
|
||||
|
||||
impl<T> CachedTreeHash<Vec<T>> for Vec<T>
|
||||
where
|
||||
T: CachedTreeHash<T>,
|
||||
{
|
||||
fn item_type() -> ItemType {
|
||||
ItemType::List
|
||||
}
|
||||
|
||||
fn new_cache(&self) -> Result<TreeHashCache, Error> {
|
||||
match T::item_type() {
|
||||
ItemType::Basic => {
|
||||
TreeHashCache::from_bytes(merkleize(get_packed_leaves(self)?), false)
|
||||
}
|
||||
ItemType::Composite | ItemType::List => {
|
||||
let subtrees = self
|
||||
.iter()
|
||||
.map(|item| TreeHashCache::new(item))
|
||||
.collect::<Result<Vec<TreeHashCache>, _>>()?;
|
||||
|
||||
TreeHashCache::from_leaves_and_subtrees(self, subtrees)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn btree_overlay(&self, chunk_offset: usize) -> Result<BTreeOverlay, Error> {
|
||||
let lengths = match T::item_type() {
|
||||
ItemType::Basic => vec![1; self.len() / T::packing_factor()],
|
||||
ItemType::Composite | ItemType::List => {
|
||||
let mut lengths = vec![];
|
||||
|
||||
for item in self {
|
||||
lengths.push(BTreeOverlay::new(item, 0)?.total_nodes())
|
||||
}
|
||||
|
||||
lengths
|
||||
}
|
||||
};
|
||||
|
||||
BTreeOverlay::from_lengths(chunk_offset, lengths)
|
||||
}
|
||||
|
||||
fn packed_encoding(&self) -> Result<Vec<u8>, Error> {
|
||||
Err(Error::ShouldNeverBePacked(Self::item_type()))
|
||||
}
|
||||
|
||||
fn packing_factor() -> usize {
|
||||
1
|
||||
}
|
||||
|
||||
fn update_cache(
|
||||
&self,
|
||||
other: &Vec<T>,
|
||||
cache: &mut TreeHashCache,
|
||||
chunk: usize,
|
||||
) -> Result<usize, Error> {
|
||||
let offset_handler = BTreeOverlay::new(self, chunk)?;
|
||||
let old_offset_handler = BTreeOverlay::new(other, chunk)?;
|
||||
|
||||
if offset_handler.num_leaf_nodes != old_offset_handler.num_leaf_nodes {
|
||||
let old_offset_handler = BTreeOverlay::new(other, chunk)?;
|
||||
|
||||
// Get slices of the exsiting tree from the cache.
|
||||
let (old_bytes, old_flags) = cache
|
||||
.slices(old_offset_handler.chunk_range())
|
||||
.ok_or_else(|| Error::UnableToObtainSlices)?;
|
||||
|
||||
let (new_bytes, new_flags) =
|
||||
if offset_handler.num_leaf_nodes > old_offset_handler.num_leaf_nodes {
|
||||
grow_merkle_cache(
|
||||
old_bytes,
|
||||
old_flags,
|
||||
old_offset_handler.height(),
|
||||
offset_handler.height(),
|
||||
)
|
||||
.ok_or_else(|| Error::UnableToGrowMerkleTree)?
|
||||
} else {
|
||||
shrink_merkle_cache(
|
||||
old_bytes,
|
||||
old_flags,
|
||||
old_offset_handler.height(),
|
||||
offset_handler.height(),
|
||||
offset_handler.total_chunks(),
|
||||
)
|
||||
.ok_or_else(|| Error::UnableToShrinkMerkleTree)?
|
||||
};
|
||||
|
||||
// Create a `TreeHashCache` from the raw elements.
|
||||
let modified_cache = TreeHashCache::from_elems(new_bytes, new_flags);
|
||||
|
||||
// Splice the newly created `TreeHashCache` over the existing elements.
|
||||
cache.splice(old_offset_handler.chunk_range(), modified_cache);
|
||||
}
|
||||
|
||||
match T::item_type() {
|
||||
ItemType::Basic => {
|
||||
let leaves = get_packed_leaves(self)?;
|
||||
|
||||
for (i, chunk) in offset_handler.iter_leaf_nodes().enumerate() {
|
||||
if let Some(latest) = leaves.get(i * HASHSIZE..(i + 1) * HASHSIZE) {
|
||||
cache.maybe_update_chunk(*chunk, latest)?;
|
||||
}
|
||||
}
|
||||
let first_leaf_chunk = offset_handler.first_leaf_node()?;
|
||||
|
||||
cache.splice(
|
||||
first_leaf_chunk..offset_handler.next_node,
|
||||
TreeHashCache::from_bytes(leaves, true)?,
|
||||
);
|
||||
}
|
||||
ItemType::Composite | ItemType::List => {
|
||||
let mut i = offset_handler.num_leaf_nodes;
|
||||
for &start_chunk in offset_handler.iter_leaf_nodes().rev() {
|
||||
i -= 1;
|
||||
match (other.get(i), self.get(i)) {
|
||||
// The item existed in the previous list and exsits in the current list.
|
||||
(Some(old), Some(new)) => {
|
||||
new.update_cache(old, cache, start_chunk)?;
|
||||
}
|
||||
// The item existed in the previous list but does not exist in this list.
|
||||
//
|
||||
// I.e., the list has been shortened.
|
||||
(Some(old), None) => {
|
||||
// Splice out the entire tree of the removed node, replacing it with a
|
||||
// single padding node.
|
||||
let end_chunk = BTreeOverlay::new(old, start_chunk)?.next_node;
|
||||
|
||||
cache.splice(
|
||||
start_chunk..end_chunk,
|
||||
TreeHashCache::from_bytes(vec![0; HASHSIZE], true)?,
|
||||
);
|
||||
}
|
||||
// The item existed in the previous list but does exist in this list.
|
||||
//
|
||||
// I.e., the list has been lengthened.
|
||||
(None, Some(new)) => {
|
||||
let bytes: Vec<u8> = TreeHashCache::new(new)?.into();
|
||||
|
||||
cache.splice(
|
||||
start_chunk..start_chunk + 1,
|
||||
TreeHashCache::from_bytes(bytes, true)?,
|
||||
);
|
||||
}
|
||||
// The item didn't exist in the old list and doesn't exist in the new list,
|
||||
// nothing to do.
|
||||
(None, None) => {}
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for (&parent, children) in offset_handler.iter_internal_nodes().rev() {
|
||||
if cache.either_modified(children)? {
|
||||
cache.modify_chunk(parent, &cache.hash_children(children)?)?;
|
||||
}
|
||||
}
|
||||
|
||||
// If the root node or the length has changed, mix in the length of the list.
|
||||
let root_node = offset_handler.root();
|
||||
if cache.changed(root_node)? | (self.len() != other.len()) {
|
||||
cache.modify_chunk(root_node, &cache.mix_in_length(root_node, self.len())?)?;
|
||||
}
|
||||
|
||||
Ok(offset_handler.next_node)
|
||||
}
|
||||
}
|
||||
|
||||
fn get_packed_leaves<T>(vec: &Vec<T>) -> Result<Vec<u8>, Error>
|
||||
where
|
||||
T: CachedTreeHash<T>,
|
||||
{
|
||||
let num_packed_bytes = (BYTES_PER_CHUNK / T::packing_factor()) * vec.len();
|
||||
let num_leaves = num_sanitized_leaves(num_packed_bytes);
|
||||
|
||||
let mut packed = Vec::with_capacity(num_leaves * HASHSIZE);
|
||||
|
||||
for item in vec {
|
||||
packed.append(&mut item.packed_encoding()?);
|
||||
}
|
||||
|
||||
Ok(sanitise_bytes(packed))
|
||||
}
|
@ -40,15 +40,15 @@ pub enum ItemType {
|
||||
pub trait CachedTreeHash<Item>: Debug {
|
||||
fn item_type() -> ItemType;
|
||||
|
||||
fn build_tree_hash_cache(&self) -> Result<TreeHashCache, Error>;
|
||||
|
||||
fn btree_overlay(&self, chunk_offset: usize) -> Result<BTreeOverlay, Error>;
|
||||
|
||||
fn packed_encoding(&self) -> Result<Vec<u8>, Error>;
|
||||
|
||||
fn packing_factor() -> usize;
|
||||
|
||||
fn cached_hash_tree_root(
|
||||
fn new_cache(&self) -> Result<TreeHashCache, Error>;
|
||||
|
||||
fn update_cache(
|
||||
&self,
|
||||
other: &Item,
|
||||
cache: &mut TreeHashCache,
|
||||
|
@ -19,14 +19,14 @@ impl CachedTreeHash<Inner> for Inner {
|
||||
ItemType::Composite
|
||||
}
|
||||
|
||||
fn build_tree_hash_cache(&self) -> Result<TreeHashCache, Error> {
|
||||
fn new_cache(&self) -> Result<TreeHashCache, Error> {
|
||||
let tree = TreeHashCache::from_leaves_and_subtrees(
|
||||
self,
|
||||
vec![
|
||||
self.a.build_tree_hash_cache()?,
|
||||
self.b.build_tree_hash_cache()?,
|
||||
self.c.build_tree_hash_cache()?,
|
||||
self.d.build_tree_hash_cache()?,
|
||||
self.a.new_cache()?,
|
||||
self.b.new_cache()?,
|
||||
self.c.new_cache()?,
|
||||
self.d.new_cache()?,
|
||||
],
|
||||
)?;
|
||||
|
||||
@ -52,7 +52,7 @@ impl CachedTreeHash<Inner> for Inner {
|
||||
1
|
||||
}
|
||||
|
||||
fn cached_hash_tree_root(
|
||||
fn update_cache(
|
||||
&self,
|
||||
other: &Self,
|
||||
cache: &mut TreeHashCache,
|
||||
@ -63,10 +63,10 @@ impl CachedTreeHash<Inner> for Inner {
|
||||
// Skip past the internal nodes and update any changed leaf nodes.
|
||||
{
|
||||
let chunk = offset_handler.first_leaf_node()?;
|
||||
let chunk = self.a.cached_hash_tree_root(&other.a, cache, chunk)?;
|
||||
let chunk = self.b.cached_hash_tree_root(&other.b, cache, chunk)?;
|
||||
let chunk = self.c.cached_hash_tree_root(&other.c, cache, chunk)?;
|
||||
let _chunk = self.d.cached_hash_tree_root(&other.d, cache, chunk)?;
|
||||
let chunk = self.a.update_cache(&other.a, cache, chunk)?;
|
||||
let chunk = self.b.update_cache(&other.b, cache, chunk)?;
|
||||
let chunk = self.c.update_cache(&other.c, cache, chunk)?;
|
||||
let _chunk = self.d.update_cache(&other.d, cache, chunk)?;
|
||||
}
|
||||
|
||||
for (&parent, children) in offset_handler.iter_internal_nodes().rev() {
|
||||
@ -91,13 +91,13 @@ impl CachedTreeHash<Outer> for Outer {
|
||||
ItemType::Composite
|
||||
}
|
||||
|
||||
fn build_tree_hash_cache(&self) -> Result<TreeHashCache, Error> {
|
||||
fn new_cache(&self) -> Result<TreeHashCache, Error> {
|
||||
let tree = TreeHashCache::from_leaves_and_subtrees(
|
||||
self,
|
||||
vec![
|
||||
self.a.build_tree_hash_cache()?,
|
||||
self.b.build_tree_hash_cache()?,
|
||||
self.c.build_tree_hash_cache()?,
|
||||
self.a.new_cache()?,
|
||||
self.b.new_cache()?,
|
||||
self.c.new_cache()?,
|
||||
],
|
||||
)?;
|
||||
|
||||
@ -122,7 +122,7 @@ impl CachedTreeHash<Outer> for Outer {
|
||||
1
|
||||
}
|
||||
|
||||
fn cached_hash_tree_root(
|
||||
fn update_cache(
|
||||
&self,
|
||||
other: &Self,
|
||||
cache: &mut TreeHashCache,
|
||||
@ -133,9 +133,9 @@ impl CachedTreeHash<Outer> for Outer {
|
||||
// Skip past the internal nodes and update any changed leaf nodes.
|
||||
{
|
||||
let chunk = offset_handler.first_leaf_node()?;
|
||||
let chunk = self.a.cached_hash_tree_root(&other.a, cache, chunk)?;
|
||||
let chunk = self.b.cached_hash_tree_root(&other.b, cache, chunk)?;
|
||||
let _chunk = self.c.cached_hash_tree_root(&other.c, cache, chunk)?;
|
||||
let chunk = self.a.update_cache(&other.a, cache, chunk)?;
|
||||
let chunk = self.b.update_cache(&other.b, cache, chunk)?;
|
||||
let _chunk = self.c.update_cache(&other.c, cache, chunk)?;
|
||||
}
|
||||
|
||||
for (&parent, children) in offset_handler.iter_internal_nodes().rev() {
|
||||
@ -186,7 +186,7 @@ fn partial_modification_to_inner_struct() {
|
||||
let mut cache_struct = TreeHashCache::new(&original_outer).unwrap();
|
||||
|
||||
modified_outer
|
||||
.cached_hash_tree_root(&original_outer, &mut cache_struct, 0)
|
||||
.update_cache(&original_outer, &mut cache_struct, 0)
|
||||
.unwrap();
|
||||
|
||||
let modified_cache: Vec<u8> = cache_struct.into();
|
||||
@ -240,7 +240,7 @@ fn partial_modification_to_outer() {
|
||||
let mut cache_struct = TreeHashCache::new(&original_outer).unwrap();
|
||||
|
||||
modified_outer
|
||||
.cached_hash_tree_root(&original_outer, &mut cache_struct, 0)
|
||||
.update_cache(&original_outer, &mut cache_struct, 0)
|
||||
.unwrap();
|
||||
|
||||
let modified_cache: Vec<u8> = cache_struct.into();
|
||||
@ -326,7 +326,7 @@ fn test_u64_vec_modifications(original: Vec<u64>, modified: Vec<u64>) {
|
||||
// Perform a differential hash
|
||||
let mut cache_struct = TreeHashCache::from_bytes(original_cache.clone(), false).unwrap();
|
||||
modified
|
||||
.cached_hash_tree_root(&original, &mut cache_struct, 0)
|
||||
.update_cache(&original, &mut cache_struct, 0)
|
||||
.unwrap();
|
||||
let modified_cache: Vec<u8> = cache_struct.into();
|
||||
|
||||
@ -430,9 +430,7 @@ fn large_vec_of_u64_builds() {
|
||||
fn test_inner_vec_modifications(original: Vec<Inner>, modified: Vec<Inner>, reference: Vec<u64>) {
|
||||
let mut cache = TreeHashCache::new(&original).unwrap();
|
||||
|
||||
modified
|
||||
.cached_hash_tree_root(&original, &mut cache, 0)
|
||||
.unwrap();
|
||||
modified.update_cache(&original, &mut cache, 0).unwrap();
|
||||
let modified_cache: Vec<u8> = cache.into();
|
||||
|
||||
// Build the reference vec.
|
||||
@ -792,7 +790,7 @@ fn generic_test(index: usize) {
|
||||
let mut cache_struct = TreeHashCache::from_bytes(cache.clone(), false).unwrap();
|
||||
|
||||
changed_inner
|
||||
.cached_hash_tree_root(&inner, &mut cache_struct, 0)
|
||||
.update_cache(&inner, &mut cache_struct, 0)
|
||||
.unwrap();
|
||||
|
||||
// assert_eq!(*cache_struct.hash_count, 3);
|
||||
|
Loading…
Reference in New Issue
Block a user