Fix failing test, add hacky fix
This commit is contained in:
parent
e19abee7f9
commit
e12fa58e6e
@ -220,7 +220,6 @@ impl TreeHashCache {
|
|||||||
leaves.append(&mut t.root()?.to_vec());
|
leaves.append(&mut t.root()?.to_vec());
|
||||||
|
|
||||||
let (mut bytes, _bools, mut t_overlays) = t.into_components();
|
let (mut bytes, _bools, mut t_overlays) = t.into_components();
|
||||||
|
|
||||||
cache.append(&mut bytes);
|
cache.append(&mut bytes);
|
||||||
overlays.append(&mut t_overlays);
|
overlays.append(&mut t_overlays);
|
||||||
}
|
}
|
||||||
@ -296,13 +295,19 @@ impl TreeHashCache {
|
|||||||
) -> Result<BTreeOverlay, Error> {
|
) -> Result<BTreeOverlay, Error> {
|
||||||
let old_overlay = self.get_overlay(overlay_index, chunk_index)?;
|
let old_overlay = self.get_overlay(overlay_index, chunk_index)?;
|
||||||
|
|
||||||
|
// If the merkle tree required to represent the new list is of a different size to the one
|
||||||
|
// required for the previous list, then update our cache.
|
||||||
|
//
|
||||||
|
// This grows/shrinks the bytes to accomodate the new tree, preserving as much of the tree
|
||||||
|
// as possible.
|
||||||
|
if new_overlay.num_leaf_nodes() != old_overlay.num_leaf_nodes() {
|
||||||
// Get slices of the exsiting tree from the cache.
|
// Get slices of the exsiting tree from the cache.
|
||||||
let (old_bytes, old_flags) = self
|
let (old_bytes, old_flags) = self
|
||||||
.slices(old_overlay.chunk_range())
|
.slices(old_overlay.chunk_range())
|
||||||
.ok_or_else(|| Error::UnableToObtainSlices)?;
|
.ok_or_else(|| Error::UnableToObtainSlices)?;
|
||||||
|
|
||||||
let (new_bytes, new_bools) = if new_overlay.num_leaf_nodes() > old_overlay.num_leaf_nodes()
|
let (new_bytes, new_bools) =
|
||||||
{
|
if new_overlay.num_leaf_nodes() > old_overlay.num_leaf_nodes() {
|
||||||
resize::grow_merkle_cache(
|
resize::grow_merkle_cache(
|
||||||
old_bytes,
|
old_bytes,
|
||||||
old_flags,
|
old_flags,
|
||||||
@ -323,6 +328,7 @@ impl TreeHashCache {
|
|||||||
|
|
||||||
// Splice the newly created `TreeHashCache` over the existing elements.
|
// Splice the newly created `TreeHashCache` over the existing elements.
|
||||||
self.splice(old_overlay.chunk_range(), new_bytes, new_bools);
|
self.splice(old_overlay.chunk_range(), new_bytes, new_bools);
|
||||||
|
}
|
||||||
|
|
||||||
Ok(std::mem::replace(
|
Ok(std::mem::replace(
|
||||||
&mut self.overlays[overlay_index],
|
&mut self.overlays[overlay_index],
|
||||||
|
@ -96,10 +96,6 @@ impl BTreeOverlay {
|
|||||||
pub fn get_leaf_node(&self, i: usize) -> Result<Option<Range<usize>>, Error> {
|
pub fn get_leaf_node(&self, i: usize) -> Result<Option<Range<usize>>, Error> {
|
||||||
if i >= self.num_nodes() - self.num_padding_leaves() {
|
if i >= self.num_nodes() - self.num_padding_leaves() {
|
||||||
Ok(None)
|
Ok(None)
|
||||||
/*
|
|
||||||
} else if i < self.num_internal_nodes() {
|
|
||||||
Ok(None)
|
|
||||||
*/
|
|
||||||
} else if (i == self.num_internal_nodes()) && (self.num_items == 0) {
|
} else if (i == self.num_internal_nodes()) && (self.num_items == 0) {
|
||||||
// If this is the first leaf node and the overlay contains zero items, return `None` as
|
// If this is the first leaf node and the overlay contains zero items, return `None` as
|
||||||
// this node must be padding.
|
// this node must be padding.
|
||||||
|
@ -67,14 +67,7 @@ where
|
|||||||
let old_overlay = cache.get_overlay(cache.overlay_index, cache.chunk_index)?;
|
let old_overlay = cache.get_overlay(cache.overlay_index, cache.chunk_index)?;
|
||||||
let new_overlay = BTreeOverlay::new(self, cache.chunk_index, old_overlay.depth)?;
|
let new_overlay = BTreeOverlay::new(self, cache.chunk_index, old_overlay.depth)?;
|
||||||
|
|
||||||
// If the merkle tree required to represent the new list is of a different size to the one
|
|
||||||
// required for the previous list, then update our cache.
|
|
||||||
//
|
|
||||||
// This grows/shrinks the bytes to accomodate the new tree, preserving as much of the tree
|
|
||||||
// as possible.
|
|
||||||
if new_overlay.num_leaf_nodes() != old_overlay.num_leaf_nodes() {
|
|
||||||
cache.replace_overlay(cache.overlay_index, cache.chunk_index, new_overlay.clone())?;
|
cache.replace_overlay(cache.overlay_index, cache.chunk_index, new_overlay.clone())?;
|
||||||
}
|
|
||||||
|
|
||||||
cache.overlay_index += 1;
|
cache.overlay_index += 1;
|
||||||
|
|
||||||
@ -120,6 +113,9 @@ where
|
|||||||
// The item existed in the previous list and exists in the current list.
|
// The item existed in the previous list and exists in the current list.
|
||||||
(Some(_old), Some(new)) => {
|
(Some(_old), Some(new)) => {
|
||||||
cache.chunk_index = new.start;
|
cache.chunk_index = new.start;
|
||||||
|
if cache.chunk_index + 1 < cache.chunk_modified.len() {
|
||||||
|
cache.chunk_modified[cache.chunk_index + 1] = true;
|
||||||
|
}
|
||||||
|
|
||||||
self[i].update_tree_hash_cache(cache)?;
|
self[i].update_tree_hash_cache(cache)?;
|
||||||
}
|
}
|
||||||
@ -157,11 +153,7 @@ where
|
|||||||
// splice out the entire tree of the removed node, replacing it
|
// splice out the entire tree of the removed node, replacing it
|
||||||
// with a single padding node.
|
// with a single padding node.
|
||||||
cache.splice(old, vec![0; HASHSIZE], vec![true]);
|
cache.splice(old, vec![0; HASHSIZE], vec![true]);
|
||||||
|
|
||||||
// cache.overlays.remove(cache.overlay_index);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// local_overlay_index += 1;
|
|
||||||
}
|
}
|
||||||
// The item didn't exist in the old list and doesn't exist in the new list,
|
// The item didn't exist in the old list and doesn't exist in the new list,
|
||||||
// nothing to do.
|
// nothing to do.
|
||||||
|
@ -1,7 +1,5 @@
|
|||||||
use int_to_bytes::int_to_bytes32;
|
use int_to_bytes::int_to_bytes32;
|
||||||
use tree_hash::cached_tree_hash::*;
|
use tree_hash::cached_tree_hash::*;
|
||||||
use tree_hash::standard_tree_hash::*;
|
|
||||||
use tree_hash::*;
|
|
||||||
use tree_hash_derive::{CachedTreeHashSubTree, TreeHash};
|
use tree_hash_derive::{CachedTreeHashSubTree, TreeHash};
|
||||||
|
|
||||||
#[derive(Clone, Debug, TreeHash, CachedTreeHashSubTree)]
|
#[derive(Clone, Debug, TreeHash, CachedTreeHashSubTree)]
|
||||||
@ -10,13 +8,6 @@ pub struct NestedStruct {
|
|||||||
pub b: Inner,
|
pub b: Inner,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Clone, Debug, TreeHash, CachedTreeHashSubTree)]
|
|
||||||
pub struct StructWithVec {
|
|
||||||
pub a: u64,
|
|
||||||
pub b: Inner,
|
|
||||||
pub c: Vec<u64>,
|
|
||||||
}
|
|
||||||
|
|
||||||
fn test_routine<T>(original: T, modified: Vec<T>)
|
fn test_routine<T>(original: T, modified: Vec<T>)
|
||||||
where
|
where
|
||||||
T: CachedTreeHashSubTree<T>,
|
T: CachedTreeHashSubTree<T>,
|
||||||
@ -81,6 +72,54 @@ fn test_inner() {
|
|||||||
test_routine(original, modified);
|
test_routine(original, modified);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_vec() {
|
||||||
|
let original = vec![1, 2, 3, 4, 5];
|
||||||
|
|
||||||
|
let modified = vec![
|
||||||
|
vec![1, 2, 3, 4, 42],
|
||||||
|
vec![1, 2, 3, 4],
|
||||||
|
vec![],
|
||||||
|
vec![42; 2_usize.pow(4)],
|
||||||
|
vec![],
|
||||||
|
vec![],
|
||||||
|
vec![1, 2, 3, 4, 42],
|
||||||
|
vec![1, 2, 3],
|
||||||
|
vec![1],
|
||||||
|
];
|
||||||
|
|
||||||
|
test_routine(original, modified);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_nested_list_of_u64() {
|
||||||
|
let original: Vec<Vec<u64>> = vec![vec![1]];
|
||||||
|
|
||||||
|
let modified = vec![
|
||||||
|
vec![vec![1]],
|
||||||
|
vec![vec![1], vec![2]],
|
||||||
|
vec![vec![1], vec![3], vec![4]],
|
||||||
|
vec![],
|
||||||
|
vec![vec![1], vec![3], vec![4]],
|
||||||
|
vec![],
|
||||||
|
vec![vec![1, 2], vec![3], vec![4, 5, 6, 7, 8]],
|
||||||
|
vec![],
|
||||||
|
vec![vec![1], vec![2], vec![3]],
|
||||||
|
vec![vec![1, 2, 3, 4, 5, 6], vec![1, 2, 3, 4, 5, 6, 7]],
|
||||||
|
vec![vec![], vec![], vec![]],
|
||||||
|
vec![vec![0, 0, 0], vec![0], vec![0]],
|
||||||
|
];
|
||||||
|
|
||||||
|
test_routine(original, modified);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone, Debug, TreeHash, CachedTreeHashSubTree)]
|
||||||
|
pub struct StructWithVec {
|
||||||
|
pub a: u64,
|
||||||
|
pub b: Inner,
|
||||||
|
pub c: Vec<u64>,
|
||||||
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_struct_with_vec() {
|
fn test_struct_with_vec() {
|
||||||
let original = StructWithVec {
|
let original = StructWithVec {
|
||||||
@ -144,48 +183,7 @@ fn test_struct_with_vec() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_vec() {
|
fn test_vec_of_struct_with_vec() {
|
||||||
let original = vec![1, 2, 3, 4, 5];
|
|
||||||
|
|
||||||
let modified = vec![
|
|
||||||
vec![1, 2, 3, 4, 42],
|
|
||||||
vec![1, 2, 3, 4],
|
|
||||||
vec![],
|
|
||||||
vec![42; 2_usize.pow(4)],
|
|
||||||
vec![],
|
|
||||||
vec![],
|
|
||||||
vec![1, 2, 3, 4, 42],
|
|
||||||
vec![1, 2, 3],
|
|
||||||
vec![1],
|
|
||||||
];
|
|
||||||
|
|
||||||
test_routine(original, modified);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_nested_list_of_u64() {
|
|
||||||
let original: Vec<Vec<u64>> = vec![vec![1]];
|
|
||||||
|
|
||||||
let modified = vec![
|
|
||||||
vec![vec![1]],
|
|
||||||
vec![vec![1], vec![2]],
|
|
||||||
vec![vec![1], vec![3], vec![4]],
|
|
||||||
vec![],
|
|
||||||
vec![vec![1], vec![3], vec![4]],
|
|
||||||
vec![],
|
|
||||||
vec![vec![1, 2], vec![3], vec![4, 5, 6, 7, 8]],
|
|
||||||
vec![],
|
|
||||||
vec![vec![1], vec![2], vec![3]],
|
|
||||||
vec![vec![1, 2, 3, 4, 5, 6], vec![1, 2, 3, 4, 5, 6, 7]],
|
|
||||||
vec![vec![], vec![], vec![]],
|
|
||||||
vec![vec![0, 0, 0], vec![0], vec![0]],
|
|
||||||
];
|
|
||||||
|
|
||||||
test_routine(original, modified);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_list_of_struct_with_vec() {
|
|
||||||
let a = StructWithVec {
|
let a = StructWithVec {
|
||||||
a: 42,
|
a: 42,
|
||||||
b: Inner {
|
b: Inner {
|
||||||
@ -211,18 +209,99 @@ fn test_list_of_struct_with_vec() {
|
|||||||
};
|
};
|
||||||
let d = StructWithVec { a: 0, ..a.clone() };
|
let d = StructWithVec { a: 0, ..a.clone() };
|
||||||
|
|
||||||
let original: Vec<StructWithVec> = vec![a.clone(), c.clone()];
|
// let original: Vec<StructWithVec> = vec![a.clone(), c.clone()];
|
||||||
|
let original: Vec<StructWithVec> = vec![a.clone()];
|
||||||
|
|
||||||
let modified = vec![
|
let modified = vec![
|
||||||
vec![a.clone(), c.clone()],
|
vec![a.clone(), c.clone()],
|
||||||
vec![a.clone(), b.clone(), c.clone(), d.clone()],
|
vec![a.clone(), b.clone(), c.clone(), d.clone()],
|
||||||
vec![b.clone(), a.clone(), c.clone(), d.clone()],
|
vec![b.clone(), a.clone(), c.clone(), d.clone()],
|
||||||
vec![],
|
vec![],
|
||||||
|
vec![a.clone()],
|
||||||
|
vec![a.clone(), b.clone(), c.clone(), d.clone()],
|
||||||
];
|
];
|
||||||
|
|
||||||
test_routine(original, modified);
|
test_routine(original, modified);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[derive(Clone, Debug, TreeHash, CachedTreeHashSubTree)]
|
||||||
|
pub struct StructWithVecOfStructs {
|
||||||
|
pub a: u64,
|
||||||
|
pub b: Inner,
|
||||||
|
pub c: Vec<Inner>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_struct_with_vec_of_structs() {
|
||||||
|
let inner_a = Inner {
|
||||||
|
a: 12,
|
||||||
|
b: 13,
|
||||||
|
c: 14,
|
||||||
|
d: 15,
|
||||||
|
};
|
||||||
|
|
||||||
|
let inner_b = Inner {
|
||||||
|
a: 99,
|
||||||
|
b: 100,
|
||||||
|
c: 101,
|
||||||
|
d: 102,
|
||||||
|
};
|
||||||
|
|
||||||
|
let inner_c = Inner {
|
||||||
|
a: 255,
|
||||||
|
b: 256,
|
||||||
|
c: 257,
|
||||||
|
d: 0,
|
||||||
|
};
|
||||||
|
|
||||||
|
let a = StructWithVecOfStructs {
|
||||||
|
a: 42,
|
||||||
|
b: inner_a.clone(),
|
||||||
|
c: vec![inner_a.clone(), inner_b.clone(), inner_c.clone()],
|
||||||
|
};
|
||||||
|
|
||||||
|
let b = StructWithVecOfStructs {
|
||||||
|
c: vec![],
|
||||||
|
..a.clone()
|
||||||
|
};
|
||||||
|
|
||||||
|
let c = StructWithVecOfStructs {
|
||||||
|
a: 800,
|
||||||
|
..a.clone()
|
||||||
|
};
|
||||||
|
|
||||||
|
let d = StructWithVecOfStructs {
|
||||||
|
b: inner_c.clone(),
|
||||||
|
..a.clone()
|
||||||
|
};
|
||||||
|
|
||||||
|
let e = StructWithVecOfStructs {
|
||||||
|
c: vec![inner_a.clone(), inner_b.clone()],
|
||||||
|
..a.clone()
|
||||||
|
};
|
||||||
|
|
||||||
|
let f = StructWithVecOfStructs {
|
||||||
|
c: vec![inner_a.clone()],
|
||||||
|
..a.clone()
|
||||||
|
};
|
||||||
|
|
||||||
|
let variants = vec![
|
||||||
|
a.clone(),
|
||||||
|
b.clone(),
|
||||||
|
c.clone(),
|
||||||
|
d.clone(),
|
||||||
|
e.clone(),
|
||||||
|
f.clone(),
|
||||||
|
];
|
||||||
|
|
||||||
|
test_routine(a, variants.clone());
|
||||||
|
test_routine(b, variants.clone());
|
||||||
|
test_routine(c, variants.clone());
|
||||||
|
test_routine(d, variants.clone());
|
||||||
|
test_routine(e, variants.clone());
|
||||||
|
test_routine(f, variants);
|
||||||
|
}
|
||||||
|
|
||||||
#[derive(Clone, Debug, TreeHash, CachedTreeHashSubTree)]
|
#[derive(Clone, Debug, TreeHash, CachedTreeHashSubTree)]
|
||||||
pub struct Inner {
|
pub struct Inner {
|
||||||
pub a: u64,
|
pub a: u64,
|
||||||
@ -231,80 +310,6 @@ pub struct Inner {
|
|||||||
pub d: u64,
|
pub d: u64,
|
||||||
}
|
}
|
||||||
|
|
||||||
/*
|
|
||||||
impl TreeHash for Inner {
|
|
||||||
fn tree_hash_type() -> TreeHashType {
|
|
||||||
TreeHashType::Container
|
|
||||||
}
|
|
||||||
|
|
||||||
fn tree_hash_packed_encoding(&self) -> Vec<u8> {
|
|
||||||
unreachable!("Struct should never be packed.")
|
|
||||||
}
|
|
||||||
|
|
||||||
fn tree_hash_packing_factor() -> usize {
|
|
||||||
unreachable!("Struct should never be packed.")
|
|
||||||
}
|
|
||||||
|
|
||||||
fn tree_hash_root(&self) -> Vec<u8> {
|
|
||||||
let mut leaves = Vec::with_capacity(4 * HASHSIZE);
|
|
||||||
|
|
||||||
leaves.append(&mut self.a.tree_hash_root());
|
|
||||||
leaves.append(&mut self.b.tree_hash_root());
|
|
||||||
leaves.append(&mut self.c.tree_hash_root());
|
|
||||||
leaves.append(&mut self.d.tree_hash_root());
|
|
||||||
|
|
||||||
efficient_merkleize(&leaves)[0..32].to_vec()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl CachedTreeHashSubTree<Inner> for Inner {
|
|
||||||
fn new_tree_hash_cache(&self) -> Result<TreeHashCache, Error> {
|
|
||||||
let tree = TreeHashCache::from_leaves_and_subtrees(
|
|
||||||
self,
|
|
||||||
vec![
|
|
||||||
self.a.new_tree_hash_cache()?,
|
|
||||||
self.b.new_tree_hash_cache()?,
|
|
||||||
self.c.new_tree_hash_cache()?,
|
|
||||||
self.d.new_tree_hash_cache()?,
|
|
||||||
],
|
|
||||||
)?;
|
|
||||||
|
|
||||||
Ok(tree)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn tree_hash_cache_overlay(&self, chunk_offset: usize) -> Result<BTreeOverlay, Error> {
|
|
||||||
let mut lengths = vec![];
|
|
||||||
|
|
||||||
lengths.push(BTreeOverlay::new(&self.a, 0)?.num_nodes());
|
|
||||||
lengths.push(BTreeOverlay::new(&self.b, 0)?.num_nodes());
|
|
||||||
lengths.push(BTreeOverlay::new(&self.c, 0)?.num_nodes());
|
|
||||||
lengths.push(BTreeOverlay::new(&self.d, 0)?.num_nodes());
|
|
||||||
|
|
||||||
BTreeOverlay::from_lengths(chunk_offset, 4, lengths)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn update_tree_hash_cache(&self, cache: &mut TreeHashCache) -> Result<(), Error> {
|
|
||||||
let overlay = BTreeOverlay::new(self, cache.chunk_index)?;
|
|
||||||
|
|
||||||
// Skip the chunk index to the first leaf node of this struct.
|
|
||||||
cache.chunk_index = overlay.first_leaf_node();
|
|
||||||
// Skip the overlay index to the first leaf node of this struct.
|
|
||||||
cache.overlay_index += 1;
|
|
||||||
|
|
||||||
// Recurse into the struct items, updating their caches.
|
|
||||||
self.a.update_tree_hash_cache(cache)?;
|
|
||||||
self.b.update_tree_hash_cache(cache)?;
|
|
||||||
self.c.update_tree_hash_cache(cache)?;
|
|
||||||
self.d.update_tree_hash_cache(cache)?;
|
|
||||||
|
|
||||||
// Iterate through the internal nodes, updating them if their children have changed.
|
|
||||||
cache.update_internal_nodes(&overlay)?;
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
*/
|
|
||||||
|
|
||||||
fn generic_test(index: usize) {
|
fn generic_test(index: usize) {
|
||||||
let inner = Inner {
|
let inner = Inner {
|
||||||
a: 1,
|
a: 1,
|
||||||
|
@ -77,7 +77,7 @@ pub fn subtree_derive(input: TokenStream) -> TokenStream {
|
|||||||
let mut lengths = vec![];
|
let mut lengths = vec![];
|
||||||
|
|
||||||
#(
|
#(
|
||||||
lengths.push(tree_hash::BTreeOverlay::new(&self.#idents_b, 0, depth)?.num_nodes());
|
lengths.push(tree_hash::BTreeOverlay::new(&self.#idents_b, 0, depth)?.num_chunks());
|
||||||
)*
|
)*
|
||||||
|
|
||||||
tree_hash::BTreeOverlay::from_lengths(chunk_offset, #num_items, depth, lengths)
|
tree_hash::BTreeOverlay::from_lengths(chunk_offset, #num_items, depth, lengths)
|
||||||
@ -97,7 +97,10 @@ pub fn subtree_derive(input: TokenStream) -> TokenStream {
|
|||||||
)*
|
)*
|
||||||
|
|
||||||
// Iterate through the internal nodes, updating them if their children have changed.
|
// Iterate through the internal nodes, updating them if their children have changed.
|
||||||
|
dbg!("START");
|
||||||
|
dbg!(overlay.offset);
|
||||||
cache.update_internal_nodes(&overlay)?;
|
cache.update_internal_nodes(&overlay)?;
|
||||||
|
dbg!("END");
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
Loading…
Reference in New Issue
Block a user