Update depth variable
This commit is contained in:
parent
ec43a4085c
commit
a84a063c25
@ -20,7 +20,7 @@ impl CachedTreeHasher {
|
||||
T: CachedTreeHashSubTree<T>,
|
||||
{
|
||||
Ok(Self {
|
||||
cache: TreeHashCache::new(item)?,
|
||||
cache: TreeHashCache::new(item, 0)?,
|
||||
})
|
||||
}
|
||||
|
||||
@ -73,9 +73,13 @@ pub trait CachedTreeHash<T>: CachedTreeHashSubTree<T> + Sized {
|
||||
}
|
||||
|
||||
pub trait CachedTreeHashSubTree<Item>: TreeHash {
|
||||
fn tree_hash_cache_overlay(&self, chunk_offset: usize) -> Result<BTreeOverlay, Error>;
|
||||
fn tree_hash_cache_overlay(
|
||||
&self,
|
||||
chunk_offset: usize,
|
||||
depth: usize,
|
||||
) -> Result<BTreeOverlay, Error>;
|
||||
|
||||
fn new_tree_hash_cache(&self) -> Result<TreeHashCache, Error>;
|
||||
fn new_tree_hash_cache(&self, depth: usize) -> Result<TreeHashCache, Error>;
|
||||
|
||||
fn update_tree_hash_cache(&self, cache: &mut TreeHashCache) -> Result<(), Error>;
|
||||
}
|
||||
@ -172,21 +176,22 @@ impl Into<Vec<u8>> for TreeHashCache {
|
||||
}
|
||||
|
||||
impl TreeHashCache {
|
||||
pub fn new<T>(item: &T) -> Result<Self, Error>
|
||||
pub fn new<T>(item: &T, depth: usize) -> Result<Self, Error>
|
||||
where
|
||||
T: CachedTreeHashSubTree<T>,
|
||||
{
|
||||
item.new_tree_hash_cache()
|
||||
item.new_tree_hash_cache(depth)
|
||||
}
|
||||
|
||||
pub fn from_leaves_and_subtrees<T>(
|
||||
item: &T,
|
||||
leaves_and_subtrees: Vec<Self>,
|
||||
depth: usize,
|
||||
) -> Result<Self, Error>
|
||||
where
|
||||
T: CachedTreeHashSubTree<T>,
|
||||
{
|
||||
let overlay = BTreeOverlay::new(item, 0)?;
|
||||
let overlay = BTreeOverlay::new(item, 0, depth)?;
|
||||
|
||||
// Note how many leaves were provided. If is not a power-of-two, we'll need to pad it out
|
||||
// later.
|
||||
@ -204,7 +209,10 @@ impl TreeHashCache {
|
||||
// Allocate enough bytes to store all the leaves.
|
||||
let mut leaves = Vec::with_capacity(overlay.num_leaf_nodes() * HASHSIZE);
|
||||
let mut overlays = Vec::with_capacity(leaves_and_subtrees.len());
|
||||
|
||||
if T::tree_hash_type() == TreeHashType::List {
|
||||
overlays.push(overlay);
|
||||
}
|
||||
|
||||
// Iterate through all of the leaves/subtrees, adding their root as a leaf node and then
|
||||
// concatenating their merkle trees.
|
||||
@ -238,16 +246,21 @@ impl TreeHashCache {
|
||||
pub fn from_bytes(
|
||||
bytes: Vec<u8>,
|
||||
initial_modified_state: bool,
|
||||
overlay: BTreeOverlay,
|
||||
overlay: Option<BTreeOverlay>,
|
||||
) -> Result<Self, Error> {
|
||||
if bytes.len() % BYTES_PER_CHUNK > 0 {
|
||||
return Err(Error::BytesAreNotEvenChunks(bytes.len()));
|
||||
}
|
||||
|
||||
let overlays = match overlay {
|
||||
Some(overlay) => vec![overlay],
|
||||
None => vec![],
|
||||
};
|
||||
|
||||
Ok(Self {
|
||||
chunk_modified: vec![initial_modified_state; bytes.len() / BYTES_PER_CHUNK],
|
||||
cache: bytes,
|
||||
overlays: vec![overlay],
|
||||
overlays,
|
||||
chunk_index: 0,
|
||||
overlay_index: 0,
|
||||
})
|
||||
@ -317,6 +330,17 @@ impl TreeHashCache {
|
||||
))
|
||||
}
|
||||
|
||||
pub fn remove_proceeding_child_overlays(&mut self, overlay_index: usize, depth: usize) {
|
||||
let end = self
|
||||
.overlays
|
||||
.iter()
|
||||
.skip(overlay_index)
|
||||
.position(|o| o.depth <= depth)
|
||||
.unwrap_or_else(|| self.overlays.len());
|
||||
|
||||
self.overlays.splice(overlay_index..end, vec![]);
|
||||
}
|
||||
|
||||
pub fn update_internal_nodes(&mut self, overlay: &BTreeOverlay) -> Result<(), Error> {
|
||||
for (parent, children) in overlay.internal_parents_and_children().into_iter().rev() {
|
||||
if self.either_modified(children)? {
|
||||
|
@ -3,21 +3,23 @@ use super::*;
|
||||
#[derive(Debug, PartialEq, Clone)]
|
||||
pub struct BTreeOverlay {
|
||||
pub offset: usize,
|
||||
pub depth: usize,
|
||||
pub num_items: usize,
|
||||
pub lengths: Vec<usize>,
|
||||
}
|
||||
|
||||
impl BTreeOverlay {
|
||||
pub fn new<T>(item: &T, initial_offset: usize) -> Result<Self, Error>
|
||||
pub fn new<T>(item: &T, initial_offset: usize, depth: usize) -> Result<Self, Error>
|
||||
where
|
||||
T: CachedTreeHashSubTree<T>,
|
||||
{
|
||||
item.tree_hash_cache_overlay(initial_offset)
|
||||
item.tree_hash_cache_overlay(initial_offset, depth)
|
||||
}
|
||||
|
||||
pub fn from_lengths(
|
||||
offset: usize,
|
||||
num_items: usize,
|
||||
depth: usize,
|
||||
lengths: Vec<usize>,
|
||||
) -> Result<Self, Error> {
|
||||
if lengths.is_empty() {
|
||||
@ -26,6 +28,7 @@ impl BTreeOverlay {
|
||||
Ok(Self {
|
||||
offset,
|
||||
num_items,
|
||||
depth,
|
||||
lengths,
|
||||
})
|
||||
}
|
||||
@ -166,7 +169,7 @@ mod test {
|
||||
use super::*;
|
||||
|
||||
fn get_tree_a(n: usize) -> BTreeOverlay {
|
||||
BTreeOverlay::from_lengths(0, n, vec![1; n]).unwrap()
|
||||
BTreeOverlay::from_lengths(0, n, 0, vec![1; n]).unwrap()
|
||||
}
|
||||
|
||||
#[test]
|
||||
@ -204,7 +207,7 @@ mod test {
|
||||
let tree = get_tree_a(2);
|
||||
assert_eq!(tree.chunk_range(), 0..3);
|
||||
|
||||
let tree = BTreeOverlay::from_lengths(11, 4, vec![1, 1]).unwrap();
|
||||
let tree = BTreeOverlay::from_lengths(11, 4, 0, vec![1, 1]).unwrap();
|
||||
assert_eq!(tree.chunk_range(), 11..14);
|
||||
}
|
||||
|
||||
|
@ -3,28 +3,29 @@ use super::*;
|
||||
mod vec;
|
||||
|
||||
impl CachedTreeHashSubTree<u64> for u64 {
|
||||
fn new_tree_hash_cache(&self) -> Result<TreeHashCache, Error> {
|
||||
fn new_tree_hash_cache(&self, depth: usize) -> Result<TreeHashCache, Error> {
|
||||
Ok(TreeHashCache::from_bytes(
|
||||
merkleize(self.to_le_bytes().to_vec()),
|
||||
false,
|
||||
self.tree_hash_cache_overlay(0)?,
|
||||
// self.tree_hash_cache_overlay(0, depth)?,
|
||||
None,
|
||||
)?)
|
||||
}
|
||||
|
||||
fn tree_hash_cache_overlay(&self, chunk_offset: usize) -> Result<BTreeOverlay, Error> {
|
||||
BTreeOverlay::from_lengths(chunk_offset, 1, vec![1])
|
||||
fn tree_hash_cache_overlay(
|
||||
&self,
|
||||
chunk_offset: usize,
|
||||
depth: usize,
|
||||
) -> Result<BTreeOverlay, Error> {
|
||||
BTreeOverlay::from_lengths(chunk_offset, 1, depth, vec![1])
|
||||
}
|
||||
|
||||
fn update_tree_hash_cache(&self, cache: &mut TreeHashCache) -> Result<(), Error> {
|
||||
let leaf = merkleize(self.to_le_bytes().to_vec());
|
||||
cache.maybe_update_chunk(cache.chunk_index, &leaf)?;
|
||||
|
||||
dbg!(cache.overlay_index);
|
||||
|
||||
cache.chunk_index += 1;
|
||||
cache.overlay_index += 1;
|
||||
|
||||
dbg!(cache.overlay_index);
|
||||
// cache.overlay_index += 1;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
@ -4,22 +4,22 @@ impl<T> CachedTreeHashSubTree<Vec<T>> for Vec<T>
|
||||
where
|
||||
T: CachedTreeHashSubTree<T> + TreeHash,
|
||||
{
|
||||
fn new_tree_hash_cache(&self) -> Result<TreeHashCache, Error> {
|
||||
let overlay = self.tree_hash_cache_overlay(0)?;
|
||||
fn new_tree_hash_cache(&self, depth: usize) -> Result<TreeHashCache, Error> {
|
||||
let overlay = self.tree_hash_cache_overlay(0, depth)?;
|
||||
|
||||
let mut cache = match T::tree_hash_type() {
|
||||
TreeHashType::Basic => TreeHashCache::from_bytes(
|
||||
merkleize(get_packed_leaves(self)?),
|
||||
false,
|
||||
overlay.clone(),
|
||||
Some(overlay.clone()),
|
||||
),
|
||||
TreeHashType::Container | TreeHashType::List | TreeHashType::Vector => {
|
||||
let subtrees = self
|
||||
.iter()
|
||||
.map(|item| TreeHashCache::new(item))
|
||||
.map(|item| TreeHashCache::new(item, depth + 1))
|
||||
.collect::<Result<Vec<TreeHashCache>, _>>()?;
|
||||
|
||||
TreeHashCache::from_leaves_and_subtrees(self, subtrees)
|
||||
TreeHashCache::from_leaves_and_subtrees(self, subtrees, depth)
|
||||
}
|
||||
}?;
|
||||
|
||||
@ -30,7 +30,11 @@ where
|
||||
Ok(cache)
|
||||
}
|
||||
|
||||
fn tree_hash_cache_overlay(&self, chunk_offset: usize) -> Result<BTreeOverlay, Error> {
|
||||
fn tree_hash_cache_overlay(
|
||||
&self,
|
||||
chunk_offset: usize,
|
||||
depth: usize,
|
||||
) -> Result<BTreeOverlay, Error> {
|
||||
let lengths = match T::tree_hash_type() {
|
||||
TreeHashType::Basic => {
|
||||
// Ceil division.
|
||||
@ -44,7 +48,7 @@ where
|
||||
let mut lengths = vec![];
|
||||
|
||||
for item in self {
|
||||
lengths.push(BTreeOverlay::new(item, 0)?.num_nodes())
|
||||
lengths.push(BTreeOverlay::new(item, 0, depth)?.num_nodes())
|
||||
}
|
||||
|
||||
// Disallow zero-length as an empty list still has one all-padding node.
|
||||
@ -56,17 +60,12 @@ where
|
||||
}
|
||||
};
|
||||
|
||||
BTreeOverlay::from_lengths(chunk_offset, self.len(), lengths)
|
||||
BTreeOverlay::from_lengths(chunk_offset, self.len(), depth, lengths)
|
||||
}
|
||||
|
||||
fn update_tree_hash_cache(&self, cache: &mut TreeHashCache) -> Result<(), Error> {
|
||||
let new_overlay = BTreeOverlay::new(self, cache.chunk_index)?;
|
||||
let old_overlay = cache.get_overlay(cache.overlay_index, cache.chunk_index)?;
|
||||
|
||||
dbg!(cache.overlay_index);
|
||||
|
||||
// dbg!(&new_overlay);
|
||||
// dbg!(&old_overlay);
|
||||
let new_overlay = BTreeOverlay::new(self, cache.chunk_index, old_overlay.depth)?;
|
||||
|
||||
// If the merkle tree required to represent the new list is of a different size to the one
|
||||
// required for the previous list, then update our cache.
|
||||
@ -109,11 +108,7 @@ where
|
||||
}
|
||||
}
|
||||
TreeHashType::Container | TreeHashType::List | TreeHashType::Vector => {
|
||||
let mut local_overlay_index = cache.overlay_index;
|
||||
|
||||
for i in 0..new_overlay.num_leaf_nodes() {
|
||||
cache.overlay_index = local_overlay_index;
|
||||
|
||||
// Adjust `i` so it is a leaf node for each of the overlays.
|
||||
let old_i = i + old_overlay.num_internal_nodes();
|
||||
let new_i = i + new_overlay.num_internal_nodes();
|
||||
@ -127,8 +122,27 @@ where
|
||||
cache.chunk_index = new.start;
|
||||
|
||||
self[i].update_tree_hash_cache(cache)?;
|
||||
}
|
||||
// The item did not exist in the previous list but does exist in this list.
|
||||
//
|
||||
// Viz., the list has been lengthened.
|
||||
(None, Some(new)) => {
|
||||
let (bytes, mut bools, overlays) =
|
||||
TreeHashCache::new(&self[i], new_overlay.depth + 1)?
|
||||
.into_components();
|
||||
|
||||
local_overlay_index += 1;
|
||||
// Record the number of overlays, this will be used later in the fn.
|
||||
let num_overlays = overlays.len();
|
||||
|
||||
// Flag the root node of the new tree as dirty.
|
||||
bools[0] = true;
|
||||
|
||||
cache.splice(new.start..new.start + 1, bytes, bools);
|
||||
cache
|
||||
.overlays
|
||||
.splice(cache.overlay_index..cache.overlay_index, overlays);
|
||||
|
||||
cache.overlay_index += num_overlays;
|
||||
}
|
||||
// The item existed in the previous list but does not exist in this list.
|
||||
//
|
||||
@ -144,37 +158,27 @@ where
|
||||
// with a single padding node.
|
||||
cache.splice(old, vec![0; HASHSIZE], vec![true]);
|
||||
|
||||
cache.overlays.remove(cache.overlay_index);
|
||||
// cache.overlays.remove(cache.overlay_index);
|
||||
}
|
||||
|
||||
local_overlay_index += 1;
|
||||
}
|
||||
// The item did not exist in the previous list but does exist in this list.
|
||||
//
|
||||
// Viz., the list has been lengthened.
|
||||
(None, Some(new)) => {
|
||||
let bytes: Vec<u8> = TreeHashCache::new(&self[i])?.into();
|
||||
let bools = vec![true; bytes.len() / HASHSIZE];
|
||||
|
||||
cache.splice(new.start..new.start + 1, bytes, bools);
|
||||
|
||||
cache.overlays.insert(
|
||||
std::cmp::min(cache.overlay_index, cache.overlays.len()),
|
||||
BTreeOverlay::new(&self[i], 0)?,
|
||||
);
|
||||
|
||||
local_overlay_index += 1;
|
||||
// local_overlay_index += 1;
|
||||
}
|
||||
// The item didn't exist in the old list and doesn't exist in the new list,
|
||||
// nothing to do.
|
||||
(None, None) => {}
|
||||
}
|
||||
}
|
||||
|
||||
// Clean out any excess overlays that may or may not be remaining if the list was
|
||||
// shortened.
|
||||
cache.remove_proceeding_child_overlays(cache.overlay_index, new_overlay.depth);
|
||||
}
|
||||
}
|
||||
|
||||
cache.update_internal_nodes(&new_overlay)?;
|
||||
|
||||
dbg!(&new_overlay);
|
||||
|
||||
// Mix in length.
|
||||
let root_node = new_overlay.root();
|
||||
if cache.changed(root_node)? {
|
||||
@ -191,8 +195,6 @@ where
|
||||
|
||||
cache.chunk_index = new_overlay.next_node();
|
||||
|
||||
dbg!(&cache.overlay_index);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
@ -217,13 +217,13 @@ fn test_list_of_struct_with_vec() {
|
||||
vec![a.clone(), c.clone()],
|
||||
// vec![a.clone(), b.clone(), c.clone(), d.clone()],
|
||||
// vec![b.clone(), a.clone(), c.clone(), d.clone()],
|
||||
vec![],
|
||||
// vec![],
|
||||
];
|
||||
|
||||
test_routine(original, modified);
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
#[derive(Clone, Debug, TreeHash, CachedTreeHashSubTree)]
|
||||
pub struct Inner {
|
||||
pub a: u64,
|
||||
pub b: u64,
|
||||
@ -231,6 +231,7 @@ pub struct Inner {
|
||||
pub d: u64,
|
||||
}
|
||||
|
||||
/*
|
||||
impl TreeHash for Inner {
|
||||
fn tree_hash_type() -> TreeHashType {
|
||||
TreeHashType::Container
|
||||
@ -296,14 +297,13 @@ impl CachedTreeHashSubTree<Inner> for Inner {
|
||||
self.c.update_tree_hash_cache(cache)?;
|
||||
self.d.update_tree_hash_cache(cache)?;
|
||||
|
||||
dbg!(cache.overlay_index);
|
||||
|
||||
// Iterate through the internal nodes, updating them if their children have changed.
|
||||
cache.update_internal_nodes(&overlay)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
*/
|
||||
|
||||
fn generic_test(index: usize) {
|
||||
let inner = Inner {
|
||||
@ -313,7 +313,7 @@ fn generic_test(index: usize) {
|
||||
d: 4,
|
||||
};
|
||||
|
||||
let mut cache = TreeHashCache::new(&inner).unwrap();
|
||||
let mut cache = TreeHashCache::new(&inner, 0).unwrap();
|
||||
|
||||
let changed_inner = match index {
|
||||
0 => Inner {
|
||||
@ -378,7 +378,7 @@ fn inner_builds() {
|
||||
d: 4,
|
||||
};
|
||||
|
||||
let cache: Vec<u8> = TreeHashCache::new(&inner).unwrap().into();
|
||||
let cache: Vec<u8> = TreeHashCache::new(&inner, 0).unwrap().into();
|
||||
|
||||
assert_eq!(expected, cache);
|
||||
}
|
||||
|
@ -59,46 +59,43 @@ pub fn subtree_derive(input: TokenStream) -> TokenStream {
|
||||
|
||||
let output = quote! {
|
||||
impl tree_hash::CachedTreeHashSubTree<#name> for #name {
|
||||
fn new_tree_hash_cache(&self) -> Result<tree_hash::TreeHashCache, tree_hash::Error> {
|
||||
fn new_tree_hash_cache(&self, depth: usize) -> Result<tree_hash::TreeHashCache, tree_hash::Error> {
|
||||
let tree = tree_hash::TreeHashCache::from_leaves_and_subtrees(
|
||||
self,
|
||||
vec![
|
||||
#(
|
||||
self.#idents_a.new_tree_hash_cache()?,
|
||||
self.#idents_a.new_tree_hash_cache(depth)?,
|
||||
)*
|
||||
],
|
||||
depth
|
||||
)?;
|
||||
|
||||
Ok(tree)
|
||||
}
|
||||
|
||||
fn tree_hash_cache_overlay(&self, chunk_offset: usize) -> Result<tree_hash::BTreeOverlay, tree_hash::Error> {
|
||||
fn tree_hash_cache_overlay(&self, chunk_offset: usize, depth: usize) -> Result<tree_hash::BTreeOverlay, tree_hash::Error> {
|
||||
let mut lengths = vec![];
|
||||
|
||||
#(
|
||||
lengths.push(tree_hash::BTreeOverlay::new(&self.#idents_b, 0)?.num_nodes());
|
||||
lengths.push(tree_hash::BTreeOverlay::new(&self.#idents_b, 0, depth)?.num_nodes());
|
||||
)*
|
||||
|
||||
tree_hash::BTreeOverlay::from_lengths(chunk_offset, #num_items, lengths)
|
||||
tree_hash::BTreeOverlay::from_lengths(chunk_offset, #num_items, depth, lengths)
|
||||
}
|
||||
|
||||
fn update_tree_hash_cache(&self, cache: &mut TreeHashCache) -> Result<(), Error> {
|
||||
let overlay = BTreeOverlay::new(self, cache.chunk_index)?;
|
||||
|
||||
println!("start derive - cache.overlay_index: {}", cache.overlay_index);
|
||||
let overlay = BTreeOverlay::new(self, cache.chunk_index, 0)?;
|
||||
|
||||
// Skip the chunk index to the first leaf node of this struct.
|
||||
cache.chunk_index = overlay.first_leaf_node();
|
||||
// Skip the overlay index to the first leaf node of this struct.
|
||||
cache.overlay_index += 1;
|
||||
// cache.overlay_index += 1;
|
||||
|
||||
// Recurse into the struct items, updating their caches.
|
||||
#(
|
||||
self.#idents_c.update_tree_hash_cache(cache)?;
|
||||
)*
|
||||
|
||||
println!("end derive - cache.overlay_index: {}", cache.overlay_index);
|
||||
|
||||
// Iterate through the internal nodes, updating them if their children have changed.
|
||||
cache.update_internal_nodes(&overlay)?;
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user