Remove "old" item requirement from treehash
This commit is contained in:
parent
2ee3b05bd3
commit
4aeadfa60f
@ -4,6 +4,9 @@ version = "0.1.0"
|
|||||||
authors = ["Paul Hauner <paul@paulhauner.com>"]
|
authors = ["Paul Hauner <paul@paulhauner.com>"]
|
||||||
edition = "2018"
|
edition = "2018"
|
||||||
|
|
||||||
|
[dev-dependencies]
|
||||||
|
tree_hash_derive = { path = "../tree_hash_derive" }
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
ethereum-types = "0.5"
|
ethereum-types = "0.5"
|
||||||
hashing = { path = "../hashing" }
|
hashing = { path = "../hashing" }
|
||||||
|
@ -17,10 +17,13 @@ pub enum Error {
|
|||||||
UnableToObtainSlices,
|
UnableToObtainSlices,
|
||||||
UnableToGrowMerkleTree,
|
UnableToGrowMerkleTree,
|
||||||
UnableToShrinkMerkleTree,
|
UnableToShrinkMerkleTree,
|
||||||
|
TreeCannotHaveZeroNodes,
|
||||||
ShouldNeverBePacked(TreeHashType),
|
ShouldNeverBePacked(TreeHashType),
|
||||||
BytesAreNotEvenChunks(usize),
|
BytesAreNotEvenChunks(usize),
|
||||||
NoModifiedFieldForChunk(usize),
|
NoModifiedFieldForChunk(usize),
|
||||||
NoBytesForChunk(usize),
|
NoBytesForChunk(usize),
|
||||||
|
NoOverlayForIndex(usize),
|
||||||
|
NotLeafNode(usize),
|
||||||
}
|
}
|
||||||
|
|
||||||
pub trait CachedTreeHash<T>: CachedTreeHashSubTree<T> + Sized {
|
pub trait CachedTreeHash<T>: CachedTreeHashSubTree<T> + Sized {
|
||||||
@ -36,12 +39,7 @@ pub trait CachedTreeHashSubTree<Item>: TreeHash {
|
|||||||
|
|
||||||
fn new_tree_hash_cache(&self) -> Result<TreeHashCache, Error>;
|
fn new_tree_hash_cache(&self) -> Result<TreeHashCache, Error>;
|
||||||
|
|
||||||
fn update_tree_hash_cache(
|
fn update_tree_hash_cache(&self, cache: &mut TreeHashCache) -> Result<(), Error>;
|
||||||
&self,
|
|
||||||
other: &Item,
|
|
||||||
cache: &mut TreeHashCache,
|
|
||||||
chunk: usize,
|
|
||||||
) -> Result<usize, Error>;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn children(parent: usize) -> (usize, usize) {
|
fn children(parent: usize) -> (usize, usize) {
|
||||||
@ -123,6 +121,10 @@ fn num_bytes(num_leaves: usize) -> usize {
|
|||||||
pub struct TreeHashCache {
|
pub struct TreeHashCache {
|
||||||
cache: Vec<u8>,
|
cache: Vec<u8>,
|
||||||
chunk_modified: Vec<bool>,
|
chunk_modified: Vec<bool>,
|
||||||
|
overlays: Vec<BTreeOverlay>,
|
||||||
|
|
||||||
|
pub chunk_index: usize,
|
||||||
|
pub overlay_index: usize,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Into<Vec<u8>> for TreeHashCache {
|
impl Into<Vec<u8>> for TreeHashCache {
|
||||||
@ -139,10 +141,17 @@ impl TreeHashCache {
|
|||||||
item.new_tree_hash_cache()
|
item.new_tree_hash_cache()
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn from_elems(cache: Vec<u8>, chunk_modified: Vec<bool>) -> Self {
|
pub fn from_elems(
|
||||||
|
cache: Vec<u8>,
|
||||||
|
chunk_modified: Vec<bool>,
|
||||||
|
overlays: Vec<BTreeOverlay>,
|
||||||
|
) -> Self {
|
||||||
Self {
|
Self {
|
||||||
cache,
|
cache,
|
||||||
chunk_modified,
|
chunk_modified,
|
||||||
|
overlays,
|
||||||
|
chunk_index: 0,
|
||||||
|
overlay_index: 0,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -153,7 +162,7 @@ impl TreeHashCache {
|
|||||||
where
|
where
|
||||||
T: CachedTreeHashSubTree<T>,
|
T: CachedTreeHashSubTree<T>,
|
||||||
{
|
{
|
||||||
let offset_handler = BTreeOverlay::new(item, 0)?;
|
let overlay = BTreeOverlay::new(item, 0)?;
|
||||||
|
|
||||||
// Note how many leaves were provided. If is not a power-of-two, we'll need to pad it out
|
// Note how many leaves were provided. If is not a power-of-two, we'll need to pad it out
|
||||||
// later.
|
// later.
|
||||||
@ -161,7 +170,7 @@ impl TreeHashCache {
|
|||||||
|
|
||||||
// Allocate enough bytes to store the internal nodes and the leaves and subtrees, then fill
|
// Allocate enough bytes to store the internal nodes and the leaves and subtrees, then fill
|
||||||
// all the to-be-built internal nodes with zeros and append the leaves and subtrees.
|
// all the to-be-built internal nodes with zeros and append the leaves and subtrees.
|
||||||
let internal_node_bytes = offset_handler.num_internal_nodes * BYTES_PER_CHUNK;
|
let internal_node_bytes = overlay.num_internal_nodes() * BYTES_PER_CHUNK;
|
||||||
let leaves_and_subtrees_bytes = leaves_and_subtrees
|
let leaves_and_subtrees_bytes = leaves_and_subtrees
|
||||||
.iter()
|
.iter()
|
||||||
.fold(0, |acc, t| acc + t.bytes_len());
|
.fold(0, |acc, t| acc + t.bytes_len());
|
||||||
@ -169,13 +178,19 @@ impl TreeHashCache {
|
|||||||
cache.resize(internal_node_bytes, 0);
|
cache.resize(internal_node_bytes, 0);
|
||||||
|
|
||||||
// Allocate enough bytes to store all the leaves.
|
// Allocate enough bytes to store all the leaves.
|
||||||
let mut leaves = Vec::with_capacity(offset_handler.num_leaf_nodes * HASHSIZE);
|
let mut leaves = Vec::with_capacity(overlay.num_leaf_nodes() * HASHSIZE);
|
||||||
|
let mut overlays = Vec::with_capacity(leaves_and_subtrees.len());
|
||||||
|
overlays.push(overlay);
|
||||||
|
|
||||||
// Iterate through all of the leaves/subtrees, adding their root as a leaf node and then
|
// Iterate through all of the leaves/subtrees, adding their root as a leaf node and then
|
||||||
// concatenating their merkle trees.
|
// concatenating their merkle trees.
|
||||||
for t in leaves_and_subtrees {
|
for t in leaves_and_subtrees {
|
||||||
leaves.append(&mut t.root().ok_or_else(|| Error::NoBytesForRoot)?.to_vec());
|
leaves.append(&mut t.root().ok_or_else(|| Error::NoBytesForRoot)?.to_vec());
|
||||||
cache.append(&mut t.into_merkle_tree());
|
|
||||||
|
let (mut bytes, _bools, mut t_overlays) = t.into_components();
|
||||||
|
|
||||||
|
cache.append(&mut bytes);
|
||||||
|
overlays.append(&mut t_overlays);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Pad the leaves to an even power-of-two, using zeros.
|
// Pad the leaves to an even power-of-two, using zeros.
|
||||||
@ -190,10 +205,17 @@ impl TreeHashCache {
|
|||||||
Ok(Self {
|
Ok(Self {
|
||||||
chunk_modified: vec![false; cache.len() / BYTES_PER_CHUNK],
|
chunk_modified: vec![false; cache.len() / BYTES_PER_CHUNK],
|
||||||
cache,
|
cache,
|
||||||
|
overlays,
|
||||||
|
chunk_index: 0,
|
||||||
|
overlay_index: 0,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn from_bytes(bytes: Vec<u8>, initial_modified_state: bool) -> Result<Self, Error> {
|
pub fn from_bytes(
|
||||||
|
bytes: Vec<u8>,
|
||||||
|
initial_modified_state: bool,
|
||||||
|
overlay: BTreeOverlay,
|
||||||
|
) -> Result<Self, Error> {
|
||||||
if bytes.len() % BYTES_PER_CHUNK > 0 {
|
if bytes.len() % BYTES_PER_CHUNK > 0 {
|
||||||
return Err(Error::BytesAreNotEvenChunks(bytes.len()));
|
return Err(Error::BytesAreNotEvenChunks(bytes.len()));
|
||||||
}
|
}
|
||||||
@ -201,9 +223,84 @@ impl TreeHashCache {
|
|||||||
Ok(Self {
|
Ok(Self {
|
||||||
chunk_modified: vec![initial_modified_state; bytes.len() / BYTES_PER_CHUNK],
|
chunk_modified: vec![initial_modified_state; bytes.len() / BYTES_PER_CHUNK],
|
||||||
cache: bytes,
|
cache: bytes,
|
||||||
|
overlays: vec![overlay],
|
||||||
|
chunk_index: 0,
|
||||||
|
overlay_index: 0,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn get_overlay(
|
||||||
|
&self,
|
||||||
|
overlay_index: usize,
|
||||||
|
chunk_index: usize,
|
||||||
|
) -> Result<BTreeOverlay, Error> {
|
||||||
|
let mut overlay = self
|
||||||
|
.overlays
|
||||||
|
.get(overlay_index)
|
||||||
|
.ok_or_else(|| Error::NoOverlayForIndex(overlay_index))?
|
||||||
|
.clone();
|
||||||
|
|
||||||
|
overlay.offset = chunk_index;
|
||||||
|
|
||||||
|
Ok(overlay)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn replace_overlay(
|
||||||
|
&mut self,
|
||||||
|
overlay_index: usize,
|
||||||
|
new_overlay: BTreeOverlay,
|
||||||
|
) -> Result<BTreeOverlay, Error> {
|
||||||
|
let old_overlay = self
|
||||||
|
.overlays
|
||||||
|
.get(overlay_index)
|
||||||
|
.ok_or_else(|| Error::NoOverlayForIndex(overlay_index))?;
|
||||||
|
|
||||||
|
// Get slices of the exsiting tree from the cache.
|
||||||
|
let (old_bytes, old_flags) = self
|
||||||
|
.slices(old_overlay.chunk_range())
|
||||||
|
.ok_or_else(|| Error::UnableToObtainSlices)?;
|
||||||
|
|
||||||
|
let (new_bytes, new_bools) = if new_overlay.num_leaf_nodes() > old_overlay.num_leaf_nodes()
|
||||||
|
{
|
||||||
|
resize::grow_merkle_cache(
|
||||||
|
old_bytes,
|
||||||
|
old_flags,
|
||||||
|
old_overlay.height(),
|
||||||
|
new_overlay.height(),
|
||||||
|
)
|
||||||
|
.ok_or_else(|| Error::UnableToGrowMerkleTree)?
|
||||||
|
} else {
|
||||||
|
resize::shrink_merkle_cache(
|
||||||
|
old_bytes,
|
||||||
|
old_flags,
|
||||||
|
old_overlay.height(),
|
||||||
|
new_overlay.height(),
|
||||||
|
new_overlay.total_chunks(),
|
||||||
|
)
|
||||||
|
.ok_or_else(|| Error::UnableToShrinkMerkleTree)?
|
||||||
|
};
|
||||||
|
|
||||||
|
// Splice the newly created `TreeHashCache` over the existing elements.
|
||||||
|
self.splice(old_overlay.chunk_range(), new_bytes, new_bools);
|
||||||
|
|
||||||
|
Ok(std::mem::replace(
|
||||||
|
&mut self.overlays[overlay_index],
|
||||||
|
new_overlay,
|
||||||
|
))
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn update_internal_nodes(&mut self, overlay: &BTreeOverlay) -> Result<(), Error> {
|
||||||
|
for (parent, children) in overlay.internal_parents_and_children().into_iter().rev() {
|
||||||
|
dbg!(parent);
|
||||||
|
dbg!(&children);
|
||||||
|
if self.either_modified(children)? {
|
||||||
|
self.modify_chunk(parent, &self.hash_children(children)?)?;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
pub fn bytes_len(&self) -> usize {
|
pub fn bytes_len(&self) -> usize {
|
||||||
self.cache.len()
|
self.cache.len()
|
||||||
}
|
}
|
||||||
@ -212,9 +309,7 @@ impl TreeHashCache {
|
|||||||
self.cache.get(0..HASHSIZE)
|
self.cache.get(0..HASHSIZE)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn splice(&mut self, chunk_range: Range<usize>, replace_with: Self) {
|
pub fn splice(&mut self, chunk_range: Range<usize>, bytes: Vec<u8>, bools: Vec<bool>) {
|
||||||
let (bytes, bools) = replace_with.into_components();
|
|
||||||
|
|
||||||
// Update the `chunk_modified` vec, marking all spliced-in nodes as changed.
|
// Update the `chunk_modified` vec, marking all spliced-in nodes as changed.
|
||||||
self.chunk_modified.splice(chunk_range.clone(), bools);
|
self.chunk_modified.splice(chunk_range.clone(), bools);
|
||||||
self.cache
|
self.cache
|
||||||
@ -278,14 +373,14 @@ impl TreeHashCache {
|
|||||||
.ok_or_else(|| Error::NoModifiedFieldForChunk(chunk))
|
.ok_or_else(|| Error::NoModifiedFieldForChunk(chunk))
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn either_modified(&self, children: (&usize, &usize)) -> Result<bool, Error> {
|
pub fn either_modified(&self, children: (usize, usize)) -> Result<bool, Error> {
|
||||||
Ok(self.changed(*children.0)? | self.changed(*children.1)?)
|
Ok(self.changed(children.0)? | self.changed(children.1)?)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn hash_children(&self, children: (&usize, &usize)) -> Result<Vec<u8>, Error> {
|
pub fn hash_children(&self, children: (usize, usize)) -> Result<Vec<u8>, Error> {
|
||||||
let mut child_bytes = Vec::with_capacity(BYTES_PER_CHUNK * 2);
|
let mut child_bytes = Vec::with_capacity(BYTES_PER_CHUNK * 2);
|
||||||
child_bytes.append(&mut self.get_chunk(*children.0)?.to_vec());
|
child_bytes.append(&mut self.get_chunk(children.0)?.to_vec());
|
||||||
child_bytes.append(&mut self.get_chunk(*children.1)?.to_vec());
|
child_bytes.append(&mut self.get_chunk(children.1)?.to_vec());
|
||||||
|
|
||||||
Ok(hash(&child_bytes))
|
Ok(hash(&child_bytes))
|
||||||
}
|
}
|
||||||
@ -299,11 +394,7 @@ impl TreeHashCache {
|
|||||||
Ok(hash(&bytes))
|
Ok(hash(&bytes))
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn into_merkle_tree(self) -> Vec<u8> {
|
pub fn into_components(self) -> (Vec<u8>, Vec<bool>, Vec<BTreeOverlay>) {
|
||||||
self.cache
|
(self.cache, self.chunk_modified, self.overlays)
|
||||||
}
|
|
||||||
|
|
||||||
pub fn into_components(self) -> (Vec<u8>, Vec<bool>) {
|
|
||||||
(self.cache, self.chunk_modified)
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1,12 +1,9 @@
|
|||||||
use super::*;
|
use super::*;
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug, PartialEq, Clone)]
|
||||||
pub struct BTreeOverlay {
|
pub struct BTreeOverlay {
|
||||||
pub num_internal_nodes: usize,
|
pub offset: usize,
|
||||||
pub num_leaf_nodes: usize,
|
lengths: Vec<usize>,
|
||||||
pub first_node: usize,
|
|
||||||
pub next_node: usize,
|
|
||||||
offsets: Vec<usize>,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl BTreeOverlay {
|
impl BTreeOverlay {
|
||||||
@ -17,84 +14,87 @@ impl BTreeOverlay {
|
|||||||
item.tree_hash_cache_overlay(initial_offset)
|
item.tree_hash_cache_overlay(initial_offset)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn from_lengths(offset: usize, mut lengths: Vec<usize>) -> Result<Self, Error> {
|
pub fn from_lengths(offset: usize, lengths: Vec<usize>) -> Result<Self, Error> {
|
||||||
// Extend it to the next power-of-two, if it is not already.
|
if lengths.is_empty() {
|
||||||
let num_leaf_nodes = if lengths.len().is_power_of_two() {
|
Err(Error::TreeCannotHaveZeroNodes)
|
||||||
lengths.len()
|
|
||||||
} else {
|
} else {
|
||||||
let num_leaf_nodes = lengths.len().next_power_of_two();
|
Ok(Self { offset, lengths })
|
||||||
lengths.resize(num_leaf_nodes, 1);
|
}
|
||||||
num_leaf_nodes
|
|
||||||
};
|
|
||||||
|
|
||||||
let num_nodes = num_nodes(num_leaf_nodes);
|
|
||||||
let num_internal_nodes = num_nodes - num_leaf_nodes;
|
|
||||||
|
|
||||||
let mut offsets = Vec::with_capacity(num_nodes);
|
|
||||||
offsets.append(&mut (offset..offset + num_internal_nodes).collect());
|
|
||||||
|
|
||||||
let mut next_node = num_internal_nodes + offset;
|
|
||||||
for i in 0..num_leaf_nodes {
|
|
||||||
offsets.push(next_node);
|
|
||||||
next_node += lengths[i];
|
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(Self {
|
pub fn num_leaf_nodes(&self) -> usize {
|
||||||
num_internal_nodes,
|
self.lengths.len().next_power_of_two()
|
||||||
num_leaf_nodes,
|
}
|
||||||
offsets,
|
|
||||||
first_node: offset,
|
fn num_padding_leaves(&self) -> usize {
|
||||||
next_node,
|
self.num_leaf_nodes() - self.lengths.len()
|
||||||
})
|
}
|
||||||
|
|
||||||
|
pub fn num_nodes(&self) -> usize {
|
||||||
|
2 * self.num_leaf_nodes() - 1
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn num_internal_nodes(&self) -> usize {
|
||||||
|
self.num_leaf_nodes() - 1
|
||||||
|
}
|
||||||
|
|
||||||
|
fn first_node(&self) -> usize {
|
||||||
|
self.offset
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn root(&self) -> usize {
|
pub fn root(&self) -> usize {
|
||||||
self.first_node
|
self.first_node()
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn next_node(&self) -> usize {
|
||||||
|
self.first_node() + self.lengths.iter().sum::<usize>()
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn height(&self) -> usize {
|
pub fn height(&self) -> usize {
|
||||||
self.num_leaf_nodes.trailing_zeros() as usize
|
self.num_leaf_nodes().trailing_zeros() as usize
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn chunk_range(&self) -> Range<usize> {
|
pub fn chunk_range(&self) -> Range<usize> {
|
||||||
self.first_node..self.next_node
|
self.first_node()..self.next_node()
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn total_chunks(&self) -> usize {
|
pub fn total_chunks(&self) -> usize {
|
||||||
self.next_node - self.first_node
|
self.next_node() - self.first_node()
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn total_nodes(&self) -> usize {
|
pub fn first_leaf_node(&self) -> usize {
|
||||||
self.num_internal_nodes + self.num_leaf_nodes
|
self.offset + self.num_internal_nodes()
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn first_leaf_node(&self) -> Result<usize, Error> {
|
pub fn get_leaf_node(&self, i: usize) -> Result<Option<Range<usize>>, Error> {
|
||||||
self.offsets
|
if i >= self.num_leaf_nodes() {
|
||||||
.get(self.num_internal_nodes)
|
return Err(Error::NotLeafNode(i));
|
||||||
.cloned()
|
} else if i >= self.num_leaf_nodes() - self.num_padding_leaves() {
|
||||||
.ok_or_else(|| Error::NoFirstNode)
|
Ok(None)
|
||||||
|
} else {
|
||||||
|
let first_node = self.offset + self.lengths.iter().take(i).sum::<usize>();
|
||||||
|
let last_node = first_node + self.lengths[i];
|
||||||
|
Ok(Some(first_node..last_node))
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Returns an iterator visiting each internal node, providing the left and right child chunks
|
/// Returns an iterator visiting each internal node, providing the left and right child chunks
|
||||||
/// for the node.
|
/// for the node.
|
||||||
pub fn iter_internal_nodes<'a>(
|
pub fn internal_parents_and_children(&self) -> Vec<(usize, (usize, usize))> {
|
||||||
&'a self,
|
(0..self.num_internal_nodes())
|
||||||
) -> impl DoubleEndedIterator<Item = (&'a usize, (&'a usize, &'a usize))> {
|
.into_iter()
|
||||||
let internal_nodes = &self.offsets[0..self.num_internal_nodes];
|
.map(|parent| {
|
||||||
|
let children = children(parent);
|
||||||
internal_nodes.iter().enumerate().map(move |(i, parent)| {
|
|
||||||
let children = children(i);
|
|
||||||
(
|
(
|
||||||
parent,
|
parent + self.offset,
|
||||||
(&self.offsets[children.0], &self.offsets[children.1]),
|
(children.0 + self.offset, children.1 + self.offset),
|
||||||
)
|
)
|
||||||
})
|
})
|
||||||
|
.collect()
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Returns an iterator visiting each leaf node, providing the chunk for that node.
|
// Returns a `Vec` of chunk indices for each internal node of the tree.
|
||||||
pub fn iter_leaf_nodes<'a>(&'a self) -> impl DoubleEndedIterator<Item = &'a usize> {
|
pub fn internal_node_chunks(&self) -> Vec<usize> {
|
||||||
let leaf_nodes = &self.offsets[self.num_internal_nodes..];
|
(self.offset..self.offset + self.num_internal_nodes()).collect()
|
||||||
|
|
||||||
leaf_nodes.iter()
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -8,6 +8,7 @@ impl CachedTreeHashSubTree<u64> for u64 {
|
|||||||
Ok(TreeHashCache::from_bytes(
|
Ok(TreeHashCache::from_bytes(
|
||||||
merkleize(self.to_le_bytes().to_vec()),
|
merkleize(self.to_le_bytes().to_vec()),
|
||||||
false,
|
false,
|
||||||
|
self.tree_hash_cache_overlay(0)?,
|
||||||
)?)
|
)?)
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -15,17 +16,13 @@ impl CachedTreeHashSubTree<u64> for u64 {
|
|||||||
BTreeOverlay::from_lengths(chunk_offset, vec![1])
|
BTreeOverlay::from_lengths(chunk_offset, vec![1])
|
||||||
}
|
}
|
||||||
|
|
||||||
fn update_tree_hash_cache(
|
fn update_tree_hash_cache(&self, cache: &mut TreeHashCache) -> Result<(), Error> {
|
||||||
&self,
|
|
||||||
other: &Self,
|
|
||||||
cache: &mut TreeHashCache,
|
|
||||||
chunk: usize,
|
|
||||||
) -> Result<usize, Error> {
|
|
||||||
if self != other {
|
|
||||||
let leaf = merkleize(self.to_le_bytes().to_vec());
|
let leaf = merkleize(self.to_le_bytes().to_vec());
|
||||||
cache.modify_chunk(chunk, &leaf)?;
|
cache.maybe_update_chunk(cache.chunk_index, &leaf)?;
|
||||||
}
|
|
||||||
|
|
||||||
Ok(chunk + 1)
|
cache.chunk_index += 1;
|
||||||
|
cache.overlay_index += 1;
|
||||||
|
|
||||||
|
Ok(())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -5,10 +5,14 @@ where
|
|||||||
T: CachedTreeHashSubTree<T> + TreeHash,
|
T: CachedTreeHashSubTree<T> + TreeHash,
|
||||||
{
|
{
|
||||||
fn new_tree_hash_cache(&self) -> Result<TreeHashCache, Error> {
|
fn new_tree_hash_cache(&self) -> Result<TreeHashCache, Error> {
|
||||||
match T::tree_hash_type() {
|
let overlay = self.tree_hash_cache_overlay(0)?;
|
||||||
TreeHashType::Basic => {
|
|
||||||
TreeHashCache::from_bytes(merkleize(get_packed_leaves(self)?), false)
|
let mut cache = match T::tree_hash_type() {
|
||||||
}
|
TreeHashType::Basic => TreeHashCache::from_bytes(
|
||||||
|
merkleize(get_packed_leaves(self)?),
|
||||||
|
false,
|
||||||
|
overlay.clone(),
|
||||||
|
),
|
||||||
TreeHashType::Container | TreeHashType::List | TreeHashType::Vector => {
|
TreeHashType::Container | TreeHashType::List | TreeHashType::Vector => {
|
||||||
let subtrees = self
|
let subtrees = self
|
||||||
.iter()
|
.iter()
|
||||||
@ -17,17 +21,29 @@ where
|
|||||||
|
|
||||||
TreeHashCache::from_leaves_and_subtrees(self, subtrees)
|
TreeHashCache::from_leaves_and_subtrees(self, subtrees)
|
||||||
}
|
}
|
||||||
}
|
}?;
|
||||||
|
|
||||||
|
// Mix in the length of the list.
|
||||||
|
let root_node = overlay.root();
|
||||||
|
cache.modify_chunk(root_node, &cache.mix_in_length(root_node, self.len())?)?;
|
||||||
|
|
||||||
|
Ok(cache)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn tree_hash_cache_overlay(&self, chunk_offset: usize) -> Result<BTreeOverlay, Error> {
|
fn tree_hash_cache_overlay(&self, chunk_offset: usize) -> Result<BTreeOverlay, Error> {
|
||||||
let lengths = match T::tree_hash_type() {
|
let lengths = match T::tree_hash_type() {
|
||||||
TreeHashType::Basic => vec![1; self.len() / T::tree_hash_packing_factor()],
|
TreeHashType::Basic => {
|
||||||
|
// Ceil division.
|
||||||
|
let num_leaves = (self.len() + T::tree_hash_packing_factor() - 1)
|
||||||
|
/ T::tree_hash_packing_factor();
|
||||||
|
|
||||||
|
vec![1; num_leaves]
|
||||||
|
}
|
||||||
TreeHashType::Container | TreeHashType::List | TreeHashType::Vector => {
|
TreeHashType::Container | TreeHashType::List | TreeHashType::Vector => {
|
||||||
let mut lengths = vec![];
|
let mut lengths = vec![];
|
||||||
|
|
||||||
for item in self {
|
for item in self {
|
||||||
lengths.push(BTreeOverlay::new(item, 0)?.total_nodes())
|
lengths.push(BTreeOverlay::new(item, 0)?.num_nodes())
|
||||||
}
|
}
|
||||||
|
|
||||||
lengths
|
lengths
|
||||||
@ -37,120 +53,93 @@ where
|
|||||||
BTreeOverlay::from_lengths(chunk_offset, lengths)
|
BTreeOverlay::from_lengths(chunk_offset, lengths)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn update_tree_hash_cache(
|
fn update_tree_hash_cache(&self, cache: &mut TreeHashCache) -> Result<(), Error> {
|
||||||
&self,
|
let new_overlay = BTreeOverlay::new(self, cache.chunk_index)?;
|
||||||
other: &Vec<T>,
|
let old_overlay = cache
|
||||||
cache: &mut TreeHashCache,
|
.get_overlay(cache.overlay_index, cache.chunk_index)?
|
||||||
chunk: usize,
|
.clone();
|
||||||
) -> Result<usize, Error> {
|
|
||||||
let offset_handler = BTreeOverlay::new(self, chunk)?;
|
|
||||||
let old_offset_handler = BTreeOverlay::new(other, chunk)?;
|
|
||||||
|
|
||||||
if offset_handler.num_leaf_nodes != old_offset_handler.num_leaf_nodes {
|
// If the merkle tree required to represent the new list is of a different size to the one
|
||||||
let old_offset_handler = BTreeOverlay::new(other, chunk)?;
|
// required for the previous list, then update our cache.
|
||||||
|
//
|
||||||
// Get slices of the exsiting tree from the cache.
|
// This grows/shrinks the bytes to accomodate the new tree, preserving as much of the tree
|
||||||
let (old_bytes, old_flags) = cache
|
// as possible.
|
||||||
.slices(old_offset_handler.chunk_range())
|
if new_overlay.num_leaf_nodes() != old_overlay.num_leaf_nodes() {
|
||||||
.ok_or_else(|| Error::UnableToObtainSlices)?;
|
cache.replace_overlay(cache.overlay_index, new_overlay.clone())?;
|
||||||
|
|
||||||
let (new_bytes, new_flags) =
|
|
||||||
if offset_handler.num_leaf_nodes > old_offset_handler.num_leaf_nodes {
|
|
||||||
grow_merkle_cache(
|
|
||||||
old_bytes,
|
|
||||||
old_flags,
|
|
||||||
old_offset_handler.height(),
|
|
||||||
offset_handler.height(),
|
|
||||||
)
|
|
||||||
.ok_or_else(|| Error::UnableToGrowMerkleTree)?
|
|
||||||
} else {
|
|
||||||
shrink_merkle_cache(
|
|
||||||
old_bytes,
|
|
||||||
old_flags,
|
|
||||||
old_offset_handler.height(),
|
|
||||||
offset_handler.height(),
|
|
||||||
offset_handler.total_chunks(),
|
|
||||||
)
|
|
||||||
.ok_or_else(|| Error::UnableToShrinkMerkleTree)?
|
|
||||||
};
|
|
||||||
|
|
||||||
// Create a `TreeHashCache` from the raw elements.
|
|
||||||
let modified_cache = TreeHashCache::from_elems(new_bytes, new_flags);
|
|
||||||
|
|
||||||
// Splice the newly created `TreeHashCache` over the existing elements.
|
|
||||||
cache.splice(old_offset_handler.chunk_range(), modified_cache);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
match T::tree_hash_type() {
|
match T::tree_hash_type() {
|
||||||
TreeHashType::Basic => {
|
TreeHashType::Basic => {
|
||||||
let leaves = get_packed_leaves(self)?;
|
let mut buf = vec![0; HASHSIZE];
|
||||||
|
let item_bytes = HASHSIZE / T::tree_hash_packing_factor();
|
||||||
|
|
||||||
for (i, chunk) in offset_handler.iter_leaf_nodes().enumerate() {
|
// Iterate through each of the leaf nodes.
|
||||||
if let Some(latest) = leaves.get(i * HASHSIZE..(i + 1) * HASHSIZE) {
|
for i in 0..new_overlay.num_leaf_nodes() {
|
||||||
cache.maybe_update_chunk(*chunk, latest)?;
|
// Iterate through the number of items that may be packing into the leaf node.
|
||||||
|
for j in 0..T::tree_hash_packing_factor() {
|
||||||
|
// Create a mut slice that can either be filled with a serialized item or
|
||||||
|
// padding.
|
||||||
|
let buf_slice = &mut buf[j * item_bytes..(j + 1) * item_bytes];
|
||||||
|
|
||||||
|
// Attempt to get the item for this portion of the chunk. If it exists,
|
||||||
|
// update `buf` with it's serialized bytes. If it doesn't exist, update
|
||||||
|
// `buf` with padding.
|
||||||
|
match self.get(i * T::tree_hash_packing_factor() + j) {
|
||||||
|
Some(item) => {
|
||||||
|
buf_slice.copy_from_slice(&item.tree_hash_packed_encoding());
|
||||||
|
}
|
||||||
|
None => buf_slice.copy_from_slice(&vec![0; item_bytes]),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
let first_leaf_chunk = offset_handler.first_leaf_node()?;
|
|
||||||
|
|
||||||
cache.splice(
|
// Update the chunk if the generated `buf` is not the same as the cache.
|
||||||
first_leaf_chunk..offset_handler.next_node,
|
let chunk = new_overlay.first_leaf_node() + i;
|
||||||
TreeHashCache::from_bytes(leaves, true)?,
|
cache.maybe_update_chunk(chunk, &buf)?;
|
||||||
);
|
}
|
||||||
}
|
}
|
||||||
TreeHashType::Container | TreeHashType::List | TreeHashType::Vector => {
|
TreeHashType::Container | TreeHashType::List | TreeHashType::Vector => {
|
||||||
let mut i = offset_handler.num_leaf_nodes;
|
for i in (0..new_overlay.num_leaf_nodes()).rev() {
|
||||||
for &start_chunk in offset_handler.iter_leaf_nodes().rev() {
|
match (old_overlay.get_leaf_node(i)?, new_overlay.get_leaf_node(i)?) {
|
||||||
i -= 1;
|
// The item existed in the previous list and exists in the current list.
|
||||||
match (other.get(i), self.get(i)) {
|
(Some(_old), Some(new)) => {
|
||||||
// The item existed in the previous list and exsits in the current list.
|
cache.chunk_index = new.start;
|
||||||
(Some(old), Some(new)) => {
|
self[i].update_tree_hash_cache(cache)?;
|
||||||
new.update_tree_hash_cache(old, cache, start_chunk)?;
|
|
||||||
}
|
}
|
||||||
// The item existed in the previous list but does not exist in this list.
|
// The item existed in the previous list but does not exist in this list.
|
||||||
//
|
//
|
||||||
// I.e., the list has been shortened.
|
// Viz., the list has been shortened.
|
||||||
(Some(old), None) => {
|
(Some(old), None) => {
|
||||||
// Splice out the entire tree of the removed node, replacing it with a
|
// Splice out the entire tree of the removed node, replacing it with a
|
||||||
// single padding node.
|
// single padding node.
|
||||||
let end_chunk = BTreeOverlay::new(old, start_chunk)?.next_node;
|
cache.splice(old, vec![0; HASHSIZE], vec![true]);
|
||||||
|
|
||||||
cache.splice(
|
|
||||||
start_chunk..end_chunk,
|
|
||||||
TreeHashCache::from_bytes(vec![0; HASHSIZE], true)?,
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
// The item existed in the previous list but does exist in this list.
|
// The item did not exist in the previous list but does exist in this list.
|
||||||
//
|
//
|
||||||
// I.e., the list has been lengthened.
|
// Viz., the list has been lengthened.
|
||||||
(None, Some(new)) => {
|
(None, Some(new)) => {
|
||||||
let bytes: Vec<u8> = TreeHashCache::new(new)?.into();
|
let bytes: Vec<u8> = TreeHashCache::new(&self[i])?.into();
|
||||||
|
let bools = vec![true; bytes.len() / HASHSIZE];
|
||||||
|
|
||||||
cache.splice(
|
cache.splice(new.start..new.start + 1, bytes, bools);
|
||||||
start_chunk..start_chunk + 1,
|
|
||||||
TreeHashCache::from_bytes(bytes, true)?,
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
// The item didn't exist in the old list and doesn't exist in the new list,
|
// The item didn't exist in the old list and doesn't exist in the new list,
|
||||||
// nothing to do.
|
// nothing to do.
|
||||||
(None, None) => {}
|
(None, None) => {}
|
||||||
};
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
for (&parent, children) in offset_handler.iter_internal_nodes().rev() {
|
cache.update_internal_nodes(&new_overlay)?;
|
||||||
if cache.either_modified(children)? {
|
|
||||||
cache.modify_chunk(parent, &cache.hash_children(children)?)?;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// If the root node or the length has changed, mix in the length of the list.
|
// Always update the root node as we don't have a reliable check to know if the list len
|
||||||
let root_node = offset_handler.root();
|
// has changed.
|
||||||
if cache.changed(root_node)? | (self.len() != other.len()) {
|
let root_node = new_overlay.root();
|
||||||
cache.modify_chunk(root_node, &cache.mix_in_length(root_node, self.len())?)?;
|
cache.modify_chunk(root_node, &cache.mix_in_length(root_node, self.len())?)?;
|
||||||
}
|
|
||||||
|
|
||||||
Ok(offset_handler.next_node)
|
cache.chunk_index = new_overlay.next_node();
|
||||||
|
|
||||||
|
Ok(())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1,9 +1,275 @@
|
|||||||
use hashing::hash;
|
use int_to_bytes::int_to_bytes32;
|
||||||
use int_to_bytes::{int_to_bytes32, int_to_bytes8};
|
|
||||||
use tree_hash::cached_tree_hash::*;
|
use tree_hash::cached_tree_hash::*;
|
||||||
use tree_hash::standard_tree_hash::*;
|
use tree_hash::standard_tree_hash::*;
|
||||||
use tree_hash::*;
|
use tree_hash::*;
|
||||||
|
use tree_hash_derive::{CachedTreeHashSubTree, TreeHash};
|
||||||
|
|
||||||
|
#[derive(Clone, Debug, TreeHash, CachedTreeHashSubTree)]
|
||||||
|
pub struct Nested {
|
||||||
|
pub a: u64,
|
||||||
|
pub b: Inner,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone, Debug, TreeHash, CachedTreeHashSubTree)]
|
||||||
|
pub struct Thing {
|
||||||
|
pub a: u64,
|
||||||
|
pub b: Inner,
|
||||||
|
pub c: Vec<u64>,
|
||||||
|
}
|
||||||
|
|
||||||
|
fn test_routine<T>(original: T, modified: Vec<T>)
|
||||||
|
where
|
||||||
|
T: CachedTreeHashSubTree<T>,
|
||||||
|
{
|
||||||
|
let mut cache = original.new_tree_hash_cache().unwrap();
|
||||||
|
|
||||||
|
let standard_root = original.tree_hash_root();
|
||||||
|
let cached_root = cache.root().unwrap().to_vec();
|
||||||
|
assert_eq!(standard_root, cached_root, "Initial cache build failed.");
|
||||||
|
|
||||||
|
for (i, modified) in modified.iter().enumerate() {
|
||||||
|
// Test after a modification
|
||||||
|
modified.update_tree_hash_cache(&mut cache).unwrap();
|
||||||
|
let standard_root = modified.tree_hash_root();
|
||||||
|
let cached_root = cache.root().unwrap().to_vec();
|
||||||
|
assert_eq!(standard_root, cached_root, "Modification {} failed.", i);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_nested() {
|
||||||
|
let original = Nested {
|
||||||
|
a: 42,
|
||||||
|
b: Inner {
|
||||||
|
a: 12,
|
||||||
|
b: 13,
|
||||||
|
c: 14,
|
||||||
|
d: 15,
|
||||||
|
},
|
||||||
|
};
|
||||||
|
let modified = vec![Nested {
|
||||||
|
a: 99,
|
||||||
|
..original.clone()
|
||||||
|
}];
|
||||||
|
|
||||||
|
test_routine(original, modified);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_inner() {
|
||||||
|
let original = Inner {
|
||||||
|
a: 12,
|
||||||
|
b: 13,
|
||||||
|
c: 14,
|
||||||
|
d: 15,
|
||||||
|
};
|
||||||
|
|
||||||
|
let modified = vec![Inner {
|
||||||
|
a: 99,
|
||||||
|
..original.clone()
|
||||||
|
}];
|
||||||
|
|
||||||
|
test_routine(original, modified);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_thing() {
|
||||||
|
let original = Thing {
|
||||||
|
a: 42,
|
||||||
|
b: Inner {
|
||||||
|
a: 12,
|
||||||
|
b: 13,
|
||||||
|
c: 14,
|
||||||
|
d: 15,
|
||||||
|
},
|
||||||
|
c: vec![1, 2, 3, 4, 5],
|
||||||
|
};
|
||||||
|
|
||||||
|
let modified = vec![Thing {
|
||||||
|
a: 99,
|
||||||
|
..original.clone()
|
||||||
|
}];
|
||||||
|
|
||||||
|
test_routine(original, modified);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_vec() {
|
||||||
|
let original = vec![1, 2, 3, 4, 5];
|
||||||
|
|
||||||
|
let modified = vec![vec![1, 2, 3, 4, 42]];
|
||||||
|
|
||||||
|
test_routine(original, modified);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone, Debug)]
|
||||||
|
pub struct Inner {
|
||||||
|
pub a: u64,
|
||||||
|
pub b: u64,
|
||||||
|
pub c: u64,
|
||||||
|
pub d: u64,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl TreeHash for Inner {
|
||||||
|
fn tree_hash_type() -> TreeHashType {
|
||||||
|
TreeHashType::Container
|
||||||
|
}
|
||||||
|
|
||||||
|
fn tree_hash_packed_encoding(&self) -> Vec<u8> {
|
||||||
|
unreachable!("Struct should never be packed.")
|
||||||
|
}
|
||||||
|
|
||||||
|
fn tree_hash_packing_factor() -> usize {
|
||||||
|
unreachable!("Struct should never be packed.")
|
||||||
|
}
|
||||||
|
|
||||||
|
fn tree_hash_root(&self) -> Vec<u8> {
|
||||||
|
let mut leaves = Vec::with_capacity(4 * HASHSIZE);
|
||||||
|
|
||||||
|
leaves.append(&mut self.a.tree_hash_root());
|
||||||
|
leaves.append(&mut self.b.tree_hash_root());
|
||||||
|
leaves.append(&mut self.c.tree_hash_root());
|
||||||
|
leaves.append(&mut self.d.tree_hash_root());
|
||||||
|
|
||||||
|
efficient_merkleize(&leaves)[0..32].to_vec()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl CachedTreeHashSubTree<Inner> for Inner {
|
||||||
|
fn new_tree_hash_cache(&self) -> Result<TreeHashCache, Error> {
|
||||||
|
let tree = TreeHashCache::from_leaves_and_subtrees(
|
||||||
|
self,
|
||||||
|
vec![
|
||||||
|
self.a.new_tree_hash_cache()?,
|
||||||
|
self.b.new_tree_hash_cache()?,
|
||||||
|
self.c.new_tree_hash_cache()?,
|
||||||
|
self.d.new_tree_hash_cache()?,
|
||||||
|
],
|
||||||
|
)?;
|
||||||
|
|
||||||
|
Ok(tree)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn tree_hash_cache_overlay(&self, chunk_offset: usize) -> Result<BTreeOverlay, Error> {
|
||||||
|
let mut lengths = vec![];
|
||||||
|
|
||||||
|
lengths.push(BTreeOverlay::new(&self.a, 0)?.num_nodes());
|
||||||
|
lengths.push(BTreeOverlay::new(&self.b, 0)?.num_nodes());
|
||||||
|
lengths.push(BTreeOverlay::new(&self.c, 0)?.num_nodes());
|
||||||
|
lengths.push(BTreeOverlay::new(&self.d, 0)?.num_nodes());
|
||||||
|
|
||||||
|
BTreeOverlay::from_lengths(chunk_offset, lengths)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn update_tree_hash_cache(&self, cache: &mut TreeHashCache) -> Result<(), Error> {
|
||||||
|
let overlay = cache.get_overlay(cache.overlay_index, cache.chunk_index)?;
|
||||||
|
dbg!(&overlay);
|
||||||
|
|
||||||
|
// Skip the chunk index to the first leaf node of this struct.
|
||||||
|
cache.chunk_index = overlay.first_leaf_node();
|
||||||
|
// Skip the overlay index to the first leaf node of this struct.
|
||||||
|
cache.overlay_index += 1;
|
||||||
|
|
||||||
|
// Recurse into the struct items, updating their caches.
|
||||||
|
self.a.update_tree_hash_cache(cache)?;
|
||||||
|
self.b.update_tree_hash_cache(cache)?;
|
||||||
|
self.c.update_tree_hash_cache(cache)?;
|
||||||
|
self.d.update_tree_hash_cache(cache)?;
|
||||||
|
|
||||||
|
// Iterate through the internal nodes, updating them if their children have changed.
|
||||||
|
cache.update_internal_nodes(&overlay)?;
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn generic_test(index: usize) {
|
||||||
|
let inner = Inner {
|
||||||
|
a: 1,
|
||||||
|
b: 2,
|
||||||
|
c: 3,
|
||||||
|
d: 4,
|
||||||
|
};
|
||||||
|
|
||||||
|
let mut cache = TreeHashCache::new(&inner).unwrap();
|
||||||
|
|
||||||
|
let changed_inner = match index {
|
||||||
|
0 => Inner {
|
||||||
|
a: 42,
|
||||||
|
..inner.clone()
|
||||||
|
},
|
||||||
|
1 => Inner {
|
||||||
|
b: 42,
|
||||||
|
..inner.clone()
|
||||||
|
},
|
||||||
|
2 => Inner {
|
||||||
|
c: 42,
|
||||||
|
..inner.clone()
|
||||||
|
},
|
||||||
|
3 => Inner {
|
||||||
|
d: 42,
|
||||||
|
..inner.clone()
|
||||||
|
},
|
||||||
|
_ => panic!("bad index"),
|
||||||
|
};
|
||||||
|
|
||||||
|
changed_inner.update_tree_hash_cache(&mut cache).unwrap();
|
||||||
|
|
||||||
|
let data1 = int_to_bytes32(1);
|
||||||
|
let data2 = int_to_bytes32(2);
|
||||||
|
let data3 = int_to_bytes32(3);
|
||||||
|
let data4 = int_to_bytes32(4);
|
||||||
|
|
||||||
|
let mut data = vec![data1, data2, data3, data4];
|
||||||
|
|
||||||
|
data[index] = int_to_bytes32(42);
|
||||||
|
|
||||||
|
let expected = merkleize(join(data));
|
||||||
|
|
||||||
|
let cache_bytes: Vec<u8> = cache.into();
|
||||||
|
|
||||||
|
assert_eq!(expected, cache_bytes);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn cached_hash_on_inner() {
|
||||||
|
generic_test(0);
|
||||||
|
generic_test(1);
|
||||||
|
generic_test(2);
|
||||||
|
generic_test(3);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn inner_builds() {
|
||||||
|
let data1 = int_to_bytes32(1);
|
||||||
|
let data2 = int_to_bytes32(2);
|
||||||
|
let data3 = int_to_bytes32(3);
|
||||||
|
let data4 = int_to_bytes32(4);
|
||||||
|
|
||||||
|
let data = join(vec![data1, data2, data3, data4]);
|
||||||
|
let expected = merkleize(data);
|
||||||
|
|
||||||
|
let inner = Inner {
|
||||||
|
a: 1,
|
||||||
|
b: 2,
|
||||||
|
c: 3,
|
||||||
|
d: 4,
|
||||||
|
};
|
||||||
|
|
||||||
|
let cache: Vec<u8> = TreeHashCache::new(&inner).unwrap().into();
|
||||||
|
|
||||||
|
assert_eq!(expected, cache);
|
||||||
|
}
|
||||||
|
|
||||||
|
fn join(many: Vec<Vec<u8>>) -> Vec<u8> {
|
||||||
|
let mut all = vec![];
|
||||||
|
for one in many {
|
||||||
|
all.extend_from_slice(&mut one.clone())
|
||||||
|
}
|
||||||
|
all
|
||||||
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
#[derive(Clone, Debug)]
|
#[derive(Clone, Debug)]
|
||||||
pub struct InternalCache {
|
pub struct InternalCache {
|
||||||
pub a: u64,
|
pub a: u64,
|
||||||
@ -101,8 +367,8 @@ impl CachedTreeHashSubTree<InternalCache> for InternalCache {
|
|||||||
fn tree_hash_cache_overlay(&self, chunk_offset: usize) -> Result<BTreeOverlay, Error> {
|
fn tree_hash_cache_overlay(&self, chunk_offset: usize) -> Result<BTreeOverlay, Error> {
|
||||||
let mut lengths = vec![];
|
let mut lengths = vec![];
|
||||||
|
|
||||||
lengths.push(BTreeOverlay::new(&self.a, 0)?.total_nodes());
|
lengths.push(BTreeOverlay::new(&self.a, 0)?.num_nodes());
|
||||||
lengths.push(BTreeOverlay::new(&self.b, 0)?.total_nodes());
|
lengths.push(BTreeOverlay::new(&self.b, 0)?.num_nodes());
|
||||||
|
|
||||||
BTreeOverlay::from_lengths(chunk_offset, lengths)
|
BTreeOverlay::from_lengths(chunk_offset, lengths)
|
||||||
}
|
}
|
||||||
@ -187,10 +453,10 @@ impl CachedTreeHashSubTree<Inner> for Inner {
|
|||||||
fn tree_hash_cache_overlay(&self, chunk_offset: usize) -> Result<BTreeOverlay, Error> {
|
fn tree_hash_cache_overlay(&self, chunk_offset: usize) -> Result<BTreeOverlay, Error> {
|
||||||
let mut lengths = vec![];
|
let mut lengths = vec![];
|
||||||
|
|
||||||
lengths.push(BTreeOverlay::new(&self.a, 0)?.total_nodes());
|
lengths.push(BTreeOverlay::new(&self.a, 0)?.num_nodes());
|
||||||
lengths.push(BTreeOverlay::new(&self.b, 0)?.total_nodes());
|
lengths.push(BTreeOverlay::new(&self.b, 0)?.num_nodes());
|
||||||
lengths.push(BTreeOverlay::new(&self.c, 0)?.total_nodes());
|
lengths.push(BTreeOverlay::new(&self.c, 0)?.num_nodes());
|
||||||
lengths.push(BTreeOverlay::new(&self.d, 0)?.total_nodes());
|
lengths.push(BTreeOverlay::new(&self.d, 0)?.num_nodes());
|
||||||
|
|
||||||
BTreeOverlay::from_lengths(chunk_offset, lengths)
|
BTreeOverlay::from_lengths(chunk_offset, lengths)
|
||||||
}
|
}
|
||||||
@ -270,9 +536,9 @@ impl CachedTreeHashSubTree<Outer> for Outer {
|
|||||||
fn tree_hash_cache_overlay(&self, chunk_offset: usize) -> Result<BTreeOverlay, Error> {
|
fn tree_hash_cache_overlay(&self, chunk_offset: usize) -> Result<BTreeOverlay, Error> {
|
||||||
let mut lengths = vec![];
|
let mut lengths = vec![];
|
||||||
|
|
||||||
lengths.push(BTreeOverlay::new(&self.a, 0)?.total_nodes());
|
lengths.push(BTreeOverlay::new(&self.a, 0)?.num_nodes());
|
||||||
lengths.push(BTreeOverlay::new(&self.b, 0)?.total_nodes());
|
lengths.push(BTreeOverlay::new(&self.b, 0)?.num_nodes());
|
||||||
lengths.push(BTreeOverlay::new(&self.c, 0)?.total_nodes());
|
lengths.push(BTreeOverlay::new(&self.c, 0)?.num_nodes());
|
||||||
|
|
||||||
BTreeOverlay::from_lengths(chunk_offset, lengths)
|
BTreeOverlay::from_lengths(chunk_offset, lengths)
|
||||||
}
|
}
|
||||||
@ -1078,3 +1344,4 @@ fn merkleize_4_leaves() {
|
|||||||
assert_eq!(chunk, &expected[..], "failed at {}", i);
|
assert_eq!(chunk, &expected[..], "failed at {}", i);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
*/
|
||||||
|
@ -54,7 +54,6 @@ pub fn subtree_derive(input: TokenStream) -> TokenStream {
|
|||||||
let idents_a = get_hashable_named_field_idents(&struct_data);
|
let idents_a = get_hashable_named_field_idents(&struct_data);
|
||||||
let idents_b = idents_a.clone();
|
let idents_b = idents_a.clone();
|
||||||
let idents_c = idents_a.clone();
|
let idents_c = idents_a.clone();
|
||||||
let idents_d = idents_a.clone();
|
|
||||||
|
|
||||||
let output = quote! {
|
let output = quote! {
|
||||||
impl tree_hash::CachedTreeHashSubTree<#name> for #name {
|
impl tree_hash::CachedTreeHashSubTree<#name> for #name {
|
||||||
@ -75,35 +74,29 @@ pub fn subtree_derive(input: TokenStream) -> TokenStream {
|
|||||||
let mut lengths = vec![];
|
let mut lengths = vec![];
|
||||||
|
|
||||||
#(
|
#(
|
||||||
lengths.push(tree_hash::BTreeOverlay::new(&self.#idents_b, 0)?.total_nodes());
|
lengths.push(tree_hash::BTreeOverlay::new(&self.#idents_b, 0)?.num_nodes());
|
||||||
)*
|
)*
|
||||||
|
|
||||||
tree_hash::BTreeOverlay::from_lengths(chunk_offset, lengths)
|
tree_hash::BTreeOverlay::from_lengths(chunk_offset, lengths)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn update_tree_hash_cache(
|
fn update_tree_hash_cache(&self, cache: &mut TreeHashCache) -> Result<(), Error> {
|
||||||
&self,
|
let overlay = cache.get_overlay(cache.overlay_index, cache.chunk_index)?;
|
||||||
other: &Self,
|
|
||||||
cache: &mut tree_hash::TreeHashCache,
|
|
||||||
chunk: usize,
|
|
||||||
) -> Result<usize, tree_hash::Error> {
|
|
||||||
let offset_handler = tree_hash::BTreeOverlay::new(self, chunk)?;
|
|
||||||
|
|
||||||
// Skip past the internal nodes and update any changed leaf nodes.
|
// Skip the chunk index to the first leaf node of this struct.
|
||||||
{
|
cache.chunk_index = overlay.first_leaf_node();
|
||||||
let chunk = offset_handler.first_leaf_node()?;
|
// Skip the overlay index to the first leaf node of this struct.
|
||||||
|
cache.overlay_index += 1;
|
||||||
|
|
||||||
|
// Recurse into the struct items, updating their caches.
|
||||||
#(
|
#(
|
||||||
let chunk = self.#idents_c.update_tree_hash_cache(&other.#idents_d, cache, chunk)?;
|
self.#idents_c.update_tree_hash_cache(cache)?;
|
||||||
)*
|
)*
|
||||||
}
|
|
||||||
|
|
||||||
for (&parent, children) in offset_handler.iter_internal_nodes().rev() {
|
// Iterate through the internal nodes, updating them if their children have changed.
|
||||||
if cache.either_modified(children)? {
|
cache.update_internal_nodes(&overlay)?;
|
||||||
cache.modify_chunk(parent, &cache.hash_children(children)?)?;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(offset_handler.next_node)
|
Ok(())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
Loading…
Reference in New Issue
Block a user