Implement failing cache hash test
This commit is contained in:
parent
f21409fee1
commit
49639c40ee
@ -1,4 +1,5 @@
|
||||
use hashing::hash;
|
||||
use std::iter::Iterator;
|
||||
|
||||
mod impls;
|
||||
mod tests;
|
||||
@ -16,6 +17,8 @@ pub trait CachedTreeHash {
|
||||
/// prefixes.
|
||||
fn num_bytes(&self) -> usize;
|
||||
|
||||
fn num_child_nodes(&self) -> usize;
|
||||
|
||||
fn cached_hash_tree_root(
|
||||
&self,
|
||||
other: &Self::Item,
|
||||
@ -81,15 +84,24 @@ impl TreeHashCache {
|
||||
self.chunk_modified.get(chunk).cloned()
|
||||
}
|
||||
|
||||
pub fn children_modified(&self, parent_chunk: usize) -> Option<bool> {
|
||||
let children = children(parent_chunk);
|
||||
|
||||
Some(self.changed(children.0)? | self.changed(children.1)?)
|
||||
pub fn either_modified(&self, children: (&usize, &usize)) -> Option<bool> {
|
||||
dbg!(&self.chunk_modified.len());
|
||||
dbg!(&self.cache.len() / BYTES_PER_CHUNK);
|
||||
Some(self.changed(*children.0)? | self.changed(*children.1)?)
|
||||
}
|
||||
|
||||
pub fn hash_children(&self, parent_chunk: usize) -> Option<Vec<u8>> {
|
||||
/*
|
||||
pub fn children_modified(&self, parent_chunk: usize, child_offsets: &[usize]) -> Option<bool> {
|
||||
let children = children(parent_chunk);
|
||||
|
||||
let a = *child_offsets.get(children.0)?;
|
||||
let b = *child_offsets.get(children.1)?;
|
||||
|
||||
Some(self.changed(a)? | self.changed(b)?)
|
||||
}
|
||||
*/
|
||||
|
||||
pub fn hash_children(&self, children: (&usize, &usize)) -> Option<Vec<u8>> {
|
||||
let start = children.0 * BYTES_PER_CHUNK;
|
||||
let end = start + BYTES_PER_CHUNK * 2;
|
||||
|
||||
@ -97,6 +109,30 @@ impl TreeHashCache {
|
||||
}
|
||||
}
|
||||
|
||||
/*
|
||||
pub struct LocalCache {
|
||||
offsets: Vec<usize>,
|
||||
}
|
||||
|
||||
impl LocalCache {
|
||||
|
||||
}
|
||||
|
||||
pub struct OffsetBTree {
|
||||
offsets: Vec<usize>,
|
||||
}
|
||||
|
||||
impl From<Vec<usize>> for OffsetBTree {
|
||||
fn from(offsets: Vec<usize>) -> Self {
|
||||
Self { offsets }
|
||||
}
|
||||
}
|
||||
|
||||
impl OffsetBTree {
|
||||
fn
|
||||
}
|
||||
*/
|
||||
|
||||
fn children(parent: usize) -> (usize, usize) {
|
||||
((2 * parent + 1), (2 * parent + 2))
|
||||
}
|
||||
@ -105,6 +141,71 @@ fn num_nodes(num_leaves: usize) -> usize {
|
||||
2 * num_leaves - 1
|
||||
}
|
||||
|
||||
pub struct OffsetHandler {
|
||||
num_internal_nodes: usize,
|
||||
num_leaf_nodes: usize,
|
||||
next_node: usize,
|
||||
offsets: Vec<usize>,
|
||||
}
|
||||
|
||||
impl OffsetHandler {
|
||||
fn from_lengths(offset: usize, mut lengths: Vec<usize>) -> Self {
|
||||
// Extend it to the next power-of-two, if it is not already.
|
||||
let num_leaf_nodes = if lengths.len().is_power_of_two() {
|
||||
lengths.len()
|
||||
} else {
|
||||
let num_leaf_nodes = lengths.len().next_power_of_two();
|
||||
lengths.resize(num_leaf_nodes, 1);
|
||||
num_leaf_nodes
|
||||
};
|
||||
|
||||
let num_nodes = num_nodes(num_leaf_nodes);
|
||||
let num_internal_nodes = num_nodes - num_leaf_nodes;
|
||||
|
||||
let mut offsets = Vec::with_capacity(num_nodes);
|
||||
offsets.append(&mut (offset..offset + num_internal_nodes).collect());
|
||||
|
||||
let mut next_node = num_internal_nodes + offset;
|
||||
for i in 0..num_leaf_nodes {
|
||||
offsets.push(next_node);
|
||||
next_node += lengths[i];
|
||||
}
|
||||
|
||||
Self {
|
||||
num_internal_nodes,
|
||||
num_leaf_nodes,
|
||||
offsets,
|
||||
next_node,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn total_nodes(&self) -> usize {
|
||||
self.num_internal_nodes + self.num_leaf_nodes
|
||||
}
|
||||
|
||||
pub fn first_leaf_node(&self) -> Option<usize> {
|
||||
self.offsets.get(self.num_internal_nodes).cloned()
|
||||
}
|
||||
|
||||
pub fn next_node(&self) -> usize {
|
||||
self.next_node
|
||||
}
|
||||
|
||||
pub fn iter_internal_nodes<'a>(
|
||||
&'a self,
|
||||
) -> impl DoubleEndedIterator<Item = (&'a usize, (&'a usize, &'a usize))> {
|
||||
let internal_nodes = &self.offsets[0..self.num_internal_nodes];
|
||||
|
||||
internal_nodes.iter().enumerate().map(move |(i, parent)| {
|
||||
let children = children(i);
|
||||
(
|
||||
parent,
|
||||
(&self.offsets[children.0], &self.offsets[children.1]),
|
||||
)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
/// Split `values` into a power-of-two, identical-length chunks (padding with `0`) and merkleize
|
||||
/// them, returning the entire merkle tree.
|
||||
///
|
||||
|
@ -12,6 +12,10 @@ impl CachedTreeHash for u64 {
|
||||
8
|
||||
}
|
||||
|
||||
fn num_child_nodes(&self) -> usize {
|
||||
0
|
||||
}
|
||||
|
||||
fn cached_hash_tree_root(
|
||||
&self,
|
||||
other: &Self,
|
||||
@ -27,6 +31,7 @@ impl CachedTreeHash for u64 {
|
||||
}
|
||||
}
|
||||
|
||||
/*
|
||||
impl<T> CachedTreeHash for Vec<T>
|
||||
where
|
||||
T: CachedTreeHash + Encodable,
|
||||
@ -96,3 +101,4 @@ where
|
||||
Some(chunk + num_nodes)
|
||||
}
|
||||
}
|
||||
*/
|
||||
|
@ -13,58 +13,87 @@ impl CachedTreeHash for Inner {
|
||||
type Item = Self;
|
||||
|
||||
fn build_cache_bytes(&self) -> Vec<u8> {
|
||||
let cache_a = self.a.build_cache_bytes();
|
||||
let cache_b = self.b.build_cache_bytes();
|
||||
let cache_c = self.c.build_cache_bytes();
|
||||
let cache_d = self.d.build_cache_bytes();
|
||||
|
||||
let mut leaves = vec![];
|
||||
leaves.extend_from_slice(&cache_a[0..32].to_vec());
|
||||
leaves.extend_from_slice(&cache_b[0..32].to_vec());
|
||||
leaves.extend_from_slice(&cache_c[0..32].to_vec());
|
||||
leaves.extend_from_slice(&cache_d[0..32].to_vec());
|
||||
|
||||
leaves.append(&mut self.a.build_cache_bytes());
|
||||
leaves.append(&mut self.b.build_cache_bytes());
|
||||
leaves.append(&mut self.c.build_cache_bytes());
|
||||
leaves.append(&mut self.d.build_cache_bytes());
|
||||
let mut merkle = merkleize(leaves);
|
||||
|
||||
merkleize(leaves)
|
||||
let num_leaves = 4;
|
||||
let num_nodes = num_nodes(num_leaves);
|
||||
let num_internal_nodes = num_nodes - num_leaves;
|
||||
|
||||
let mut next_hash = num_internal_nodes * HASHSIZE;
|
||||
merkle.splice(next_hash..next_hash + HASHSIZE, cache_a);
|
||||
next_hash += HASHSIZE;
|
||||
merkle.splice(next_hash..next_hash + HASHSIZE, cache_b);
|
||||
next_hash += HASHSIZE;
|
||||
merkle.splice(next_hash..next_hash + HASHSIZE, cache_c);
|
||||
next_hash += HASHSIZE;
|
||||
merkle.splice(next_hash..next_hash + HASHSIZE, cache_d);
|
||||
|
||||
merkle
|
||||
}
|
||||
|
||||
fn num_bytes(&self) -> usize {
|
||||
let mut bytes = 0;
|
||||
|
||||
bytes += self.a.num_bytes();
|
||||
bytes += self.b.num_bytes();
|
||||
bytes += self.c.num_bytes();
|
||||
bytes += self.d.num_bytes();
|
||||
|
||||
bytes
|
||||
}
|
||||
|
||||
fn num_child_nodes(&self) -> usize {
|
||||
let mut children = 0;
|
||||
let leaves = 4;
|
||||
|
||||
children += self.a.num_child_nodes();
|
||||
children += self.b.num_child_nodes();
|
||||
children += self.c.num_child_nodes();
|
||||
children += self.d.num_child_nodes();
|
||||
|
||||
num_nodes(leaves) + children - 1
|
||||
}
|
||||
|
||||
fn cached_hash_tree_root(
|
||||
&self,
|
||||
other: &Self,
|
||||
cache: &mut TreeHashCache,
|
||||
chunk: usize,
|
||||
) -> Option<usize> {
|
||||
let mut num_leaves: usize = 0;
|
||||
num_leaves += num_unsanitized_leaves(self.a.num_bytes());
|
||||
num_leaves += num_unsanitized_leaves(self.b.num_bytes());
|
||||
num_leaves += num_unsanitized_leaves(self.c.num_bytes());
|
||||
num_leaves += num_unsanitized_leaves(self.d.num_bytes());
|
||||
|
||||
let num_nodes = num_nodes(num_leaves);
|
||||
let num_internal_nodes = num_nodes - num_leaves;
|
||||
let mut offsets = vec![];
|
||||
offsets.push(self.a.num_child_nodes() + 1);
|
||||
offsets.push(self.b.num_child_nodes() + 1);
|
||||
offsets.push(self.c.num_child_nodes() + 1);
|
||||
offsets.push(self.d.num_child_nodes() + 1);
|
||||
let offset_handler = OffsetHandler::from_lengths(chunk, offsets);
|
||||
|
||||
// Skip past the internal nodes and update any changed leaf nodes.
|
||||
{
|
||||
let chunk = chunk + num_internal_nodes;
|
||||
let chunk = offset_handler.first_leaf_node()?;
|
||||
let chunk = self.a.cached_hash_tree_root(&other.a, cache, chunk)?;
|
||||
let chunk = self.b.cached_hash_tree_root(&other.b, cache, chunk)?;
|
||||
let chunk = self.c.cached_hash_tree_root(&other.c, cache, chunk)?;
|
||||
let _chunk = self.d.cached_hash_tree_root(&other.d, cache, chunk)?;
|
||||
}
|
||||
|
||||
// Iterate backwards through the internal nodes, rehashing any node where it's children
|
||||
// have changed.
|
||||
for chunk in (chunk..chunk + num_internal_nodes).into_iter().rev() {
|
||||
if cache.children_modified(chunk)? {
|
||||
cache.modify_chunk(chunk, &cache.hash_children(chunk)?)?;
|
||||
for (&parent, children) in offset_handler.iter_internal_nodes().rev() {
|
||||
if cache.either_modified(children)? {
|
||||
cache.modify_chunk(parent, &cache.hash_children(children)?)?;
|
||||
}
|
||||
}
|
||||
|
||||
Some(chunk + num_nodes)
|
||||
Some(offset_handler.next_node())
|
||||
}
|
||||
}
|
||||
|
||||
@ -79,53 +108,79 @@ impl CachedTreeHash for Outer {
|
||||
type Item = Self;
|
||||
|
||||
fn build_cache_bytes(&self) -> Vec<u8> {
|
||||
let cache_a = self.a.build_cache_bytes();
|
||||
let cache_b = self.b.build_cache_bytes();
|
||||
let cache_c = self.c.build_cache_bytes();
|
||||
|
||||
let mut leaves = vec![];
|
||||
leaves.extend_from_slice(&cache_a[0..32].to_vec());
|
||||
leaves.extend_from_slice(&cache_b[0..32].to_vec());
|
||||
leaves.extend_from_slice(&cache_c[0..32].to_vec());
|
||||
|
||||
leaves.append(&mut self.a.build_cache_bytes());
|
||||
leaves.append(&mut self.b.build_cache_bytes());
|
||||
leaves.append(&mut self.c.build_cache_bytes());
|
||||
let mut merkle = merkleize(leaves);
|
||||
|
||||
merkleize(leaves)
|
||||
let num_leaves = 4;
|
||||
let num_nodes = num_nodes(num_leaves);
|
||||
let num_internal_nodes = num_nodes - num_leaves;
|
||||
|
||||
let mut next_hash = num_internal_nodes * HASHSIZE;
|
||||
merkle.splice(next_hash..next_hash + HASHSIZE, cache_a);
|
||||
next_hash += (self.a.num_child_nodes() + 1) * HASHSIZE;
|
||||
merkle.splice(next_hash..next_hash + HASHSIZE, cache_b);
|
||||
next_hash += (self.b.num_child_nodes() + 1) * HASHSIZE;
|
||||
merkle.splice(next_hash..next_hash + HASHSIZE, cache_c);
|
||||
|
||||
merkle
|
||||
}
|
||||
|
||||
fn num_bytes(&self) -> usize {
|
||||
let mut bytes = 0;
|
||||
bytes += self.a.num_bytes();
|
||||
bytes += self.b.num_bytes();
|
||||
bytes += self.c.num_bytes();
|
||||
bytes
|
||||
}
|
||||
|
||||
fn num_child_nodes(&self) -> usize {
|
||||
let mut children = 0;
|
||||
let leaves = 3;
|
||||
|
||||
children += self.a.num_child_nodes();
|
||||
children += self.b.num_child_nodes();
|
||||
children += self.c.num_child_nodes();
|
||||
|
||||
num_nodes(leaves) + children - 1
|
||||
}
|
||||
|
||||
fn cached_hash_tree_root(
|
||||
&self,
|
||||
other: &Self,
|
||||
cache: &mut TreeHashCache,
|
||||
chunk: usize,
|
||||
) -> Option<usize> {
|
||||
let mut num_leaves: usize = 0;
|
||||
num_leaves += num_unsanitized_leaves(self.a.num_bytes());
|
||||
num_leaves += num_unsanitized_leaves(self.b.num_bytes());
|
||||
num_leaves += num_unsanitized_leaves(self.c.num_bytes());
|
||||
|
||||
let num_nodes = num_nodes(num_leaves);
|
||||
let num_internal_nodes = num_nodes - num_leaves;
|
||||
let mut offsets = vec![];
|
||||
offsets.push(self.a.num_child_nodes() + 1);
|
||||
offsets.push(self.b.num_child_nodes() + 1);
|
||||
offsets.push(self.c.num_child_nodes() + 1);
|
||||
let offset_handler = OffsetHandler::from_lengths(chunk, offsets);
|
||||
|
||||
// Skip past the internal nodes and update any changed leaf nodes.
|
||||
{
|
||||
let chunk = chunk + num_internal_nodes;
|
||||
let chunk = offset_handler.first_leaf_node()?;
|
||||
let chunk = self.a.cached_hash_tree_root(&other.a, cache, chunk)?;
|
||||
let chunk = self.b.cached_hash_tree_root(&other.b, cache, chunk)?;
|
||||
let _chunk = self.c.cached_hash_tree_root(&other.c, cache, chunk)?;
|
||||
}
|
||||
|
||||
// Iterate backwards through the internal nodes, rehashing any node where it's children
|
||||
// have changed.
|
||||
for chunk in (chunk..chunk + num_internal_nodes).into_iter().rev() {
|
||||
if cache.children_modified(chunk)? {
|
||||
cache.modify_chunk(chunk, &cache.hash_children(chunk)?)?;
|
||||
for (&parent, children) in offset_handler.iter_internal_nodes().rev() {
|
||||
if cache.either_modified(children)? {
|
||||
dbg!(parent);
|
||||
dbg!(children);
|
||||
cache.modify_chunk(parent, &cache.hash_children(children)?)?;
|
||||
}
|
||||
}
|
||||
|
||||
Some(chunk + num_nodes)
|
||||
Some(offset_handler.next_node())
|
||||
}
|
||||
}
|
||||
|
||||
@ -163,15 +218,30 @@ fn partial_modification_to_outer() {
|
||||
|
||||
// Perform a differential hash
|
||||
let mut cache_struct = TreeHashCache::from_bytes(original_cache.clone()).unwrap();
|
||||
modified_outer.cached_hash_tree_root(&original_outer, &mut cache_struct, 0);
|
||||
|
||||
modified_outer
|
||||
.cached_hash_tree_root(&original_outer, &mut cache_struct, 0)
|
||||
.unwrap();
|
||||
|
||||
let modified_cache: Vec<u8> = cache_struct.into();
|
||||
|
||||
// Generate reference data.
|
||||
let mut data = vec![];
|
||||
data.append(&mut int_to_bytes32(0));
|
||||
data.append(&mut inner.build_cache_bytes());
|
||||
data.append(&mut int_to_bytes32(42));
|
||||
let merkle = merkleize(data);
|
||||
let inner_bytes = inner.build_cache_bytes();
|
||||
data.append(&mut int_to_bytes32(5));
|
||||
|
||||
let leaves = vec![
|
||||
int_to_bytes32(0),
|
||||
inner_bytes[0..32].to_vec(),
|
||||
int_to_bytes32(5),
|
||||
vec![0; 32], // padding
|
||||
];
|
||||
let mut merkle = merkleize(join(leaves));
|
||||
merkle.splice(4 * 32..5 * 32, inner_bytes);
|
||||
|
||||
assert_eq!(merkle.len() / HASHSIZE, 13);
|
||||
assert_eq!(modified_cache.len() / HASHSIZE, 13);
|
||||
|
||||
assert_eq!(merkle, modified_cache);
|
||||
}
|
||||
@ -197,13 +267,33 @@ fn outer_builds() {
|
||||
// Generate reference data.
|
||||
let mut data = vec![];
|
||||
data.append(&mut int_to_bytes32(0));
|
||||
data.append(&mut inner.build_cache_bytes());
|
||||
let inner_bytes = inner.build_cache_bytes();
|
||||
data.append(&mut int_to_bytes32(5));
|
||||
let merkle = merkleize(data);
|
||||
|
||||
assert_eq!(merkle, cache);
|
||||
let leaves = vec![
|
||||
int_to_bytes32(0),
|
||||
inner_bytes[0..32].to_vec(),
|
||||
int_to_bytes32(5),
|
||||
vec![0; 32], // padding
|
||||
];
|
||||
let mut merkle = merkleize(join(leaves));
|
||||
merkle.splice(4 * 32..5 * 32, inner_bytes);
|
||||
|
||||
assert_eq!(merkle.len() / HASHSIZE, 13);
|
||||
assert_eq!(cache.len() / HASHSIZE, 13);
|
||||
|
||||
for (i, chunk) in cache.chunks(HASHSIZE).enumerate() {
|
||||
assert_eq!(
|
||||
merkle[i * HASHSIZE..(i + 1) * HASHSIZE],
|
||||
*chunk,
|
||||
"failed on {}",
|
||||
i
|
||||
);
|
||||
}
|
||||
// assert_eq!(merkle, cache);
|
||||
}
|
||||
|
||||
/*
|
||||
#[test]
|
||||
fn partial_modification_u64_vec() {
|
||||
let n: u64 = 50;
|
||||
@ -272,6 +362,7 @@ fn vec_of_u64_builds() {
|
||||
|
||||
assert_eq!(expected, cache);
|
||||
}
|
||||
*/
|
||||
|
||||
#[test]
|
||||
fn merkleize_odd() {
|
||||
|
Loading…
Reference in New Issue
Block a user