Create cached_tree_hash
crate.
This commit is contained in:
parent
827e1c62d9
commit
b213a5ade4
@ -9,6 +9,7 @@ members = [
|
||||
"eth2/types",
|
||||
"eth2/utils/bls",
|
||||
"eth2/utils/boolean-bitfield",
|
||||
"eth2/utils/cached_tree_hash",
|
||||
"eth2/utils/hashing",
|
||||
"eth2/utils/honey-badger-split",
|
||||
"eth2/utils/merkle_proof",
|
||||
|
@ -50,7 +50,7 @@ where
|
||||
}
|
||||
|
||||
fn tree_hash_root(&self) -> Vec<u8> {
|
||||
tree_hash::standard_tree_hash::vec_tree_hash_root(self)
|
||||
tree_hash::impls::vec_tree_hash_root(self)
|
||||
}
|
||||
}
|
||||
|
||||
|
14
eth2/utils/cached_tree_hash/Cargo.toml
Normal file
14
eth2/utils/cached_tree_hash/Cargo.toml
Normal file
@ -0,0 +1,14 @@
|
||||
[package]
|
||||
name = "cached_tree_hash"
|
||||
version = "0.1.0"
|
||||
authors = ["Paul Hauner <paul@paulhauner.com>"]
|
||||
edition = "2018"
|
||||
|
||||
[dev-dependencies]
|
||||
tree_hash_derive = { path = "../tree_hash_derive" }
|
||||
|
||||
[dependencies]
|
||||
tree_hash = { path = "../tree_hash" }
|
||||
ethereum-types = "0.5"
|
||||
hashing = { path = "../hashing" }
|
||||
int_to_bytes = { path = "../int_to_bytes" }
|
76
eth2/utils/cached_tree_hash/README.md
Normal file
76
eth2/utils/cached_tree_hash/README.md
Normal file
@ -0,0 +1,76 @@
|
||||
# Tree hashing
|
||||
|
||||
Provides both cached and non-cached tree hashing methods.
|
||||
|
||||
## Standard Tree Hash
|
||||
|
||||
```rust
|
||||
use tree_hash_derive::TreeHash;
|
||||
|
||||
#[derive(TreeHash)]
|
||||
struct Foo {
|
||||
a: u64,
|
||||
b: Vec<u64>,
|
||||
}
|
||||
|
||||
fn main() {
|
||||
let foo = Foo {
|
||||
a: 42,
|
||||
b: vec![1, 2, 3]
|
||||
};
|
||||
|
||||
println!("root: {}", foo.tree_hash_root());
|
||||
}
|
||||
```
|
||||
|
||||
## Cached Tree Hash
|
||||
|
||||
|
||||
```rust
|
||||
use tree_hash_derive::{TreeHash, CachedTreeHash};
|
||||
|
||||
#[derive(TreeHash, CachedTreeHash)]
|
||||
struct Foo {
|
||||
a: u64,
|
||||
b: Vec<u64>,
|
||||
}
|
||||
|
||||
#[derive(TreeHash, CachedTreeHash)]
|
||||
struct Bar {
|
||||
a: Vec<Foo>,
|
||||
b: u64,
|
||||
}
|
||||
|
||||
fn main() {
|
||||
let bar = Bar {
|
||||
a: vec![
|
||||
Foo {
|
||||
a: 42,
|
||||
b: vec![1, 2, 3]
|
||||
}
|
||||
],
|
||||
b: 42
|
||||
};
|
||||
|
||||
let modified_bar = Bar {
|
||||
a: vec![
|
||||
Foo {
|
||||
a: 100,
|
||||
b: vec![1, 2, 3, 4, 5, 6]
|
||||
},
|
||||
Foo {
|
||||
a: 42,
|
||||
b: vec![]
|
||||
}
|
||||
],
|
||||
b: 99
|
||||
};
|
||||
|
||||
|
||||
let mut hasher = CachedTreeHasher::new(&bar).unwrap();
|
||||
hasher.update(&modified_bar).unwrap();
|
||||
|
||||
// Assert that the cached tree hash matches a standard tree hash.
|
||||
assert_eq!(hasher.tree_hash_root(), modified_bar.tree_hash_root());
|
||||
}
|
||||
```
|
@ -168,6 +168,10 @@ impl BTreeOverlay {
|
||||
}
|
||||
}
|
||||
|
||||
fn children(parent: usize) -> (usize, usize) {
|
||||
((2 * parent + 1), (2 * parent + 2))
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use super::*;
|
18
eth2/utils/cached_tree_hash/src/errors.rs
Normal file
18
eth2/utils/cached_tree_hash/src/errors.rs
Normal file
@ -0,0 +1,18 @@
|
||||
use tree_hash::TreeHashType;
|
||||
|
||||
#[derive(Debug, PartialEq, Clone)]
|
||||
pub enum Error {
|
||||
ShouldNotProduceBTreeOverlay,
|
||||
NoFirstNode,
|
||||
NoBytesForRoot,
|
||||
UnableToObtainSlices,
|
||||
UnableToGrowMerkleTree,
|
||||
UnableToShrinkMerkleTree,
|
||||
TreeCannotHaveZeroNodes,
|
||||
ShouldNeverBePacked(TreeHashType),
|
||||
BytesAreNotEvenChunks(usize),
|
||||
NoModifiedFieldForChunk(usize),
|
||||
NoBytesForChunk(usize),
|
||||
NoOverlayForIndex(usize),
|
||||
NotLeafNode(usize),
|
||||
}
|
@ -1,4 +1,5 @@
|
||||
use super::*;
|
||||
use crate::merkleize::merkleize;
|
||||
|
||||
mod vec;
|
||||
|
||||
@ -17,8 +18,8 @@ impl CachedTreeHash<u64> for u64 {
|
||||
|
||||
fn tree_hash_cache_overlay(
|
||||
&self,
|
||||
chunk_offset: usize,
|
||||
depth: usize,
|
||||
_chunk_offset: usize,
|
||||
_depth: usize,
|
||||
) -> Result<BTreeOverlay, Error> {
|
||||
panic!("Basic should not produce overlay");
|
||||
// BTreeOverlay::from_lengths(chunk_offset, 1, depth, vec![1])
|
||||
@ -50,8 +51,8 @@ impl CachedTreeHash<usize> for usize {
|
||||
|
||||
fn tree_hash_cache_overlay(
|
||||
&self,
|
||||
chunk_offset: usize,
|
||||
depth: usize,
|
||||
_chunk_offset: usize,
|
||||
_depth: usize,
|
||||
) -> Result<BTreeOverlay, Error> {
|
||||
panic!("Basic should not produce overlay");
|
||||
// BTreeOverlay::from_lengths(chunk_offset, 1, depth, vec![1])
|
@ -1,4 +1,5 @@
|
||||
use super::*;
|
||||
use crate::merkleize::{merkleize, num_sanitized_leaves, sanitise_bytes};
|
||||
|
||||
impl<T> CachedTreeHash<Vec<T>> for Vec<T>
|
||||
where
|
66
eth2/utils/cached_tree_hash/src/lib.rs
Normal file
66
eth2/utils/cached_tree_hash/src/lib.rs
Normal file
@ -0,0 +1,66 @@
|
||||
use hashing::hash;
|
||||
use std::ops::Range;
|
||||
use tree_hash::{TreeHash, TreeHashType, BYTES_PER_CHUNK, HASHSIZE};
|
||||
|
||||
mod btree_overlay;
|
||||
mod errors;
|
||||
mod impls;
|
||||
pub mod merkleize;
|
||||
mod resize;
|
||||
mod tree_hash_cache;
|
||||
|
||||
pub use btree_overlay::BTreeOverlay;
|
||||
pub use errors::Error;
|
||||
pub use tree_hash_cache::TreeHashCache;
|
||||
|
||||
pub trait CachedTreeHash<Item>: TreeHash {
|
||||
fn tree_hash_cache_overlay(
|
||||
&self,
|
||||
chunk_offset: usize,
|
||||
depth: usize,
|
||||
) -> Result<BTreeOverlay, Error>;
|
||||
|
||||
fn num_tree_hash_cache_chunks(&self) -> usize;
|
||||
|
||||
fn new_tree_hash_cache(&self, depth: usize) -> Result<TreeHashCache, Error>;
|
||||
|
||||
fn update_tree_hash_cache(&self, cache: &mut TreeHashCache) -> Result<(), Error>;
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq)]
|
||||
pub struct CachedTreeHasher {
|
||||
cache: TreeHashCache,
|
||||
}
|
||||
|
||||
impl CachedTreeHasher {
|
||||
pub fn new<T>(item: &T) -> Result<Self, Error>
|
||||
where
|
||||
T: CachedTreeHash<T>,
|
||||
{
|
||||
Ok(Self {
|
||||
cache: TreeHashCache::new(item, 0)?,
|
||||
})
|
||||
}
|
||||
|
||||
pub fn update<T>(&mut self, item: &T) -> Result<(), Error>
|
||||
where
|
||||
T: CachedTreeHash<T>,
|
||||
{
|
||||
// Reset the per-hash counters.
|
||||
self.cache.chunk_index = 0;
|
||||
self.cache.overlay_index = 0;
|
||||
|
||||
// Reset the "modified" flags for the cache.
|
||||
self.cache.reset_modifications();
|
||||
|
||||
// Update the cache with the (maybe) changed object.
|
||||
item.update_tree_hash_cache(&mut self.cache)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn tree_hash_root(&self) -> Result<Vec<u8>, Error> {
|
||||
// Return the root of the cache -- the merkle root.
|
||||
Ok(self.cache.root()?.to_vec())
|
||||
}
|
||||
}
|
78
eth2/utils/cached_tree_hash/src/merkleize.rs
Normal file
78
eth2/utils/cached_tree_hash/src/merkleize.rs
Normal file
@ -0,0 +1,78 @@
|
||||
use hashing::hash;
|
||||
use tree_hash::{BYTES_PER_CHUNK, HASHSIZE, MERKLE_HASH_CHUNK};
|
||||
|
||||
/// Split `values` into a power-of-two, identical-length chunks (padding with `0`) and merkleize
|
||||
/// them, returning the entire merkle tree.
|
||||
///
|
||||
/// The root hash is `merkleize(values)[0..BYTES_PER_CHUNK]`.
|
||||
pub fn merkleize(values: Vec<u8>) -> Vec<u8> {
|
||||
let values = sanitise_bytes(values);
|
||||
|
||||
let leaves = values.len() / HASHSIZE;
|
||||
|
||||
if leaves == 0 {
|
||||
panic!("No full leaves");
|
||||
}
|
||||
|
||||
if !leaves.is_power_of_two() {
|
||||
panic!("leaves is not power of two");
|
||||
}
|
||||
|
||||
let mut o: Vec<u8> = vec![0; (num_nodes(leaves) - leaves) * HASHSIZE];
|
||||
o.append(&mut values.to_vec());
|
||||
|
||||
let mut i = o.len();
|
||||
let mut j = o.len() - values.len();
|
||||
|
||||
while i >= MERKLE_HASH_CHUNK {
|
||||
i -= MERKLE_HASH_CHUNK;
|
||||
let hash = hash(&o[i..i + MERKLE_HASH_CHUNK]);
|
||||
|
||||
j -= HASHSIZE;
|
||||
o[j..j + HASHSIZE].copy_from_slice(&hash);
|
||||
}
|
||||
|
||||
o
|
||||
}
|
||||
|
||||
pub fn sanitise_bytes(mut bytes: Vec<u8>) -> Vec<u8> {
|
||||
let present_leaves = num_unsanitized_leaves(bytes.len());
|
||||
let required_leaves = present_leaves.next_power_of_two();
|
||||
|
||||
if (present_leaves != required_leaves) | last_leaf_needs_padding(bytes.len()) {
|
||||
bytes.resize(num_bytes(required_leaves), 0);
|
||||
}
|
||||
|
||||
bytes
|
||||
}
|
||||
|
||||
pub fn pad_for_leaf_count(num_leaves: usize, bytes: &mut Vec<u8>) {
|
||||
let required_leaves = num_leaves.next_power_of_two();
|
||||
|
||||
bytes.resize(
|
||||
bytes.len() + (required_leaves - num_leaves) * BYTES_PER_CHUNK,
|
||||
0,
|
||||
);
|
||||
}
|
||||
|
||||
fn last_leaf_needs_padding(num_bytes: usize) -> bool {
|
||||
num_bytes % HASHSIZE != 0
|
||||
}
|
||||
|
||||
/// Rounds up
|
||||
fn num_unsanitized_leaves(num_bytes: usize) -> usize {
|
||||
(num_bytes + HASHSIZE - 1) / HASHSIZE
|
||||
}
|
||||
|
||||
fn num_bytes(num_leaves: usize) -> usize {
|
||||
num_leaves * HASHSIZE
|
||||
}
|
||||
|
||||
fn num_nodes(num_leaves: usize) -> usize {
|
||||
2 * num_leaves - 1
|
||||
}
|
||||
|
||||
pub fn num_sanitized_leaves(num_bytes: usize) -> usize {
|
||||
let leaves = (num_bytes + HASHSIZE - 1) / HASHSIZE;
|
||||
leaves.next_power_of_two()
|
||||
}
|
@ -1,4 +1,6 @@
|
||||
use super::*;
|
||||
use crate::merkleize::{merkleize, pad_for_leaf_count};
|
||||
use int_to_bytes::int_to_bytes32;
|
||||
|
||||
#[derive(Debug, PartialEq, Clone)]
|
||||
pub struct TreeHashCache {
|
||||
@ -328,3 +330,7 @@ impl TreeHashCache {
|
||||
(self.cache, self.chunk_modified, self.overlays)
|
||||
}
|
||||
}
|
||||
|
||||
fn node_range_to_byte_range(node_range: &Range<usize>) -> Range<usize> {
|
||||
node_range.start * HASHSIZE..node_range.end * HASHSIZE
|
||||
}
|
437
eth2/utils/cached_tree_hash/tests/tests.rs
Normal file
437
eth2/utils/cached_tree_hash/tests/tests.rs
Normal file
@ -0,0 +1,437 @@
|
||||
use cached_tree_hash::{merkleize::merkleize, *};
|
||||
use int_to_bytes::int_to_bytes32;
|
||||
use tree_hash_derive::{CachedTreeHash, TreeHash};
|
||||
|
||||
#[derive(Clone, Debug, TreeHash, CachedTreeHash)]
|
||||
pub struct NestedStruct {
|
||||
pub a: u64,
|
||||
pub b: Inner,
|
||||
}
|
||||
|
||||
fn test_routine<T>(original: T, modified: Vec<T>)
|
||||
where
|
||||
T: CachedTreeHash<T>,
|
||||
{
|
||||
let mut hasher = CachedTreeHasher::new(&original).unwrap();
|
||||
|
||||
let standard_root = original.tree_hash_root();
|
||||
let cached_root = hasher.tree_hash_root().unwrap();
|
||||
assert_eq!(standard_root, cached_root, "Initial cache build failed.");
|
||||
|
||||
for (i, modified) in modified.iter().enumerate() {
|
||||
println!("-- Start of modification {} --", i);
|
||||
// Test after a modification
|
||||
hasher
|
||||
.update(modified)
|
||||
.expect(&format!("Modification {}", i));
|
||||
let standard_root = modified.tree_hash_root();
|
||||
let cached_root = hasher
|
||||
.tree_hash_root()
|
||||
.expect(&format!("Modification {}", i));
|
||||
assert_eq!(
|
||||
standard_root, cached_root,
|
||||
"Modification {} failed. \n Cache: {:?}",
|
||||
i, hasher
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_nested_struct() {
|
||||
let original = NestedStruct {
|
||||
a: 42,
|
||||
b: Inner {
|
||||
a: 12,
|
||||
b: 13,
|
||||
c: 14,
|
||||
d: 15,
|
||||
},
|
||||
};
|
||||
let modified = vec![NestedStruct {
|
||||
a: 99,
|
||||
..original.clone()
|
||||
}];
|
||||
|
||||
test_routine(original, modified);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_inner() {
|
||||
let original = Inner {
|
||||
a: 12,
|
||||
b: 13,
|
||||
c: 14,
|
||||
d: 15,
|
||||
};
|
||||
|
||||
let modified = vec![Inner {
|
||||
a: 99,
|
||||
..original.clone()
|
||||
}];
|
||||
|
||||
test_routine(original, modified);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_vec() {
|
||||
let original: Vec<u64> = vec![1, 2, 3, 4, 5];
|
||||
|
||||
let modified: Vec<Vec<u64>> = vec![
|
||||
vec![1, 2, 3, 4, 42],
|
||||
vec![1, 2, 3, 4],
|
||||
vec![],
|
||||
vec![42; 2_usize.pow(4)],
|
||||
vec![],
|
||||
vec![],
|
||||
vec![1, 2, 3, 4, 42],
|
||||
vec![1, 2, 3],
|
||||
vec![1],
|
||||
];
|
||||
|
||||
test_routine(original, modified);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_nested_list_of_u64() {
|
||||
let original: Vec<Vec<u64>> = vec![vec![42]];
|
||||
|
||||
let modified = vec![
|
||||
vec![vec![1]],
|
||||
vec![vec![1], vec![2]],
|
||||
vec![vec![1], vec![3], vec![4]],
|
||||
vec![],
|
||||
vec![vec![1], vec![3], vec![4]],
|
||||
vec![],
|
||||
vec![vec![1, 2], vec![3], vec![4, 5, 6, 7, 8]],
|
||||
vec![],
|
||||
vec![vec![1], vec![2], vec![3]],
|
||||
vec![vec![1, 2, 3, 4, 5, 6], vec![1, 2, 3, 4, 5, 6, 7]],
|
||||
vec![vec![], vec![], vec![]],
|
||||
vec![vec![0, 0, 0], vec![0], vec![0]],
|
||||
];
|
||||
|
||||
test_routine(original, modified);
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, TreeHash, CachedTreeHash)]
|
||||
pub struct StructWithVec {
|
||||
pub a: u64,
|
||||
pub b: Inner,
|
||||
pub c: Vec<u64>,
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_struct_with_vec() {
|
||||
let original = StructWithVec {
|
||||
a: 42,
|
||||
b: Inner {
|
||||
a: 12,
|
||||
b: 13,
|
||||
c: 14,
|
||||
d: 15,
|
||||
},
|
||||
c: vec![1, 2, 3, 4, 5],
|
||||
};
|
||||
|
||||
let modified = vec![
|
||||
StructWithVec {
|
||||
a: 99,
|
||||
..original.clone()
|
||||
},
|
||||
StructWithVec {
|
||||
a: 100,
|
||||
..original.clone()
|
||||
},
|
||||
StructWithVec {
|
||||
c: vec![1, 2, 3, 4, 5],
|
||||
..original.clone()
|
||||
},
|
||||
StructWithVec {
|
||||
c: vec![1, 3, 4, 5, 6],
|
||||
..original.clone()
|
||||
},
|
||||
StructWithVec {
|
||||
c: vec![1, 3, 4, 5, 6, 7, 8, 9],
|
||||
..original.clone()
|
||||
},
|
||||
StructWithVec {
|
||||
c: vec![1, 3, 4, 5],
|
||||
..original.clone()
|
||||
},
|
||||
StructWithVec {
|
||||
b: Inner {
|
||||
a: u64::max_value(),
|
||||
b: u64::max_value(),
|
||||
c: u64::max_value(),
|
||||
d: u64::max_value(),
|
||||
},
|
||||
c: vec![],
|
||||
..original.clone()
|
||||
},
|
||||
StructWithVec {
|
||||
b: Inner {
|
||||
a: 0,
|
||||
b: 1,
|
||||
c: 2,
|
||||
d: 3,
|
||||
},
|
||||
..original.clone()
|
||||
},
|
||||
];
|
||||
|
||||
test_routine(original, modified);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_vec_of_struct_with_vec() {
|
||||
let a = StructWithVec {
|
||||
a: 42,
|
||||
b: Inner {
|
||||
a: 12,
|
||||
b: 13,
|
||||
c: 14,
|
||||
d: 15,
|
||||
},
|
||||
c: vec![1, 2, 3, 4, 5],
|
||||
};
|
||||
let b = StructWithVec {
|
||||
c: vec![],
|
||||
..a.clone()
|
||||
};
|
||||
let c = StructWithVec {
|
||||
b: Inner {
|
||||
a: 99,
|
||||
b: 100,
|
||||
c: 101,
|
||||
d: 102,
|
||||
},
|
||||
..a.clone()
|
||||
};
|
||||
let d = StructWithVec { a: 0, ..a.clone() };
|
||||
|
||||
// let original: Vec<StructWithVec> = vec![a.clone(), c.clone()];
|
||||
let original: Vec<StructWithVec> = vec![a.clone()];
|
||||
|
||||
let modified = vec![
|
||||
vec![a.clone(), c.clone()],
|
||||
vec![a.clone(), b.clone(), c.clone(), d.clone()],
|
||||
vec![b.clone(), a.clone(), c.clone(), d.clone()],
|
||||
vec![],
|
||||
vec![a.clone()],
|
||||
vec![a.clone(), b.clone(), c.clone(), d.clone()],
|
||||
];
|
||||
|
||||
test_routine(original, modified);
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, TreeHash, CachedTreeHash)]
|
||||
pub struct StructWithVecOfStructs {
|
||||
pub a: u64,
|
||||
pub b: Inner,
|
||||
pub c: Vec<Inner>,
|
||||
}
|
||||
|
||||
fn get_struct_with_vec_of_structs() -> Vec<StructWithVecOfStructs> {
|
||||
let inner_a = Inner {
|
||||
a: 12,
|
||||
b: 13,
|
||||
c: 14,
|
||||
d: 15,
|
||||
};
|
||||
|
||||
let inner_b = Inner {
|
||||
a: 99,
|
||||
b: 100,
|
||||
c: 101,
|
||||
d: 102,
|
||||
};
|
||||
|
||||
let inner_c = Inner {
|
||||
a: 255,
|
||||
b: 256,
|
||||
c: 257,
|
||||
d: 0,
|
||||
};
|
||||
|
||||
let a = StructWithVecOfStructs {
|
||||
a: 42,
|
||||
b: inner_a.clone(),
|
||||
c: vec![inner_a.clone(), inner_b.clone(), inner_c.clone()],
|
||||
};
|
||||
|
||||
let b = StructWithVecOfStructs {
|
||||
c: vec![],
|
||||
..a.clone()
|
||||
};
|
||||
|
||||
let c = StructWithVecOfStructs {
|
||||
a: 800,
|
||||
..a.clone()
|
||||
};
|
||||
|
||||
let d = StructWithVecOfStructs {
|
||||
b: inner_c.clone(),
|
||||
..a.clone()
|
||||
};
|
||||
|
||||
let e = StructWithVecOfStructs {
|
||||
c: vec![inner_a.clone(), inner_b.clone()],
|
||||
..a.clone()
|
||||
};
|
||||
|
||||
let f = StructWithVecOfStructs {
|
||||
c: vec![inner_a.clone()],
|
||||
..a.clone()
|
||||
};
|
||||
|
||||
vec![a, b, c, d, e, f]
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_struct_with_vec_of_structs() {
|
||||
let variants = get_struct_with_vec_of_structs();
|
||||
|
||||
test_routine(variants[0].clone(), variants.clone());
|
||||
test_routine(variants[1].clone(), variants.clone());
|
||||
test_routine(variants[2].clone(), variants.clone());
|
||||
test_routine(variants[3].clone(), variants.clone());
|
||||
test_routine(variants[4].clone(), variants.clone());
|
||||
test_routine(variants[5].clone(), variants.clone());
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, TreeHash, CachedTreeHash)]
|
||||
pub struct StructWithVecOfStructWithVecOfStructs {
|
||||
pub a: Vec<StructWithVecOfStructs>,
|
||||
pub b: u64,
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_struct_with_vec_of_struct_with_vec_of_structs() {
|
||||
let structs = get_struct_with_vec_of_structs();
|
||||
|
||||
let variants = vec![
|
||||
StructWithVecOfStructWithVecOfStructs {
|
||||
a: structs[..].to_vec(),
|
||||
b: 99,
|
||||
},
|
||||
StructWithVecOfStructWithVecOfStructs { a: vec![], b: 99 },
|
||||
StructWithVecOfStructWithVecOfStructs {
|
||||
a: structs[0..2].to_vec(),
|
||||
b: 99,
|
||||
},
|
||||
StructWithVecOfStructWithVecOfStructs {
|
||||
a: structs[0..2].to_vec(),
|
||||
b: 100,
|
||||
},
|
||||
StructWithVecOfStructWithVecOfStructs {
|
||||
a: structs[0..1].to_vec(),
|
||||
b: 100,
|
||||
},
|
||||
StructWithVecOfStructWithVecOfStructs {
|
||||
a: structs[0..4].to_vec(),
|
||||
b: 100,
|
||||
},
|
||||
StructWithVecOfStructWithVecOfStructs {
|
||||
a: structs[0..5].to_vec(),
|
||||
b: 8,
|
||||
},
|
||||
];
|
||||
|
||||
for v in &variants {
|
||||
test_routine(v.clone(), variants.clone());
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, TreeHash, CachedTreeHash)]
|
||||
pub struct Inner {
|
||||
pub a: u64,
|
||||
pub b: u64,
|
||||
pub c: u64,
|
||||
pub d: u64,
|
||||
}
|
||||
|
||||
fn generic_test(index: usize) {
|
||||
let inner = Inner {
|
||||
a: 1,
|
||||
b: 2,
|
||||
c: 3,
|
||||
d: 4,
|
||||
};
|
||||
|
||||
let mut cache = TreeHashCache::new(&inner, 0).unwrap();
|
||||
|
||||
let changed_inner = match index {
|
||||
0 => Inner {
|
||||
a: 42,
|
||||
..inner.clone()
|
||||
},
|
||||
1 => Inner {
|
||||
b: 42,
|
||||
..inner.clone()
|
||||
},
|
||||
2 => Inner {
|
||||
c: 42,
|
||||
..inner.clone()
|
||||
},
|
||||
3 => Inner {
|
||||
d: 42,
|
||||
..inner.clone()
|
||||
},
|
||||
_ => panic!("bad index"),
|
||||
};
|
||||
|
||||
changed_inner.update_tree_hash_cache(&mut cache).unwrap();
|
||||
|
||||
let data1 = int_to_bytes32(1);
|
||||
let data2 = int_to_bytes32(2);
|
||||
let data3 = int_to_bytes32(3);
|
||||
let data4 = int_to_bytes32(4);
|
||||
|
||||
let mut data = vec![data1, data2, data3, data4];
|
||||
|
||||
data[index] = int_to_bytes32(42);
|
||||
|
||||
let expected = merkleize(join(data));
|
||||
|
||||
let cache_bytes: Vec<u8> = cache.into();
|
||||
|
||||
assert_eq!(expected, cache_bytes);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn cached_hash_on_inner() {
|
||||
generic_test(0);
|
||||
generic_test(1);
|
||||
generic_test(2);
|
||||
generic_test(3);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn inner_builds() {
|
||||
let data1 = int_to_bytes32(1);
|
||||
let data2 = int_to_bytes32(2);
|
||||
let data3 = int_to_bytes32(3);
|
||||
let data4 = int_to_bytes32(4);
|
||||
|
||||
let data = join(vec![data1, data2, data3, data4]);
|
||||
let expected = merkleize(data);
|
||||
|
||||
let inner = Inner {
|
||||
a: 1,
|
||||
b: 2,
|
||||
c: 3,
|
||||
d: 4,
|
||||
};
|
||||
|
||||
let cache: Vec<u8> = TreeHashCache::new(&inner, 0).unwrap().into();
|
||||
|
||||
assert_eq!(expected, cache);
|
||||
}
|
||||
|
||||
fn join(many: Vec<Vec<u8>>) -> Vec<u8> {
|
||||
let mut all = vec![];
|
||||
for one in many {
|
||||
all.extend_from_slice(&mut one.clone())
|
||||
}
|
||||
all
|
||||
}
|
@ -1,156 +0,0 @@
|
||||
use super::*;
|
||||
use hashing::hash;
|
||||
use int_to_bytes::int_to_bytes32;
|
||||
use std::ops::Range;
|
||||
|
||||
pub mod btree_overlay;
|
||||
pub mod impls;
|
||||
pub mod resize;
|
||||
pub mod tree_hash_cache;
|
||||
|
||||
pub use btree_overlay::BTreeOverlay;
|
||||
pub use tree_hash_cache::TreeHashCache;
|
||||
|
||||
#[derive(Debug, PartialEq)]
|
||||
pub struct CachedTreeHasher {
|
||||
cache: TreeHashCache,
|
||||
}
|
||||
|
||||
impl CachedTreeHasher {
|
||||
pub fn new<T>(item: &T) -> Result<Self, Error>
|
||||
where
|
||||
T: CachedTreeHash<T>,
|
||||
{
|
||||
Ok(Self {
|
||||
cache: TreeHashCache::new(item, 0)?,
|
||||
})
|
||||
}
|
||||
|
||||
pub fn update<T>(&mut self, item: &T) -> Result<(), Error>
|
||||
where
|
||||
T: CachedTreeHash<T>,
|
||||
{
|
||||
// Reset the per-hash counters.
|
||||
self.cache.chunk_index = 0;
|
||||
self.cache.overlay_index = 0;
|
||||
|
||||
// Reset the "modified" flags for the cache.
|
||||
self.cache.reset_modifications();
|
||||
|
||||
// Update the cache with the (maybe) changed object.
|
||||
item.update_tree_hash_cache(&mut self.cache)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn tree_hash_root(&self) -> Result<Vec<u8>, Error> {
|
||||
// Return the root of the cache -- the merkle root.
|
||||
Ok(self.cache.root()?.to_vec())
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Clone)]
|
||||
pub enum Error {
|
||||
ShouldNotProduceBTreeOverlay,
|
||||
NoFirstNode,
|
||||
NoBytesForRoot,
|
||||
UnableToObtainSlices,
|
||||
UnableToGrowMerkleTree,
|
||||
UnableToShrinkMerkleTree,
|
||||
TreeCannotHaveZeroNodes,
|
||||
ShouldNeverBePacked(TreeHashType),
|
||||
BytesAreNotEvenChunks(usize),
|
||||
NoModifiedFieldForChunk(usize),
|
||||
NoBytesForChunk(usize),
|
||||
NoOverlayForIndex(usize),
|
||||
NotLeafNode(usize),
|
||||
}
|
||||
|
||||
pub trait CachedTreeHash<Item>: TreeHash {
|
||||
fn tree_hash_cache_overlay(
|
||||
&self,
|
||||
chunk_offset: usize,
|
||||
depth: usize,
|
||||
) -> Result<BTreeOverlay, Error>;
|
||||
|
||||
fn num_tree_hash_cache_chunks(&self) -> usize;
|
||||
|
||||
fn new_tree_hash_cache(&self, depth: usize) -> Result<TreeHashCache, Error>;
|
||||
|
||||
fn update_tree_hash_cache(&self, cache: &mut TreeHashCache) -> Result<(), Error>;
|
||||
}
|
||||
|
||||
fn children(parent: usize) -> (usize, usize) {
|
||||
((2 * parent + 1), (2 * parent + 2))
|
||||
}
|
||||
|
||||
fn node_range_to_byte_range(node_range: &Range<usize>) -> Range<usize> {
|
||||
node_range.start * HASHSIZE..node_range.end * HASHSIZE
|
||||
}
|
||||
|
||||
/// Split `values` into a power-of-two, identical-length chunks (padding with `0`) and merkleize
|
||||
/// them, returning the entire merkle tree.
|
||||
///
|
||||
/// The root hash is `merkleize(values)[0..BYTES_PER_CHUNK]`.
|
||||
pub fn merkleize(values: Vec<u8>) -> Vec<u8> {
|
||||
let values = sanitise_bytes(values);
|
||||
|
||||
let leaves = values.len() / HASHSIZE;
|
||||
|
||||
if leaves == 0 {
|
||||
panic!("No full leaves");
|
||||
}
|
||||
|
||||
if !leaves.is_power_of_two() {
|
||||
panic!("leaves is not power of two");
|
||||
}
|
||||
|
||||
let mut o: Vec<u8> = vec![0; (num_nodes(leaves) - leaves) * HASHSIZE];
|
||||
o.append(&mut values.to_vec());
|
||||
|
||||
let mut i = o.len();
|
||||
let mut j = o.len() - values.len();
|
||||
|
||||
while i >= MERKLE_HASH_CHUNCK {
|
||||
i -= MERKLE_HASH_CHUNCK;
|
||||
let hash = hash(&o[i..i + MERKLE_HASH_CHUNCK]);
|
||||
|
||||
j -= HASHSIZE;
|
||||
o[j..j + HASHSIZE].copy_from_slice(&hash);
|
||||
}
|
||||
|
||||
o
|
||||
}
|
||||
|
||||
pub fn sanitise_bytes(mut bytes: Vec<u8>) -> Vec<u8> {
|
||||
let present_leaves = num_unsanitized_leaves(bytes.len());
|
||||
let required_leaves = present_leaves.next_power_of_two();
|
||||
|
||||
if (present_leaves != required_leaves) | last_leaf_needs_padding(bytes.len()) {
|
||||
bytes.resize(num_bytes(required_leaves), 0);
|
||||
}
|
||||
|
||||
bytes
|
||||
}
|
||||
|
||||
fn pad_for_leaf_count(num_leaves: usize, bytes: &mut Vec<u8>) {
|
||||
let required_leaves = num_leaves.next_power_of_two();
|
||||
|
||||
bytes.resize(
|
||||
bytes.len() + (required_leaves - num_leaves) * BYTES_PER_CHUNK,
|
||||
0,
|
||||
);
|
||||
}
|
||||
|
||||
fn last_leaf_needs_padding(num_bytes: usize) -> bool {
|
||||
num_bytes % HASHSIZE != 0
|
||||
}
|
||||
|
||||
/// Rounds up
|
||||
fn num_unsanitized_leaves(num_bytes: usize) -> usize {
|
||||
(num_bytes + HASHSIZE - 1) / HASHSIZE
|
||||
}
|
||||
|
||||
fn num_bytes(num_leaves: usize) -> usize {
|
||||
num_leaves * HASHSIZE
|
||||
}
|
@ -1,5 +1,6 @@
|
||||
use super::*;
|
||||
use ethereum_types::H256;
|
||||
use int_to_bytes::int_to_bytes32;
|
||||
|
||||
macro_rules! impl_for_bitsize {
|
||||
($type: ident, $bit_size: expr) => {
|
@ -1,14 +1,10 @@
|
||||
pub mod cached_tree_hash;
|
||||
pub mod signed_root;
|
||||
pub mod standard_tree_hash;
|
||||
use hashing::hash;
|
||||
|
||||
pub mod impls;
|
||||
|
||||
pub const BYTES_PER_CHUNK: usize = 32;
|
||||
pub const HASHSIZE: usize = 32;
|
||||
pub const MERKLE_HASH_CHUNCK: usize = 2 * BYTES_PER_CHUNK;
|
||||
|
||||
pub use cached_tree_hash::{BTreeOverlay, CachedTreeHash, Error, TreeHashCache};
|
||||
pub use signed_root::SignedRoot;
|
||||
pub use standard_tree_hash::{merkle_root, TreeHash};
|
||||
pub const MERKLE_HASH_CHUNK: usize = 2 * BYTES_PER_CHUNK;
|
||||
|
||||
#[derive(Debug, PartialEq, Clone)]
|
||||
pub enum TreeHashType {
|
||||
@ -18,6 +14,78 @@ pub enum TreeHashType {
|
||||
Container,
|
||||
}
|
||||
|
||||
pub trait TreeHash {
|
||||
fn tree_hash_type() -> TreeHashType;
|
||||
|
||||
fn tree_hash_packed_encoding(&self) -> Vec<u8>;
|
||||
|
||||
fn tree_hash_packing_factor() -> usize;
|
||||
|
||||
fn tree_hash_root(&self) -> Vec<u8>;
|
||||
}
|
||||
|
||||
pub trait SignedRoot: TreeHash {
|
||||
fn signed_root(&self) -> Vec<u8>;
|
||||
}
|
||||
|
||||
pub fn merkle_root(bytes: &[u8]) -> Vec<u8> {
|
||||
// TODO: replace this with a more memory efficient method.
|
||||
efficient_merkleize(&bytes)[0..32].to_vec()
|
||||
}
|
||||
|
||||
pub fn efficient_merkleize(bytes: &[u8]) -> Vec<u8> {
|
||||
// If the bytes are just one chunk (or less than one chunk) just return them.
|
||||
if bytes.len() <= HASHSIZE {
|
||||
let mut o = bytes.to_vec();
|
||||
o.resize(HASHSIZE, 0);
|
||||
return o;
|
||||
}
|
||||
|
||||
let leaves = num_sanitized_leaves(bytes.len());
|
||||
let nodes = num_nodes(leaves);
|
||||
let internal_nodes = nodes - leaves;
|
||||
|
||||
let num_bytes = std::cmp::max(internal_nodes, 1) * HASHSIZE + bytes.len();
|
||||
|
||||
let mut o: Vec<u8> = vec![0; internal_nodes * HASHSIZE];
|
||||
|
||||
o.append(&mut bytes.to_vec());
|
||||
|
||||
assert_eq!(o.len(), num_bytes);
|
||||
|
||||
let empty_chunk_hash = hash(&[0; MERKLE_HASH_CHUNK]);
|
||||
|
||||
let mut i = nodes * HASHSIZE;
|
||||
let mut j = internal_nodes * HASHSIZE;
|
||||
|
||||
while i >= MERKLE_HASH_CHUNK {
|
||||
i -= MERKLE_HASH_CHUNK;
|
||||
|
||||
j -= HASHSIZE;
|
||||
let hash = match o.get(i..i + MERKLE_HASH_CHUNK) {
|
||||
// All bytes are available, hash as ususal.
|
||||
Some(slice) => hash(slice),
|
||||
// Unable to get all the bytes.
|
||||
None => {
|
||||
match o.get(i..) {
|
||||
// Able to get some of the bytes, pad them out.
|
||||
Some(slice) => {
|
||||
let mut bytes = slice.to_vec();
|
||||
bytes.resize(MERKLE_HASH_CHUNK, 0);
|
||||
hash(&bytes)
|
||||
}
|
||||
// Unable to get any bytes, use the empty-chunk hash.
|
||||
None => empty_chunk_hash.clone(),
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
o[j..j + HASHSIZE].copy_from_slice(&hash);
|
||||
}
|
||||
|
||||
o
|
||||
}
|
||||
|
||||
fn num_sanitized_leaves(num_bytes: usize) -> usize {
|
||||
let leaves = (num_bytes + HASHSIZE - 1) / HASHSIZE;
|
||||
leaves.next_power_of_two()
|
||||
|
@ -1,5 +0,0 @@
|
||||
use crate::TreeHash;
|
||||
|
||||
pub trait SignedRoot: TreeHash {
|
||||
fn signed_root(&self) -> Vec<u8>;
|
||||
}
|
@ -1,75 +0,0 @@
|
||||
use super::*;
|
||||
use hashing::hash;
|
||||
use int_to_bytes::int_to_bytes32;
|
||||
|
||||
pub use impls::vec_tree_hash_root;
|
||||
|
||||
mod impls;
|
||||
|
||||
pub trait TreeHash {
|
||||
fn tree_hash_type() -> TreeHashType;
|
||||
|
||||
fn tree_hash_packed_encoding(&self) -> Vec<u8>;
|
||||
|
||||
fn tree_hash_packing_factor() -> usize;
|
||||
|
||||
fn tree_hash_root(&self) -> Vec<u8>;
|
||||
}
|
||||
|
||||
pub fn merkle_root(bytes: &[u8]) -> Vec<u8> {
|
||||
// TODO: replace this with a more memory efficient method.
|
||||
efficient_merkleize(&bytes)[0..32].to_vec()
|
||||
}
|
||||
|
||||
pub fn efficient_merkleize(bytes: &[u8]) -> Vec<u8> {
|
||||
// If the bytes are just one chunk (or less than one chunk) just return them.
|
||||
if bytes.len() <= HASHSIZE {
|
||||
let mut o = bytes.to_vec();
|
||||
o.resize(HASHSIZE, 0);
|
||||
return o;
|
||||
}
|
||||
|
||||
let leaves = num_sanitized_leaves(bytes.len());
|
||||
let nodes = num_nodes(leaves);
|
||||
let internal_nodes = nodes - leaves;
|
||||
|
||||
let num_bytes = std::cmp::max(internal_nodes, 1) * HASHSIZE + bytes.len();
|
||||
|
||||
let mut o: Vec<u8> = vec![0; internal_nodes * HASHSIZE];
|
||||
|
||||
o.append(&mut bytes.to_vec());
|
||||
|
||||
assert_eq!(o.len(), num_bytes);
|
||||
|
||||
let empty_chunk_hash = hash(&[0; MERKLE_HASH_CHUNCK]);
|
||||
|
||||
let mut i = nodes * HASHSIZE;
|
||||
let mut j = internal_nodes * HASHSIZE;
|
||||
|
||||
while i >= MERKLE_HASH_CHUNCK {
|
||||
i -= MERKLE_HASH_CHUNCK;
|
||||
|
||||
j -= HASHSIZE;
|
||||
let hash = match o.get(i..i + MERKLE_HASH_CHUNCK) {
|
||||
// All bytes are available, hash as ususal.
|
||||
Some(slice) => hash(slice),
|
||||
// Unable to get all the bytes.
|
||||
None => {
|
||||
match o.get(i..) {
|
||||
// Able to get some of the bytes, pad them out.
|
||||
Some(slice) => {
|
||||
let mut bytes = slice.to_vec();
|
||||
bytes.resize(MERKLE_HASH_CHUNCK, 0);
|
||||
hash(&bytes)
|
||||
}
|
||||
// Unable to get any bytes, use the empty-chunk hash.
|
||||
None => empty_chunk_hash.clone(),
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
o[j..j + HASHSIZE].copy_from_slice(&hash);
|
||||
}
|
||||
|
||||
o
|
||||
}
|
File diff suppressed because it is too large
Load Diff
@ -10,6 +10,7 @@ proc-macro = true
|
||||
|
||||
[dev-dependencies]
|
||||
tree_hash = { path = "../tree_hash" }
|
||||
cached_tree_hash = { path = "../cached_tree_hash" }
|
||||
|
||||
[dependencies]
|
||||
syn = "0.15"
|
||||
|
@ -58,9 +58,9 @@ pub fn subtree_derive(input: TokenStream) -> TokenStream {
|
||||
let num_items = idents_a.len();
|
||||
|
||||
let output = quote! {
|
||||
impl tree_hash::CachedTreeHash<#name> for #name {
|
||||
fn new_tree_hash_cache(&self, depth: usize) -> Result<tree_hash::TreeHashCache, tree_hash::Error> {
|
||||
let tree = tree_hash::TreeHashCache::from_leaves_and_subtrees(
|
||||
impl cached_tree_hash::CachedTreeHash<#name> for #name {
|
||||
fn new_tree_hash_cache(&self, depth: usize) -> Result<cached_tree_hash::TreeHashCache, cached_tree_hash::Error> {
|
||||
let tree = cached_tree_hash::TreeHashCache::from_leaves_and_subtrees(
|
||||
self,
|
||||
vec![
|
||||
#(
|
||||
@ -74,23 +74,23 @@ pub fn subtree_derive(input: TokenStream) -> TokenStream {
|
||||
}
|
||||
|
||||
fn num_tree_hash_cache_chunks(&self) -> usize {
|
||||
tree_hash::BTreeOverlay::new(self, 0, 0)
|
||||
cached_tree_hash::BTreeOverlay::new(self, 0, 0)
|
||||
.and_then(|o| Ok(o.num_chunks()))
|
||||
.unwrap_or_else(|_| 1)
|
||||
}
|
||||
|
||||
fn tree_hash_cache_overlay(&self, chunk_offset: usize, depth: usize) -> Result<tree_hash::BTreeOverlay, tree_hash::Error> {
|
||||
fn tree_hash_cache_overlay(&self, chunk_offset: usize, depth: usize) -> Result<cached_tree_hash::BTreeOverlay, cached_tree_hash::Error> {
|
||||
let mut lengths = vec![];
|
||||
|
||||
#(
|
||||
lengths.push(self.#idents_b.num_tree_hash_cache_chunks());
|
||||
)*
|
||||
|
||||
tree_hash::BTreeOverlay::from_lengths(chunk_offset, #num_items, depth, lengths)
|
||||
cached_tree_hash::BTreeOverlay::from_lengths(chunk_offset, #num_items, depth, lengths)
|
||||
}
|
||||
|
||||
fn update_tree_hash_cache(&self, cache: &mut TreeHashCache) -> Result<(), Error> {
|
||||
let overlay = BTreeOverlay::new(self, cache.chunk_index, 0)?;
|
||||
fn update_tree_hash_cache(&self, cache: &mut cached_tree_hash::TreeHashCache) -> Result<(), cached_tree_hash::Error> {
|
||||
let overlay = cached_tree_hash::BTreeOverlay::new(self, cache.chunk_index, 0)?;
|
||||
|
||||
// Skip the chunk index to the first leaf node of this struct.
|
||||
cache.chunk_index = overlay.first_leaf_node();
|
||||
|
@ -1,4 +1,5 @@
|
||||
use tree_hash::{CachedTreeHash, SignedRoot, TreeHash};
|
||||
use cached_tree_hash::{CachedTreeHash, CachedTreeHasher};
|
||||
use tree_hash::{SignedRoot, TreeHash};
|
||||
use tree_hash_derive::{CachedTreeHash, SignedRoot, TreeHash};
|
||||
|
||||
#[derive(Clone, Debug, TreeHash, CachedTreeHash)]
|
||||
@ -13,18 +14,17 @@ fn test_standard_and_cached<T>(original: &T, modified: &T)
|
||||
where
|
||||
T: CachedTreeHash<T>,
|
||||
{
|
||||
let mut cache = original.new_tree_hash_cache().unwrap();
|
||||
// let mut cache = original.new_tree_hash_cache().unwrap();
|
||||
let mut hasher = CachedTreeHasher::new(original).unwrap();
|
||||
|
||||
let standard_root = original.tree_hash_root();
|
||||
let cached_root = cache.root().unwrap().to_vec();
|
||||
let cached_root = hasher.tree_hash_root().unwrap();
|
||||
assert_eq!(standard_root, cached_root);
|
||||
|
||||
// Test after a modification
|
||||
modified
|
||||
.update_tree_hash_cache(&original, &mut cache, 0)
|
||||
.unwrap();
|
||||
hasher.update(modified).unwrap();
|
||||
let standard_root = modified.tree_hash_root();
|
||||
let cached_root = cache.root().unwrap().to_vec();
|
||||
let cached_root = hasher.tree_hash_root().unwrap();
|
||||
assert_eq!(standard_root, cached_root);
|
||||
}
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user