Further tidy tree_hash
crate.
This commit is contained in:
parent
461f3d2382
commit
f1d8224d89
@ -1,5 +1,7 @@
|
|||||||
use super::*;
|
use super::*;
|
||||||
|
use crate::merkleize::merkle_root;
|
||||||
use ethereum_types::H256;
|
use ethereum_types::H256;
|
||||||
|
use hashing::hash;
|
||||||
use int_to_bytes::int_to_bytes32;
|
use int_to_bytes::int_to_bytes32;
|
||||||
|
|
||||||
macro_rules! impl_for_bitsize {
|
macro_rules! impl_for_bitsize {
|
||||||
|
@ -1,6 +1,5 @@
|
|||||||
use hashing::hash;
|
|
||||||
|
|
||||||
pub mod impls;
|
pub mod impls;
|
||||||
|
pub mod merkleize;
|
||||||
|
|
||||||
pub const BYTES_PER_CHUNK: usize = 32;
|
pub const BYTES_PER_CHUNK: usize = 32;
|
||||||
pub const HASHSIZE: usize = 32;
|
pub const HASHSIZE: usize = 32;
|
||||||
@ -28,73 +27,6 @@ pub trait SignedRoot: TreeHash {
|
|||||||
fn signed_root(&self) -> Vec<u8>;
|
fn signed_root(&self) -> Vec<u8>;
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn merkle_root(bytes: &[u8]) -> Vec<u8> {
|
|
||||||
// TODO: replace this with a more memory efficient method.
|
|
||||||
efficient_merkleize(&bytes)[0..32].to_vec()
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn efficient_merkleize(bytes: &[u8]) -> Vec<u8> {
|
|
||||||
// If the bytes are just one chunk (or less than one chunk) just return them.
|
|
||||||
if bytes.len() <= HASHSIZE {
|
|
||||||
let mut o = bytes.to_vec();
|
|
||||||
o.resize(HASHSIZE, 0);
|
|
||||||
return o;
|
|
||||||
}
|
|
||||||
|
|
||||||
let leaves = num_sanitized_leaves(bytes.len());
|
|
||||||
let nodes = num_nodes(leaves);
|
|
||||||
let internal_nodes = nodes - leaves;
|
|
||||||
|
|
||||||
let num_bytes = std::cmp::max(internal_nodes, 1) * HASHSIZE + bytes.len();
|
|
||||||
|
|
||||||
let mut o: Vec<u8> = vec![0; internal_nodes * HASHSIZE];
|
|
||||||
|
|
||||||
o.append(&mut bytes.to_vec());
|
|
||||||
|
|
||||||
assert_eq!(o.len(), num_bytes);
|
|
||||||
|
|
||||||
let empty_chunk_hash = hash(&[0; MERKLE_HASH_CHUNK]);
|
|
||||||
|
|
||||||
let mut i = nodes * HASHSIZE;
|
|
||||||
let mut j = internal_nodes * HASHSIZE;
|
|
||||||
|
|
||||||
while i >= MERKLE_HASH_CHUNK {
|
|
||||||
i -= MERKLE_HASH_CHUNK;
|
|
||||||
|
|
||||||
j -= HASHSIZE;
|
|
||||||
let hash = match o.get(i..i + MERKLE_HASH_CHUNK) {
|
|
||||||
// All bytes are available, hash as ususal.
|
|
||||||
Some(slice) => hash(slice),
|
|
||||||
// Unable to get all the bytes.
|
|
||||||
None => {
|
|
||||||
match o.get(i..) {
|
|
||||||
// Able to get some of the bytes, pad them out.
|
|
||||||
Some(slice) => {
|
|
||||||
let mut bytes = slice.to_vec();
|
|
||||||
bytes.resize(MERKLE_HASH_CHUNK, 0);
|
|
||||||
hash(&bytes)
|
|
||||||
}
|
|
||||||
// Unable to get any bytes, use the empty-chunk hash.
|
|
||||||
None => empty_chunk_hash.clone(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
o[j..j + HASHSIZE].copy_from_slice(&hash);
|
|
||||||
}
|
|
||||||
|
|
||||||
o
|
|
||||||
}
|
|
||||||
|
|
||||||
fn num_sanitized_leaves(num_bytes: usize) -> usize {
|
|
||||||
let leaves = (num_bytes + HASHSIZE - 1) / HASHSIZE;
|
|
||||||
leaves.next_power_of_two()
|
|
||||||
}
|
|
||||||
|
|
||||||
fn num_nodes(num_leaves: usize) -> usize {
|
|
||||||
2 * num_leaves - 1
|
|
||||||
}
|
|
||||||
|
|
||||||
#[macro_export]
|
#[macro_export]
|
||||||
macro_rules! tree_hash_ssz_encoding_as_vector {
|
macro_rules! tree_hash_ssz_encoding_as_vector {
|
||||||
($type: ident) => {
|
($type: ident) => {
|
||||||
@ -112,7 +44,7 @@ macro_rules! tree_hash_ssz_encoding_as_vector {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn tree_hash_root(&self) -> Vec<u8> {
|
fn tree_hash_root(&self) -> Vec<u8> {
|
||||||
tree_hash::merkle_root(&ssz::ssz_encode(self))
|
tree_hash::merkleize::merkle_root(&ssz::ssz_encode(self))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
69
eth2/utils/tree_hash/src/merkleize.rs
Normal file
69
eth2/utils/tree_hash/src/merkleize.rs
Normal file
@ -0,0 +1,69 @@
|
|||||||
|
use super::*;
|
||||||
|
use hashing::hash;
|
||||||
|
|
||||||
|
pub fn merkle_root(bytes: &[u8]) -> Vec<u8> {
|
||||||
|
// TODO: replace this with a more memory efficient method.
|
||||||
|
efficient_merkleize(&bytes)[0..32].to_vec()
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn efficient_merkleize(bytes: &[u8]) -> Vec<u8> {
|
||||||
|
// If the bytes are just one chunk (or less than one chunk) just return them.
|
||||||
|
if bytes.len() <= HASHSIZE {
|
||||||
|
let mut o = bytes.to_vec();
|
||||||
|
o.resize(HASHSIZE, 0);
|
||||||
|
return o;
|
||||||
|
}
|
||||||
|
|
||||||
|
let leaves = num_sanitized_leaves(bytes.len());
|
||||||
|
let nodes = num_nodes(leaves);
|
||||||
|
let internal_nodes = nodes - leaves;
|
||||||
|
|
||||||
|
let num_bytes = std::cmp::max(internal_nodes, 1) * HASHSIZE + bytes.len();
|
||||||
|
|
||||||
|
let mut o: Vec<u8> = vec![0; internal_nodes * HASHSIZE];
|
||||||
|
|
||||||
|
o.append(&mut bytes.to_vec());
|
||||||
|
|
||||||
|
assert_eq!(o.len(), num_bytes);
|
||||||
|
|
||||||
|
let empty_chunk_hash = hash(&[0; MERKLE_HASH_CHUNK]);
|
||||||
|
|
||||||
|
let mut i = nodes * HASHSIZE;
|
||||||
|
let mut j = internal_nodes * HASHSIZE;
|
||||||
|
|
||||||
|
while i >= MERKLE_HASH_CHUNK {
|
||||||
|
i -= MERKLE_HASH_CHUNK;
|
||||||
|
|
||||||
|
j -= HASHSIZE;
|
||||||
|
let hash = match o.get(i..i + MERKLE_HASH_CHUNK) {
|
||||||
|
// All bytes are available, hash as ususal.
|
||||||
|
Some(slice) => hash(slice),
|
||||||
|
// Unable to get all the bytes.
|
||||||
|
None => {
|
||||||
|
match o.get(i..) {
|
||||||
|
// Able to get some of the bytes, pad them out.
|
||||||
|
Some(slice) => {
|
||||||
|
let mut bytes = slice.to_vec();
|
||||||
|
bytes.resize(MERKLE_HASH_CHUNK, 0);
|
||||||
|
hash(&bytes)
|
||||||
|
}
|
||||||
|
// Unable to get any bytes, use the empty-chunk hash.
|
||||||
|
None => empty_chunk_hash.clone(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
o[j..j + HASHSIZE].copy_from_slice(&hash);
|
||||||
|
}
|
||||||
|
|
||||||
|
o
|
||||||
|
}
|
||||||
|
|
||||||
|
fn num_sanitized_leaves(num_bytes: usize) -> usize {
|
||||||
|
let leaves = (num_bytes + HASHSIZE - 1) / HASHSIZE;
|
||||||
|
leaves.next_power_of_two()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn num_nodes(num_leaves: usize) -> usize {
|
||||||
|
2 * num_leaves - 1
|
||||||
|
}
|
@ -149,7 +149,7 @@ pub fn tree_hash_derive(input: TokenStream) -> TokenStream {
|
|||||||
leaves.append(&mut self.#idents.tree_hash_root());
|
leaves.append(&mut self.#idents.tree_hash_root());
|
||||||
)*
|
)*
|
||||||
|
|
||||||
tree_hash::merkle_root(&leaves)
|
tree_hash::merkleize::merkle_root(&leaves)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
@ -179,7 +179,7 @@ pub fn tree_hash_signed_root_derive(input: TokenStream) -> TokenStream {
|
|||||||
leaves.append(&mut self.#idents.tree_hash_root());
|
leaves.append(&mut self.#idents.tree_hash_root());
|
||||||
)*
|
)*
|
||||||
|
|
||||||
tree_hash::merkle_root(&leaves)
|
tree_hash::merkleize::merkle_root(&leaves)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
use cached_tree_hash::{CachedTreeHash, CachedTreeHasher};
|
use cached_tree_hash::{CachedTreeHash, CachedTreeHasher};
|
||||||
use tree_hash::{SignedRoot, TreeHash};
|
use tree_hash::{merkleize::merkle_root, SignedRoot, TreeHash};
|
||||||
use tree_hash_derive::{CachedTreeHash, SignedRoot, TreeHash};
|
use tree_hash_derive::{CachedTreeHash, SignedRoot, TreeHash};
|
||||||
|
|
||||||
#[derive(Clone, Debug, TreeHash, CachedTreeHash)]
|
#[derive(Clone, Debug, TreeHash, CachedTreeHash)]
|
||||||
@ -120,7 +120,7 @@ impl CryptoKitties {
|
|||||||
leaves.append(&mut self.best_kitty.tree_hash_root());
|
leaves.append(&mut self.best_kitty.tree_hash_root());
|
||||||
leaves.append(&mut self.worst_kitty.tree_hash_root());
|
leaves.append(&mut self.worst_kitty.tree_hash_root());
|
||||||
leaves.append(&mut self.kitties.tree_hash_root());
|
leaves.append(&mut self.kitties.tree_hash_root());
|
||||||
tree_hash::merkle_root(&leaves)
|
merkle_root(&leaves)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -158,14 +158,14 @@ impl Casper {
|
|||||||
let mut list = Vec::new();
|
let mut list = Vec::new();
|
||||||
list.append(&mut self.friendly.tree_hash_root());
|
list.append(&mut self.friendly.tree_hash_root());
|
||||||
list.append(&mut self.friends.tree_hash_root());
|
list.append(&mut self.friends.tree_hash_root());
|
||||||
tree_hash::merkle_root(&list)
|
merkle_root(&list)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn expected_tree_hash(&self) -> Vec<u8> {
|
fn expected_tree_hash(&self) -> Vec<u8> {
|
||||||
let mut list = Vec::new();
|
let mut list = Vec::new();
|
||||||
list.append(&mut self.friendly.tree_hash_root());
|
list.append(&mut self.friendly.tree_hash_root());
|
||||||
list.append(&mut self.dead.tree_hash_root());
|
list.append(&mut self.dead.tree_hash_root());
|
||||||
tree_hash::merkle_root(&list)
|
merkle_root(&list)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
Loading…
Reference in New Issue
Block a user