lighthouse/eth2/utils/tree_hash/src/lib.rs

142 lines
3.8 KiB
Rust
Raw Normal View History

2019-04-25 23:55:03 +00:00
use hashing::hash;
pub mod impls;
2019-04-15 01:14:30 +00:00
pub const BYTES_PER_CHUNK: usize = 32;
pub const HASHSIZE: usize = 32;
2019-04-25 23:55:03 +00:00
pub const MERKLE_HASH_CHUNK: usize = 2 * BYTES_PER_CHUNK;
2019-04-15 23:34:23 +00:00
2019-04-15 01:14:30 +00:00
#[derive(Debug, PartialEq, Clone)]
2019-04-15 23:34:23 +00:00
pub enum TreeHashType {
2019-04-15 01:14:30 +00:00
Basic,
2019-04-17 00:57:36 +00:00
Vector,
2019-04-15 01:14:30 +00:00
List,
2019-04-17 00:57:36 +00:00
Container,
2019-04-15 01:14:30 +00:00
}
2019-04-25 23:55:03 +00:00
pub trait TreeHash {
fn tree_hash_type() -> TreeHashType;
fn tree_hash_packed_encoding(&self) -> Vec<u8>;
fn tree_hash_packing_factor() -> usize;
fn tree_hash_root(&self) -> Vec<u8>;
}
pub trait SignedRoot: TreeHash {
fn signed_root(&self) -> Vec<u8>;
}
pub fn merkle_root(bytes: &[u8]) -> Vec<u8> {
// TODO: replace this with a more memory efficient method.
efficient_merkleize(&bytes)[0..32].to_vec()
}
pub fn efficient_merkleize(bytes: &[u8]) -> Vec<u8> {
// If the bytes are just one chunk (or less than one chunk) just return them.
if bytes.len() <= HASHSIZE {
let mut o = bytes.to_vec();
o.resize(HASHSIZE, 0);
return o;
}
let leaves = num_sanitized_leaves(bytes.len());
let nodes = num_nodes(leaves);
let internal_nodes = nodes - leaves;
let num_bytes = std::cmp::max(internal_nodes, 1) * HASHSIZE + bytes.len();
let mut o: Vec<u8> = vec![0; internal_nodes * HASHSIZE];
o.append(&mut bytes.to_vec());
assert_eq!(o.len(), num_bytes);
let empty_chunk_hash = hash(&[0; MERKLE_HASH_CHUNK]);
let mut i = nodes * HASHSIZE;
let mut j = internal_nodes * HASHSIZE;
while i >= MERKLE_HASH_CHUNK {
i -= MERKLE_HASH_CHUNK;
j -= HASHSIZE;
let hash = match o.get(i..i + MERKLE_HASH_CHUNK) {
// All bytes are available, hash as ususal.
Some(slice) => hash(slice),
// Unable to get all the bytes.
None => {
match o.get(i..) {
// Able to get some of the bytes, pad them out.
Some(slice) => {
let mut bytes = slice.to_vec();
bytes.resize(MERKLE_HASH_CHUNK, 0);
hash(&bytes)
}
// Unable to get any bytes, use the empty-chunk hash.
None => empty_chunk_hash.clone(),
}
}
};
o[j..j + HASHSIZE].copy_from_slice(&hash);
}
o
}
2019-04-15 01:14:30 +00:00
fn num_sanitized_leaves(num_bytes: usize) -> usize {
let leaves = (num_bytes + HASHSIZE - 1) / HASHSIZE;
leaves.next_power_of_two()
}
2019-04-15 23:14:33 +00:00
fn num_nodes(num_leaves: usize) -> usize {
2 * num_leaves - 1
2019-04-15 01:14:30 +00:00
}
2019-04-16 02:29:39 +00:00
#[macro_export]
2019-04-17 01:57:57 +00:00
macro_rules! tree_hash_ssz_encoding_as_vector {
2019-04-16 02:29:39 +00:00
($type: ident) => {
impl tree_hash::TreeHash for $type {
fn tree_hash_type() -> tree_hash::TreeHashType {
2019-04-17 00:57:36 +00:00
tree_hash::TreeHashType::Vector
2019-04-16 02:29:39 +00:00
}
fn tree_hash_packed_encoding(&self) -> Vec<u8> {
2019-04-17 00:57:36 +00:00
unreachable!("Vector should never be packed.")
2019-04-16 02:29:39 +00:00
}
fn tree_hash_packing_factor() -> usize {
2019-04-17 00:57:36 +00:00
unreachable!("Vector should never be packed.")
2019-04-16 02:29:39 +00:00
}
fn tree_hash_root(&self) -> Vec<u8> {
tree_hash::merkle_root(&ssz::ssz_encode(self))
}
}
};
}
2019-04-17 01:57:57 +00:00
#[macro_export]
macro_rules! tree_hash_ssz_encoding_as_list {
($type: ident) => {
impl tree_hash::TreeHash for $type {
fn tree_hash_type() -> tree_hash::TreeHashType {
tree_hash::TreeHashType::List
}
fn tree_hash_packed_encoding(&self) -> Vec<u8> {
unreachable!("List should never be packed.")
}
fn tree_hash_packing_factor() -> usize {
unreachable!("List should never be packed.")
}
fn tree_hash_root(&self) -> Vec<u8> {
ssz::ssz_encode(self).tree_hash_root()
}
}
};
}