lighthouse/eth2/utils/tree_hash/src/lib.rs

149 lines
3.6 KiB
Rust
Raw Normal View History

2019-04-15 01:14:30 +00:00
use hashing::hash;
use int_to_bytes::int_to_bytes32;
use std::ops::Range;
2019-04-15 01:37:29 +00:00
mod btree_overlay;
2019-04-15 01:14:30 +00:00
mod cached_tree_hash;
mod impls;
mod resize;
2019-04-15 01:37:29 +00:00
pub use btree_overlay::BTreeOverlay;
2019-04-15 01:14:30 +00:00
pub use cached_tree_hash::TreeHashCache;
pub const BYTES_PER_CHUNK: usize = 32;
pub const HASHSIZE: usize = 32;
pub const MERKLE_HASH_CHUNCK: usize = 2 * BYTES_PER_CHUNK;
#[derive(Debug, PartialEq, Clone)]
pub enum Error {
ShouldNotProduceBTreeOverlay,
NoFirstNode,
NoBytesForRoot,
UnableToObtainSlices,
UnableToGrowMerkleTree,
UnableToShrinkMerkleTree,
2019-04-15 01:49:50 +00:00
ShouldNeverBePacked(ItemType),
2019-04-15 01:14:30 +00:00
BytesAreNotEvenChunks(usize),
NoModifiedFieldForChunk(usize),
NoBytesForChunk(usize),
}
#[derive(Debug, PartialEq, Clone)]
pub enum ItemType {
Basic,
List,
Composite,
}
2019-04-15 05:45:05 +00:00
pub trait CachedTreeHash<T>: CachedTreeHashSubTree<T> + Sized {
2019-04-15 05:13:02 +00:00
fn update_internal_tree_hash_cache(self, old: T) -> Result<(Self, Self), Error>;
fn cached_tree_hash_root(&self) -> Option<Vec<u8>>;
fn clone_without_tree_hash_cache(&self) -> Self;
}
2019-04-15 05:45:05 +00:00
pub trait CachedTreeHashSubTree<Item> {
2019-04-15 01:14:30 +00:00
fn item_type() -> ItemType;
2019-04-15 01:37:29 +00:00
fn btree_overlay(&self, chunk_offset: usize) -> Result<BTreeOverlay, Error>;
2019-04-15 01:14:30 +00:00
2019-04-15 01:49:50 +00:00
fn packed_encoding(&self) -> Result<Vec<u8>, Error>;
2019-04-15 01:14:30 +00:00
fn packing_factor() -> usize;
2019-04-15 02:01:12 +00:00
fn new_cache(&self) -> Result<TreeHashCache, Error>;
fn update_cache(
2019-04-15 01:14:30 +00:00
&self,
other: &Item,
cache: &mut TreeHashCache,
chunk: usize,
) -> Result<usize, Error>;
}
fn children(parent: usize) -> (usize, usize) {
((2 * parent + 1), (2 * parent + 2))
}
fn num_nodes(num_leaves: usize) -> usize {
2 * num_leaves - 1
}
fn node_range_to_byte_range(node_range: &Range<usize>) -> Range<usize> {
node_range.start * HASHSIZE..node_range.end * HASHSIZE
}
/// Split `values` into a power-of-two, identical-length chunks (padding with `0`) and merkleize
/// them, returning the entire merkle tree.
///
/// The root hash is `merkleize(values)[0..BYTES_PER_CHUNK]`.
pub fn merkleize(values: Vec<u8>) -> Vec<u8> {
let values = sanitise_bytes(values);
let leaves = values.len() / HASHSIZE;
if leaves == 0 {
panic!("No full leaves");
}
if !leaves.is_power_of_two() {
panic!("leaves is not power of two");
}
let mut o: Vec<u8> = vec![0; (num_nodes(leaves) - leaves) * HASHSIZE];
o.append(&mut values.to_vec());
let mut i = o.len();
let mut j = o.len() - values.len();
while i >= MERKLE_HASH_CHUNCK {
i -= MERKLE_HASH_CHUNCK;
let hash = hash(&o[i..i + MERKLE_HASH_CHUNCK]);
j -= HASHSIZE;
o[j..j + HASHSIZE].copy_from_slice(&hash);
}
o
}
pub fn sanitise_bytes(mut bytes: Vec<u8>) -> Vec<u8> {
let present_leaves = num_unsanitized_leaves(bytes.len());
let required_leaves = present_leaves.next_power_of_two();
if (present_leaves != required_leaves) | last_leaf_needs_padding(bytes.len()) {
bytes.resize(num_bytes(required_leaves), 0);
}
bytes
}
fn pad_for_leaf_count(num_leaves: usize, bytes: &mut Vec<u8>) {
let required_leaves = num_leaves.next_power_of_two();
bytes.resize(
bytes.len() + (required_leaves - num_leaves) * BYTES_PER_CHUNK,
0,
);
}
fn last_leaf_needs_padding(num_bytes: usize) -> bool {
num_bytes % HASHSIZE != 0
}
/// Rounds up
fn num_unsanitized_leaves(num_bytes: usize) -> usize {
(num_bytes + HASHSIZE - 1) / HASHSIZE
}
/// Rounds up
fn num_sanitized_leaves(num_bytes: usize) -> usize {
let leaves = (num_bytes + HASHSIZE - 1) / HASHSIZE;
leaves.next_power_of_two()
}
fn num_bytes(num_leaves: usize) -> usize {
num_leaves * HASHSIZE
}