lighthouse/eth2/utils/cached_tree_hash/src/tree_hash_cache.rs

337 lines
11 KiB
Rust
Raw Normal View History

2019-04-24 08:13:37 +00:00
use super::*;
2019-04-25 23:55:03 +00:00
use crate::merkleize::{merkleize, pad_for_leaf_count};
use int_to_bytes::int_to_bytes32;
2019-04-24 08:13:37 +00:00
#[derive(Debug, PartialEq, Clone)]
pub struct TreeHashCache {
pub cache: Vec<u8>,
pub chunk_modified: Vec<bool>,
pub overlays: Vec<BTreeOverlay>,
pub chunk_index: usize,
pub overlay_index: usize,
}
impl Into<Vec<u8>> for TreeHashCache {
fn into(self) -> Vec<u8> {
self.cache
}
}
impl TreeHashCache {
pub fn new<T>(item: &T, depth: usize) -> Result<Self, Error>
where
T: CachedTreeHash<T>,
2019-04-24 08:13:37 +00:00
{
item.new_tree_hash_cache(depth)
}
pub fn from_leaves_and_subtrees<T>(
item: &T,
leaves_and_subtrees: Vec<Self>,
depth: usize,
) -> Result<Self, Error>
where
T: CachedTreeHash<T>,
2019-04-24 08:13:37 +00:00
{
2019-04-26 01:34:07 +00:00
let overlay = BTreeOverlay::new(item, 0, depth);
2019-04-24 08:13:37 +00:00
// Note how many leaves were provided. If is not a power-of-two, we'll need to pad it out
// later.
let num_provided_leaf_nodes = leaves_and_subtrees.len();
// Allocate enough bytes to store the internal nodes and the leaves and subtrees, then fill
// all the to-be-built internal nodes with zeros and append the leaves and subtrees.
let internal_node_bytes = overlay.num_internal_nodes() * BYTES_PER_CHUNK;
let leaves_and_subtrees_bytes = leaves_and_subtrees
.iter()
.fold(0, |acc, t| acc + t.bytes_len());
let mut cache = Vec::with_capacity(leaves_and_subtrees_bytes + internal_node_bytes);
cache.resize(internal_node_bytes, 0);
// Allocate enough bytes to store all the leaves.
let mut leaves = Vec::with_capacity(overlay.num_leaf_nodes() * HASHSIZE);
let mut overlays = Vec::with_capacity(leaves_and_subtrees.len());
if T::tree_hash_type() == TreeHashType::List {
overlays.push(overlay);
}
// Iterate through all of the leaves/subtrees, adding their root as a leaf node and then
// concatenating their merkle trees.
for t in leaves_and_subtrees {
leaves.append(&mut t.root()?.to_vec());
let (mut bytes, _bools, mut t_overlays) = t.into_components();
cache.append(&mut bytes);
overlays.append(&mut t_overlays);
}
// Pad the leaves to an even power-of-two, using zeros.
pad_for_leaf_count(num_provided_leaf_nodes, &mut cache);
// Merkleize the leaves, then split the leaf nodes off them. Then, replace all-zeros
// internal nodes created earlier with the internal nodes generated by `merkleize`.
let mut merkleized = merkleize(leaves);
merkleized.split_off(internal_node_bytes);
cache.splice(0..internal_node_bytes, merkleized);
Ok(Self {
chunk_modified: vec![false; cache.len() / BYTES_PER_CHUNK],
cache,
overlays,
chunk_index: 0,
overlay_index: 0,
})
}
pub fn from_bytes(
bytes: Vec<u8>,
initial_modified_state: bool,
overlay: Option<BTreeOverlay>,
) -> Result<Self, Error> {
if bytes.len() % BYTES_PER_CHUNK > 0 {
return Err(Error::BytesAreNotEvenChunks(bytes.len()));
}
let overlays = match overlay {
Some(overlay) => vec![overlay],
None => vec![],
};
Ok(Self {
chunk_modified: vec![initial_modified_state; bytes.len() / BYTES_PER_CHUNK],
cache: bytes,
overlays,
chunk_index: 0,
overlay_index: 0,
})
}
pub fn get_overlay(
&self,
overlay_index: usize,
chunk_index: usize,
) -> Result<BTreeOverlay, Error> {
let mut overlay = self
.overlays
.get(overlay_index)
.ok_or_else(|| Error::NoOverlayForIndex(overlay_index))?
.clone();
overlay.offset = chunk_index;
Ok(overlay)
}
pub fn reset_modifications(&mut self) {
for chunk_modified in &mut self.chunk_modified {
*chunk_modified = false;
}
}
pub fn replace_overlay(
&mut self,
overlay_index: usize,
chunk_index: usize,
new_overlay: BTreeOverlay,
) -> Result<BTreeOverlay, Error> {
let old_overlay = self.get_overlay(overlay_index, chunk_index)?;
// If the merkle tree required to represent the new list is of a different size to the one
// required for the previous list, then update our cache.
//
// This grows/shrinks the bytes to accomodate the new tree, preserving as much of the tree
// as possible.
if new_overlay.num_leaf_nodes() != old_overlay.num_leaf_nodes() {
// Get slices of the exsiting tree from the cache.
let (old_bytes, old_flags) = self
.slices(old_overlay.chunk_range())
.ok_or_else(|| Error::UnableToObtainSlices)?;
let (new_bytes, new_bools) =
if new_overlay.num_leaf_nodes() > old_overlay.num_leaf_nodes() {
resize::grow_merkle_cache(
old_bytes,
old_flags,
old_overlay.height(),
new_overlay.height(),
)
.ok_or_else(|| Error::UnableToGrowMerkleTree)?
} else {
resize::shrink_merkle_cache(
old_bytes,
old_flags,
old_overlay.height(),
new_overlay.height(),
new_overlay.num_chunks(),
)
.ok_or_else(|| Error::UnableToShrinkMerkleTree)?
};
// Splice the newly created `TreeHashCache` over the existing elements.
self.splice(old_overlay.chunk_range(), new_bytes, new_bools);
}
Ok(std::mem::replace(
&mut self.overlays[overlay_index],
new_overlay,
))
}
pub fn remove_proceeding_child_overlays(&mut self, overlay_index: usize, depth: usize) {
let end = self
.overlays
.iter()
.skip(overlay_index)
.position(|o| o.depth <= depth)
2019-04-25 02:00:39 +00:00
.and_then(|i| Some(i + overlay_index))
2019-04-24 08:13:37 +00:00
.unwrap_or_else(|| self.overlays.len());
self.overlays.splice(overlay_index..end, vec![]);
}
pub fn update_internal_nodes(&mut self, overlay: &BTreeOverlay) -> Result<(), Error> {
for (parent, children) in overlay.internal_parents_and_children().into_iter().rev() {
if self.either_modified(children)? {
self.modify_chunk(parent, &self.hash_children(children)?)?;
}
}
Ok(())
}
fn bytes_len(&self) -> usize {
self.cache.len()
}
pub fn root(&self) -> Result<&[u8], Error> {
self.cache
.get(0..HASHSIZE)
.ok_or_else(|| Error::NoBytesForRoot)
}
pub fn splice(&mut self, chunk_range: Range<usize>, bytes: Vec<u8>, bools: Vec<bool>) {
// Update the `chunk_modified` vec, marking all spliced-in nodes as changed.
self.chunk_modified.splice(chunk_range.clone(), bools);
self.cache
.splice(node_range_to_byte_range(&chunk_range), bytes);
}
pub fn maybe_update_chunk(&mut self, chunk: usize, to: &[u8]) -> Result<(), Error> {
let start = chunk * BYTES_PER_CHUNK;
let end = start + BYTES_PER_CHUNK;
if !self.chunk_equals(chunk, to)? {
self.cache
.get_mut(start..end)
.ok_or_else(|| Error::NoModifiedFieldForChunk(chunk))?
.copy_from_slice(to);
self.chunk_modified[chunk] = true;
}
Ok(())
}
fn slices(&self, chunk_range: Range<usize>) -> Option<(&[u8], &[bool])> {
Some((
self.cache.get(node_range_to_byte_range(&chunk_range))?,
self.chunk_modified.get(chunk_range)?,
))
}
pub fn modify_chunk(&mut self, chunk: usize, to: &[u8]) -> Result<(), Error> {
let start = chunk * BYTES_PER_CHUNK;
let end = start + BYTES_PER_CHUNK;
self.cache
.get_mut(start..end)
.ok_or_else(|| Error::NoBytesForChunk(chunk))?
.copy_from_slice(to);
self.chunk_modified[chunk] = true;
Ok(())
}
fn get_chunk(&self, chunk: usize) -> Result<&[u8], Error> {
let start = chunk * BYTES_PER_CHUNK;
let end = start + BYTES_PER_CHUNK;
Ok(self
.cache
.get(start..end)
.ok_or_else(|| Error::NoModifiedFieldForChunk(chunk))?)
}
fn chunk_equals(&mut self, chunk: usize, other: &[u8]) -> Result<bool, Error> {
Ok(self.get_chunk(chunk)? == other)
}
pub fn changed(&self, chunk: usize) -> Result<bool, Error> {
self.chunk_modified
.get(chunk)
.cloned()
.ok_or_else(|| Error::NoModifiedFieldForChunk(chunk))
}
fn either_modified(&self, children: (usize, usize)) -> Result<bool, Error> {
Ok(self.changed(children.0)? | self.changed(children.1)?)
}
pub fn hash_children(&self, children: (usize, usize)) -> Result<Vec<u8>, Error> {
let mut child_bytes = Vec::with_capacity(BYTES_PER_CHUNK * 2);
child_bytes.append(&mut self.get_chunk(children.0)?.to_vec());
child_bytes.append(&mut self.get_chunk(children.1)?.to_vec());
Ok(hash(&child_bytes))
}
pub fn add_length_nodes(
&mut self,
chunk_range: Range<usize>,
length: usize,
) -> Result<(), Error> {
self.chunk_modified[chunk_range.start] = true;
let byte_range = node_range_to_byte_range(&chunk_range);
// Add the last node.
self.cache
.splice(byte_range.end..byte_range.end, vec![0; HASHSIZE]);
self.chunk_modified
.splice(chunk_range.end..chunk_range.end, vec![false]);
// Add the first node.
self.cache
.splice(byte_range.start..byte_range.start, vec![0; HASHSIZE]);
self.chunk_modified
.splice(chunk_range.start..chunk_range.start, vec![false]);
self.mix_in_length(chunk_range.start + 1..chunk_range.end + 1, length)?;
Ok(())
}
pub fn mix_in_length(&mut self, chunk_range: Range<usize>, length: usize) -> Result<(), Error> {
// Update the length chunk.
self.maybe_update_chunk(chunk_range.end, &int_to_bytes32(length as u64))?;
// Update the mixed-in root if the main root or the length have changed.
let children = (chunk_range.start, chunk_range.end);
if self.either_modified(children)? {
self.modify_chunk(chunk_range.start - 1, &self.hash_children(children)?)?;
}
Ok(())
}
pub fn into_components(self) -> (Vec<u8>, Vec<bool>, Vec<BTreeOverlay>) {
(self.cache, self.chunk_modified, self.overlays)
}
}
2019-04-25 23:55:03 +00:00
fn node_range_to_byte_range(node_range: &Range<usize>) -> Range<usize> {
node_range.start * HASHSIZE..node_range.end * HASHSIZE
}