lighthouse/eth2/utils/cached_tree_hash/src/impls/vec.rs

203 lines
8.3 KiB
Rust
Raw Normal View History

2019-04-15 02:01:12 +00:00
use super::*;
2019-04-25 23:55:03 +00:00
use crate::merkleize::{merkleize, num_sanitized_leaves, sanitise_bytes};
2019-04-15 02:01:12 +00:00
impl<T> CachedTreeHash<Vec<T>> for Vec<T>
2019-04-15 02:01:12 +00:00
where
T: CachedTreeHash<T> + TreeHash,
2019-04-15 02:01:12 +00:00
{
2019-04-23 23:29:32 +00:00
fn new_tree_hash_cache(&self, depth: usize) -> Result<TreeHashCache, Error> {
2019-04-25 02:00:39 +00:00
let overlay = self.tree_hash_cache_overlay(0, depth)?;
let mut cache = match T::tree_hash_type() {
TreeHashType::Basic => TreeHashCache::from_bytes(
merkleize(get_packed_leaves(self)?),
false,
2019-04-23 23:29:32 +00:00
Some(overlay.clone()),
),
2019-04-17 00:57:36 +00:00
TreeHashType::Container | TreeHashType::List | TreeHashType::Vector => {
2019-04-15 02:01:12 +00:00
let subtrees = self
.iter()
2019-04-23 23:29:32 +00:00
.map(|item| TreeHashCache::new(item, depth + 1))
2019-04-15 02:01:12 +00:00
.collect::<Result<Vec<TreeHashCache>, _>>()?;
2019-04-23 23:29:32 +00:00
TreeHashCache::from_leaves_and_subtrees(self, subtrees, depth)
2019-04-15 02:01:12 +00:00
}
}?;
2019-04-24 08:13:37 +00:00
cache.add_length_nodes(overlay.chunk_range(), self.len())?;
Ok(cache)
2019-04-15 02:01:12 +00:00
}
2019-04-24 08:13:37 +00:00
fn num_tree_hash_cache_chunks(&self) -> usize {
BTreeOverlay::new(self, 0, 0)
.and_then(|o| Ok(o.num_chunks()))
.unwrap_or_else(|_| 1)
+ 2
}
2019-04-23 23:29:32 +00:00
fn tree_hash_cache_overlay(
&self,
chunk_offset: usize,
depth: usize,
) -> Result<BTreeOverlay, Error> {
2019-04-15 23:34:23 +00:00
let lengths = match T::tree_hash_type() {
TreeHashType::Basic => {
// Ceil division.
let num_leaves = (self.len() + T::tree_hash_packing_factor() - 1)
/ T::tree_hash_packing_factor();
// Disallow zero-length as an empty list still has one all-padding node.
vec![1; std::cmp::max(1, num_leaves)]
}
2019-04-17 00:57:36 +00:00
TreeHashType::Container | TreeHashType::List | TreeHashType::Vector => {
2019-04-15 02:01:12 +00:00
let mut lengths = vec![];
for item in self {
2019-04-24 08:13:37 +00:00
lengths.push(item.num_tree_hash_cache_chunks())
2019-04-15 02:01:12 +00:00
}
2019-04-22 06:09:29 +00:00
// Disallow zero-length as an empty list still has one all-padding node.
if lengths.is_empty() {
lengths.push(1);
}
2019-04-15 02:01:12 +00:00
lengths
}
};
2019-04-23 23:29:32 +00:00
BTreeOverlay::from_lengths(chunk_offset, self.len(), depth, lengths)
2019-04-15 02:01:12 +00:00
}
fn update_tree_hash_cache(&self, cache: &mut TreeHashCache) -> Result<(), Error> {
2019-04-24 08:13:37 +00:00
// Skip the length-mixed-in root node.
cache.chunk_index += 1;
2019-04-22 06:09:29 +00:00
let old_overlay = cache.get_overlay(cache.overlay_index, cache.chunk_index)?;
2019-04-23 23:29:32 +00:00
let new_overlay = BTreeOverlay::new(self, cache.chunk_index, old_overlay.depth)?;
2019-04-22 11:31:39 +00:00
2019-04-24 04:56:39 +00:00
cache.replace_overlay(cache.overlay_index, cache.chunk_index, new_overlay.clone())?;
2019-04-15 02:01:12 +00:00
2019-04-22 06:09:29 +00:00
cache.overlay_index += 1;
2019-04-15 23:34:23 +00:00
match T::tree_hash_type() {
TreeHashType::Basic => {
let mut buf = vec![0; HASHSIZE];
let item_bytes = HASHSIZE / T::tree_hash_packing_factor();
// Iterate through each of the leaf nodes.
for i in 0..new_overlay.num_leaf_nodes() {
// Iterate through the number of items that may be packing into the leaf node.
for j in 0..T::tree_hash_packing_factor() {
2019-04-22 06:09:29 +00:00
// Create a mut slice that can be filled with either a serialized item or
// padding.
let buf_slice = &mut buf[j * item_bytes..(j + 1) * item_bytes];
// Attempt to get the item for this portion of the chunk. If it exists,
// update `buf` with it's serialized bytes. If it doesn't exist, update
// `buf` with padding.
match self.get(i * T::tree_hash_packing_factor() + j) {
Some(item) => {
buf_slice.copy_from_slice(&item.tree_hash_packed_encoding());
}
None => buf_slice.copy_from_slice(&vec![0; item_bytes]),
}
2019-04-15 02:01:12 +00:00
}
// Update the chunk if the generated `buf` is not the same as the cache.
let chunk = new_overlay.first_leaf_node() + i;
cache.maybe_update_chunk(chunk, &buf)?;
}
2019-04-15 02:01:12 +00:00
}
2019-04-17 00:57:36 +00:00
TreeHashType::Container | TreeHashType::List | TreeHashType::Vector => {
2019-04-22 06:09:29 +00:00
for i in 0..new_overlay.num_leaf_nodes() {
// Adjust `i` so it is a leaf node for each of the overlays.
let old_i = i + old_overlay.num_internal_nodes();
let new_i = i + new_overlay.num_internal_nodes();
match (
old_overlay.get_leaf_node(old_i)?,
new_overlay.get_leaf_node(new_i)?,
) {
// The item existed in the previous list and exists in the current list.
(Some(_old), Some(new)) => {
cache.chunk_index = new.start;
2019-04-22 06:09:29 +00:00
self[i].update_tree_hash_cache(cache)?;
2019-04-23 23:29:32 +00:00
}
// The item did not exist in the previous list but does exist in this list.
//
// Viz., the list has been lengthened.
(None, Some(new)) => {
let (bytes, mut bools, overlays) =
TreeHashCache::new(&self[i], new_overlay.depth + 1)?
.into_components();
// Record the number of overlays, this will be used later in the fn.
let num_overlays = overlays.len();
// Flag the root node of the new tree as dirty.
bools[0] = true;
cache.splice(new.start..new.start + 1, bytes, bools);
cache
.overlays
.splice(cache.overlay_index..cache.overlay_index, overlays);
2019-04-22 06:09:29 +00:00
2019-04-23 23:29:32 +00:00
cache.overlay_index += num_overlays;
2019-04-15 02:01:12 +00:00
}
// The item existed in the previous list but does not exist in this list.
//
// Viz., the list has been shortened.
2019-04-15 02:01:12 +00:00
(Some(old), None) => {
2019-04-22 11:31:39 +00:00
if new_overlay.num_items == 0 {
2019-04-22 06:09:29 +00:00
// In this case, the list has been made empty and we should make
// this node padding.
cache.maybe_update_chunk(new_overlay.root(), &[0; HASHSIZE])?;
} else {
// In this case, there are some items in the new list and we should
// splice out the entire tree of the removed node, replacing it
// with a single padding node.
cache.splice(old, vec![0; HASHSIZE], vec![true]);
}
2019-04-15 02:01:12 +00:00
}
// The item didn't exist in the old list and doesn't exist in the new list,
// nothing to do.
(None, None) => {}
}
2019-04-15 02:01:12 +00:00
}
2019-04-23 23:29:32 +00:00
// Clean out any excess overlays that may or may not be remaining if the list was
// shortened.
cache.remove_proceeding_child_overlays(cache.overlay_index, new_overlay.depth);
2019-04-15 02:01:12 +00:00
}
}
cache.update_internal_nodes(&new_overlay)?;
2019-04-15 02:01:12 +00:00
2019-04-24 08:13:37 +00:00
// Mix in length
cache.mix_in_length(new_overlay.chunk_range(), self.len())?;
2019-04-24 08:13:37 +00:00
// Skip an extra node to clear the length node.
cache.chunk_index = new_overlay.next_node() + 1;
2019-04-15 02:01:12 +00:00
Ok(())
2019-04-15 02:01:12 +00:00
}
}
fn get_packed_leaves<T>(vec: &Vec<T>) -> Result<Vec<u8>, Error>
where
T: CachedTreeHash<T>,
2019-04-15 02:01:12 +00:00
{
2019-04-15 23:34:23 +00:00
let num_packed_bytes = (BYTES_PER_CHUNK / T::tree_hash_packing_factor()) * vec.len();
2019-04-15 02:01:12 +00:00
let num_leaves = num_sanitized_leaves(num_packed_bytes);
let mut packed = Vec::with_capacity(num_leaves * HASHSIZE);
for item in vec {
2019-04-15 23:34:23 +00:00
packed.append(&mut item.tree_hash_packed_encoding());
2019-04-15 02:01:12 +00:00
}
Ok(sanitise_bytes(packed))
}