Implement CachedTreeHash for TreeHashVector
This commit is contained in:
parent
f1d8224d89
commit
15f81c0907
@ -7,6 +7,7 @@ edition = "2018"
|
||||
[dependencies]
|
||||
bls = { path = "../utils/bls" }
|
||||
boolean-bitfield = { path = "../utils/boolean-bitfield" }
|
||||
cached_tree_hash = { path = "../utils/cached_tree_hash" }
|
||||
dirs = "1.0"
|
||||
derivative = "1.0"
|
||||
ethereum-types = "0.5"
|
||||
|
@ -1,4 +1,5 @@
|
||||
use crate::test_utils::{RngCore, TestRandom};
|
||||
use cached_tree_hash::CachedTreeHash;
|
||||
use serde_derive::{Deserialize, Serialize};
|
||||
use ssz::{Decodable, DecodeError, Encodable, SszStream};
|
||||
use std::ops::{Deref, DerefMut};
|
||||
@ -54,6 +55,43 @@ where
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> CachedTreeHash<TreeHashVector<T>> for TreeHashVector<T>
|
||||
where
|
||||
T: CachedTreeHash<T> + TreeHash,
|
||||
{
|
||||
fn new_tree_hash_cache(
|
||||
&self,
|
||||
depth: usize,
|
||||
) -> Result<cached_tree_hash::TreeHashCache, cached_tree_hash::Error> {
|
||||
let (cache, _overlay) = cached_tree_hash::impls::vec::new_tree_hash_cache(self, depth)?;
|
||||
|
||||
Ok(cache)
|
||||
}
|
||||
|
||||
fn num_tree_hash_cache_chunks(&self) -> usize {
|
||||
cached_tree_hash::BTreeOverlay::new(self, 0, 0)
|
||||
.and_then(|o| Ok(o.num_chunks()))
|
||||
.unwrap_or_else(|_| 1)
|
||||
}
|
||||
|
||||
fn tree_hash_cache_overlay(
|
||||
&self,
|
||||
chunk_offset: usize,
|
||||
depth: usize,
|
||||
) -> Result<cached_tree_hash::BTreeOverlay, cached_tree_hash::Error> {
|
||||
cached_tree_hash::impls::vec::produce_overlay(self, chunk_offset, depth)
|
||||
}
|
||||
|
||||
fn update_tree_hash_cache(
|
||||
&self,
|
||||
cache: &mut cached_tree_hash::TreeHashCache,
|
||||
) -> Result<(), cached_tree_hash::Error> {
|
||||
cached_tree_hash::impls::vec::update_tree_hash_cache(self, cache)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> Encodable for TreeHashVector<T>
|
||||
where
|
||||
T: Encodable,
|
||||
|
@ -1,7 +1,7 @@
|
||||
use super::*;
|
||||
use crate::merkleize::merkleize;
|
||||
|
||||
mod vec;
|
||||
pub mod vec;
|
||||
|
||||
impl CachedTreeHash<u64> for u64 {
|
||||
fn new_tree_hash_cache(&self, _depth: usize) -> Result<TreeHashCache, Error> {
|
||||
|
@ -6,23 +6,7 @@ where
|
||||
T: CachedTreeHash<T> + TreeHash,
|
||||
{
|
||||
fn new_tree_hash_cache(&self, depth: usize) -> Result<TreeHashCache, Error> {
|
||||
let overlay = self.tree_hash_cache_overlay(0, depth)?;
|
||||
|
||||
let mut cache = match T::tree_hash_type() {
|
||||
TreeHashType::Basic => TreeHashCache::from_bytes(
|
||||
merkleize(get_packed_leaves(self)?),
|
||||
false,
|
||||
Some(overlay.clone()),
|
||||
),
|
||||
TreeHashType::Container | TreeHashType::List | TreeHashType::Vector => {
|
||||
let subtrees = self
|
||||
.iter()
|
||||
.map(|item| TreeHashCache::new(item, depth + 1))
|
||||
.collect::<Result<Vec<TreeHashCache>, _>>()?;
|
||||
|
||||
TreeHashCache::from_leaves_and_subtrees(self, subtrees, depth)
|
||||
}
|
||||
}?;
|
||||
let (mut cache, overlay) = new_tree_hash_cache(self, depth)?;
|
||||
|
||||
cache.add_length_nodes(overlay.chunk_range(), self.len())?;
|
||||
|
||||
@ -33,7 +17,7 @@ where
|
||||
BTreeOverlay::new(self, 0, 0)
|
||||
.and_then(|o| Ok(o.num_chunks()))
|
||||
.unwrap_or_else(|_| 1)
|
||||
+ 2
|
||||
+ 2 // Add two extra nodes to cater for the length.
|
||||
}
|
||||
|
||||
fn tree_hash_cache_overlay(
|
||||
@ -41,11 +25,61 @@ where
|
||||
chunk_offset: usize,
|
||||
depth: usize,
|
||||
) -> Result<BTreeOverlay, Error> {
|
||||
produce_overlay(self, chunk_offset, depth)
|
||||
}
|
||||
|
||||
fn update_tree_hash_cache(&self, cache: &mut TreeHashCache) -> Result<(), Error> {
|
||||
// Skip the length-mixed-in root node.
|
||||
cache.chunk_index += 1;
|
||||
|
||||
// Update the cache, returning the new overlay.
|
||||
let new_overlay = update_tree_hash_cache(self, cache)?;
|
||||
|
||||
// Mix in length
|
||||
cache.mix_in_length(new_overlay.chunk_range(), self.len())?;
|
||||
|
||||
// Skip an extra node to clear the length node.
|
||||
cache.chunk_index = new_overlay.next_node() + 1;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
pub fn new_tree_hash_cache<T: CachedTreeHash<T>>(
|
||||
vec: &Vec<T>,
|
||||
depth: usize,
|
||||
) -> Result<(TreeHashCache, BTreeOverlay), Error> {
|
||||
let overlay = vec.tree_hash_cache_overlay(0, depth)?;
|
||||
|
||||
let cache = match T::tree_hash_type() {
|
||||
TreeHashType::Basic => TreeHashCache::from_bytes(
|
||||
merkleize(get_packed_leaves(vec)?),
|
||||
false,
|
||||
Some(overlay.clone()),
|
||||
),
|
||||
TreeHashType::Container | TreeHashType::List | TreeHashType::Vector => {
|
||||
let subtrees = vec
|
||||
.iter()
|
||||
.map(|item| TreeHashCache::new(item, depth + 1))
|
||||
.collect::<Result<Vec<TreeHashCache>, _>>()?;
|
||||
|
||||
TreeHashCache::from_leaves_and_subtrees(vec, subtrees, depth)
|
||||
}
|
||||
}?;
|
||||
|
||||
Ok((cache, overlay))
|
||||
}
|
||||
|
||||
pub fn produce_overlay<T: CachedTreeHash<T>>(
|
||||
vec: &Vec<T>,
|
||||
chunk_offset: usize,
|
||||
depth: usize,
|
||||
) -> Result<BTreeOverlay, Error> {
|
||||
let lengths = match T::tree_hash_type() {
|
||||
TreeHashType::Basic => {
|
||||
// Ceil division.
|
||||
let num_leaves = (self.len() + T::tree_hash_packing_factor() - 1)
|
||||
/ T::tree_hash_packing_factor();
|
||||
let num_leaves =
|
||||
(vec.len() + T::tree_hash_packing_factor() - 1) / T::tree_hash_packing_factor();
|
||||
|
||||
// Disallow zero-length as an empty list still has one all-padding node.
|
||||
vec![1; std::cmp::max(1, num_leaves)]
|
||||
@ -53,7 +87,7 @@ where
|
||||
TreeHashType::Container | TreeHashType::List | TreeHashType::Vector => {
|
||||
let mut lengths = vec![];
|
||||
|
||||
for item in self {
|
||||
for item in vec {
|
||||
lengths.push(item.num_tree_hash_cache_chunks())
|
||||
}
|
||||
|
||||
@ -66,15 +100,15 @@ where
|
||||
}
|
||||
};
|
||||
|
||||
BTreeOverlay::from_lengths(chunk_offset, self.len(), depth, lengths)
|
||||
}
|
||||
|
||||
fn update_tree_hash_cache(&self, cache: &mut TreeHashCache) -> Result<(), Error> {
|
||||
// Skip the length-mixed-in root node.
|
||||
cache.chunk_index += 1;
|
||||
BTreeOverlay::from_lengths(chunk_offset, vec.len(), depth, lengths)
|
||||
}
|
||||
|
||||
pub fn update_tree_hash_cache<T: CachedTreeHash<T>>(
|
||||
vec: &Vec<T>,
|
||||
cache: &mut TreeHashCache,
|
||||
) -> Result<BTreeOverlay, Error> {
|
||||
let old_overlay = cache.get_overlay(cache.overlay_index, cache.chunk_index)?;
|
||||
let new_overlay = BTreeOverlay::new(self, cache.chunk_index, old_overlay.depth)?;
|
||||
let new_overlay = BTreeOverlay::new(vec, cache.chunk_index, old_overlay.depth)?;
|
||||
|
||||
cache.replace_overlay(cache.overlay_index, cache.chunk_index, new_overlay.clone())?;
|
||||
|
||||
@ -96,7 +130,7 @@ where
|
||||
// Attempt to get the item for this portion of the chunk. If it exists,
|
||||
// update `buf` with it's serialized bytes. If it doesn't exist, update
|
||||
// `buf` with padding.
|
||||
match self.get(i * T::tree_hash_packing_factor() + j) {
|
||||
match vec.get(i * T::tree_hash_packing_factor() + j) {
|
||||
Some(item) => {
|
||||
buf_slice.copy_from_slice(&item.tree_hash_packed_encoding());
|
||||
}
|
||||
@ -123,15 +157,14 @@ where
|
||||
(Some(_old), Some(new)) => {
|
||||
cache.chunk_index = new.start;
|
||||
|
||||
self[i].update_tree_hash_cache(cache)?;
|
||||
vec[i].update_tree_hash_cache(cache)?;
|
||||
}
|
||||
// The item did not exist in the previous list but does exist in this list.
|
||||
//
|
||||
// Viz., the list has been lengthened.
|
||||
(None, Some(new)) => {
|
||||
let (bytes, mut bools, overlays) =
|
||||
TreeHashCache::new(&self[i], new_overlay.depth + 1)?
|
||||
.into_components();
|
||||
TreeHashCache::new(&vec[i], new_overlay.depth + 1)?.into_components();
|
||||
|
||||
// Record the number of overlays, this will be used later in the fn.
|
||||
let num_overlays = overlays.len();
|
||||
@ -175,14 +208,7 @@ where
|
||||
|
||||
cache.update_internal_nodes(&new_overlay)?;
|
||||
|
||||
// Mix in length
|
||||
cache.mix_in_length(new_overlay.chunk_range(), self.len())?;
|
||||
|
||||
// Skip an extra node to clear the length node.
|
||||
cache.chunk_index = new_overlay.next_node() + 1;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
Ok(new_overlay)
|
||||
}
|
||||
|
||||
fn get_packed_leaves<T>(vec: &Vec<T>) -> Result<Vec<u8>, Error>
|
||||
|
@ -4,7 +4,7 @@ use tree_hash::{TreeHash, TreeHashType, BYTES_PER_CHUNK, HASHSIZE};
|
||||
|
||||
mod btree_overlay;
|
||||
mod errors;
|
||||
mod impls;
|
||||
pub mod impls;
|
||||
pub mod merkleize;
|
||||
mod resize;
|
||||
mod tree_hash_cache;
|
||||
|
@ -56,11 +56,11 @@ impl TreeHash for [u8; 4] {
|
||||
}
|
||||
|
||||
fn tree_hash_packed_encoding(&self) -> Vec<u8> {
|
||||
panic!("bytesN should never be packed.")
|
||||
unreachable!("bytesN should never be packed.")
|
||||
}
|
||||
|
||||
fn tree_hash_packing_factor() -> usize {
|
||||
panic!("bytesN should never be packed.")
|
||||
unreachable!("bytesN should never be packed.")
|
||||
}
|
||||
|
||||
fn tree_hash_root(&self) -> Vec<u8> {
|
||||
|
@ -49,6 +49,7 @@ macro_rules! tree_hash_ssz_encoding_as_vector {
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
#[macro_export]
|
||||
macro_rules! tree_hash_ssz_encoding_as_list {
|
||||
($type: ident) => {
|
||||
|
Loading…
Reference in New Issue
Block a user