Remove num_items from BTreeOverlay

This commit is contained in:
Paul Hauner 2019-04-26 11:34:07 +10:00
parent 15f81c0907
commit 794b48078c
No known key found for this signature in database
GPG Key ID: D362883A9218FCC6
6 changed files with 27 additions and 67 deletions

View File

@ -4,33 +4,22 @@ use super::*;
pub struct BTreeOverlay { pub struct BTreeOverlay {
pub offset: usize, pub offset: usize,
pub depth: usize, pub depth: usize,
pub num_items: usize,
pub lengths: Vec<usize>, pub lengths: Vec<usize>,
} }
impl BTreeOverlay { impl BTreeOverlay {
pub fn new<T>(item: &T, initial_offset: usize, depth: usize) -> Result<Self, Error> pub fn new<T>(item: &T, initial_offset: usize, depth: usize) -> Self
where where
T: CachedTreeHash<T>, T: CachedTreeHash<T>,
{ {
item.tree_hash_cache_overlay(initial_offset, depth) item.tree_hash_cache_overlay(initial_offset, depth)
} }
pub fn from_lengths( pub fn from_lengths(offset: usize, depth: usize, lengths: Vec<usize>) -> Self {
offset: usize, Self {
num_items: usize,
depth: usize,
lengths: Vec<usize>,
) -> Result<Self, Error> {
if lengths.is_empty() {
Err(Error::TreeCannotHaveZeroNodes)
} else {
Ok(Self {
offset, offset,
num_items,
depth, depth,
lengths, lengths,
})
} }
} }
@ -96,7 +85,7 @@ impl BTreeOverlay {
pub fn get_leaf_node(&self, i: usize) -> Result<Option<Range<usize>>, Error> { pub fn get_leaf_node(&self, i: usize) -> Result<Option<Range<usize>>, Error> {
if i >= self.num_nodes() - self.num_padding_leaves() { if i >= self.num_nodes() - self.num_padding_leaves() {
Ok(None) Ok(None)
} else if (i == self.num_internal_nodes()) && (self.num_items == 0) { } else if (i == self.num_internal_nodes()) && (self.lengths.len() == 0) {
// If this is the first leaf node and the overlay contains zero items, return `None` as // If this is the first leaf node and the overlay contains zero items, return `None` as
// this node must be padding. // this node must be padding.
Ok(None) Ok(None)
@ -177,7 +166,7 @@ mod test {
use super::*; use super::*;
fn get_tree_a(n: usize) -> BTreeOverlay { fn get_tree_a(n: usize) -> BTreeOverlay {
BTreeOverlay::from_lengths(0, n, 0, vec![1; n]).unwrap() BTreeOverlay::from_lengths(0, 0, vec![1; n])
} }
#[test] #[test]
@ -215,7 +204,7 @@ mod test {
let tree = get_tree_a(2); let tree = get_tree_a(2);
assert_eq!(tree.chunk_range(), 0..3); assert_eq!(tree.chunk_range(), 0..3);
let tree = BTreeOverlay::from_lengths(11, 4, 0, vec![1, 1]).unwrap(); let tree = BTreeOverlay::from_lengths(11, 0, vec![1, 1]);
assert_eq!(tree.chunk_range(), 11..14); assert_eq!(tree.chunk_range(), 11..14);
} }

View File

@ -16,13 +16,8 @@ impl CachedTreeHash<u64> for u64 {
1 1
} }
fn tree_hash_cache_overlay( fn tree_hash_cache_overlay(&self, _chunk_offset: usize, _depth: usize) -> BTreeOverlay {
&self, unreachable!("Basic should not produce overlay");
_chunk_offset: usize,
_depth: usize,
) -> Result<BTreeOverlay, Error> {
panic!("Basic should not produce overlay");
// BTreeOverlay::from_lengths(chunk_offset, 1, depth, vec![1])
} }
fn update_tree_hash_cache(&self, cache: &mut TreeHashCache) -> Result<(), Error> { fn update_tree_hash_cache(&self, cache: &mut TreeHashCache) -> Result<(), Error> {
@ -49,13 +44,8 @@ impl CachedTreeHash<usize> for usize {
1 1
} }
fn tree_hash_cache_overlay( fn tree_hash_cache_overlay(&self, _chunk_offset: usize, _depth: usize) -> BTreeOverlay {
&self, unreachable!("Basic should not produce overlay");
_chunk_offset: usize,
_depth: usize,
) -> Result<BTreeOverlay, Error> {
panic!("Basic should not produce overlay");
// BTreeOverlay::from_lengths(chunk_offset, 1, depth, vec![1])
} }
fn update_tree_hash_cache(&self, cache: &mut TreeHashCache) -> Result<(), Error> { fn update_tree_hash_cache(&self, cache: &mut TreeHashCache) -> Result<(), Error> {

View File

@ -14,17 +14,11 @@ where
} }
fn num_tree_hash_cache_chunks(&self) -> usize { fn num_tree_hash_cache_chunks(&self) -> usize {
BTreeOverlay::new(self, 0, 0) // Add two extra nodes to cater for the node before and after to allow mixing-in length.
.and_then(|o| Ok(o.num_chunks())) BTreeOverlay::new(self, 0, 0).num_chunks() + 2
.unwrap_or_else(|_| 1)
+ 2 // Add two extra nodes to cater for the length.
} }
fn tree_hash_cache_overlay( fn tree_hash_cache_overlay(&self, chunk_offset: usize, depth: usize) -> BTreeOverlay {
&self,
chunk_offset: usize,
depth: usize,
) -> Result<BTreeOverlay, Error> {
produce_overlay(self, chunk_offset, depth) produce_overlay(self, chunk_offset, depth)
} }
@ -49,7 +43,7 @@ pub fn new_tree_hash_cache<T: CachedTreeHash<T>>(
vec: &Vec<T>, vec: &Vec<T>,
depth: usize, depth: usize,
) -> Result<(TreeHashCache, BTreeOverlay), Error> { ) -> Result<(TreeHashCache, BTreeOverlay), Error> {
let overlay = vec.tree_hash_cache_overlay(0, depth)?; let overlay = vec.tree_hash_cache_overlay(0, depth);
let cache = match T::tree_hash_type() { let cache = match T::tree_hash_type() {
TreeHashType::Basic => TreeHashCache::from_bytes( TreeHashType::Basic => TreeHashCache::from_bytes(
@ -74,7 +68,7 @@ pub fn produce_overlay<T: CachedTreeHash<T>>(
vec: &Vec<T>, vec: &Vec<T>,
chunk_offset: usize, chunk_offset: usize,
depth: usize, depth: usize,
) -> Result<BTreeOverlay, Error> { ) -> BTreeOverlay {
let lengths = match T::tree_hash_type() { let lengths = match T::tree_hash_type() {
TreeHashType::Basic => { TreeHashType::Basic => {
// Ceil division. // Ceil division.
@ -91,16 +85,11 @@ pub fn produce_overlay<T: CachedTreeHash<T>>(
lengths.push(item.num_tree_hash_cache_chunks()) lengths.push(item.num_tree_hash_cache_chunks())
} }
// Disallow zero-length as an empty list still has one all-padding node.
if lengths.is_empty() {
lengths.push(1);
}
lengths lengths
} }
}; };
BTreeOverlay::from_lengths(chunk_offset, vec.len(), depth, lengths) BTreeOverlay::from_lengths(chunk_offset, depth, lengths)
} }
pub fn update_tree_hash_cache<T: CachedTreeHash<T>>( pub fn update_tree_hash_cache<T: CachedTreeHash<T>>(
@ -108,7 +97,7 @@ pub fn update_tree_hash_cache<T: CachedTreeHash<T>>(
cache: &mut TreeHashCache, cache: &mut TreeHashCache,
) -> Result<BTreeOverlay, Error> { ) -> Result<BTreeOverlay, Error> {
let old_overlay = cache.get_overlay(cache.overlay_index, cache.chunk_index)?; let old_overlay = cache.get_overlay(cache.overlay_index, cache.chunk_index)?;
let new_overlay = BTreeOverlay::new(vec, cache.chunk_index, old_overlay.depth)?; let new_overlay = BTreeOverlay::new(vec, cache.chunk_index, old_overlay.depth);
cache.replace_overlay(cache.overlay_index, cache.chunk_index, new_overlay.clone())?; cache.replace_overlay(cache.overlay_index, cache.chunk_index, new_overlay.clone())?;
@ -183,7 +172,7 @@ pub fn update_tree_hash_cache<T: CachedTreeHash<T>>(
// //
// Viz., the list has been shortened. // Viz., the list has been shortened.
(Some(old), None) => { (Some(old), None) => {
if new_overlay.num_items == 0 { if vec.len() == 0 {
// In this case, the list has been made empty and we should make // In this case, the list has been made empty and we should make
// this node padding. // this node padding.
cache.maybe_update_chunk(new_overlay.root(), &[0; HASHSIZE])?; cache.maybe_update_chunk(new_overlay.root(), &[0; HASHSIZE])?;

View File

@ -14,11 +14,7 @@ pub use errors::Error;
pub use tree_hash_cache::TreeHashCache; pub use tree_hash_cache::TreeHashCache;
pub trait CachedTreeHash<Item>: TreeHash { pub trait CachedTreeHash<Item>: TreeHash {
fn tree_hash_cache_overlay( fn tree_hash_cache_overlay(&self, chunk_offset: usize, depth: usize) -> BTreeOverlay;
&self,
chunk_offset: usize,
depth: usize,
) -> Result<BTreeOverlay, Error>;
fn num_tree_hash_cache_chunks(&self) -> usize; fn num_tree_hash_cache_chunks(&self) -> usize;

View File

@ -34,7 +34,7 @@ impl TreeHashCache {
where where
T: CachedTreeHash<T>, T: CachedTreeHash<T>,
{ {
let overlay = BTreeOverlay::new(item, 0, depth)?; let overlay = BTreeOverlay::new(item, 0, depth);
// Note how many leaves were provided. If is not a power-of-two, we'll need to pad it out // Note how many leaves were provided. If is not a power-of-two, we'll need to pad it out
// later. // later.

View File

@ -55,8 +55,6 @@ pub fn subtree_derive(input: TokenStream) -> TokenStream {
let idents_b = idents_a.clone(); let idents_b = idents_a.clone();
let idents_c = idents_a.clone(); let idents_c = idents_a.clone();
let num_items = idents_a.len();
let output = quote! { let output = quote! {
impl cached_tree_hash::CachedTreeHash<#name> for #name { impl cached_tree_hash::CachedTreeHash<#name> for #name {
fn new_tree_hash_cache(&self, depth: usize) -> Result<cached_tree_hash::TreeHashCache, cached_tree_hash::Error> { fn new_tree_hash_cache(&self, depth: usize) -> Result<cached_tree_hash::TreeHashCache, cached_tree_hash::Error> {
@ -74,23 +72,21 @@ pub fn subtree_derive(input: TokenStream) -> TokenStream {
} }
fn num_tree_hash_cache_chunks(&self) -> usize { fn num_tree_hash_cache_chunks(&self) -> usize {
cached_tree_hash::BTreeOverlay::new(self, 0, 0) cached_tree_hash::BTreeOverlay::new(self, 0, 0).num_chunks()
.and_then(|o| Ok(o.num_chunks()))
.unwrap_or_else(|_| 1)
} }
fn tree_hash_cache_overlay(&self, chunk_offset: usize, depth: usize) -> Result<cached_tree_hash::BTreeOverlay, cached_tree_hash::Error> { fn tree_hash_cache_overlay(&self, chunk_offset: usize, depth: usize) ->cached_tree_hash::BTreeOverlay {
let mut lengths = vec![]; let mut lengths = vec![];
#( #(
lengths.push(self.#idents_b.num_tree_hash_cache_chunks()); lengths.push(self.#idents_b.num_tree_hash_cache_chunks());
)* )*
cached_tree_hash::BTreeOverlay::from_lengths(chunk_offset, #num_items, depth, lengths) cached_tree_hash::BTreeOverlay::from_lengths(chunk_offset, depth, lengths)
} }
fn update_tree_hash_cache(&self, cache: &mut cached_tree_hash::TreeHashCache) -> Result<(), cached_tree_hash::Error> { fn update_tree_hash_cache(&self, cache: &mut cached_tree_hash::TreeHashCache) -> Result<(), cached_tree_hash::Error> {
let overlay = cached_tree_hash::BTreeOverlay::new(self, cache.chunk_index, 0)?; let overlay = cached_tree_hash::BTreeOverlay::new(self, cache.chunk_index, 0);
// Skip the chunk index to the first leaf node of this struct. // Skip the chunk index to the first leaf node of this struct.
cache.chunk_index = overlay.first_leaf_node(); cache.chunk_index = overlay.first_leaf_node();