Tidy CachedTreeHash trait

This commit is contained in:
Paul Hauner 2019-04-15 11:37:29 +10:00
parent 0b5c10212d
commit c87a0fc588
No known key found for this signature in database
GPG Key ID: D362883A9218FCC6
4 changed files with 131 additions and 132 deletions

View File

@ -0,0 +1,100 @@
use super::*;
#[derive(Debug)]
pub struct BTreeOverlay {
pub num_internal_nodes: usize,
pub num_leaf_nodes: usize,
pub first_node: usize,
pub next_node: usize,
offsets: Vec<usize>,
}
impl BTreeOverlay {
pub fn new<T>(item: &T, initial_offset: usize) -> Result<Self, Error>
where
T: CachedTreeHash<T>,
{
item.btree_overlay(initial_offset)
}
pub fn from_lengths(offset: usize, mut lengths: Vec<usize>) -> Result<Self, Error> {
// Extend it to the next power-of-two, if it is not already.
let num_leaf_nodes = if lengths.len().is_power_of_two() {
lengths.len()
} else {
let num_leaf_nodes = lengths.len().next_power_of_two();
lengths.resize(num_leaf_nodes, 1);
num_leaf_nodes
};
let num_nodes = num_nodes(num_leaf_nodes);
let num_internal_nodes = num_nodes - num_leaf_nodes;
let mut offsets = Vec::with_capacity(num_nodes);
offsets.append(&mut (offset..offset + num_internal_nodes).collect());
let mut next_node = num_internal_nodes + offset;
for i in 0..num_leaf_nodes {
offsets.push(next_node);
next_node += lengths[i];
}
Ok(Self {
num_internal_nodes,
num_leaf_nodes,
offsets,
first_node: offset,
next_node,
})
}
pub fn root(&self) -> usize {
self.first_node
}
pub fn height(&self) -> usize {
self.num_leaf_nodes.trailing_zeros() as usize
}
pub fn chunk_range(&self) -> Range<usize> {
self.first_node..self.next_node
}
pub fn total_chunks(&self) -> usize {
self.next_node - self.first_node
}
pub fn total_nodes(&self) -> usize {
self.num_internal_nodes + self.num_leaf_nodes
}
pub fn first_leaf_node(&self) -> Result<usize, Error> {
self.offsets
.get(self.num_internal_nodes)
.cloned()
.ok_or_else(|| Error::NoFirstNode)
}
/// Returns an iterator visiting each internal node, providing the left and right child chunks
/// for the node.
pub fn iter_internal_nodes<'a>(
&'a self,
) -> impl DoubleEndedIterator<Item = (&'a usize, (&'a usize, &'a usize))> {
let internal_nodes = &self.offsets[0..self.num_internal_nodes];
internal_nodes.iter().enumerate().map(move |(i, parent)| {
let children = children(i);
(
parent,
(&self.offsets[children.0], &self.offsets[children.1]),
)
})
}
/// Returns an iterator visiting each leaf node, providing the chunk for that node.
pub fn iter_leaf_nodes<'a>(&'a self) -> impl DoubleEndedIterator<Item = &'a usize> {
let leaf_nodes = &self.offsets[self.num_internal_nodes..];
leaf_nodes.iter()
}
}

View File

@ -14,12 +14,12 @@ impl CachedTreeHash<u64> for u64 {
)?) )?)
} }
fn num_bytes(&self) -> usize { fn btree_overlay(&self, _chunk_offset: usize) -> Result<BTreeOverlay, Error> {
8 Err(Error::ShouldNotProduceBTreeOverlay)
} }
fn offsets(&self) -> Result<Vec<usize>, Error> { fn num_bytes(&self) -> usize {
Err(Error::ShouldNotProduceBTreeOverlay) 8
} }
fn num_child_nodes(&self) -> usize { fn num_child_nodes(&self) -> usize {
@ -71,21 +71,22 @@ where
} }
} }
fn offsets(&self) -> Result<Vec<usize>, Error> { fn btree_overlay(&self, chunk_offset: usize) -> Result<BTreeOverlay, Error> {
let offsets = match T::item_type() { //
let lengths = match T::item_type() {
ItemType::Basic => vec![1; self.len() / T::packing_factor()], ItemType::Basic => vec![1; self.len() / T::packing_factor()],
ItemType::Composite | ItemType::List => { ItemType::Composite | ItemType::List => {
let mut offsets = vec![]; let mut lengths = vec![];
for item in self { for item in self {
offsets.push(BTreeOverlay::new(item, 0)?.total_nodes()) lengths.push(BTreeOverlay::new(item, 0)?.total_nodes())
} }
offsets lengths
} }
}; };
Ok(offsets) BTreeOverlay::from_lengths(chunk_offset, lengths)
} }
fn num_child_nodes(&self) -> usize { fn num_child_nodes(&self) -> usize {
@ -180,7 +181,7 @@ where
(Some(old), None) => { (Some(old), None) => {
// Splice out the entire tree of the removed node, replacing it with a // Splice out the entire tree of the removed node, replacing it with a
// single padding node. // single padding node.
let end_chunk = BTreeOverlay::new(old, start_chunk)?.next_node(); let end_chunk = BTreeOverlay::new(old, start_chunk)?.next_node;
cache.splice( cache.splice(
start_chunk..end_chunk, start_chunk..end_chunk,
@ -218,7 +219,7 @@ where
cache.modify_chunk(root_node, &cache.mix_in_length(root_node, self.len())?)?; cache.modify_chunk(root_node, &cache.mix_in_length(root_node, self.len())?)?;
} }
Ok(offset_handler.next_node()) Ok(offset_handler.next_node)
} }
} }

View File

@ -1,13 +1,14 @@
use hashing::hash; use hashing::hash;
use int_to_bytes::int_to_bytes32; use int_to_bytes::int_to_bytes32;
use std::fmt::Debug; use std::fmt::Debug;
use std::iter::Iterator;
use std::ops::Range; use std::ops::Range;
mod btree_overlay;
mod cached_tree_hash; mod cached_tree_hash;
mod impls; mod impls;
mod resize; mod resize;
pub use btree_overlay::BTreeOverlay;
pub use cached_tree_hash::TreeHashCache; pub use cached_tree_hash::TreeHashCache;
pub const BYTES_PER_CHUNK: usize = 32; pub const BYTES_PER_CHUNK: usize = 32;
@ -44,7 +45,7 @@ pub trait CachedTreeHash<Item>: Debug {
/// prefixes. /// prefixes.
fn num_bytes(&self) -> usize; fn num_bytes(&self) -> usize;
fn offsets(&self) -> Result<Vec<usize>, Error>; fn btree_overlay(&self, chunk_offset: usize) -> Result<BTreeOverlay, Error>;
fn num_child_nodes(&self) -> usize; fn num_child_nodes(&self) -> usize;
@ -72,109 +73,6 @@ fn node_range_to_byte_range(node_range: &Range<usize>) -> Range<usize> {
node_range.start * HASHSIZE..node_range.end * HASHSIZE node_range.start * HASHSIZE..node_range.end * HASHSIZE
} }
#[derive(Debug)]
pub struct BTreeOverlay {
num_internal_nodes: usize,
pub num_leaf_nodes: usize,
first_node: usize,
next_node: usize,
offsets: Vec<usize>,
}
impl BTreeOverlay {
pub fn new<T>(item: &T, initial_offset: usize) -> Result<Self, Error>
where
T: CachedTreeHash<T>,
{
Self::from_lengths(initial_offset, item.offsets()?)
}
fn from_lengths(offset: usize, mut lengths: Vec<usize>) -> Result<Self, Error> {
// Extend it to the next power-of-two, if it is not already.
let num_leaf_nodes = if lengths.len().is_power_of_two() {
lengths.len()
} else {
let num_leaf_nodes = lengths.len().next_power_of_two();
lengths.resize(num_leaf_nodes, 1);
num_leaf_nodes
};
let num_nodes = num_nodes(num_leaf_nodes);
let num_internal_nodes = num_nodes - num_leaf_nodes;
let mut offsets = Vec::with_capacity(num_nodes);
offsets.append(&mut (offset..offset + num_internal_nodes).collect());
let mut next_node = num_internal_nodes + offset;
for i in 0..num_leaf_nodes {
offsets.push(next_node);
next_node += lengths[i];
}
Ok(Self {
num_internal_nodes,
num_leaf_nodes,
offsets,
first_node: offset,
next_node,
})
}
pub fn root(&self) -> usize {
self.first_node
}
pub fn height(&self) -> usize {
self.num_leaf_nodes.trailing_zeros() as usize
}
pub fn chunk_range(&self) -> Range<usize> {
self.first_node..self.next_node
}
pub fn total_chunks(&self) -> usize {
self.next_node - self.first_node
}
pub fn total_nodes(&self) -> usize {
self.num_internal_nodes + self.num_leaf_nodes
}
pub fn first_leaf_node(&self) -> Result<usize, Error> {
self.offsets
.get(self.num_internal_nodes)
.cloned()
.ok_or_else(|| Error::NoFirstNode)
}
pub fn next_node(&self) -> usize {
self.next_node
}
/// Returns an iterator visiting each internal node, providing the left and right child chunks
/// for the node.
pub fn iter_internal_nodes<'a>(
&'a self,
) -> impl DoubleEndedIterator<Item = (&'a usize, (&'a usize, &'a usize))> {
let internal_nodes = &self.offsets[0..self.num_internal_nodes];
internal_nodes.iter().enumerate().map(move |(i, parent)| {
let children = children(i);
(
parent,
(&self.offsets[children.0], &self.offsets[children.1]),
)
})
}
/// Returns an iterator visiting each leaf node, providing the chunk for that node.
pub fn iter_leaf_nodes<'a>(&'a self) -> impl DoubleEndedIterator<Item = &'a usize> {
let leaf_nodes = &self.offsets[self.num_internal_nodes..];
leaf_nodes.iter()
}
}
/// Split `values` into a power-of-two, identical-length chunks (padding with `0`) and merkleize /// Split `values` into a power-of-two, identical-length chunks (padding with `0`) and merkleize
/// them, returning the entire merkle tree. /// them, returning the entire merkle tree.
/// ///

View File

@ -44,15 +44,15 @@ impl CachedTreeHash<Inner> for Inner {
bytes bytes
} }
fn offsets(&self) -> Result<Vec<usize>, Error> { fn btree_overlay(&self, chunk_offset: usize) -> Result<BTreeOverlay, Error> {
let mut offsets = vec![]; let mut lengths = vec![];
offsets.push(self.a.num_child_nodes() + 1); lengths.push(self.a.num_child_nodes() + 1);
offsets.push(self.b.num_child_nodes() + 1); lengths.push(self.b.num_child_nodes() + 1);
offsets.push(self.c.num_child_nodes() + 1); lengths.push(self.c.num_child_nodes() + 1);
offsets.push(self.d.num_child_nodes() + 1); lengths.push(self.d.num_child_nodes() + 1);
Ok(offsets) BTreeOverlay::from_lengths(chunk_offset, lengths)
} }
fn num_child_nodes(&self) -> usize { fn num_child_nodes(&self) -> usize {
@ -98,7 +98,7 @@ impl CachedTreeHash<Inner> for Inner {
} }
} }
Ok(offset_handler.next_node()) Ok(offset_handler.next_node)
} }
} }
@ -146,14 +146,14 @@ impl CachedTreeHash<Outer> for Outer {
num_nodes(leaves) + children - 1 num_nodes(leaves) + children - 1
} }
fn offsets(&self) -> Result<Vec<usize>, Error> { fn btree_overlay(&self, chunk_offset: usize) -> Result<BTreeOverlay, Error> {
let mut offsets = vec![]; let mut lengths = vec![];
offsets.push(self.a.num_child_nodes() + 1); lengths.push(self.a.num_child_nodes() + 1);
offsets.push(self.b.num_child_nodes() + 1); lengths.push(self.b.num_child_nodes() + 1);
offsets.push(self.c.num_child_nodes() + 1); lengths.push(self.c.num_child_nodes() + 1);
Ok(offsets) BTreeOverlay::from_lengths(chunk_offset, lengths)
} }
fn packed_encoding(&self) -> Vec<u8> { fn packed_encoding(&self) -> Vec<u8> {
@ -186,7 +186,7 @@ impl CachedTreeHash<Outer> for Outer {
} }
} }
Ok(offset_handler.next_node()) Ok(offset_handler.next_node)
} }
} }