Add tests, fix bugs

This commit is contained in:
Paul Hauner 2019-04-22 16:09:29 +10:00
parent 2c12aabf04
commit 7c64a5a21b
No known key found for this signature in database
GPG Key ID: 303E4494BB28068C
3 changed files with 99 additions and 15 deletions

View File

@ -35,7 +35,7 @@ impl BTreeOverlay {
self.lengths.len().next_power_of_two()
}
fn num_padding_leaves(&self) -> usize {
pub fn num_padding_leaves(&self) -> usize {
self.num_leaf_nodes() - self.lengths.len()
}
@ -76,14 +76,31 @@ impl BTreeOverlay {
self.offset + self.num_internal_nodes()
}
/// Returns the chunk-range for a given leaf node.
///
/// Returns `None` if:
/// - The specified node is internal.
/// - The specified node is padding.
/// - The specified node is OOB of the tree.
pub fn get_leaf_node(&self, i: usize) -> Result<Option<Range<usize>>, Error> {
if i >= self.num_leaf_nodes() {
return Err(Error::NotLeafNode(i));
} else if i >= self.num_leaf_nodes() - self.num_padding_leaves() {
if i >= self.num_nodes() - self.num_padding_leaves() {
Ok(None)
/*
} else if i < self.num_internal_nodes() {
Ok(None)
*/
} else if (i == self.num_internal_nodes()) && (self.num_items == 0) {
// If this is the first leaf node and the overlay contains zero items, return `None` as
// this node must be padding.
Ok(None)
} else {
let first_node = self.offset + self.lengths.iter().take(i).sum::<usize>();
let i = i - self.num_internal_nodes();
let first_node = self.offset
+ self.num_internal_nodes()
+ self.lengths.iter().take(i).sum::<usize>();
let last_node = first_node + self.lengths[i];
Ok(Some(first_node..last_node))
}
}
@ -191,6 +208,16 @@ mod test {
assert_eq!(tree.chunk_range(), 11..14);
}
#[test]
fn get_leaf_node() {
let tree = get_tree_a(4);
assert_eq!(tree.get_leaf_node(3), Ok(Some(3..4)));
assert_eq!(tree.get_leaf_node(4), Ok(Some(4..5)));
assert_eq!(tree.get_leaf_node(5), Ok(Some(5..6)));
assert_eq!(tree.get_leaf_node(6), Ok(Some(6..7)));
}
#[test]
fn root_of_one_node() {
let tree = get_tree_a(1);

View File

@ -47,6 +47,11 @@ where
lengths.push(BTreeOverlay::new(item, 0)?.num_nodes())
}
// Disallow zero-length as an empty list still has one all-padding node.
if lengths.is_empty() {
lengths.push(1);
}
lengths
}
};
@ -56,9 +61,7 @@ where
fn update_tree_hash_cache(&self, cache: &mut TreeHashCache) -> Result<(), Error> {
let new_overlay = BTreeOverlay::new(self, cache.chunk_index)?;
let old_overlay = cache
.get_overlay(cache.overlay_index, cache.chunk_index)?
.clone();
let old_overlay = cache.get_overlay(cache.overlay_index, cache.chunk_index)?;
// If the merkle tree required to represent the new list is of a different size to the one
// required for the previous list, then update our cache.
@ -69,6 +72,8 @@ where
cache.replace_overlay(cache.overlay_index, cache.chunk_index, new_overlay.clone())?;
}
cache.overlay_index += 1;
match T::tree_hash_type() {
TreeHashType::Basic => {
let mut buf = vec![0; HASHSIZE];
@ -78,7 +83,7 @@ where
for i in 0..new_overlay.num_leaf_nodes() {
// Iterate through the number of items that may be packing into the leaf node.
for j in 0..T::tree_hash_packing_factor() {
// Create a mut slice that can either be filled with a serialized item or
// Create a mut slice that can be filled with either a serialized item or
// padding.
let buf_slice = &mut buf[j * item_bytes..(j + 1) * item_bytes];
@ -99,20 +104,47 @@ where
}
}
TreeHashType::Container | TreeHashType::List | TreeHashType::Vector => {
for i in (0..new_overlay.num_leaf_nodes()).rev() {
match (old_overlay.get_leaf_node(i)?, new_overlay.get_leaf_node(i)?) {
let mut local_overlay_index = cache.overlay_index;
for i in 0..new_overlay.num_leaf_nodes() {
cache.overlay_index = local_overlay_index;
// Adjust `i` so it is a leaf node for each of the overlays.
let old_i = i + old_overlay.num_internal_nodes();
let new_i = i + new_overlay.num_internal_nodes();
match (
old_overlay.get_leaf_node(old_i)?,
new_overlay.get_leaf_node(new_i)?,
) {
// The item existed in the previous list and exists in the current list.
(Some(_old), Some(new)) => {
cache.chunk_index = new.start;
self[i].update_tree_hash_cache(cache)?;
local_overlay_index += 1;
}
// The item existed in the previous list but does not exist in this list.
//
// Viz., the list has been shortened.
(Some(old), None) => {
// Splice out the entire tree of the removed node, replacing it with a
// single padding node.
cache.splice(old, vec![0; HASHSIZE], vec![true]);
if new_overlay.num_items == 0 {
// In this case, the list has been made empty and we should make
// this node padding.
cache.maybe_update_chunk(new_overlay.root(), &[0; HASHSIZE])?;
} else {
// In this case, there are some items in the new list and we should
// splice out the entire tree of the removed node, replacing it
// with a single padding node.
cache.splice(old, vec![0; HASHSIZE], vec![true]);
cache.overlays.remove(cache.overlay_index);
}
local_overlay_index += 1;
}
// The item did not exist in the previous list but does exist in this list.
//
@ -122,6 +154,13 @@ where
let bools = vec![true; bytes.len() / HASHSIZE];
cache.splice(new.start..new.start + 1, bytes, bools);
cache.overlays.insert(
std::cmp::min(cache.overlay_index, cache.overlays.len()),
BTreeOverlay::new(&self[i], 0)?,
);
local_overlay_index += 1;
}
// The item didn't exist in the old list and doesn't exist in the new list,
// nothing to do.
@ -136,7 +175,6 @@ where
// Mix in length.
let root_node = new_overlay.root();
if cache.changed(root_node)? {
dbg!(cache.get_chunk(12));
cache.modify_chunk(root_node, &cache.mix_in_length(root_node, self.len())?)?;
} else if old_overlay.num_items != new_overlay.num_items {
if new_overlay.num_internal_nodes() == 0 {

View File

@ -28,6 +28,7 @@ where
assert_eq!(standard_root, cached_root, "Initial cache build failed.");
for (i, modified) in modified.iter().enumerate() {
println!("-- Start of modification {} --", i);
// Test after a modification
hasher
.update(modified)
@ -161,6 +162,24 @@ fn test_vec() {
test_routine(original, modified);
}
#[test]
fn test_nested_list() {
let original: Vec<Vec<u64>> = vec![vec![1]];
let modified = vec![
vec![vec![1]],
vec![vec![1], vec![2]],
vec![vec![1], vec![3], vec![4]],
vec![],
vec![vec![1], vec![3], vec![4]],
vec![],
vec![vec![1, 2], vec![3], vec![4, 5, 6, 7, 8]],
vec![],
];
test_routine(original, modified);
}
#[derive(Clone, Debug)]
pub struct Inner {
pub a: u64,