Address various clippy lints, improve comments

This commit is contained in:
Paul Hauner 2019-04-29 14:04:52 +10:00
parent 52695c29e8
commit a90bbbfd82
No known key found for this signature in database
GPG Key ID: D362883A9218FCC6
4 changed files with 35 additions and 27 deletions

View File

@ -149,7 +149,6 @@ impl BTreeOverlay {
chunks.append(&mut self.leaf_node_chunks()); chunks.append(&mut self.leaf_node_chunks());
(0..self.num_internal_nodes()) (0..self.num_internal_nodes())
.into_iter()
.map(|parent| { .map(|parent| {
let children = children(parent); let children = children(parent);
(chunks[parent], (chunks[children.0], chunks[children.1])) (chunks[parent], (chunks[children.0], chunks[children.1]))

View File

@ -65,7 +65,7 @@ pub fn new_tree_hash_cache<T: CachedTreeHash>(
Ok((cache, schema)) Ok((cache, schema))
} }
pub fn produce_schema<T: CachedTreeHash>(vec: &Vec<T>, depth: usize) -> BTreeSchema { pub fn produce_schema<T: CachedTreeHash>(vec: &[T], depth: usize) -> BTreeSchema {
let lengths = match T::tree_hash_type() { let lengths = match T::tree_hash_type() {
TreeHashType::Basic => { TreeHashType::Basic => {
// Ceil division. // Ceil division.
@ -89,6 +89,7 @@ pub fn produce_schema<T: CachedTreeHash>(vec: &Vec<T>, depth: usize) -> BTreeSch
BTreeSchema::from_lengths(depth, lengths) BTreeSchema::from_lengths(depth, lengths)
} }
#[allow(clippy::range_plus_one)] // Minor readability lint requiring structural changes; not worth it.
pub fn update_tree_hash_cache<T: CachedTreeHash>( pub fn update_tree_hash_cache<T: CachedTreeHash>(
vec: &Vec<T>, vec: &Vec<T>,
cache: &mut TreeHashCache, cache: &mut TreeHashCache,
@ -294,7 +295,7 @@ where
// //
} }
fn get_packed_leaves<T>(vec: &Vec<T>) -> Result<Vec<u8>, Error> fn get_packed_leaves<T>(vec: &[T]) -> Result<Vec<u8>, Error>
where where
T: CachedTreeHash, T: CachedTreeHash,
{ {

View File

@ -168,24 +168,31 @@ impl TreeHashCache {
) -> Result<BTreeOverlay, Error> { ) -> Result<BTreeOverlay, Error> {
let old_overlay = self.get_overlay(schema_index, chunk_index)?; let old_overlay = self.get_overlay(schema_index, chunk_index)?;
// If the merkle tree required to represent the new list is of a different size to the one // If the merkle tree required to represent the new list is of a different size to the one
// required for the previous list, then update our cache. // required for the previous list, then update the internal nodes.
// //
// This grows/shrinks the bytes to accomodate the new tree, preserving as much of the tree // Leaf nodes are not touched, they should be updated externally to this function.
//
// This grows/shrinks the bytes to accommodate the new tree, preserving as much of the tree
// as possible. // as possible.
if new_overlay.num_leaf_nodes() != old_overlay.num_leaf_nodes() { if new_overlay.num_internal_nodes() != old_overlay.num_internal_nodes() {
// Get slices of the existing tree from the cache. // Get slices of the existing tree from the cache.
let (old_bytes, old_flags) = self let (old_bytes, old_flags) = self
.slices(old_overlay.internal_chunk_range()) .slices(old_overlay.internal_chunk_range())
.ok_or_else(|| Error::UnableToObtainSlices)?; .ok_or_else(|| Error::UnableToObtainSlices)?;
let (new_bytes, new_flags) = if new_overlay.num_internal_nodes() == 0 { let (new_bytes, new_flags) = if new_overlay.num_internal_nodes() == 0 {
// The new tree has zero internal nodes, simply return empty lists.
(vec![], vec![]) (vec![], vec![])
} else if old_overlay.num_internal_nodes() == 0 { } else if old_overlay.num_internal_nodes() == 0 {
// The old tree has zero nodes and the new tree has some nodes. Create new nodes to
// suit.
let nodes = resize::nodes_in_tree_of_height(new_overlay.height() - 1); let nodes = resize::nodes_in_tree_of_height(new_overlay.height() - 1);
(vec![0; nodes * HASHSIZE], vec![true; nodes]) (vec![0; nodes * HASHSIZE], vec![true; nodes])
} else { } else if new_overlay.num_internal_nodes() > old_overlay.num_internal_nodes() {
if new_overlay.num_leaf_nodes() > old_overlay.num_leaf_nodes() { // The new tree is bigger than the old tree.
//
// Grow the internal nodes, preserving any existing nodes.
resize::grow_merkle_tree( resize::grow_merkle_tree(
old_bytes, old_bytes,
old_flags, old_flags,
@ -194,6 +201,9 @@ impl TreeHashCache {
) )
.ok_or_else(|| Error::UnableToGrowMerkleTree)? .ok_or_else(|| Error::UnableToGrowMerkleTree)?
} else { } else {
// The new tree is smaller than the old tree.
//
// Shrink the internal nodes, preserving any existing nodes.
resize::shrink_merkle_tree( resize::shrink_merkle_tree(
old_bytes, old_bytes,
old_flags, old_flags,
@ -201,13 +211,10 @@ impl TreeHashCache {
new_overlay.height() - 1, new_overlay.height() - 1,
) )
.ok_or_else(|| Error::UnableToShrinkMerkleTree)? .ok_or_else(|| Error::UnableToShrinkMerkleTree)?
}
}; };
assert_eq!(old_overlay.num_internal_nodes(), old_flags.len()); // Splice the resized created elements over the existing elements, effectively updating
assert_eq!(new_overlay.num_internal_nodes(), new_flags.len()); // the number of stored internal nodes for this tree.
// Splice the resized created elements over the existing elements.
self.splice(old_overlay.internal_chunk_range(), new_bytes, new_flags); self.splice(old_overlay.internal_chunk_range(), new_bytes, new_flags);
} }

View File

@ -19,6 +19,7 @@ macro_rules! impl_for_bitsize {
HASHSIZE / ($bit_size / 8) HASHSIZE / ($bit_size / 8)
} }
#[allow(clippy::cast_lossless)]
fn tree_hash_root(&self) -> Vec<u8> { fn tree_hash_root(&self) -> Vec<u8> {
int_to_bytes32(*self as u64) int_to_bytes32(*self as u64)
} }