Implement CachedTreeHash for TreeHashVector
This commit is contained in:
		
							parent
							
								
									f1d8224d89
								
							
						
					
					
						commit
						15f81c0907
					
				| @ -7,6 +7,7 @@ edition = "2018" | ||||
| [dependencies] | ||||
| bls = { path = "../utils/bls" } | ||||
| boolean-bitfield = { path = "../utils/boolean-bitfield" } | ||||
| cached_tree_hash = { path = "../utils/cached_tree_hash" } | ||||
| dirs = "1.0" | ||||
| derivative = "1.0" | ||||
| ethereum-types = "0.5" | ||||
|  | ||||
| @ -1,4 +1,5 @@ | ||||
| use crate::test_utils::{RngCore, TestRandom}; | ||||
| use cached_tree_hash::CachedTreeHash; | ||||
| use serde_derive::{Deserialize, Serialize}; | ||||
| use ssz::{Decodable, DecodeError, Encodable, SszStream}; | ||||
| use std::ops::{Deref, DerefMut}; | ||||
| @ -54,6 +55,43 @@ where | ||||
|     } | ||||
| } | ||||
| 
 | ||||
| impl<T> CachedTreeHash<TreeHashVector<T>> for TreeHashVector<T> | ||||
| where | ||||
|     T: CachedTreeHash<T> + TreeHash, | ||||
| { | ||||
|     fn new_tree_hash_cache( | ||||
|         &self, | ||||
|         depth: usize, | ||||
|     ) -> Result<cached_tree_hash::TreeHashCache, cached_tree_hash::Error> { | ||||
|         let (cache, _overlay) = cached_tree_hash::impls::vec::new_tree_hash_cache(self, depth)?; | ||||
| 
 | ||||
|         Ok(cache) | ||||
|     } | ||||
| 
 | ||||
|     fn num_tree_hash_cache_chunks(&self) -> usize { | ||||
|         cached_tree_hash::BTreeOverlay::new(self, 0, 0) | ||||
|             .and_then(|o| Ok(o.num_chunks())) | ||||
|             .unwrap_or_else(|_| 1) | ||||
|     } | ||||
| 
 | ||||
|     fn tree_hash_cache_overlay( | ||||
|         &self, | ||||
|         chunk_offset: usize, | ||||
|         depth: usize, | ||||
|     ) -> Result<cached_tree_hash::BTreeOverlay, cached_tree_hash::Error> { | ||||
|         cached_tree_hash::impls::vec::produce_overlay(self, chunk_offset, depth) | ||||
|     } | ||||
| 
 | ||||
|     fn update_tree_hash_cache( | ||||
|         &self, | ||||
|         cache: &mut cached_tree_hash::TreeHashCache, | ||||
|     ) -> Result<(), cached_tree_hash::Error> { | ||||
|         cached_tree_hash::impls::vec::update_tree_hash_cache(self, cache)?; | ||||
| 
 | ||||
|         Ok(()) | ||||
|     } | ||||
| } | ||||
| 
 | ||||
| impl<T> Encodable for TreeHashVector<T> | ||||
| where | ||||
|     T: Encodable, | ||||
|  | ||||
| @ -1,7 +1,7 @@ | ||||
| use super::*; | ||||
| use crate::merkleize::merkleize; | ||||
| 
 | ||||
| mod vec; | ||||
| pub mod vec; | ||||
| 
 | ||||
| impl CachedTreeHash<u64> for u64 { | ||||
|     fn new_tree_hash_cache(&self, _depth: usize) -> Result<TreeHashCache, Error> { | ||||
|  | ||||
| @ -6,23 +6,7 @@ where | ||||
|     T: CachedTreeHash<T> + TreeHash, | ||||
| { | ||||
|     fn new_tree_hash_cache(&self, depth: usize) -> Result<TreeHashCache, Error> { | ||||
|         let overlay = self.tree_hash_cache_overlay(0, depth)?; | ||||
| 
 | ||||
|         let mut cache = match T::tree_hash_type() { | ||||
|             TreeHashType::Basic => TreeHashCache::from_bytes( | ||||
|                 merkleize(get_packed_leaves(self)?), | ||||
|                 false, | ||||
|                 Some(overlay.clone()), | ||||
|             ), | ||||
|             TreeHashType::Container | TreeHashType::List | TreeHashType::Vector => { | ||||
|                 let subtrees = self | ||||
|                     .iter() | ||||
|                     .map(|item| TreeHashCache::new(item, depth + 1)) | ||||
|                     .collect::<Result<Vec<TreeHashCache>, _>>()?; | ||||
| 
 | ||||
|                 TreeHashCache::from_leaves_and_subtrees(self, subtrees, depth) | ||||
|             } | ||||
|         }?; | ||||
|         let (mut cache, overlay) = new_tree_hash_cache(self, depth)?; | ||||
| 
 | ||||
|         cache.add_length_nodes(overlay.chunk_range(), self.len())?; | ||||
| 
 | ||||
| @ -33,7 +17,7 @@ where | ||||
|         BTreeOverlay::new(self, 0, 0) | ||||
|             .and_then(|o| Ok(o.num_chunks())) | ||||
|             .unwrap_or_else(|_| 1) | ||||
|             + 2 | ||||
|             + 2 // Add two extra nodes to cater for the length.
 | ||||
|     } | ||||
| 
 | ||||
|     fn tree_hash_cache_overlay( | ||||
| @ -41,139 +25,15 @@ where | ||||
|         chunk_offset: usize, | ||||
|         depth: usize, | ||||
|     ) -> Result<BTreeOverlay, Error> { | ||||
|         let lengths = match T::tree_hash_type() { | ||||
|             TreeHashType::Basic => { | ||||
|                 // Ceil division.
 | ||||
|                 let num_leaves = (self.len() + T::tree_hash_packing_factor() - 1) | ||||
|                     / T::tree_hash_packing_factor(); | ||||
| 
 | ||||
|                 // Disallow zero-length as an empty list still has one all-padding node.
 | ||||
|                 vec![1; std::cmp::max(1, num_leaves)] | ||||
|             } | ||||
|             TreeHashType::Container | TreeHashType::List | TreeHashType::Vector => { | ||||
|                 let mut lengths = vec![]; | ||||
| 
 | ||||
|                 for item in self { | ||||
|                     lengths.push(item.num_tree_hash_cache_chunks()) | ||||
|                 } | ||||
| 
 | ||||
|                 // Disallow zero-length as an empty list still has one all-padding node.
 | ||||
|                 if lengths.is_empty() { | ||||
|                     lengths.push(1); | ||||
|                 } | ||||
| 
 | ||||
|                 lengths | ||||
|             } | ||||
|         }; | ||||
| 
 | ||||
|         BTreeOverlay::from_lengths(chunk_offset, self.len(), depth, lengths) | ||||
|         produce_overlay(self, chunk_offset, depth) | ||||
|     } | ||||
| 
 | ||||
|     fn update_tree_hash_cache(&self, cache: &mut TreeHashCache) -> Result<(), Error> { | ||||
|         // Skip the length-mixed-in root node.
 | ||||
|         cache.chunk_index += 1; | ||||
| 
 | ||||
|         let old_overlay = cache.get_overlay(cache.overlay_index, cache.chunk_index)?; | ||||
|         let new_overlay = BTreeOverlay::new(self, cache.chunk_index, old_overlay.depth)?; | ||||
| 
 | ||||
|         cache.replace_overlay(cache.overlay_index, cache.chunk_index, new_overlay.clone())?; | ||||
| 
 | ||||
|         cache.overlay_index += 1; | ||||
| 
 | ||||
|         match T::tree_hash_type() { | ||||
|             TreeHashType::Basic => { | ||||
|                 let mut buf = vec![0; HASHSIZE]; | ||||
|                 let item_bytes = HASHSIZE / T::tree_hash_packing_factor(); | ||||
| 
 | ||||
|                 // Iterate through each of the leaf nodes.
 | ||||
|                 for i in 0..new_overlay.num_leaf_nodes() { | ||||
|                     // Iterate through the number of items that may be packing into the leaf node.
 | ||||
|                     for j in 0..T::tree_hash_packing_factor() { | ||||
|                         // Create a mut slice that can be filled with either a serialized item or
 | ||||
|                         // padding.
 | ||||
|                         let buf_slice = &mut buf[j * item_bytes..(j + 1) * item_bytes]; | ||||
| 
 | ||||
|                         // Attempt to get the item for this portion of the chunk. If it exists,
 | ||||
|                         // update `buf` with it's serialized bytes. If it doesn't exist, update
 | ||||
|                         // `buf` with padding.
 | ||||
|                         match self.get(i * T::tree_hash_packing_factor() + j) { | ||||
|                             Some(item) => { | ||||
|                                 buf_slice.copy_from_slice(&item.tree_hash_packed_encoding()); | ||||
|                             } | ||||
|                             None => buf_slice.copy_from_slice(&vec![0; item_bytes]), | ||||
|                         } | ||||
|                     } | ||||
| 
 | ||||
|                     // Update the chunk if the generated `buf` is not the same as the cache.
 | ||||
|                     let chunk = new_overlay.first_leaf_node() + i; | ||||
|                     cache.maybe_update_chunk(chunk, &buf)?; | ||||
|                 } | ||||
|             } | ||||
|             TreeHashType::Container | TreeHashType::List | TreeHashType::Vector => { | ||||
|                 for i in 0..new_overlay.num_leaf_nodes() { | ||||
|                     // Adjust `i` so it is a leaf node for each of the overlays.
 | ||||
|                     let old_i = i + old_overlay.num_internal_nodes(); | ||||
|                     let new_i = i + new_overlay.num_internal_nodes(); | ||||
| 
 | ||||
|                     match ( | ||||
|                         old_overlay.get_leaf_node(old_i)?, | ||||
|                         new_overlay.get_leaf_node(new_i)?, | ||||
|                     ) { | ||||
|                         // The item existed in the previous list and exists in the current list.
 | ||||
|                         (Some(_old), Some(new)) => { | ||||
|                             cache.chunk_index = new.start; | ||||
| 
 | ||||
|                             self[i].update_tree_hash_cache(cache)?; | ||||
|                         } | ||||
|                         // The item did not exist in the previous list but does exist in this list.
 | ||||
|                         //
 | ||||
|                         // Viz., the list has been lengthened.
 | ||||
|                         (None, Some(new)) => { | ||||
|                             let (bytes, mut bools, overlays) = | ||||
|                                 TreeHashCache::new(&self[i], new_overlay.depth + 1)? | ||||
|                                     .into_components(); | ||||
| 
 | ||||
|                             // Record the number of overlays, this will be used later in the fn.
 | ||||
|                             let num_overlays = overlays.len(); | ||||
| 
 | ||||
|                             // Flag the root node of the new tree as dirty.
 | ||||
|                             bools[0] = true; | ||||
| 
 | ||||
|                             cache.splice(new.start..new.start + 1, bytes, bools); | ||||
|                             cache | ||||
|                                 .overlays | ||||
|                                 .splice(cache.overlay_index..cache.overlay_index, overlays); | ||||
| 
 | ||||
|                             cache.overlay_index += num_overlays; | ||||
|                         } | ||||
|                         // The item existed in the previous list but does not exist in this list.
 | ||||
|                         //
 | ||||
|                         // Viz., the list has been shortened.
 | ||||
|                         (Some(old), None) => { | ||||
|                             if new_overlay.num_items == 0 { | ||||
|                                 // In this case, the list has been made empty and we should make
 | ||||
|                                 // this node padding.
 | ||||
|                                 cache.maybe_update_chunk(new_overlay.root(), &[0; HASHSIZE])?; | ||||
|                             } else { | ||||
|                                 // In this case, there are some items in the new list and we should
 | ||||
|                                 // splice out the entire tree of the removed node, replacing it
 | ||||
|                                 // with a single padding node.
 | ||||
|                                 cache.splice(old, vec![0; HASHSIZE], vec![true]); | ||||
|                             } | ||||
|                         } | ||||
|                         // The item didn't exist in the old list and doesn't exist in the new list,
 | ||||
|                         // nothing to do.
 | ||||
|                         (None, None) => {} | ||||
|                     } | ||||
|                 } | ||||
| 
 | ||||
|                 // Clean out any excess overlays that may or may not be remaining if the list was
 | ||||
|                 // shortened.
 | ||||
|                 cache.remove_proceeding_child_overlays(cache.overlay_index, new_overlay.depth); | ||||
|             } | ||||
|         } | ||||
| 
 | ||||
|         cache.update_internal_nodes(&new_overlay)?; | ||||
|         // Update the cache, returning the new overlay.
 | ||||
|         let new_overlay = update_tree_hash_cache(self, cache)?; | ||||
| 
 | ||||
|         // Mix in length
 | ||||
|         cache.mix_in_length(new_overlay.chunk_range(), self.len())?; | ||||
| @ -185,6 +45,172 @@ where | ||||
|     } | ||||
| } | ||||
| 
 | ||||
| pub fn new_tree_hash_cache<T: CachedTreeHash<T>>( | ||||
|     vec: &Vec<T>, | ||||
|     depth: usize, | ||||
| ) -> Result<(TreeHashCache, BTreeOverlay), Error> { | ||||
|     let overlay = vec.tree_hash_cache_overlay(0, depth)?; | ||||
| 
 | ||||
|     let cache = match T::tree_hash_type() { | ||||
|         TreeHashType::Basic => TreeHashCache::from_bytes( | ||||
|             merkleize(get_packed_leaves(vec)?), | ||||
|             false, | ||||
|             Some(overlay.clone()), | ||||
|         ), | ||||
|         TreeHashType::Container | TreeHashType::List | TreeHashType::Vector => { | ||||
|             let subtrees = vec | ||||
|                 .iter() | ||||
|                 .map(|item| TreeHashCache::new(item, depth + 1)) | ||||
|                 .collect::<Result<Vec<TreeHashCache>, _>>()?; | ||||
| 
 | ||||
|             TreeHashCache::from_leaves_and_subtrees(vec, subtrees, depth) | ||||
|         } | ||||
|     }?; | ||||
| 
 | ||||
|     Ok((cache, overlay)) | ||||
| } | ||||
| 
 | ||||
| pub fn produce_overlay<T: CachedTreeHash<T>>( | ||||
|     vec: &Vec<T>, | ||||
|     chunk_offset: usize, | ||||
|     depth: usize, | ||||
| ) -> Result<BTreeOverlay, Error> { | ||||
|     let lengths = match T::tree_hash_type() { | ||||
|         TreeHashType::Basic => { | ||||
|             // Ceil division.
 | ||||
|             let num_leaves = | ||||
|                 (vec.len() + T::tree_hash_packing_factor() - 1) / T::tree_hash_packing_factor(); | ||||
| 
 | ||||
|             // Disallow zero-length as an empty list still has one all-padding node.
 | ||||
|             vec![1; std::cmp::max(1, num_leaves)] | ||||
|         } | ||||
|         TreeHashType::Container | TreeHashType::List | TreeHashType::Vector => { | ||||
|             let mut lengths = vec![]; | ||||
| 
 | ||||
|             for item in vec { | ||||
|                 lengths.push(item.num_tree_hash_cache_chunks()) | ||||
|             } | ||||
| 
 | ||||
|             // Disallow zero-length as an empty list still has one all-padding node.
 | ||||
|             if lengths.is_empty() { | ||||
|                 lengths.push(1); | ||||
|             } | ||||
| 
 | ||||
|             lengths | ||||
|         } | ||||
|     }; | ||||
| 
 | ||||
|     BTreeOverlay::from_lengths(chunk_offset, vec.len(), depth, lengths) | ||||
| } | ||||
| 
 | ||||
| pub fn update_tree_hash_cache<T: CachedTreeHash<T>>( | ||||
|     vec: &Vec<T>, | ||||
|     cache: &mut TreeHashCache, | ||||
| ) -> Result<BTreeOverlay, Error> { | ||||
|     let old_overlay = cache.get_overlay(cache.overlay_index, cache.chunk_index)?; | ||||
|     let new_overlay = BTreeOverlay::new(vec, cache.chunk_index, old_overlay.depth)?; | ||||
| 
 | ||||
|     cache.replace_overlay(cache.overlay_index, cache.chunk_index, new_overlay.clone())?; | ||||
| 
 | ||||
|     cache.overlay_index += 1; | ||||
| 
 | ||||
|     match T::tree_hash_type() { | ||||
|         TreeHashType::Basic => { | ||||
|             let mut buf = vec![0; HASHSIZE]; | ||||
|             let item_bytes = HASHSIZE / T::tree_hash_packing_factor(); | ||||
| 
 | ||||
|             // Iterate through each of the leaf nodes.
 | ||||
|             for i in 0..new_overlay.num_leaf_nodes() { | ||||
|                 // Iterate through the number of items that may be packing into the leaf node.
 | ||||
|                 for j in 0..T::tree_hash_packing_factor() { | ||||
|                     // Create a mut slice that can be filled with either a serialized item or
 | ||||
|                     // padding.
 | ||||
|                     let buf_slice = &mut buf[j * item_bytes..(j + 1) * item_bytes]; | ||||
| 
 | ||||
|                     // Attempt to get the item for this portion of the chunk. If it exists,
 | ||||
|                     // update `buf` with it's serialized bytes. If it doesn't exist, update
 | ||||
|                     // `buf` with padding.
 | ||||
|                     match vec.get(i * T::tree_hash_packing_factor() + j) { | ||||
|                         Some(item) => { | ||||
|                             buf_slice.copy_from_slice(&item.tree_hash_packed_encoding()); | ||||
|                         } | ||||
|                         None => buf_slice.copy_from_slice(&vec![0; item_bytes]), | ||||
|                     } | ||||
|                 } | ||||
| 
 | ||||
|                 // Update the chunk if the generated `buf` is not the same as the cache.
 | ||||
|                 let chunk = new_overlay.first_leaf_node() + i; | ||||
|                 cache.maybe_update_chunk(chunk, &buf)?; | ||||
|             } | ||||
|         } | ||||
|         TreeHashType::Container | TreeHashType::List | TreeHashType::Vector => { | ||||
|             for i in 0..new_overlay.num_leaf_nodes() { | ||||
|                 // Adjust `i` so it is a leaf node for each of the overlays.
 | ||||
|                 let old_i = i + old_overlay.num_internal_nodes(); | ||||
|                 let new_i = i + new_overlay.num_internal_nodes(); | ||||
| 
 | ||||
|                 match ( | ||||
|                     old_overlay.get_leaf_node(old_i)?, | ||||
|                     new_overlay.get_leaf_node(new_i)?, | ||||
|                 ) { | ||||
|                     // The item existed in the previous list and exists in the current list.
 | ||||
|                     (Some(_old), Some(new)) => { | ||||
|                         cache.chunk_index = new.start; | ||||
| 
 | ||||
|                         vec[i].update_tree_hash_cache(cache)?; | ||||
|                     } | ||||
|                     // The item did not exist in the previous list but does exist in this list.
 | ||||
|                     //
 | ||||
|                     // Viz., the list has been lengthened.
 | ||||
|                     (None, Some(new)) => { | ||||
|                         let (bytes, mut bools, overlays) = | ||||
|                             TreeHashCache::new(&vec[i], new_overlay.depth + 1)?.into_components(); | ||||
| 
 | ||||
|                         // Record the number of overlays, this will be used later in the fn.
 | ||||
|                         let num_overlays = overlays.len(); | ||||
| 
 | ||||
|                         // Flag the root node of the new tree as dirty.
 | ||||
|                         bools[0] = true; | ||||
| 
 | ||||
|                         cache.splice(new.start..new.start + 1, bytes, bools); | ||||
|                         cache | ||||
|                             .overlays | ||||
|                             .splice(cache.overlay_index..cache.overlay_index, overlays); | ||||
| 
 | ||||
|                         cache.overlay_index += num_overlays; | ||||
|                     } | ||||
|                     // The item existed in the previous list but does not exist in this list.
 | ||||
|                     //
 | ||||
|                     // Viz., the list has been shortened.
 | ||||
|                     (Some(old), None) => { | ||||
|                         if new_overlay.num_items == 0 { | ||||
|                             // In this case, the list has been made empty and we should make
 | ||||
|                             // this node padding.
 | ||||
|                             cache.maybe_update_chunk(new_overlay.root(), &[0; HASHSIZE])?; | ||||
|                         } else { | ||||
|                             // In this case, there are some items in the new list and we should
 | ||||
|                             // splice out the entire tree of the removed node, replacing it
 | ||||
|                             // with a single padding node.
 | ||||
|                             cache.splice(old, vec![0; HASHSIZE], vec![true]); | ||||
|                         } | ||||
|                     } | ||||
|                     // The item didn't exist in the old list and doesn't exist in the new list,
 | ||||
|                     // nothing to do.
 | ||||
|                     (None, None) => {} | ||||
|                 } | ||||
|             } | ||||
| 
 | ||||
|             // Clean out any excess overlays that may or may not be remaining if the list was
 | ||||
|             // shortened.
 | ||||
|             cache.remove_proceeding_child_overlays(cache.overlay_index, new_overlay.depth); | ||||
|         } | ||||
|     } | ||||
| 
 | ||||
|     cache.update_internal_nodes(&new_overlay)?; | ||||
| 
 | ||||
|     Ok(new_overlay) | ||||
| } | ||||
| 
 | ||||
| fn get_packed_leaves<T>(vec: &Vec<T>) -> Result<Vec<u8>, Error> | ||||
| where | ||||
|     T: CachedTreeHash<T>, | ||||
|  | ||||
| @ -4,7 +4,7 @@ use tree_hash::{TreeHash, TreeHashType, BYTES_PER_CHUNK, HASHSIZE}; | ||||
| 
 | ||||
| mod btree_overlay; | ||||
| mod errors; | ||||
| mod impls; | ||||
| pub mod impls; | ||||
| pub mod merkleize; | ||||
| mod resize; | ||||
| mod tree_hash_cache; | ||||
|  | ||||
| @ -56,11 +56,11 @@ impl TreeHash for [u8; 4] { | ||||
|     } | ||||
| 
 | ||||
|     fn tree_hash_packed_encoding(&self) -> Vec<u8> { | ||||
|         panic!("bytesN should never be packed.") | ||||
|         unreachable!("bytesN should never be packed.") | ||||
|     } | ||||
| 
 | ||||
|     fn tree_hash_packing_factor() -> usize { | ||||
|         panic!("bytesN should never be packed.") | ||||
|         unreachable!("bytesN should never be packed.") | ||||
|     } | ||||
| 
 | ||||
|     fn tree_hash_root(&self) -> Vec<u8> { | ||||
|  | ||||
| @ -49,6 +49,7 @@ macro_rules! tree_hash_ssz_encoding_as_vector { | ||||
|         } | ||||
|     }; | ||||
| } | ||||
| 
 | ||||
| #[macro_export] | ||||
| macro_rules! tree_hash_ssz_encoding_as_list { | ||||
|     ($type: ident) => { | ||||
|  | ||||
		Loading…
	
		Reference in New Issue
	
	Block a user