Introduce BTreeSchema
This commit is contained in:
parent
794b48078c
commit
8976e652d2
@ -13,6 +13,6 @@ pub enum Error {
|
|||||||
BytesAreNotEvenChunks(usize),
|
BytesAreNotEvenChunks(usize),
|
||||||
NoModifiedFieldForChunk(usize),
|
NoModifiedFieldForChunk(usize),
|
||||||
NoBytesForChunk(usize),
|
NoBytesForChunk(usize),
|
||||||
NoOverlayForIndex(usize),
|
NoSchemaForIndex(usize),
|
||||||
NotLeafNode(usize),
|
NotLeafNode(usize),
|
||||||
}
|
}
|
||||||
|
@ -96,12 +96,12 @@ pub fn update_tree_hash_cache<T: CachedTreeHash<T>>(
|
|||||||
vec: &Vec<T>,
|
vec: &Vec<T>,
|
||||||
cache: &mut TreeHashCache,
|
cache: &mut TreeHashCache,
|
||||||
) -> Result<BTreeOverlay, Error> {
|
) -> Result<BTreeOverlay, Error> {
|
||||||
let old_overlay = cache.get_overlay(cache.overlay_index, cache.chunk_index)?;
|
let old_overlay = cache.get_overlay(cache.schema_index, cache.chunk_index)?;
|
||||||
let new_overlay = BTreeOverlay::new(vec, cache.chunk_index, old_overlay.depth);
|
let new_overlay = BTreeOverlay::new(vec, cache.chunk_index, old_overlay.depth);
|
||||||
|
|
||||||
cache.replace_overlay(cache.overlay_index, cache.chunk_index, new_overlay.clone())?;
|
cache.replace_overlay(cache.schema_index, cache.chunk_index, new_overlay.clone())?;
|
||||||
|
|
||||||
cache.overlay_index += 1;
|
cache.schema_index += 1;
|
||||||
|
|
||||||
match T::tree_hash_type() {
|
match T::tree_hash_type() {
|
||||||
TreeHashType::Basic => {
|
TreeHashType::Basic => {
|
||||||
@ -152,21 +152,21 @@ pub fn update_tree_hash_cache<T: CachedTreeHash<T>>(
|
|||||||
//
|
//
|
||||||
// Viz., the list has been lengthened.
|
// Viz., the list has been lengthened.
|
||||||
(None, Some(new)) => {
|
(None, Some(new)) => {
|
||||||
let (bytes, mut bools, overlays) =
|
let (bytes, mut bools, schemas) =
|
||||||
TreeHashCache::new(&vec[i], new_overlay.depth + 1)?.into_components();
|
TreeHashCache::new(&vec[i], new_overlay.depth + 1)?.into_components();
|
||||||
|
|
||||||
// Record the number of overlays, this will be used later in the fn.
|
// Record the number of schemas, this will be used later in the fn.
|
||||||
let num_overlays = overlays.len();
|
let num_schemas = schemas.len();
|
||||||
|
|
||||||
// Flag the root node of the new tree as dirty.
|
// Flag the root node of the new tree as dirty.
|
||||||
bools[0] = true;
|
bools[0] = true;
|
||||||
|
|
||||||
cache.splice(new.start..new.start + 1, bytes, bools);
|
cache.splice(new.start..new.start + 1, bytes, bools);
|
||||||
cache
|
cache
|
||||||
.overlays
|
.schemas
|
||||||
.splice(cache.overlay_index..cache.overlay_index, overlays);
|
.splice(cache.schema_index..cache.schema_index, schemas);
|
||||||
|
|
||||||
cache.overlay_index += num_overlays;
|
cache.schema_index += num_schemas;
|
||||||
}
|
}
|
||||||
// The item existed in the previous list but does not exist in this list.
|
// The item existed in the previous list but does not exist in this list.
|
||||||
//
|
//
|
||||||
@ -189,9 +189,9 @@ pub fn update_tree_hash_cache<T: CachedTreeHash<T>>(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Clean out any excess overlays that may or may not be remaining if the list was
|
// Clean out any excess schemas that may or may not be remaining if the list was
|
||||||
// shortened.
|
// shortened.
|
||||||
cache.remove_proceeding_child_overlays(cache.overlay_index, new_overlay.depth);
|
cache.remove_proceeding_child_schemas(cache.schema_index, new_overlay.depth);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -44,7 +44,7 @@ impl CachedTreeHasher {
|
|||||||
{
|
{
|
||||||
// Reset the per-hash counters.
|
// Reset the per-hash counters.
|
||||||
self.cache.chunk_index = 0;
|
self.cache.chunk_index = 0;
|
||||||
self.cache.overlay_index = 0;
|
self.cache.schema_index = 0;
|
||||||
|
|
||||||
// Reset the "modified" flags for the cache.
|
// Reset the "modified" flags for the cache.
|
||||||
self.cache.reset_modifications();
|
self.cache.reset_modifications();
|
||||||
|
@ -2,14 +2,39 @@ use super::*;
|
|||||||
use crate::merkleize::{merkleize, pad_for_leaf_count};
|
use crate::merkleize::{merkleize, pad_for_leaf_count};
|
||||||
use int_to_bytes::int_to_bytes32;
|
use int_to_bytes::int_to_bytes32;
|
||||||
|
|
||||||
|
#[derive(Debug, PartialEq, Clone)]
|
||||||
|
pub struct BTreeSchema {
|
||||||
|
pub depth: usize,
|
||||||
|
pub lengths: Vec<usize>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl BTreeSchema {
|
||||||
|
pub fn into_overlay(self, offset: usize) -> BTreeOverlay {
|
||||||
|
BTreeOverlay {
|
||||||
|
offset,
|
||||||
|
depth: self.depth,
|
||||||
|
lengths: self.lengths,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Into<BTreeSchema> for BTreeOverlay {
|
||||||
|
fn into(self) -> BTreeSchema {
|
||||||
|
BTreeSchema {
|
||||||
|
depth: self.depth,
|
||||||
|
lengths: self.lengths,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
#[derive(Debug, PartialEq, Clone)]
|
#[derive(Debug, PartialEq, Clone)]
|
||||||
pub struct TreeHashCache {
|
pub struct TreeHashCache {
|
||||||
pub cache: Vec<u8>,
|
pub cache: Vec<u8>,
|
||||||
pub chunk_modified: Vec<bool>,
|
pub chunk_modified: Vec<bool>,
|
||||||
pub overlays: Vec<BTreeOverlay>,
|
pub schemas: Vec<BTreeSchema>,
|
||||||
|
|
||||||
pub chunk_index: usize,
|
pub chunk_index: usize,
|
||||||
pub overlay_index: usize,
|
pub schema_index: usize,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Into<Vec<u8>> for TreeHashCache {
|
impl Into<Vec<u8>> for TreeHashCache {
|
||||||
@ -51,10 +76,10 @@ impl TreeHashCache {
|
|||||||
|
|
||||||
// Allocate enough bytes to store all the leaves.
|
// Allocate enough bytes to store all the leaves.
|
||||||
let mut leaves = Vec::with_capacity(overlay.num_leaf_nodes() * HASHSIZE);
|
let mut leaves = Vec::with_capacity(overlay.num_leaf_nodes() * HASHSIZE);
|
||||||
let mut overlays = Vec::with_capacity(leaves_and_subtrees.len());
|
let mut schemas = Vec::with_capacity(leaves_and_subtrees.len());
|
||||||
|
|
||||||
if T::tree_hash_type() == TreeHashType::List {
|
if T::tree_hash_type() == TreeHashType::List {
|
||||||
overlays.push(overlay);
|
schemas.push(overlay.into());
|
||||||
}
|
}
|
||||||
|
|
||||||
// Iterate through all of the leaves/subtrees, adding their root as a leaf node and then
|
// Iterate through all of the leaves/subtrees, adding their root as a leaf node and then
|
||||||
@ -62,9 +87,9 @@ impl TreeHashCache {
|
|||||||
for t in leaves_and_subtrees {
|
for t in leaves_and_subtrees {
|
||||||
leaves.append(&mut t.root()?.to_vec());
|
leaves.append(&mut t.root()?.to_vec());
|
||||||
|
|
||||||
let (mut bytes, _bools, mut t_overlays) = t.into_components();
|
let (mut bytes, _bools, mut t_schemas) = t.into_components();
|
||||||
cache.append(&mut bytes);
|
cache.append(&mut bytes);
|
||||||
overlays.append(&mut t_overlays);
|
schemas.append(&mut t_schemas);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Pad the leaves to an even power-of-two, using zeros.
|
// Pad the leaves to an even power-of-two, using zeros.
|
||||||
@ -79,9 +104,9 @@ impl TreeHashCache {
|
|||||||
Ok(Self {
|
Ok(Self {
|
||||||
chunk_modified: vec![false; cache.len() / BYTES_PER_CHUNK],
|
chunk_modified: vec![false; cache.len() / BYTES_PER_CHUNK],
|
||||||
cache,
|
cache,
|
||||||
overlays,
|
schemas,
|
||||||
chunk_index: 0,
|
chunk_index: 0,
|
||||||
overlay_index: 0,
|
schema_index: 0,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -94,34 +119,31 @@ impl TreeHashCache {
|
|||||||
return Err(Error::BytesAreNotEvenChunks(bytes.len()));
|
return Err(Error::BytesAreNotEvenChunks(bytes.len()));
|
||||||
}
|
}
|
||||||
|
|
||||||
let overlays = match overlay {
|
let schemas = match overlay {
|
||||||
Some(overlay) => vec![overlay],
|
Some(overlay) => vec![overlay.into()],
|
||||||
None => vec![],
|
None => vec![],
|
||||||
};
|
};
|
||||||
|
|
||||||
Ok(Self {
|
Ok(Self {
|
||||||
chunk_modified: vec![initial_modified_state; bytes.len() / BYTES_PER_CHUNK],
|
chunk_modified: vec![initial_modified_state; bytes.len() / BYTES_PER_CHUNK],
|
||||||
cache: bytes,
|
cache: bytes,
|
||||||
overlays,
|
schemas,
|
||||||
chunk_index: 0,
|
chunk_index: 0,
|
||||||
overlay_index: 0,
|
schema_index: 0,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn get_overlay(
|
pub fn get_overlay(
|
||||||
&self,
|
&self,
|
||||||
overlay_index: usize,
|
schema_index: usize,
|
||||||
chunk_index: usize,
|
chunk_index: usize,
|
||||||
) -> Result<BTreeOverlay, Error> {
|
) -> Result<BTreeOverlay, Error> {
|
||||||
let mut overlay = self
|
Ok(self
|
||||||
.overlays
|
.schemas
|
||||||
.get(overlay_index)
|
.get(schema_index)
|
||||||
.ok_or_else(|| Error::NoOverlayForIndex(overlay_index))?
|
.ok_or_else(|| Error::NoSchemaForIndex(schema_index))?
|
||||||
.clone();
|
.clone()
|
||||||
|
.into_overlay(chunk_index))
|
||||||
overlay.offset = chunk_index;
|
|
||||||
|
|
||||||
Ok(overlay)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn reset_modifications(&mut self) {
|
pub fn reset_modifications(&mut self) {
|
||||||
@ -132,11 +154,11 @@ impl TreeHashCache {
|
|||||||
|
|
||||||
pub fn replace_overlay(
|
pub fn replace_overlay(
|
||||||
&mut self,
|
&mut self,
|
||||||
overlay_index: usize,
|
schema_index: usize,
|
||||||
chunk_index: usize,
|
chunk_index: usize,
|
||||||
new_overlay: BTreeOverlay,
|
new_overlay: BTreeOverlay,
|
||||||
) -> Result<BTreeOverlay, Error> {
|
) -> Result<BTreeOverlay, Error> {
|
||||||
let old_overlay = self.get_overlay(overlay_index, chunk_index)?;
|
let old_overlay = self.get_overlay(schema_index, chunk_index)?;
|
||||||
|
|
||||||
// If the merkle tree required to represent the new list is of a different size to the one
|
// If the merkle tree required to represent the new list is of a different size to the one
|
||||||
// required for the previous list, then update our cache.
|
// required for the previous list, then update our cache.
|
||||||
@ -173,22 +195,21 @@ impl TreeHashCache {
|
|||||||
self.splice(old_overlay.chunk_range(), new_bytes, new_bools);
|
self.splice(old_overlay.chunk_range(), new_bytes, new_bools);
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(std::mem::replace(
|
let old_schema = std::mem::replace(&mut self.schemas[schema_index], new_overlay.into());
|
||||||
&mut self.overlays[overlay_index],
|
|
||||||
new_overlay,
|
Ok(old_schema.into_overlay(chunk_index))
|
||||||
))
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn remove_proceeding_child_overlays(&mut self, overlay_index: usize, depth: usize) {
|
pub fn remove_proceeding_child_schemas(&mut self, schema_index: usize, depth: usize) {
|
||||||
let end = self
|
let end = self
|
||||||
.overlays
|
.schemas
|
||||||
.iter()
|
.iter()
|
||||||
.skip(overlay_index)
|
.skip(schema_index)
|
||||||
.position(|o| o.depth <= depth)
|
.position(|o| o.depth <= depth)
|
||||||
.and_then(|i| Some(i + overlay_index))
|
.and_then(|i| Some(i + schema_index))
|
||||||
.unwrap_or_else(|| self.overlays.len());
|
.unwrap_or_else(|| self.schemas.len());
|
||||||
|
|
||||||
self.overlays.splice(overlay_index..end, vec![]);
|
self.schemas.splice(schema_index..end, vec![]);
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn update_internal_nodes(&mut self, overlay: &BTreeOverlay) -> Result<(), Error> {
|
pub fn update_internal_nodes(&mut self, overlay: &BTreeOverlay) -> Result<(), Error> {
|
||||||
@ -326,8 +347,8 @@ impl TreeHashCache {
|
|||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn into_components(self) -> (Vec<u8>, Vec<bool>, Vec<BTreeOverlay>) {
|
pub fn into_components(self) -> (Vec<u8>, Vec<bool>, Vec<BTreeSchema>) {
|
||||||
(self.cache, self.chunk_modified, self.overlays)
|
(self.cache, self.chunk_modified, self.schemas)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
Loading…
Reference in New Issue
Block a user