Add tree_hash_derive crate

This commit is contained in:
Paul Hauner 2019-04-15 15:45:05 +10:00
parent 354f823c16
commit 2be05a466f
No known key found for this signature in database
GPG Key ID: D362883A9218FCC6
10 changed files with 163 additions and 12 deletions

View File

@ -19,6 +19,7 @@ members = [
"eth2/utils/ssz_derive", "eth2/utils/ssz_derive",
"eth2/utils/swap_or_not_shuffle", "eth2/utils/swap_or_not_shuffle",
"eth2/utils/tree_hash", "eth2/utils/tree_hash",
"eth2/utils/tree_hash_derive",
"eth2/utils/fisher_yates_shuffle", "eth2/utils/fisher_yates_shuffle",
"eth2/utils/test_random_derive", "eth2/utils/test_random_derive",
"beacon_node", "beacon_node",

View File

@ -12,7 +12,7 @@ pub struct BTreeOverlay {
impl BTreeOverlay { impl BTreeOverlay {
pub fn new<T>(item: &T, initial_offset: usize) -> Result<Self, Error> pub fn new<T>(item: &T, initial_offset: usize) -> Result<Self, Error>
where where
T: CachedTreeHashSubtree<T>, T: CachedTreeHashSubTree<T>,
{ {
item.btree_overlay(initial_offset) item.btree_overlay(initial_offset)
} }

View File

@ -15,7 +15,7 @@ impl Into<Vec<u8>> for TreeHashCache {
impl TreeHashCache { impl TreeHashCache {
pub fn new<T>(item: &T) -> Result<Self, Error> pub fn new<T>(item: &T) -> Result<Self, Error>
where where
T: CachedTreeHashSubtree<T>, T: CachedTreeHashSubTree<T>,
{ {
item.new_cache() item.new_cache()
} }
@ -32,7 +32,7 @@ impl TreeHashCache {
leaves_and_subtrees: Vec<Self>, leaves_and_subtrees: Vec<Self>,
) -> Result<Self, Error> ) -> Result<Self, Error>
where where
T: CachedTreeHashSubtree<T>, T: CachedTreeHashSubTree<T>,
{ {
let offset_handler = BTreeOverlay::new(item, 0)?; let offset_handler = BTreeOverlay::new(item, 0)?;

View File

@ -4,7 +4,7 @@ use ssz::ssz_encode;
mod vec; mod vec;
impl CachedTreeHashSubtree<u64> for u64 { impl CachedTreeHashSubTree<u64> for u64 {
fn item_type() -> ItemType { fn item_type() -> ItemType {
ItemType::Basic ItemType::Basic
} }

View File

@ -1,8 +1,8 @@
use super::*; use super::*;
impl<T> CachedTreeHashSubtree<Vec<T>> for Vec<T> impl<T> CachedTreeHashSubTree<Vec<T>> for Vec<T>
where where
T: CachedTreeHashSubtree<T>, T: CachedTreeHashSubTree<T>,
{ {
fn item_type() -> ItemType { fn item_type() -> ItemType {
ItemType::List ItemType::List
@ -168,7 +168,7 @@ where
fn get_packed_leaves<T>(vec: &Vec<T>) -> Result<Vec<u8>, Error> fn get_packed_leaves<T>(vec: &Vec<T>) -> Result<Vec<u8>, Error>
where where
T: CachedTreeHashSubtree<T>, T: CachedTreeHashSubTree<T>,
{ {
let num_packed_bytes = (BYTES_PER_CHUNK / T::packing_factor()) * vec.len(); let num_packed_bytes = (BYTES_PER_CHUNK / T::packing_factor()) * vec.len();
let num_leaves = num_sanitized_leaves(num_packed_bytes); let num_leaves = num_sanitized_leaves(num_packed_bytes);

View File

@ -35,7 +35,7 @@ pub enum ItemType {
Composite, Composite,
} }
pub trait CachedTreeHash<T>: CachedTreeHashSubtree<T> + Sized { pub trait CachedTreeHash<T>: CachedTreeHashSubTree<T> + Sized {
fn update_internal_tree_hash_cache(self, old: T) -> Result<(Self, Self), Error>; fn update_internal_tree_hash_cache(self, old: T) -> Result<(Self, Self), Error>;
fn cached_tree_hash_root(&self) -> Option<Vec<u8>>; fn cached_tree_hash_root(&self) -> Option<Vec<u8>>;
@ -43,7 +43,7 @@ pub trait CachedTreeHash<T>: CachedTreeHashSubtree<T> + Sized {
fn clone_without_tree_hash_cache(&self) -> Self; fn clone_without_tree_hash_cache(&self) -> Self;
} }
pub trait CachedTreeHashSubtree<Item> { pub trait CachedTreeHashSubTree<Item> {
fn item_type() -> ItemType; fn item_type() -> ItemType;
fn btree_overlay(&self, chunk_offset: usize) -> Result<BTreeOverlay, Error>; fn btree_overlay(&self, chunk_offset: usize) -> Result<BTreeOverlay, Error>;

View File

@ -63,7 +63,7 @@ fn works_when_embedded() {
assert_eq!(&merkle[0..32], &root[..]); assert_eq!(&merkle[0..32], &root[..]);
} }
impl CachedTreeHashSubtree<InternalCache> for InternalCache { impl CachedTreeHashSubTree<InternalCache> for InternalCache {
fn item_type() -> ItemType { fn item_type() -> ItemType {
ItemType::Composite ItemType::Composite
} }
@ -131,7 +131,7 @@ pub struct Inner {
pub d: u64, pub d: u64,
} }
impl CachedTreeHashSubtree<Inner> for Inner { impl CachedTreeHashSubTree<Inner> for Inner {
fn item_type() -> ItemType { fn item_type() -> ItemType {
ItemType::Composite ItemType::Composite
} }
@ -203,7 +203,7 @@ pub struct Outer {
pub c: u64, pub c: u64,
} }
impl CachedTreeHashSubtree<Outer> for Outer { impl CachedTreeHashSubTree<Outer> for Outer {
fn item_type() -> ItemType { fn item_type() -> ItemType {
ItemType::Composite ItemType::Composite
} }

View File

@ -0,0 +1,16 @@
[package]
name = "tree_hash_derive"
version = "0.1.0"
authors = ["Paul Hauner <paul@paulhauner.com>"]
edition = "2018"
description = "Procedural derive macros for SSZ tree hashing."
[lib]
proc-macro = true
[dev-dependencies]
tree_hash = { path = "../tree_hash" }
[dependencies]
syn = "0.15"
quote = "0.6"

View File

@ -0,0 +1,125 @@
#![recursion_limit = "256"]
extern crate proc_macro;
use proc_macro::TokenStream;
use quote::quote;
use syn::{parse_macro_input, DeriveInput};
/// Returns a Vec of `syn::Ident` for each named field in the struct, whilst filtering out fields
/// that should not be hashed.
///
/// # Panics
/// Any unnamed struct field (like in a tuple struct) will raise a panic at compile time.
fn get_hashable_named_field_idents<'a>(struct_data: &'a syn::DataStruct) -> Vec<&'a syn::Ident> {
struct_data
.fields
.iter()
.filter_map(|f| {
if should_skip_hashing(&f) {
None
} else {
Some(match &f.ident {
Some(ref ident) => ident,
_ => panic!("tree_hash_derive only supports named struct fields."),
})
}
})
.collect()
}
/// Returns true if some field has an attribute declaring it should not be hashedd.
///
/// The field attribute is: `#[tree_hash(skip_hashing)]`
fn should_skip_hashing(field: &syn::Field) -> bool {
for attr in &field.attrs {
if attr.tts.to_string() == "( skip_hashing )" {
return true;
}
}
false
}
/// Implements `ssz::Encodable` for some `struct`.
///
/// Fields are encoded in the order they are defined.
#[proc_macro_derive(CachedTreeHashSubTree, attributes(tree_hash))]
pub fn subtree_derive(input: TokenStream) -> TokenStream {
let item = parse_macro_input!(input as DeriveInput);
let name = &item.ident;
let struct_data = match &item.data {
syn::Data::Struct(s) => s,
_ => panic!("tree_hash_derive only supports structs."),
};
let idents_a = get_hashable_named_field_idents(&struct_data);
let idents_b = idents_a.clone();
let idents_c = idents_a.clone();
let idents_d = idents_a.clone();
let output = quote! {
impl tree_hash::CachedTreeHashSubTree<#name> for #name {
fn item_type() -> tree_hash::ItemType {
tree_hash::ItemType::Composite
}
fn new_cache(&self) -> Result<tree_hash::TreeHashCache, tree_hash::Error> {
let tree = tree_hash::TreeHashCache::from_leaves_and_subtrees(
self,
vec![
#(
self.#idents_a.new_cache()?,
)*
],
)?;
Ok(tree)
}
fn btree_overlay(&self, chunk_offset: usize) -> Result<tree_hash::BTreeOverlay, tree_hash::Error> {
let mut lengths = vec![];
#(
lengths.push(tree_hash::BTreeOverlay::new(&self.#idents_b, 0)?.total_nodes());
)*
tree_hash::BTreeOverlay::from_lengths(chunk_offset, lengths)
}
fn packed_encoding(&self) -> Result<Vec<u8>, tree_hash::Error> {
Err(tree_hash::Error::ShouldNeverBePacked(Self::item_type()))
}
fn packing_factor() -> usize {
1
}
fn update_cache(
&self,
other: &Self,
cache: &mut tree_hash::TreeHashCache,
chunk: usize,
) -> Result<usize, tree_hash::Error> {
let offset_handler = tree_hash::BTreeOverlay::new(self, chunk)?;
// Skip past the internal nodes and update any changed leaf nodes.
{
let chunk = offset_handler.first_leaf_node()?;
#(
let chunk = self.#idents_c.update_cache(&other.#idents_d, cache, chunk)?;
)*
}
for (&parent, children) in offset_handler.iter_internal_nodes().rev() {
if cache.either_modified(children)? {
cache.modify_chunk(parent, &cache.hash_children(children)?)?;
}
}
Ok(offset_handler.next_node)
}
}
};
output.into()
}

View File

@ -0,0 +1,9 @@
use tree_hash_derive::CachedTreeHashSubTree;
#[derive(Clone, Debug, CachedTreeHashSubTree)]
pub struct Inner {
pub a: u64,
pub b: u64,
pub c: u64,
pub d: u64,
}