2019-04-15 05:45:05 +00:00
|
|
|
#![recursion_limit = "256"]
|
|
|
|
extern crate proc_macro;
|
|
|
|
|
|
|
|
use proc_macro::TokenStream;
|
2019-04-16 01:14:28 +00:00
|
|
|
use quote::{quote, ToTokens};
|
2019-04-15 05:45:05 +00:00
|
|
|
use syn::{parse_macro_input, DeriveInput};
|
|
|
|
|
|
|
|
/// Returns a Vec of `syn::Ident` for each named field in the struct, whilst filtering out fields
|
|
|
|
/// that should not be hashed.
|
|
|
|
///
|
|
|
|
/// # Panics
|
|
|
|
/// Any unnamed struct field (like in a tuple struct) will raise a panic at compile time.
|
|
|
|
fn get_hashable_named_field_idents<'a>(struct_data: &'a syn::DataStruct) -> Vec<&'a syn::Ident> {
|
|
|
|
struct_data
|
|
|
|
.fields
|
|
|
|
.iter()
|
|
|
|
.filter_map(|f| {
|
|
|
|
if should_skip_hashing(&f) {
|
|
|
|
None
|
|
|
|
} else {
|
|
|
|
Some(match &f.ident {
|
|
|
|
Some(ref ident) => ident,
|
|
|
|
_ => panic!("tree_hash_derive only supports named struct fields."),
|
|
|
|
})
|
|
|
|
}
|
|
|
|
})
|
|
|
|
.collect()
|
|
|
|
}
|
|
|
|
|
|
|
|
/// Returns true if some field has an attribute declaring it should not be hashedd.
|
|
|
|
///
|
|
|
|
/// The field attribute is: `#[tree_hash(skip_hashing)]`
|
|
|
|
fn should_skip_hashing(field: &syn::Field) -> bool {
|
2019-04-16 04:14:38 +00:00
|
|
|
field
|
|
|
|
.attrs
|
|
|
|
.iter()
|
|
|
|
.any(|attr| attr.into_token_stream().to_string() == "# [ tree_hash ( skip_hashing ) ]")
|
2019-04-15 05:45:05 +00:00
|
|
|
}
|
|
|
|
|
2019-04-24 08:23:58 +00:00
|
|
|
/// Implements `tree_hash::CachedTreeHash` for some `struct`.
|
2019-04-15 05:45:05 +00:00
|
|
|
///
|
2019-04-16 00:47:58 +00:00
|
|
|
/// Fields are hashed in the order they are defined.
|
2019-04-24 08:23:58 +00:00
|
|
|
#[proc_macro_derive(CachedTreeHash, attributes(tree_hash))]
|
2019-04-15 05:45:05 +00:00
|
|
|
pub fn subtree_derive(input: TokenStream) -> TokenStream {
|
|
|
|
let item = parse_macro_input!(input as DeriveInput);
|
2019-05-08 03:08:37 +00:00
|
|
|
let (impl_generics, ty_generics, where_clause) = &item.generics.split_for_impl();
|
2019-04-15 05:45:05 +00:00
|
|
|
|
|
|
|
let name = &item.ident;
|
|
|
|
|
|
|
|
let struct_data = match &item.data {
|
|
|
|
syn::Data::Struct(s) => s,
|
|
|
|
_ => panic!("tree_hash_derive only supports structs."),
|
|
|
|
};
|
|
|
|
|
|
|
|
let idents_a = get_hashable_named_field_idents(&struct_data);
|
|
|
|
let idents_b = idents_a.clone();
|
|
|
|
let idents_c = idents_a.clone();
|
|
|
|
|
|
|
|
let output = quote! {
|
2019-05-08 03:08:37 +00:00
|
|
|
impl #impl_generics cached_tree_hash::CachedTreeHash for #name #ty_generics #where_clause {
|
2019-04-25 23:55:03 +00:00
|
|
|
fn new_tree_hash_cache(&self, depth: usize) -> Result<cached_tree_hash::TreeHashCache, cached_tree_hash::Error> {
|
2019-04-29 07:46:01 +00:00
|
|
|
let tree = cached_tree_hash::TreeHashCache::from_subtrees(
|
2019-04-15 05:45:05 +00:00
|
|
|
self,
|
|
|
|
vec![
|
|
|
|
#(
|
2019-04-23 23:29:32 +00:00
|
|
|
self.#idents_a.new_tree_hash_cache(depth)?,
|
2019-04-15 05:45:05 +00:00
|
|
|
)*
|
|
|
|
],
|
2019-04-23 23:29:32 +00:00
|
|
|
depth
|
2019-04-15 05:45:05 +00:00
|
|
|
)?;
|
|
|
|
|
|
|
|
Ok(tree)
|
|
|
|
}
|
|
|
|
|
2019-04-24 08:13:37 +00:00
|
|
|
fn num_tree_hash_cache_chunks(&self) -> usize {
|
2019-04-26 01:34:07 +00:00
|
|
|
cached_tree_hash::BTreeOverlay::new(self, 0, 0).num_chunks()
|
2019-04-24 08:13:37 +00:00
|
|
|
}
|
|
|
|
|
2019-04-26 02:27:04 +00:00
|
|
|
fn tree_hash_cache_schema(&self, depth: usize) -> cached_tree_hash::BTreeSchema {
|
2019-04-15 05:45:05 +00:00
|
|
|
let mut lengths = vec![];
|
|
|
|
|
|
|
|
#(
|
2019-04-24 08:13:37 +00:00
|
|
|
lengths.push(self.#idents_b.num_tree_hash_cache_chunks());
|
2019-04-15 05:45:05 +00:00
|
|
|
)*
|
|
|
|
|
2019-04-26 02:27:04 +00:00
|
|
|
cached_tree_hash::BTreeSchema::from_lengths(depth, lengths)
|
2019-04-15 05:45:05 +00:00
|
|
|
}
|
|
|
|
|
2019-04-25 23:55:03 +00:00
|
|
|
fn update_tree_hash_cache(&self, cache: &mut cached_tree_hash::TreeHashCache) -> Result<(), cached_tree_hash::Error> {
|
2019-04-26 01:34:07 +00:00
|
|
|
let overlay = cached_tree_hash::BTreeOverlay::new(self, cache.chunk_index, 0);
|
2019-04-22 11:31:39 +00:00
|
|
|
|
2019-04-26 07:27:39 +00:00
|
|
|
|
2019-04-21 02:12:47 +00:00
|
|
|
// Skip the chunk index to the first leaf node of this struct.
|
|
|
|
cache.chunk_index = overlay.first_leaf_node();
|
|
|
|
// Skip the overlay index to the first leaf node of this struct.
|
2019-04-23 23:29:32 +00:00
|
|
|
// cache.overlay_index += 1;
|
2019-04-21 02:12:47 +00:00
|
|
|
|
|
|
|
// Recurse into the struct items, updating their caches.
|
|
|
|
#(
|
|
|
|
self.#idents_c.update_tree_hash_cache(cache)?;
|
|
|
|
)*
|
|
|
|
|
|
|
|
// Iterate through the internal nodes, updating them if their children have changed.
|
|
|
|
cache.update_internal_nodes(&overlay)?;
|
|
|
|
|
2019-04-26 07:39:38 +00:00
|
|
|
cache.chunk_index = overlay.next_node();
|
|
|
|
|
2019-04-21 02:12:47 +00:00
|
|
|
Ok(())
|
2019-04-15 05:45:05 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
};
|
|
|
|
output.into()
|
|
|
|
}
|
2019-04-16 00:47:58 +00:00
|
|
|
|
|
|
|
/// Implements `tree_hash::TreeHash` for some `struct`.
|
|
|
|
///
|
|
|
|
/// Fields are hashed in the order they are defined.
|
|
|
|
#[proc_macro_derive(TreeHash, attributes(tree_hash))]
|
|
|
|
pub fn tree_hash_derive(input: TokenStream) -> TokenStream {
|
|
|
|
let item = parse_macro_input!(input as DeriveInput);
|
|
|
|
|
|
|
|
let name = &item.ident;
|
2019-05-08 03:08:37 +00:00
|
|
|
let (impl_generics, ty_generics, where_clause) = &item.generics.split_for_impl();
|
2019-04-16 00:47:58 +00:00
|
|
|
|
|
|
|
let struct_data = match &item.data {
|
|
|
|
syn::Data::Struct(s) => s,
|
|
|
|
_ => panic!("tree_hash_derive only supports structs."),
|
|
|
|
};
|
|
|
|
|
|
|
|
let idents = get_hashable_named_field_idents(&struct_data);
|
|
|
|
|
|
|
|
let output = quote! {
|
2019-05-08 03:08:37 +00:00
|
|
|
impl #impl_generics tree_hash::TreeHash for #name #ty_generics #where_clause {
|
2019-04-16 00:47:58 +00:00
|
|
|
fn tree_hash_type() -> tree_hash::TreeHashType {
|
2019-04-17 00:57:36 +00:00
|
|
|
tree_hash::TreeHashType::Container
|
2019-04-16 00:47:58 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
fn tree_hash_packed_encoding(&self) -> Vec<u8> {
|
|
|
|
unreachable!("Struct should never be packed.")
|
|
|
|
}
|
|
|
|
|
|
|
|
fn tree_hash_packing_factor() -> usize {
|
|
|
|
unreachable!("Struct should never be packed.")
|
|
|
|
}
|
|
|
|
|
|
|
|
fn tree_hash_root(&self) -> Vec<u8> {
|
|
|
|
let mut leaves = Vec::with_capacity(4 * tree_hash::HASHSIZE);
|
|
|
|
|
|
|
|
#(
|
|
|
|
leaves.append(&mut self.#idents.tree_hash_root());
|
|
|
|
)*
|
|
|
|
|
2019-07-16 04:40:56 +00:00
|
|
|
tree_hash::merkle_root(&leaves)
|
2019-04-16 00:47:58 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
};
|
|
|
|
output.into()
|
|
|
|
}
|
2019-04-16 01:14:28 +00:00
|
|
|
|
|
|
|
#[proc_macro_derive(SignedRoot, attributes(signed_root))]
|
|
|
|
pub fn tree_hash_signed_root_derive(input: TokenStream) -> TokenStream {
|
|
|
|
let item = parse_macro_input!(input as DeriveInput);
|
|
|
|
|
|
|
|
let name = &item.ident;
|
|
|
|
|
|
|
|
let struct_data = match &item.data {
|
|
|
|
syn::Data::Struct(s) => s,
|
|
|
|
_ => panic!("tree_hash_derive only supports structs."),
|
|
|
|
};
|
|
|
|
|
|
|
|
let idents = get_signed_root_named_field_idents(&struct_data);
|
2019-04-17 04:00:00 +00:00
|
|
|
let num_elems = idents.len();
|
2019-04-16 01:14:28 +00:00
|
|
|
|
|
|
|
let output = quote! {
|
|
|
|
impl tree_hash::SignedRoot for #name {
|
|
|
|
fn signed_root(&self) -> Vec<u8> {
|
2019-04-17 04:00:00 +00:00
|
|
|
let mut leaves = Vec::with_capacity(#num_elems * tree_hash::HASHSIZE);
|
2019-04-16 01:14:28 +00:00
|
|
|
|
|
|
|
#(
|
|
|
|
leaves.append(&mut self.#idents.tree_hash_root());
|
|
|
|
)*
|
|
|
|
|
2019-07-16 04:40:56 +00:00
|
|
|
tree_hash::merkle_root(&leaves)
|
2019-04-16 01:14:28 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
};
|
|
|
|
output.into()
|
|
|
|
}
|
|
|
|
|
|
|
|
fn get_signed_root_named_field_idents(struct_data: &syn::DataStruct) -> Vec<&syn::Ident> {
|
|
|
|
struct_data
|
|
|
|
.fields
|
|
|
|
.iter()
|
|
|
|
.filter_map(|f| {
|
|
|
|
if should_skip_signed_root(&f) {
|
|
|
|
None
|
|
|
|
} else {
|
|
|
|
Some(match &f.ident {
|
|
|
|
Some(ref ident) => ident,
|
|
|
|
_ => panic!("tree_hash_derive only supports named struct fields"),
|
|
|
|
})
|
|
|
|
}
|
|
|
|
})
|
|
|
|
.collect()
|
|
|
|
}
|
|
|
|
|
|
|
|
fn should_skip_signed_root(field: &syn::Field) -> bool {
|
|
|
|
field
|
|
|
|
.attrs
|
|
|
|
.iter()
|
|
|
|
.any(|attr| attr.into_token_stream().to_string() == "# [ signed_root ( skip_hashing ) ]")
|
|
|
|
}
|