Remove tree hashing from ssz crate

This commit is contained in:
Paul Hauner 2019-04-16 12:29:39 +10:00
parent 024b9e315a
commit 3eaa06d758
No known key found for this signature in database
GPG Key ID: D362883A9218FCC6
9 changed files with 128 additions and 423 deletions

View File

@ -1,85 +0,0 @@
use super::ethereum_types::{Address, H256};
use super::{merkle_hash, ssz_encode, TreeHash};
use hashing::hash;
impl TreeHash for u8 {
fn hash_tree_root(&self) -> Vec<u8> {
ssz_encode(self)
}
}
impl TreeHash for u16 {
fn hash_tree_root(&self) -> Vec<u8> {
ssz_encode(self)
}
}
impl TreeHash for u32 {
fn hash_tree_root(&self) -> Vec<u8> {
ssz_encode(self)
}
}
impl TreeHash for u64 {
fn hash_tree_root(&self) -> Vec<u8> {
ssz_encode(self)
}
}
impl TreeHash for usize {
fn hash_tree_root(&self) -> Vec<u8> {
ssz_encode(self)
}
}
impl TreeHash for bool {
fn hash_tree_root(&self) -> Vec<u8> {
ssz_encode(self)
}
}
impl TreeHash for Address {
fn hash_tree_root(&self) -> Vec<u8> {
ssz_encode(self)
}
}
impl TreeHash for H256 {
fn hash_tree_root(&self) -> Vec<u8> {
ssz_encode(self)
}
}
impl TreeHash for [u8] {
fn hash_tree_root(&self) -> Vec<u8> {
if self.len() > 32 {
return hash(&self);
}
self.to_vec()
}
}
impl<T> TreeHash for Vec<T>
where
T: TreeHash,
{
/// Returns the merkle_hash of a list of hash_tree_root values created
/// from the given list.
/// Note: A byte vector, Vec<u8>, must be converted to a slice (as_slice())
/// to be handled properly (i.e. hashed) as byte array.
fn hash_tree_root(&self) -> Vec<u8> {
let mut tree_hashes = self.iter().map(|x| x.hash_tree_root()).collect();
merkle_hash(&mut tree_hashes)
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_impl_tree_hash_vec() {
let result = vec![1u32, 2, 3, 4, 5, 6, 7].hash_tree_root();
assert_eq!(result.len(), 32);
}
}

View File

@ -12,17 +12,12 @@ extern crate ethereum_types;
pub mod decode;
pub mod encode;
mod signed_root;
pub mod tree_hash;
mod impl_decode;
mod impl_encode;
mod impl_tree_hash;
pub use crate::decode::{decode, decode_ssz_list, Decodable, DecodeError};
pub use crate::encode::{Encodable, SszStream};
pub use crate::signed_root::SignedRoot;
pub use crate::tree_hash::{merkle_hash, TreeHash};
pub use hashing::hash;

View File

@ -1,5 +0,0 @@
use crate::TreeHash;
pub trait SignedRoot: TreeHash {
fn signed_root(&self) -> Vec<u8>;
}

View File

@ -1,107 +0,0 @@
use hashing::hash;
const BYTES_PER_CHUNK: usize = 32;
const HASHSIZE: usize = 32;
pub trait TreeHash {
fn hash_tree_root(&self) -> Vec<u8>;
}
/// Returns a 32 byte hash of 'list' - a vector of byte vectors.
/// Note that this will consume 'list'.
pub fn merkle_hash(list: &mut Vec<Vec<u8>>) -> Vec<u8> {
// flatten list
let mut chunkz = list_to_blob(list);
// get data_len as bytes. It will hashed will the merkle root
let mut datalen = list.len().to_le_bytes().to_vec();
zpad(&mut datalen, 32);
// merklelize
while chunkz.len() > HASHSIZE {
let mut new_chunkz: Vec<u8> = Vec::new();
for two_chunks in chunkz.chunks(BYTES_PER_CHUNK * 2) {
// Hash two chuncks together
new_chunkz.append(&mut hash(two_chunks));
}
chunkz = new_chunkz;
}
chunkz.append(&mut datalen);
hash(&chunkz)
}
fn list_to_blob(list: &mut Vec<Vec<u8>>) -> Vec<u8> {
// pack - fit as many many items per chunk as we can and then
// right pad to BYTES_PER_CHUNCK
let (items_per_chunk, chunk_count) = if list.is_empty() {
(1, 1)
} else {
let items_per_chunk = BYTES_PER_CHUNK / list[0].len();
let chunk_count = list.len() / items_per_chunk;
(items_per_chunk, chunk_count)
};
let mut chunkz = Vec::new();
if list.is_empty() {
// handle and empty list
chunkz.append(&mut vec![0; BYTES_PER_CHUNK * 2]);
} else if list[0].len() <= BYTES_PER_CHUNK {
// just create a blob here; we'll divide into
// chunked slices when we merklize
let mut chunk = Vec::with_capacity(BYTES_PER_CHUNK);
let mut item_count_in_chunk = 0;
chunkz.reserve(chunk_count * BYTES_PER_CHUNK);
for item in list.iter_mut() {
item_count_in_chunk += 1;
chunk.append(item);
// completed chunk?
if item_count_in_chunk == items_per_chunk {
zpad(&mut chunk, BYTES_PER_CHUNK);
chunkz.append(&mut chunk);
item_count_in_chunk = 0;
}
}
// left-over uncompleted chunk?
if item_count_in_chunk != 0 {
zpad(&mut chunk, BYTES_PER_CHUNK);
chunkz.append(&mut chunk);
}
}
// extend the number of chunks to a power of two if necessary
if !chunk_count.is_power_of_two() {
let zero_chunks_count = chunk_count.next_power_of_two() - chunk_count;
chunkz.append(&mut vec![0; zero_chunks_count * BYTES_PER_CHUNK]);
}
chunkz
}
/// right pads with zeros making 'bytes' 'size' in length
fn zpad(bytes: &mut Vec<u8>, size: usize) {
if bytes.len() < size {
bytes.resize(size, 0);
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_merkle_hash() {
let data1 = vec![1; 32];
let data2 = vec![2; 32];
let data3 = vec![3; 32];
let mut list = vec![data1, data2, data3];
let result = merkle_hash(&mut list);
//note: should test againt a known test hash value
assert_eq!(HASHSIZE, result.len());
}
}

View File

@ -188,157 +188,3 @@ pub fn ssz_decode_derive(input: TokenStream) -> TokenStream {
};
output.into()
}
/// Returns a Vec of `syn::Ident` for each named field in the struct, whilst filtering out fields
/// that should not be tree hashed.
///
/// # Panics
/// Any unnamed struct field (like in a tuple struct) will raise a panic at compile time.
fn get_tree_hashable_named_field_idents<'a>(
struct_data: &'a syn::DataStruct,
) -> Vec<&'a syn::Ident> {
struct_data
.fields
.iter()
.filter_map(|f| {
if should_skip_tree_hash(&f) {
None
} else {
Some(match &f.ident {
Some(ref ident) => ident,
_ => panic!("ssz_derive only supports named struct fields."),
})
}
})
.collect()
}
/// Returns true if some field has an attribute declaring it should not be tree-hashed.
///
/// The field attribute is: `#[tree_hash(skip_hashing)]`
fn should_skip_tree_hash(field: &syn::Field) -> bool {
for attr in &field.attrs {
if attr.into_token_stream().to_string() == "# [ tree_hash ( skip_hashing ) ]" {
return true;
}
}
false
}
/// Implements `ssz::TreeHash` for some `struct`.
///
/// Fields are processed in the order they are defined.
#[proc_macro_derive(TreeHash, attributes(tree_hash))]
pub fn ssz_tree_hash_derive(input: TokenStream) -> TokenStream {
let item = parse_macro_input!(input as DeriveInput);
let name = &item.ident;
let struct_data = match &item.data {
syn::Data::Struct(s) => s,
_ => panic!("ssz_derive only supports structs."),
};
let field_idents = get_tree_hashable_named_field_idents(&struct_data);
let output = quote! {
impl ssz::TreeHash for #name {
fn hash_tree_root(&self) -> Vec<u8> {
let mut list: Vec<Vec<u8>> = Vec::new();
#(
list.push(self.#field_idents.hash_tree_root());
)*
ssz::merkle_hash(&mut list)
}
}
};
output.into()
}
/// Returns `true` if some `Ident` should be considered to be a signature type.
fn type_ident_is_signature(ident: &syn::Ident) -> bool {
match ident.to_string().as_ref() {
"Signature" => true,
"AggregateSignature" => true,
_ => false,
}
}
/// Takes a `Field` where the type (`ty`) portion is a path (e.g., `types::Signature`) and returns
/// the final `Ident` in that path.
///
/// E.g., for `types::Signature` returns `Signature`.
fn final_type_ident(field: &syn::Field) -> &syn::Ident {
match &field.ty {
syn::Type::Path(path) => &path.path.segments.last().unwrap().value().ident,
_ => panic!("ssz_derive only supports Path types."),
}
}
/// Implements `ssz::TreeHash` for some `struct`, whilst excluding any fields following and
/// including a field that is of type "Signature" or "AggregateSignature".
///
/// See:
/// https://github.com/ethereum/eth2.0-specs/blob/master/specs/simple-serialize.md#signed-roots
///
/// This is a rather horrendous macro, it will read the type of the object as a string and decide
/// if it's a signature by matching that string against "Signature" or "AggregateSignature". So,
/// it's important that you use those exact words as your type -- don't alias it to something else.
///
/// If you can think of a better way to do this, please make an issue!
///
/// Fields are processed in the order they are defined.
#[proc_macro_derive(SignedRoot, attributes(signed_root))]
pub fn ssz_signed_root_derive(input: TokenStream) -> TokenStream {
let item = parse_macro_input!(input as DeriveInput);
let name = &item.ident;
let struct_data = match &item.data {
syn::Data::Struct(s) => s,
_ => panic!("ssz_derive only supports structs."),
};
let mut field_idents: Vec<&syn::Ident> = vec![];
let field_idents = get_signed_root_named_field_idents(&struct_data);
let output = quote! {
impl ssz::SignedRoot for #name {
fn signed_root(&self) -> Vec<u8> {
let mut list: Vec<Vec<u8>> = Vec::new();
#(
list.push(self.#field_idents.hash_tree_root());
)*
ssz::merkle_hash(&mut list)
}
}
};
output.into()
}
fn get_signed_root_named_field_idents(struct_data: &syn::DataStruct) -> Vec<&syn::Ident> {
struct_data
.fields
.iter()
.filter_map(|f| {
if should_skip_signed_root(&f) {
None
} else {
Some(match &f.ident {
Some(ref ident) => ident,
_ => panic!("ssz_derive only supports named struct fields"),
})
}
})
.collect()
}
fn should_skip_signed_root(field: &syn::Field) -> bool {
field
.attrs
.iter()
.any(|attr| attr.into_token_stream().to_string() == "# [ signed_root ( skip_hashing ) ]")
}

View File

@ -8,7 +8,7 @@ pub const MERKLE_HASH_CHUNCK: usize = 2 * BYTES_PER_CHUNK;
pub use cached_tree_hash::{BTreeOverlay, CachedTreeHashSubTree, Error, TreeHashCache};
pub use signed_root::SignedRoot;
pub use standard_tree_hash::{efficient_merkleize, TreeHash};
pub use standard_tree_hash::{merkle_root, TreeHash};
#[derive(Debug, PartialEq, Clone)]
pub enum TreeHashType {
@ -25,3 +25,26 @@ fn num_sanitized_leaves(num_bytes: usize) -> usize {
fn num_nodes(num_leaves: usize) -> usize {
2 * num_leaves - 1
}
#[macro_export]
macro_rules! impl_tree_hash_for_ssz_bytes {
($type: ident) => {
impl tree_hash::TreeHash for $type {
fn tree_hash_type() -> tree_hash::TreeHashType {
tree_hash::TreeHashType::List
}
fn tree_hash_packed_encoding(&self) -> Vec<u8> {
panic!("bytesN should never be packed.")
}
fn tree_hash_packing_factor() -> usize {
panic!("bytesN should never be packed.")
}
fn tree_hash_root(&self) -> Vec<u8> {
tree_hash::merkle_root(&ssz::ssz_encode(self))
}
}
};
}

View File

@ -3,6 +3,8 @@ use hashing::hash;
use int_to_bytes::int_to_bytes32;
use ssz::ssz_encode;
mod impls;
pub trait TreeHash {
fn tree_hash_type() -> TreeHashType;
@ -13,70 +15,9 @@ pub trait TreeHash {
fn tree_hash_root(&self) -> Vec<u8>;
}
impl TreeHash for u64 {
fn tree_hash_type() -> TreeHashType {
TreeHashType::Basic
}
fn tree_hash_packed_encoding(&self) -> Vec<u8> {
ssz_encode(self)
}
fn tree_hash_packing_factor() -> usize {
HASHSIZE / 8
}
fn tree_hash_root(&self) -> Vec<u8> {
int_to_bytes32(*self)
}
}
impl<T> TreeHash for Vec<T>
where
T: TreeHash,
{
fn tree_hash_type() -> TreeHashType {
TreeHashType::List
}
fn tree_hash_packed_encoding(&self) -> Vec<u8> {
unreachable!("List should never be packed.")
}
fn tree_hash_packing_factor() -> usize {
unreachable!("List should never be packed.")
}
fn tree_hash_root(&self) -> Vec<u8> {
let leaves = match T::tree_hash_type() {
TreeHashType::Basic => {
let mut leaves =
Vec::with_capacity((HASHSIZE / T::tree_hash_packing_factor()) * self.len());
for item in self {
leaves.append(&mut item.tree_hash_packed_encoding());
}
leaves
}
TreeHashType::Composite | TreeHashType::List => {
let mut leaves = Vec::with_capacity(self.len() * HASHSIZE);
for item in self {
leaves.append(&mut item.tree_hash_root())
}
leaves
}
};
// Mix in the length
let mut root_and_len = Vec::with_capacity(HASHSIZE * 2);
root_and_len.append(&mut efficient_merkleize(&leaves)[0..32].to_vec());
root_and_len.append(&mut int_to_bytes32(self.len() as u64));
hash(&root_and_len)
}
pub fn merkle_root(bytes: &[u8]) -> Vec<u8> {
// TODO: replace this with a _more_ efficient fn which is more memory efficient.
efficient_merkleize(&bytes)[0..32].to_vec()
}
pub fn efficient_merkleize(bytes: &[u8]) -> Vec<u8> {

View File

@ -0,0 +1,97 @@
use super::*;
use ethereum_types::H256;
macro_rules! impl_for_bitsize {
($type: ident, $bit_size: expr) => {
impl TreeHash for $type {
fn tree_hash_type() -> TreeHashType {
TreeHashType::Basic
}
fn tree_hash_packed_encoding(&self) -> Vec<u8> {
ssz_encode(self)
}
fn tree_hash_packing_factor() -> usize {
HASHSIZE / ($bit_size / 8)
}
fn tree_hash_root(&self) -> Vec<u8> {
int_to_bytes32(*self as u64)
}
}
};
}
impl_for_bitsize!(u8, 8);
impl_for_bitsize!(u16, 16);
impl_for_bitsize!(u32, 32);
impl_for_bitsize!(u64, 64);
impl_for_bitsize!(usize, 64);
impl_for_bitsize!(bool, 8);
impl TreeHash for H256 {
fn tree_hash_type() -> TreeHashType {
TreeHashType::Basic
}
fn tree_hash_packed_encoding(&self) -> Vec<u8> {
ssz_encode(self)
}
fn tree_hash_packing_factor() -> usize {
1
}
fn tree_hash_root(&self) -> Vec<u8> {
ssz_encode(self)
}
}
impl<T> TreeHash for Vec<T>
where
T: TreeHash,
{
fn tree_hash_type() -> TreeHashType {
TreeHashType::List
}
fn tree_hash_packed_encoding(&self) -> Vec<u8> {
unreachable!("List should never be packed.")
}
fn tree_hash_packing_factor() -> usize {
unreachable!("List should never be packed.")
}
fn tree_hash_root(&self) -> Vec<u8> {
let leaves = match T::tree_hash_type() {
TreeHashType::Basic => {
let mut leaves =
Vec::with_capacity((HASHSIZE / T::tree_hash_packing_factor()) * self.len());
for item in self {
leaves.append(&mut item.tree_hash_packed_encoding());
}
leaves
}
TreeHashType::Composite | TreeHashType::List => {
let mut leaves = Vec::with_capacity(self.len() * HASHSIZE);
for item in self {
leaves.append(&mut item.tree_hash_root())
}
leaves
}
};
// Mix in the length
let mut root_and_len = Vec::with_capacity(HASHSIZE * 2);
root_and_len.append(&mut merkle_root(&leaves));
root_and_len.append(&mut int_to_bytes32(self.len() as u64));
hash(&root_and_len)
}
}

View File

@ -149,7 +149,7 @@ pub fn tree_hash_derive(input: TokenStream) -> TokenStream {
leaves.append(&mut self.#idents.tree_hash_root());
)*
tree_hash::efficient_merkleize(&leaves)[0..32].to_vec()
tree_hash::merkle_root(&leaves)
}
}
};
@ -191,7 +191,7 @@ pub fn tree_hash_signed_root_derive(input: TokenStream) -> TokenStream {
leaves.append(&mut self.#idents.tree_hash_root());
)*
tree_hash::efficient_merkleize(&leaves)[0..32].to_vec()
tree_hash::merkle_root(&leaves)
}
}
};