lighthouse/eth2/utils/ssz/src/tree_hash.rs

96 lines
2.7 KiB
Rust
Raw Normal View History

2019-01-24 04:47:28 +00:00
use hashing::canonical_hash;
2018-12-12 21:48:54 +00:00
const SSZ_CHUNK_SIZE: usize = 128;
2018-12-04 20:37:12 +00:00
const HASHSIZE: usize = 32;
2018-11-28 20:13:25 +00:00
2018-12-04 20:37:12 +00:00
pub trait TreeHash {
fn hash_tree_root(&self) -> Vec<u8>;
2018-12-04 20:37:12 +00:00
}
/// Returns a 32 byte hash of 'list' - a vector of byte vectors.
/// Note that this will consume 'list'.
2018-12-04 20:37:12 +00:00
pub fn merkle_hash(list: &mut Vec<Vec<u8>>) -> Vec<u8> {
// flatten list
2018-12-12 21:48:54 +00:00
let (chunk_size, mut data) = list_to_blob(list);
2018-12-04 20:37:12 +00:00
// get data_len as bytes. It will hashed will the merkle root
2019-01-24 04:47:28 +00:00
let datalen = list.len().to_le_bytes();
2018-12-04 20:37:12 +00:00
// merklize
2018-12-12 21:48:54 +00:00
let mut mhash = hash_level(&mut data, chunk_size);
2018-12-04 20:37:12 +00:00
while mhash.len() > HASHSIZE {
mhash = hash_level(&mut mhash, HASHSIZE);
}
2019-01-24 04:47:28 +00:00
mhash.append(&mut datalen.to_vec());
hash(mhash.as_slice())
2018-12-04 20:37:12 +00:00
}
/// Takes a flat vector of bytes. It then hashes 'chunk_size * 2' slices into
/// a byte vector of hashes, divisible by HASHSIZE
2018-12-04 20:37:12 +00:00
fn hash_level(data: &mut Vec<u8>, chunk_size: usize) -> Vec<u8> {
let mut result: Vec<u8> = Vec::new();
for two_chunks in data.chunks(chunk_size * 2) {
if two_chunks.len() == chunk_size && data.len() > chunk_size {
// if there is only one chunk here, hash it with a zero-byte
2018-12-12 21:48:54 +00:00
// SSZ_CHUNK_SIZE vector
2018-12-04 20:37:12 +00:00
let mut c = two_chunks.to_vec();
2018-12-12 21:48:54 +00:00
c.append(&mut vec![0; SSZ_CHUNK_SIZE]);
2019-01-24 04:47:28 +00:00
result.append(&mut hash(c.as_slice()));
2018-12-04 20:37:12 +00:00
} else {
2019-01-24 04:47:28 +00:00
// Hash two chuncks together
result.append(&mut hash(two_chunks));
2018-12-04 20:37:12 +00:00
}
}
result
}
2018-12-12 21:48:54 +00:00
fn list_to_blob(list: &mut Vec<Vec<u8>>) -> (usize, Vec<u8>) {
let chunk_size = if list.is_empty() {
SSZ_CHUNK_SIZE
} else if list[0].len() < SSZ_CHUNK_SIZE {
let items_per_chunk = SSZ_CHUNK_SIZE / list[0].len();
items_per_chunk * list[0].len()
2018-12-04 20:37:12 +00:00
} else {
2018-12-12 21:48:54 +00:00
list[0].len()
2018-12-04 20:37:12 +00:00
};
2018-12-12 21:48:54 +00:00
let mut data = Vec::new();
if list.is_empty() {
// handle and empty list
data.append(&mut vec![0; SSZ_CHUNK_SIZE]);
} else {
// just create a blob here; we'll divide into
// chunked slices when we merklize
data.reserve(list[0].len() * list.len());
for item in list.iter_mut() {
data.append(item);
}
2018-12-04 20:37:12 +00:00
}
2018-12-12 21:48:54 +00:00
(chunk_size, data)
2018-11-28 20:13:25 +00:00
}
2019-01-24 04:47:28 +00:00
pub fn hash(data: &[u8]) -> Vec<u8> {
canonical_hash(data)
}
2018-11-28 20:13:25 +00:00
#[cfg(test)]
mod tests {
use super::*;
2018-12-04 20:37:12 +00:00
#[test]
fn test_merkle_hash() {
let data1 = vec![1; 100];
let data2 = vec![2; 100];
let data3 = vec![3; 100];
let mut list = vec![data1, data2, data3];
let result = merkle_hash(&mut list);
//note: should test againt a known test hash value
assert_eq!(HASHSIZE, result.len());
println!("merkle_hash: {:?}", result);
}
2018-11-28 20:13:25 +00:00
}