Merge branch 'master' into new-pubkey-format

This commit is contained in:
Paul Hauner 2019-03-14 19:02:10 +13:00 committed by GitHub
commit 9e7d495dca
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
38 changed files with 680 additions and 584 deletions

View File

@ -198,6 +198,7 @@ mod tests {
} }
#[test] #[test]
#[ignore]
fn test_block_at_slot() { fn test_block_at_slot() {
let db = Arc::new(MemoryDB::open()); let db = Arc::new(MemoryDB::open());
let bs = Arc::new(BeaconBlockStore::new(db.clone())); let bs = Arc::new(BeaconBlockStore::new(db.clone()));

View File

@ -409,11 +409,23 @@ impl<T: ClientDB + Sized> ForkChoice for BitwiseLMDGhost<T> {
*child_votes.entry(child).or_insert_with(|| 0) += vote; *child_votes.entry(child).or_insert_with(|| 0) += vote;
} }
} }
// given the votes on the children, find the best child // check if we have votes of children, if not select the smallest hash child
current_head = self if child_votes.is_empty() {
.choose_best_child(&child_votes) current_head = *children
.ok_or(ForkChoiceError::CannotFindBestChild)?; .iter()
trace!("Best child found: {}", current_head); .min_by(|child1, child2| child1.cmp(child2))
.expect("Must be children here");
trace!(
"Children have no votes - smallest hash chosen: {}",
current_head
);
} else {
// given the votes on the children, find the best child
current_head = self
.choose_best_child(&child_votes)
.ok_or(ForkChoiceError::CannotFindBestChild)?;
trace!("Best child found: {}", current_head);
}
} }
// didn't find head yet, proceed to next iteration // didn't find head yet, proceed to next iteration

View File

@ -22,6 +22,7 @@ extern crate types;
pub mod bitwise_lmd_ghost; pub mod bitwise_lmd_ghost;
pub mod longest_chain; pub mod longest_chain;
pub mod optimized_lmd_ghost;
pub mod slow_lmd_ghost; pub mod slow_lmd_ghost;
use db::stores::BeaconBlockAtSlotError; use db::stores::BeaconBlockAtSlotError;
@ -30,6 +31,7 @@ use types::{BeaconBlock, ChainSpec, Hash256};
pub use bitwise_lmd_ghost::BitwiseLMDGhost; pub use bitwise_lmd_ghost::BitwiseLMDGhost;
pub use longest_chain::LongestChain; pub use longest_chain::LongestChain;
pub use optimized_lmd_ghost::OptimizedLMDGhost;
pub use slow_lmd_ghost::SlowLMDGhost; pub use slow_lmd_ghost::SlowLMDGhost;
/// Defines the interface for Fork Choices. Each Fork choice will define their own data structures /// Defines the interface for Fork Choices. Each Fork choice will define their own data structures
@ -101,4 +103,6 @@ pub enum ForkChoiceAlgorithm {
SlowLMDGhost, SlowLMDGhost,
/// An optimised version of bitwise LMD-GHOST by Vitalik. /// An optimised version of bitwise LMD-GHOST by Vitalik.
BitwiseLMDGhost, BitwiseLMDGhost,
/// An optimised implementation of LMD ghost.
OptimizedLMDGhost,
} }

View File

@ -0,0 +1,465 @@
//! The optimised bitwise LMD-GHOST fork choice rule.
extern crate bit_vec;
use crate::{ForkChoice, ForkChoiceError};
use db::{
stores::{BeaconBlockStore, BeaconStateStore},
ClientDB,
};
use log::{debug, trace};
use std::cmp::Ordering;
use std::collections::HashMap;
use std::sync::Arc;
use types::{
readers::BeaconBlockReader, validator_registry::get_active_validator_indices, BeaconBlock,
ChainSpec, Hash256, Slot, SlotHeight,
};
//TODO: Pruning - Children
//TODO: Handle Syncing
// NOTE: This uses u32 to represent difference between block heights. Thus this is only
// applicable for block height differences in the range of a u32.
// This can potentially be parallelized in some parts.
/// Compute the base-2 logarithm of an integer, floored (rounded down)
#[inline]
fn log2_int(x: u64) -> u32 {
if x == 0 {
return 0;
}
63 - x.leading_zeros()
}
fn power_of_2_below(x: u64) -> u64 {
2u64.pow(log2_int(x))
}
/// Stores the necessary data structures to run the optimised lmd ghost algorithm.
pub struct OptimizedLMDGhost<T: ClientDB + Sized> {
/// A cache of known ancestors at given heights for a specific block.
//TODO: Consider FnvHashMap
cache: HashMap<CacheKey<u64>, Hash256>,
/// Log lookup table for blocks to their ancestors.
//TODO: Verify we only want/need a size 16 log lookup
ancestors: Vec<HashMap<Hash256, Hash256>>,
/// Stores the children for any given parent.
children: HashMap<Hash256, Vec<Hash256>>,
/// The latest attestation targets as a map of validator index to block hash.
//TODO: Could this be a fixed size vec
latest_attestation_targets: HashMap<u64, Hash256>,
/// Block storage access.
block_store: Arc<BeaconBlockStore<T>>,
/// State storage access.
state_store: Arc<BeaconStateStore<T>>,
max_known_height: SlotHeight,
}
impl<T> OptimizedLMDGhost<T>
where
T: ClientDB + Sized,
{
pub fn new(
block_store: Arc<BeaconBlockStore<T>>,
state_store: Arc<BeaconStateStore<T>>,
) -> Self {
OptimizedLMDGhost {
cache: HashMap::new(),
ancestors: vec![HashMap::new(); 16],
latest_attestation_targets: HashMap::new(),
children: HashMap::new(),
max_known_height: SlotHeight::new(0),
block_store,
state_store,
}
}
/// Finds the latest votes weighted by validator balance. Returns a hashmap of block_hash to
/// weighted votes.
pub fn get_latest_votes(
&self,
state_root: &Hash256,
block_slot: Slot,
spec: &ChainSpec,
) -> Result<HashMap<Hash256, u64>, ForkChoiceError> {
// get latest votes
// Note: Votes are weighted by min(balance, MAX_DEPOSIT_AMOUNT) //
// FORK_CHOICE_BALANCE_INCREMENT
// build a hashmap of block_hash to weighted votes
let mut latest_votes: HashMap<Hash256, u64> = HashMap::new();
// gets the current weighted votes
let current_state = self
.state_store
.get_deserialized(&state_root)?
.ok_or_else(|| ForkChoiceError::MissingBeaconState(*state_root))?;
let active_validator_indices = get_active_validator_indices(
&current_state.validator_registry[..],
block_slot.epoch(spec.slots_per_epoch),
);
for index in active_validator_indices {
let balance = std::cmp::min(
current_state.validator_balances[index],
spec.max_deposit_amount,
) / spec.fork_choice_balance_increment;
if balance > 0 {
if let Some(target) = self.latest_attestation_targets.get(&(index as u64)) {
*latest_votes.entry(*target).or_insert_with(|| 0) += balance;
}
}
}
trace!("Latest votes: {:?}", latest_votes);
Ok(latest_votes)
}
/// Gets the ancestor at a given height `at_height` of a block specified by `block_hash`.
fn get_ancestor(
&mut self,
block_hash: Hash256,
target_height: SlotHeight,
spec: &ChainSpec,
) -> Option<Hash256> {
// return None if we can't get the block from the db.
let block_height = {
let block_slot = self
.block_store
.get_deserialized(&block_hash)
.ok()?
.expect("Should have returned already if None")
.slot;
block_slot.height(spec.genesis_slot)
};
// verify we haven't exceeded the block height
if target_height >= block_height {
if target_height > block_height {
return None;
} else {
return Some(block_hash);
}
}
// check if the result is stored in our cache
let cache_key = CacheKey::new(&block_hash, target_height.as_u64());
if let Some(ancestor) = self.cache.get(&cache_key) {
return Some(*ancestor);
}
// not in the cache recursively search for ancestors using a log-lookup
if let Some(ancestor) = {
let ancestor_lookup = self.ancestors
[log2_int((block_height - target_height - 1u64).as_u64()) as usize]
.get(&block_hash)
//TODO: Panic if we can't lookup and fork choice fails
.expect("All blocks should be added to the ancestor log lookup table");
self.get_ancestor(*ancestor_lookup, target_height, &spec)
} {
// add the result to the cache
self.cache.insert(cache_key, ancestor);
return Some(ancestor);
}
None
}
// looks for an obvious block winner given the latest votes for a specific height
fn get_clear_winner(
&mut self,
latest_votes: &HashMap<Hash256, u64>,
block_height: SlotHeight,
spec: &ChainSpec,
) -> Option<Hash256> {
// map of vote counts for every hash at this height
let mut current_votes: HashMap<Hash256, u64> = HashMap::new();
let mut total_vote_count = 0;
trace!("Clear winner at block height: {}", block_height);
// loop through the latest votes and count all votes
// these have already been weighted by balance
for (hash, votes) in latest_votes.iter() {
if let Some(ancestor) = self.get_ancestor(*hash, block_height, spec) {
let current_vote_value = current_votes.get(&ancestor).unwrap_or_else(|| &0);
current_votes.insert(ancestor, current_vote_value + *votes);
total_vote_count += votes;
}
}
// Check if there is a clear block winner at this height. If so return it.
for (hash, votes) in current_votes.iter() {
if *votes > total_vote_count / 2 {
// we have a clear winner, return it
return Some(*hash);
}
}
// didn't find a clear winner
None
}
// Finds the best child (one with highest votes)
fn choose_best_child(&self, votes: &HashMap<Hash256, u64>) -> Option<Hash256> {
if votes.is_empty() {
return None;
}
// Iterate through hashmap to get child with maximum votes
let best_child = votes.iter().max_by(|(child1, v1), (child2, v2)| {
let mut result = v1.cmp(v2);
// If votes are equal, choose smaller hash to break ties deterministically
if result == Ordering::Equal {
// Reverse so that max_by chooses smaller hash
result = child1.cmp(child2).reverse();
}
result
});
Some(*best_child.unwrap().0)
}
}
impl<T: ClientDB + Sized> ForkChoice for OptimizedLMDGhost<T> {
fn add_block(
&mut self,
block: &BeaconBlock,
block_hash: &Hash256,
spec: &ChainSpec,
) -> Result<(), ForkChoiceError> {
// get the height of the parent
let parent_height = self
.block_store
.get_deserialized(&block.parent_root)?
.ok_or_else(|| ForkChoiceError::MissingBeaconBlock(block.parent_root))?
.slot()
.height(spec.genesis_slot);
let parent_hash = &block.parent_root;
// add the new block to the children of parent
(*self
.children
.entry(block.parent_root)
.or_insert_with(|| vec![]))
.push(block_hash.clone());
// build the ancestor data structure
for index in 0..16 {
if parent_height % (1 << index) == 0 {
self.ancestors[index].insert(*block_hash, *parent_hash);
} else {
// TODO: This is unsafe. Will panic if parent_hash doesn't exist. Using it for debugging
let parent_ancestor = self.ancestors[index][parent_hash];
self.ancestors[index].insert(*block_hash, parent_ancestor);
}
}
// update the max height
self.max_known_height = std::cmp::max(self.max_known_height, parent_height + 1);
Ok(())
}
fn add_attestation(
&mut self,
validator_index: u64,
target_block_root: &Hash256,
spec: &ChainSpec,
) -> Result<(), ForkChoiceError> {
// simply add the attestation to the latest_attestation_target if the block_height is
// larger
trace!(
"Adding attestation of validator: {:?} for block: {}",
validator_index,
target_block_root
);
let attestation_target = self
.latest_attestation_targets
.entry(validator_index)
.or_insert_with(|| *target_block_root);
// if we already have a value
if attestation_target != target_block_root {
trace!("Old attestation found: {:?}", attestation_target);
// get the height of the target block
let block_height = self
.block_store
.get_deserialized(&target_block_root)?
.ok_or_else(|| ForkChoiceError::MissingBeaconBlock(*target_block_root))?
.slot()
.height(spec.genesis_slot);
// get the height of the past target block
let past_block_height = self
.block_store
.get_deserialized(&attestation_target)?
.ok_or_else(|| ForkChoiceError::MissingBeaconBlock(*attestation_target))?
.slot()
.height(spec.genesis_slot);
// update the attestation only if the new target is higher
if past_block_height < block_height {
trace!("Updating old attestation");
*attestation_target = *target_block_root;
}
}
Ok(())
}
/// Perform lmd_ghost on the current chain to find the head.
fn find_head(
&mut self,
justified_block_start: &Hash256,
spec: &ChainSpec,
) -> Result<Hash256, ForkChoiceError> {
debug!(
"Starting optimised fork choice at block: {}",
justified_block_start
);
let block = self
.block_store
.get_deserialized(&justified_block_start)?
.ok_or_else(|| ForkChoiceError::MissingBeaconBlock(*justified_block_start))?;
let block_slot = block.slot();
let state_root = block.state_root();
let mut block_height = block_slot.height(spec.genesis_slot);
let mut current_head = *justified_block_start;
let mut latest_votes = self.get_latest_votes(&state_root, block_slot, spec)?;
// remove any votes that don't relate to our current head.
latest_votes
.retain(|hash, _| self.get_ancestor(*hash, block_height, spec) == Some(current_head));
// begin searching for the head
loop {
debug!(
"Iteration for block: {} with vote length: {}",
current_head,
latest_votes.len()
);
// if there are no children, we are done, return the current_head
let children = match self.children.get(&current_head) {
Some(children) => children.clone(),
None => {
debug!("Head found: {}", current_head);
return Ok(current_head);
}
};
// logarithmic lookup blocks to see if there are obvious winners, if so,
// progress to the next iteration.
let mut step =
power_of_2_below(self.max_known_height.saturating_sub(block_height).as_u64()) / 2;
while step > 0 {
trace!("Current Step: {}", step);
if let Some(clear_winner) = self.get_clear_winner(
&latest_votes,
block_height - (block_height % step) + step,
spec,
) {
current_head = clear_winner;
break;
}
step /= 2;
}
if step > 0 {
trace!("Found clear winner: {}", current_head);
}
// if our skip lookup failed and we only have one child, progress to that child
else if children.len() == 1 {
current_head = children[0];
trace!(
"Lookup failed, only one child, proceeding to child: {}",
current_head
);
}
// we need to find the best child path to progress down.
else {
trace!("Searching for best child");
let mut child_votes = HashMap::new();
for (voted_hash, vote) in latest_votes.iter() {
// if the latest votes correspond to a child
if let Some(child) = self.get_ancestor(*voted_hash, block_height + 1, spec) {
// add up the votes for each child
*child_votes.entry(child).or_insert_with(|| 0) += vote;
}
}
// check if we have votes of children, if not select the smallest hash child
if child_votes.is_empty() {
current_head = *children
.iter()
.min_by(|child1, child2| child1.cmp(child2))
.expect("Must be children here");
trace!(
"Children have no votes - smallest hash chosen: {}",
current_head
);
} else {
// given the votes on the children, find the best child
current_head = self
.choose_best_child(&child_votes)
.ok_or(ForkChoiceError::CannotFindBestChild)?;
trace!("Best child found: {}", current_head);
}
}
// didn't find head yet, proceed to next iteration
// update block height
block_height = self
.block_store
.get_deserialized(&current_head)?
.ok_or_else(|| ForkChoiceError::MissingBeaconBlock(current_head))?
.slot()
.height(spec.genesis_slot);
// prune the latest votes for votes that are not part of current chosen chain
// more specifically, only keep votes that have head as an ancestor
for hash in latest_votes.keys() {
trace!(
"Ancestor for vote: {} at height: {} is: {:?}",
hash,
block_height,
self.get_ancestor(*hash, block_height, spec)
);
}
latest_votes.retain(|hash, _| {
self.get_ancestor(*hash, block_height, spec) == Some(current_head)
});
}
}
}
/// Type for storing blocks in a memory cache. Key is comprised of block-hash plus the height.
#[derive(PartialEq, Eq, Hash)]
pub struct CacheKey<T> {
block_hash: Hash256,
block_height: T,
}
impl<T> CacheKey<T> {
pub fn new(block_hash: &Hash256, block_height: T) -> Self {
CacheKey {
block_hash: *block_hash,
block_height,
}
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
pub fn test_power_of_2_below() {
assert_eq!(power_of_2_below(4), 4);
assert_eq!(power_of_2_below(5), 4);
assert_eq!(power_of_2_below(7), 4);
assert_eq!(power_of_2_below(24), 16);
assert_eq!(power_of_2_below(32), 32);
assert_eq!(power_of_2_below(33), 32);
assert_eq!(power_of_2_below(63), 32);
}
#[test]
pub fn test_power_of_2_below_large() {
let pow: u64 = 1 << 24;
for x in (pow - 20)..(pow + 20) {
assert!(power_of_2_below(x) <= x, "{}", x);
}
}
}

View File

@ -210,6 +210,7 @@ impl<T: ClientDB + Sized> ForkChoice for SlowLMDGhost<T> {
trace!("Children found: {:?}", children); trace!("Children found: {:?}", children);
let mut head_vote_count = 0; let mut head_vote_count = 0;
head_hash = children[0];
for child_hash in children { for child_hash in children {
let vote_count = self.get_vote_count(&latest_votes, &child_hash)?; let vote_count = self.get_vote_count(&latest_votes, &child_hash)?;
trace!("Vote count for child: {} is: {}", child_hash, vote_count); trace!("Vote count for child: {} is: {}", child_hash, vote_count);
@ -218,6 +219,12 @@ impl<T: ClientDB + Sized> ForkChoice for SlowLMDGhost<T> {
head_hash = *child_hash; head_hash = *child_hash;
head_vote_count = vote_count; head_vote_count = vote_count;
} }
// resolve ties - choose smaller hash
else if vote_count == head_vote_count {
if *child_hash < head_hash {
head_hash = *child_hash;
}
}
} }
} }
Ok(head_hash) Ok(head_hash)

View File

@ -63,3 +63,82 @@ test_cases:
- b7: 2 - b7: 2
heads: heads:
- id: 'b4' - id: 'b4'
- blocks:
- id: 'b0'
parent: 'b0'
- id: 'b1'
parent: 'b0'
- id: 'b2'
parent: 'b0'
- id: 'b3'
parent: 'b0'
- id: 'b4'
parent: 'b1'
- id: 'b5'
parent: 'b1'
- id: 'b6'
parent: 'b2'
- id: 'b7'
parent: 'b2'
- id: 'b8'
parent: 'b3'
- id: 'b9'
parent: 'b3'
weights:
- b1: 2
- b2: 1
- b3: 1
- b4: 7
- b5: 5
- b6: 2
- b7: 4
- b8: 4
- b9: 2
heads:
- id: 'b4'
- blocks:
- id: 'b0'
parent: 'b0'
- id: 'b1'
parent: 'b0'
- id: 'b2'
parent: 'b0'
- id: 'b3'
parent: 'b0'
- id: 'b4'
parent: 'b1'
- id: 'b5'
parent: 'b1'
- id: 'b6'
parent: 'b2'
- id: 'b7'
parent: 'b2'
- id: 'b8'
parent: 'b3'
- id: 'b9'
parent: 'b3'
weights:
- b1: 1
- b2: 1
- b3: 1
- b4: 7
- b5: 5
- b6: 2
- b7: 4
- b8: 4
- b9: 2
heads:
- id: 'b7'
- blocks:
- id: 'b0'
parent: 'b0'
- id: 'b1'
parent: 'b0'
- id: 'b2'
parent: 'b0'
weights:
- b1: 0
- b2: 0
heads:
- id: 'b1'

View File

@ -35,3 +35,31 @@ test_cases:
- b3: 3 - b3: 3
heads: heads:
- id: 'b1' - id: 'b1'
# equal weights children. Should choose lower hash b2
- blocks:
- id: 'b0'
parent: 'b0'
- id: 'b1'
parent: 'b0'
- id: 'b2'
parent: 'b0'
- id: 'b3'
parent: 'b0'
weights:
- b1: 5
- b2: 6
- b3: 6
heads:
- id: 'b2'
- blocks:
- id: 'b0'
parent: 'b0'
- id: 'b1'
parent: 'b0'
- id: 'b2'
parent: 'b0'
weights:
- b1: 0
- b2: 0
heads:
- id: 'b1'

View File

@ -3,7 +3,7 @@
extern crate beacon_chain; extern crate beacon_chain;
extern crate bls; extern crate bls;
extern crate db; extern crate db;
//extern crate env_logger; // for debugging // extern crate env_logger; // for debugging
extern crate fork_choice; extern crate fork_choice;
extern crate hex; extern crate hex;
extern crate log; extern crate log;
@ -15,18 +15,32 @@ pub use beacon_chain::BeaconChain;
use bls::Signature; use bls::Signature;
use db::stores::{BeaconBlockStore, BeaconStateStore}; use db::stores::{BeaconBlockStore, BeaconStateStore};
use db::MemoryDB; use db::MemoryDB;
//use env_logger::{Builder, Env}; // use env_logger::{Builder, Env};
use fork_choice::{BitwiseLMDGhost, ForkChoice, ForkChoiceAlgorithm, LongestChain, SlowLMDGhost}; use fork_choice::{
BitwiseLMDGhost, ForkChoice, ForkChoiceAlgorithm, LongestChain, OptimizedLMDGhost, SlowLMDGhost,
};
use ssz::ssz_encode; use ssz::ssz_encode;
use std::collections::HashMap; use std::collections::HashMap;
use std::sync::Arc; use std::sync::Arc;
use std::{fs::File, io::prelude::*, path::PathBuf}; use std::{fs::File, io::prelude::*, path::PathBuf};
use types::test_utils::TestingBeaconStateBuilder; use types::test_utils::TestingBeaconStateBuilder;
use types::{BeaconBlock, BeaconBlockBody, ChainSpec, Eth1Data, Hash256, Slot}; use types::{BeaconBlock, BeaconBlockBody, ChainSpec, Eth1Data, Hash256, Keypair, Slot};
use yaml_rust::yaml; use yaml_rust::yaml;
// Note: We Assume the block Id's are hex-encoded. // Note: We Assume the block Id's are hex-encoded.
#[test]
fn test_optimized_lmd_ghost() {
// set up logging
// Builder::from_env(Env::default().default_filter_or("trace")).init();
test_yaml_vectors(
ForkChoiceAlgorithm::OptimizedLMDGhost,
"tests/lmd_ghost_test_vectors.yaml",
100,
);
}
#[test] #[test]
fn test_bitwise_lmd_ghost() { fn test_bitwise_lmd_ghost() {
// set up logging // set up logging
@ -204,7 +218,7 @@ fn load_test_cases_from_yaml(file_path: &str) -> Vec<yaml_rust::Yaml> {
// initialise a single validator and state. All blocks will reference this state root. // initialise a single validator and state. All blocks will reference this state root.
fn setup_inital_state( fn setup_inital_state(
fork_choice_algo: &ForkChoiceAlgorithm, fork_choice_algo: &ForkChoiceAlgorithm,
no_validators: usize, num_validators: usize,
) -> (Box<ForkChoice>, Arc<BeaconBlockStore<MemoryDB>>, Hash256) { ) -> (Box<ForkChoice>, Arc<BeaconBlockStore<MemoryDB>>, Hash256) {
let db = Arc::new(MemoryDB::open()); let db = Arc::new(MemoryDB::open());
let block_store = Arc::new(BeaconBlockStore::new(db.clone())); let block_store = Arc::new(BeaconBlockStore::new(db.clone()));
@ -212,6 +226,10 @@ fn setup_inital_state(
// the fork choice instantiation // the fork choice instantiation
let fork_choice: Box<ForkChoice> = match fork_choice_algo { let fork_choice: Box<ForkChoice> = match fork_choice_algo {
ForkChoiceAlgorithm::OptimizedLMDGhost => Box::new(OptimizedLMDGhost::new(
block_store.clone(),
state_store.clone(),
)),
ForkChoiceAlgorithm::BitwiseLMDGhost => Box::new(BitwiseLMDGhost::new( ForkChoiceAlgorithm::BitwiseLMDGhost => Box::new(BitwiseLMDGhost::new(
block_store.clone(), block_store.clone(),
state_store.clone(), state_store.clone(),
@ -225,7 +243,7 @@ fn setup_inital_state(
let spec = ChainSpec::foundation(); let spec = ChainSpec::foundation();
let state_builder = let state_builder =
TestingBeaconStateBuilder::from_deterministic_keypairs(no_validators, &spec); TestingBeaconStateBuilder::from_single_keypair(num_validators, &Keypair::random(), &spec);
let (state, _keypairs) = state_builder.build(); let (state, _keypairs) = state_builder.build();
let state_root = state.canonical_root(); let state_root = state.canonical_root();

View File

@ -20,29 +20,6 @@ pub struct Attestation {
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use super::*; use super::*;
use crate::test_utils::{SeedableRng, TestRandom, XorShiftRng};
use ssz::{ssz_encode, Decodable, TreeHash};
#[test] ssz_tests!(Attestation);
pub fn test_ssz_round_trip() {
let mut rng = XorShiftRng::from_seed([42; 16]);
let original = Attestation::random_for_test(&mut rng);
let bytes = ssz_encode(&original);
let (decoded, _) = <_>::ssz_decode(&bytes, 0).unwrap();
assert_eq!(original, decoded);
}
#[test]
pub fn test_hash_tree_root() {
let mut rng = XorShiftRng::from_seed([42; 16]);
let original = Attestation::random_for_test(&mut rng);
let result = original.hash_tree_root();
assert_eq!(result.len(), 32);
// TODO: Add further tests
// https://github.com/sigp/lighthouse/issues/170
}
} }

View File

@ -38,29 +38,6 @@ impl Eq for AttestationData {}
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use super::*; use super::*;
use crate::test_utils::{SeedableRng, TestRandom, XorShiftRng};
use ssz::{ssz_encode, Decodable, TreeHash};
#[test] ssz_tests!(AttestationData);
pub fn test_ssz_round_trip() {
let mut rng = XorShiftRng::from_seed([42; 16]);
let original = AttestationData::random_for_test(&mut rng);
let bytes = ssz_encode(&original);
let (decoded, _) = <_>::ssz_decode(&bytes, 0).unwrap();
assert_eq!(original, decoded);
}
#[test]
pub fn test_hash_tree_root() {
let mut rng = XorShiftRng::from_seed([42; 16]);
let original = AttestationData::random_for_test(&mut rng);
let result = original.hash_tree_root();
assert_eq!(result.len(), 32);
// TODO: Add further tests
// https://github.com/sigp/lighthouse/issues/170
}
} }

View File

@ -25,31 +25,6 @@ impl<T: RngCore> TestRandom<T> for AttestationDataAndCustodyBit {
#[cfg(test)] #[cfg(test)]
mod test { mod test {
use super::*; use super::*;
use crate::test_utils::{SeedableRng, TestRandom, XorShiftRng};
use ssz::{ssz_encode, Decodable, TreeHash};
#[test] ssz_tests!(AttestationDataAndCustodyBit);
pub fn test_ssz_round_trip() {
let mut rng = XorShiftRng::from_seed([42; 16]);
let original = AttestationDataAndCustodyBit::random_for_test(&mut rng);
let bytes = ssz_encode(&original);
let (decoded, _) = <_>::ssz_decode(&bytes, 0).unwrap();
assert_eq!(original, decoded);
}
#[test]
pub fn test_hash_tree_root() {
let mut rng = XorShiftRng::from_seed([42; 16]);
let original = AttestationDataAndCustodyBit::random_for_test(&mut rng);
let result = original.hash_tree_root();
assert_eq!(result.len(), 32);
// TODO: Add further tests
// https://github.com/sigp/lighthouse/issues/170
}
} }

View File

@ -16,29 +16,6 @@ pub struct AttesterSlashing {
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use super::*; use super::*;
use crate::test_utils::{SeedableRng, TestRandom, XorShiftRng};
use ssz::{ssz_encode, Decodable, TreeHash};
#[test] ssz_tests!(AttesterSlashing);
pub fn test_ssz_round_trip() {
let mut rng = XorShiftRng::from_seed([42; 16]);
let original = AttesterSlashing::random_for_test(&mut rng);
let bytes = ssz_encode(&original);
let (decoded, _) = <_>::ssz_decode(&bytes, 0).unwrap();
assert_eq!(original, decoded);
}
#[test]
pub fn test_hash_tree_root() {
let mut rng = XorShiftRng::from_seed([42; 16]);
let original = AttesterSlashing::random_for_test(&mut rng);
let result = original.hash_tree_root();
assert_eq!(result.len(), 32);
// TODO: Add further tests
// https://github.com/sigp/lighthouse/issues/170
}
} }

View File

@ -70,29 +70,6 @@ impl BeaconBlock {
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use super::*; use super::*;
use crate::test_utils::{SeedableRng, TestRandom, XorShiftRng};
use ssz::{ssz_encode, Decodable, TreeHash};
#[test] ssz_tests!(BeaconBlock);
pub fn test_ssz_round_trip() {
let mut rng = XorShiftRng::from_seed([42; 16]);
let original = BeaconBlock::random_for_test(&mut rng);
let bytes = ssz_encode(&original);
let (decoded, _) = <_>::ssz_decode(&bytes, 0).unwrap();
assert_eq!(original, decoded);
}
#[test]
pub fn test_hash_tree_root() {
let mut rng = XorShiftRng::from_seed([42; 16]);
let original = BeaconBlock::random_for_test(&mut rng);
let result = original.hash_tree_root();
assert_eq!(result.len(), 32);
// TODO: Add further tests
// https://github.com/sigp/lighthouse/issues/170
}
} }

View File

@ -21,29 +21,6 @@ pub struct BeaconBlockBody {
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use super::*; use super::*;
use crate::test_utils::{SeedableRng, TestRandom, XorShiftRng};
use ssz::{ssz_encode, Decodable, TreeHash};
#[test] ssz_tests!(BeaconBlockBody);
pub fn test_ssz_round_trip() {
let mut rng = XorShiftRng::from_seed([42; 16]);
let original = BeaconBlockBody::random_for_test(&mut rng);
let bytes = ssz_encode(&original);
let (decoded, _) = <_>::ssz_decode(&bytes, 0).unwrap();
assert_eq!(original, decoded);
}
#[test]
pub fn test_hash_tree_root() {
let mut rng = XorShiftRng::from_seed([42; 16]);
let original = BeaconBlockBody::random_for_test(&mut rng);
let result = original.hash_tree_root();
assert_eq!(result.len(), 32);
// TODO: Add further tests
// https://github.com/sigp/lighthouse/issues/170
}
} }

View File

@ -4,7 +4,6 @@ use super::*;
use crate::test_utils::TestingBeaconStateBuilder; use crate::test_utils::TestingBeaconStateBuilder;
use crate::test_utils::{SeedableRng, TestRandom, XorShiftRng}; use crate::test_utils::{SeedableRng, TestRandom, XorShiftRng};
use crate::{BeaconState, ChainSpec}; use crate::{BeaconState, ChainSpec};
use ssz::{ssz_encode, Decodable};
/// Tests that `get_attestation_participants` is consistent with the result of /// Tests that `get_attestation_participants` is consistent with the result of
/// get_crosslink_committees_at_slot` with a full bitfield. /// get_crosslink_committees_at_slot` with a full bitfield.
@ -51,25 +50,4 @@ pub fn get_attestation_participants_consistency() {
} }
} }
#[test] ssz_tests!(BeaconState);
pub fn test_ssz_round_trip() {
let mut rng = XorShiftRng::from_seed([42; 16]);
let original = BeaconState::random_for_test(&mut rng);
let bytes = ssz_encode(&original);
let (decoded, _) = <_>::ssz_decode(&bytes, 0).unwrap();
assert_eq!(original, decoded);
}
#[test]
pub fn test_hash_tree_root() {
let mut rng = XorShiftRng::from_seed([42; 16]);
let original = BeaconState::random_for_test(&mut rng);
let result = original.hash_tree_root();
assert_eq!(result.len(), 32);
// TODO: Add further tests
// https://github.com/sigp/lighthouse/issues/170
}

View File

@ -19,29 +19,6 @@ pub struct Crosslink {
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use super::*; use super::*;
use crate::test_utils::{SeedableRng, TestRandom, XorShiftRng};
use ssz::{ssz_encode, Decodable, TreeHash};
#[test] ssz_tests!(Crosslink);
pub fn test_ssz_round_trip() {
let mut rng = XorShiftRng::from_seed([42; 16]);
let original = Crosslink::random_for_test(&mut rng);
let bytes = ssz_encode(&original);
let (decoded, _) = <_>::ssz_decode(&bytes, 0).unwrap();
assert_eq!(original, decoded);
}
#[test]
pub fn test_hash_tree_root() {
let mut rng = XorShiftRng::from_seed([42; 16]);
let original = Crosslink::random_for_test(&mut rng);
let result = original.hash_tree_root();
assert_eq!(result.len(), 32);
// TODO: Add further tests
// https://github.com/sigp/lighthouse/issues/170
}
} }

View File

@ -18,29 +18,6 @@ pub struct Deposit {
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use super::*; use super::*;
use crate::test_utils::{SeedableRng, TestRandom, XorShiftRng};
use ssz::{ssz_encode, Decodable, TreeHash};
#[test] ssz_tests!(Deposit);
pub fn test_ssz_round_trip() {
let mut rng = XorShiftRng::from_seed([42; 16]);
let original = Deposit::random_for_test(&mut rng);
let bytes = ssz_encode(&original);
let (decoded, _) = <_>::ssz_decode(&bytes, 0).unwrap();
assert_eq!(original, decoded);
}
#[test]
pub fn test_hash_tree_root() {
let mut rng = XorShiftRng::from_seed([42; 16]);
let original = Deposit::random_for_test(&mut rng);
let result = original.hash_tree_root();
assert_eq!(result.len(), 32);
// TODO: Add further tests
// https://github.com/sigp/lighthouse/issues/170
}
} }

View File

@ -18,29 +18,6 @@ pub struct DepositData {
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use super::*; use super::*;
use crate::test_utils::{SeedableRng, TestRandom, XorShiftRng};
use ssz::{ssz_encode, Decodable, TreeHash};
#[test] ssz_tests!(DepositData);
pub fn test_ssz_round_trip() {
let mut rng = XorShiftRng::from_seed([42; 16]);
let original = DepositData::random_for_test(&mut rng);
let bytes = ssz_encode(&original);
let (decoded, _) = <_>::ssz_decode(&bytes, 0).unwrap();
assert_eq!(original, decoded);
}
#[test]
pub fn test_hash_tree_root() {
let mut rng = XorShiftRng::from_seed([42; 16]);
let original = DepositData::random_for_test(&mut rng);
let result = original.hash_tree_root();
assert_eq!(result.len(), 32);
// TODO: Add further tests
// https://github.com/sigp/lighthouse/issues/170
}
} }

View File

@ -66,29 +66,6 @@ impl DepositInput {
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use super::*; use super::*;
use crate::test_utils::{SeedableRng, TestRandom, XorShiftRng};
use ssz::{ssz_encode, Decodable, TreeHash};
#[test] ssz_tests!(DepositInput);
pub fn test_ssz_round_trip() {
let mut rng = XorShiftRng::from_seed([42; 16]);
let original = DepositInput::random_for_test(&mut rng);
let bytes = ssz_encode(&original);
let (decoded, _) = <_>::ssz_decode(&bytes, 0).unwrap();
assert_eq!(original, decoded);
}
#[test]
pub fn test_hash_tree_root() {
let mut rng = XorShiftRng::from_seed([42; 16]);
let original = DepositInput::random_for_test(&mut rng);
let result = original.hash_tree_root();
assert_eq!(result.len(), 32);
// TODO: Add further tests
// https://github.com/sigp/lighthouse/issues/170
}
} }

View File

@ -17,29 +17,6 @@ pub struct Eth1Data {
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use super::*; use super::*;
use crate::test_utils::{SeedableRng, TestRandom, XorShiftRng};
use ssz::{ssz_encode, Decodable, TreeHash};
#[test] ssz_tests!(Eth1Data);
pub fn test_ssz_round_trip() {
let mut rng = XorShiftRng::from_seed([42; 16]);
let original = Eth1Data::random_for_test(&mut rng);
let bytes = ssz_encode(&original);
let (decoded, _) = <_>::ssz_decode(&bytes, 0).unwrap();
assert_eq!(original, decoded);
}
#[test]
pub fn test_hash_tree_root() {
let mut rng = XorShiftRng::from_seed([42; 16]);
let original = Eth1Data::random_for_test(&mut rng);
let result = original.hash_tree_root();
assert_eq!(result.len(), 32);
// TODO: Add further tests
// https://github.com/sigp/lighthouse/issues/170
}
} }

View File

@ -17,29 +17,6 @@ pub struct Eth1DataVote {
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use super::*; use super::*;
use crate::test_utils::{SeedableRng, TestRandom, XorShiftRng};
use ssz::{ssz_encode, Decodable, TreeHash};
#[test] ssz_tests!(Eth1DataVote);
pub fn test_ssz_round_trip() {
let mut rng = XorShiftRng::from_seed([42; 16]);
let original = Eth1DataVote::random_for_test(&mut rng);
let bytes = ssz_encode(&original);
let (decoded, _) = <_>::ssz_decode(&bytes, 0).unwrap();
assert_eq!(original, decoded);
}
#[test]
pub fn test_hash_tree_root() {
let mut rng = XorShiftRng::from_seed([42; 16]);
let original = Eth1DataVote::random_for_test(&mut rng);
let result = original.hash_tree_root();
assert_eq!(result.len(), 32);
// TODO: Add further tests
// https://github.com/sigp/lighthouse/issues/170
}
} }

View File

@ -29,29 +29,6 @@ impl Fork {
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use super::*; use super::*;
use crate::test_utils::{SeedableRng, TestRandom, XorShiftRng};
use ssz::{ssz_encode, Decodable, TreeHash};
#[test] ssz_tests!(Fork);
pub fn test_ssz_round_trip() {
let mut rng = XorShiftRng::from_seed([42; 16]);
let original = Fork::random_for_test(&mut rng);
let bytes = ssz_encode(&original);
let (decoded, _) = <_>::ssz_decode(&bytes, 0).unwrap();
assert_eq!(original, decoded);
}
#[test]
pub fn test_hash_tree_root() {
let mut rng = XorShiftRng::from_seed([42; 16]);
let original = Fork::random_for_test(&mut rng);
let result = original.hash_tree_root();
assert_eq!(result.len(), 32);
// TODO: Add further tests
// https://github.com/sigp/lighthouse/issues/170
}
} }

View File

@ -1,5 +1,7 @@
//! Ethereum 2.0 types //! Ethereum 2.0 types
#[macro_use]
pub mod test_utils;
pub mod attestation; pub mod attestation;
pub mod attestation_data; pub mod attestation_data;
pub mod attestation_data_and_custody_bit; pub mod attestation_data_and_custody_bit;

View File

@ -19,29 +19,6 @@ pub struct PendingAttestation {
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use super::*; use super::*;
use crate::test_utils::{SeedableRng, TestRandom, XorShiftRng};
use ssz::{ssz_encode, Decodable, TreeHash};
#[test] ssz_tests!(PendingAttestation);
pub fn test_ssz_round_trip() {
let mut rng = XorShiftRng::from_seed([42; 16]);
let original = PendingAttestation::random_for_test(&mut rng);
let bytes = ssz_encode(&original);
let (decoded, _) = <_>::ssz_decode(&bytes, 0).unwrap();
assert_eq!(original, decoded);
}
#[test]
pub fn test_hash_tree_root() {
let mut rng = XorShiftRng::from_seed([42; 16]);
let original = PendingAttestation::random_for_test(&mut rng);
let result = original.hash_tree_root();
assert_eq!(result.len(), 32);
// TODO: Add further tests
// https://github.com/sigp/lighthouse/issues/170
}
} }

View File

@ -23,30 +23,7 @@ pub struct Proposal {
mod tests { mod tests {
use super::*; use super::*;
use crate::test_utils::{SeedableRng, TestRandom, XorShiftRng}; use crate::test_utils::{SeedableRng, TestRandom, XorShiftRng};
use ssz::{ssz_encode, Decodable, SignedRoot, TreeHash}; use ssz::{SignedRoot, TreeHash};
#[test]
pub fn test_ssz_round_trip() {
let mut rng = XorShiftRng::from_seed([42; 16]);
let original = Proposal::random_for_test(&mut rng);
let bytes = ssz_encode(&original);
let (decoded, _) = <_>::ssz_decode(&bytes, 0).unwrap();
assert_eq!(original, decoded);
}
#[test]
pub fn test_hash_tree_root() {
let mut rng = XorShiftRng::from_seed([42; 16]);
let original = Proposal::random_for_test(&mut rng);
let result = original.hash_tree_root();
assert_eq!(result.len(), 32);
// TODO: Add further tests
// https://github.com/sigp/lighthouse/issues/170
}
#[derive(TreeHash)] #[derive(TreeHash)]
struct SignedProposal { struct SignedProposal {
@ -75,4 +52,5 @@ mod tests {
assert_eq!(original.signed_root(), other.hash_tree_root()); assert_eq!(original.signed_root(), other.hash_tree_root());
} }
ssz_tests!(Proposal);
} }

View File

@ -18,29 +18,6 @@ pub struct ProposerSlashing {
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use super::*; use super::*;
use crate::test_utils::{SeedableRng, TestRandom, XorShiftRng};
use ssz::{ssz_encode, Decodable, TreeHash};
#[test] ssz_tests!(ProposerSlashing);
pub fn test_ssz_round_trip() {
let mut rng = XorShiftRng::from_seed([42; 16]);
let original = ProposerSlashing::random_for_test(&mut rng);
let bytes = ssz_encode(&original);
let (decoded, _) = <_>::ssz_decode(&bytes, 0).unwrap();
assert_eq!(original, decoded);
}
#[test]
pub fn test_hash_tree_root() {
let mut rng = XorShiftRng::from_seed([42; 16]);
let original = ProposerSlashing::random_for_test(&mut rng);
let result = original.hash_tree_root();
assert_eq!(result.len(), 32);
// TODO: Add further tests
// https://github.com/sigp/lighthouse/issues/170
}
} }

View File

@ -14,29 +14,6 @@ pub struct ShardReassignmentRecord {
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use super::*; use super::*;
use crate::test_utils::{SeedableRng, TestRandom, XorShiftRng};
use ssz::{ssz_encode, Decodable, TreeHash};
#[test] ssz_tests!(ShardReassignmentRecord);
pub fn test_ssz_round_trip() {
let mut rng = XorShiftRng::from_seed([42; 16]);
let original = ShardReassignmentRecord::random_for_test(&mut rng);
let bytes = ssz_encode(&original);
let (decoded, _) = <_>::ssz_decode(&bytes, 0).unwrap();
assert_eq!(original, decoded);
}
#[test]
pub fn test_hash_tree_root() {
let mut rng = XorShiftRng::from_seed([42; 16]);
let original = ShardReassignmentRecord::random_for_test(&mut rng);
let result = original.hash_tree_root();
assert_eq!(result.len(), 32);
// TODO: Add further tests
// https://github.com/sigp/lighthouse/issues/170
}
} }

View File

@ -46,7 +46,6 @@ mod tests {
use crate::chain_spec::ChainSpec; use crate::chain_spec::ChainSpec;
use crate::slot_epoch::{Epoch, Slot}; use crate::slot_epoch::{Epoch, Slot};
use crate::test_utils::{SeedableRng, TestRandom, XorShiftRng}; use crate::test_utils::{SeedableRng, TestRandom, XorShiftRng};
use ssz::{ssz_encode, Decodable, TreeHash};
#[test] #[test]
pub fn test_is_double_vote_true() { pub fn test_is_double_vote_true() {
@ -120,28 +119,7 @@ mod tests {
); );
} }
#[test] ssz_tests!(SlashableAttestation);
pub fn test_ssz_round_trip() {
let mut rng = XorShiftRng::from_seed([42; 16]);
let original = SlashableAttestation::random_for_test(&mut rng);
let bytes = ssz_encode(&original);
let (decoded, _) = <_>::ssz_decode(&bytes, 0).unwrap();
assert_eq!(original, decoded);
}
#[test]
pub fn test_hash_tree_root() {
let mut rng = XorShiftRng::from_seed([42; 16]);
let original = SlashableAttestation::random_for_test(&mut rng);
let result = original.hash_tree_root();
assert_eq!(result.len(), 32);
// TODO: Add further tests
// https://github.com/sigp/lighthouse/issues/170
}
fn create_slashable_attestation( fn create_slashable_attestation(
slot_factor: u64, slot_factor: u64,

View File

@ -103,8 +103,6 @@ impl<'a> Iterator for SlotIter<'a> {
#[cfg(test)] #[cfg(test)]
mod slot_tests { mod slot_tests {
use super::*; use super::*;
use crate::test_utils::{SeedableRng, TestRandom, XorShiftRng};
use ssz::ssz_encode;
all_tests!(Slot); all_tests!(Slot);
} }
@ -112,8 +110,6 @@ mod slot_tests {
#[cfg(test)] #[cfg(test)]
mod epoch_tests { mod epoch_tests {
use super::*; use super::*;
use crate::test_utils::{SeedableRng, TestRandom, XorShiftRng};
use ssz::ssz_encode;
all_tests!(Epoch); all_tests!(Epoch);

View File

@ -248,7 +248,7 @@ macro_rules! impl_common {
} }
// test macros // test macros
#[allow(unused_macros)] #[cfg(test)]
macro_rules! new_tests { macro_rules! new_tests {
($type: ident) => { ($type: ident) => {
#[test] #[test]
@ -260,7 +260,7 @@ macro_rules! new_tests {
}; };
} }
#[allow(unused_macros)] #[cfg(test)]
macro_rules! from_into_tests { macro_rules! from_into_tests {
($type: ident, $other: ident) => { ($type: ident, $other: ident) => {
#[test] #[test]
@ -286,7 +286,7 @@ macro_rules! from_into_tests {
}; };
} }
#[allow(unused_macros)] #[cfg(test)]
macro_rules! math_between_tests { macro_rules! math_between_tests {
($type: ident, $other: ident) => { ($type: ident, $other: ident) => {
#[test] #[test]
@ -434,7 +434,7 @@ macro_rules! math_between_tests {
}; };
} }
#[allow(unused_macros)] #[cfg(test)]
macro_rules! math_tests { macro_rules! math_tests {
($type: ident) => { ($type: ident) => {
#[test] #[test]
@ -528,35 +528,7 @@ macro_rules! math_tests {
}; };
} }
#[allow(unused_macros)] #[cfg(test)]
macro_rules! ssz_tests {
($type: ident) => {
#[test]
pub fn test_ssz_round_trip() {
let mut rng = XorShiftRng::from_seed([42; 16]);
let original = $type::random_for_test(&mut rng);
let bytes = ssz_encode(&original);
let (decoded, _) = $type::ssz_decode(&bytes, 0).unwrap();
assert_eq!(original, decoded);
}
#[test]
pub fn test_hash_tree_root() {
let mut rng = XorShiftRng::from_seed([42; 16]);
let original = $type::random_for_test(&mut rng);
let result = original.hash_tree_root();
assert_eq!(result.len(), 32);
// TODO: Add further tests
// https://github.com/sigp/lighthouse/issues/170
}
};
}
#[allow(unused_macros)]
macro_rules! all_tests { macro_rules! all_tests {
($type: ident) => { ($type: ident) => {
new_tests!($type); new_tests!($type);

View File

@ -33,11 +33,8 @@ impl SlotHeight {
} }
#[cfg(test)] #[cfg(test)]
mod slot_height_tests { mod slot_height_tests {
use super::*; use super::*;
use crate::test_utils::{SeedableRng, TestRandom, XorShiftRng};
use ssz::ssz_encode;
all_tests!(SlotHeight); all_tests!(SlotHeight);
} }

View File

@ -1,3 +1,5 @@
#[macro_use]
mod macros;
mod generate_deterministic_keypairs; mod generate_deterministic_keypairs;
mod keypairs_file; mod keypairs_file;
mod test_random; mod test_random;

View File

@ -67,13 +67,15 @@ impl TestingAttesterSlashingBuilder {
}; };
let add_signatures = |attestation: &mut SlashableAttestation| { let add_signatures = |attestation: &mut SlashableAttestation| {
// All validators sign with a `false` custody bit.
let attestation_data_and_custody_bit = AttestationDataAndCustodyBit {
data: attestation.data.clone(),
custody_bit: false,
};
let message = attestation_data_and_custody_bit.hash_tree_root();
for (i, validator_index) in validator_indices.iter().enumerate() { for (i, validator_index) in validator_indices.iter().enumerate() {
attestation.custody_bitfield.set(i, false); attestation.custody_bitfield.set(i, false);
let attestation_data_and_custody_bit = AttestationDataAndCustodyBit {
data: attestation.data.clone(),
custody_bit: attestation.custody_bitfield.get(i).unwrap(),
};
let message = attestation_data_and_custody_bit.hash_tree_root();
let signature = signer(*validator_index, &message[..], epoch, Domain::Attestation); let signature = signer(*validator_index, &message[..], epoch, Domain::Attestation);
attestation.aggregate_signature.add(&signature); attestation.aggregate_signature.add(&signature);
} }

View File

@ -74,6 +74,22 @@ impl TestingBeaconStateBuilder {
TestingBeaconStateBuilder::from_keypairs(keypairs, spec) TestingBeaconStateBuilder::from_keypairs(keypairs, spec)
} }
/// Uses the given keypair for all validators.
pub fn from_single_keypair(
validator_count: usize,
keypair: &Keypair,
spec: &ChainSpec,
) -> Self {
debug!("Generating {} cloned keypairs...", validator_count);
let mut keypairs = Vec::with_capacity(validator_count);
for _ in 0..validator_count {
keypairs.push(keypair.clone())
}
TestingBeaconStateBuilder::from_keypairs(keypairs, spec)
}
/// Creates the builder from an existing set of keypairs. /// Creates the builder from an existing set of keypairs.
pub fn from_keypairs(keypairs: Vec<Keypair>, spec: &ChainSpec) -> Self { pub fn from_keypairs(keypairs: Vec<Keypair>, spec: &ChainSpec) -> Self {
let validator_count = keypairs.len(); let validator_count = keypairs.len();

View File

@ -24,29 +24,6 @@ pub struct Transfer {
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use super::*; use super::*;
use crate::test_utils::{SeedableRng, TestRandom, XorShiftRng};
use ssz::{ssz_encode, Decodable, TreeHash};
#[test] ssz_tests!(Transfer);
pub fn test_ssz_round_trip() {
let mut rng = XorShiftRng::from_seed([42; 16]);
let original = Transfer::random_for_test(&mut rng);
let bytes = ssz_encode(&original);
let (decoded, _) = <_>::ssz_decode(&bytes, 0).unwrap();
assert_eq!(original, decoded);
}
#[test]
pub fn test_hash_tree_root() {
let mut rng = XorShiftRng::from_seed([42; 16]);
let original = Transfer::random_for_test(&mut rng);
let result = original.hash_tree_root();
assert_eq!(result.len(), 32);
// TODO: Add further tests
// https://github.com/sigp/lighthouse/issues/170
}
} }

View File

@ -54,18 +54,6 @@ impl Default for Validator {
mod tests { mod tests {
use super::*; use super::*;
use crate::test_utils::{SeedableRng, TestRandom, XorShiftRng}; use crate::test_utils::{SeedableRng, TestRandom, XorShiftRng};
use ssz::{ssz_encode, Decodable, TreeHash};
#[test]
pub fn test_ssz_round_trip() {
let mut rng = XorShiftRng::from_seed([42; 16]);
let original = Validator::random_for_test(&mut rng);
let bytes = ssz_encode(&original);
let (decoded, _) = <_>::ssz_decode(&bytes, 0).unwrap();
assert_eq!(original, decoded);
}
#[test] #[test]
fn test_validator_can_be_active() { fn test_validator_can_be_active() {
@ -90,15 +78,5 @@ mod tests {
} }
} }
#[test] ssz_tests!(Validator);
pub fn test_hash_tree_root() {
let mut rng = XorShiftRng::from_seed([42; 16]);
let original = Validator::random_for_test(&mut rng);
let result = original.hash_tree_root();
assert_eq!(result.len(), 32);
// TODO: Add further tests
// https://github.com/sigp/lighthouse/issues/170
}
} }

View File

@ -19,29 +19,6 @@ pub struct VoluntaryExit {
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use super::*; use super::*;
use crate::test_utils::{SeedableRng, TestRandom, XorShiftRng};
use ssz::{ssz_encode, Decodable, TreeHash};
#[test] ssz_tests!(VoluntaryExit);
pub fn test_ssz_round_trip() {
let mut rng = XorShiftRng::from_seed([42; 16]);
let original = VoluntaryExit::random_for_test(&mut rng);
let bytes = ssz_encode(&original);
let (decoded, _) = <_>::ssz_decode(&bytes, 0).unwrap();
assert_eq!(original, decoded);
}
#[test]
pub fn test_hash_tree_root() {
let mut rng = XorShiftRng::from_seed([42; 16]);
let original = VoluntaryExit::random_for_test(&mut rng);
let result = original.hash_tree_root();
assert_eq!(result.len(), 32);
// TODO: Add further tests
// https://github.com/sigp/lighthouse/issues/170
}
} }