Merge branch 'v0.8.3' into interop-v0.8.3

This commit is contained in:
Paul Hauner 2019-09-05 17:39:23 +10:00
commit 3bc62ef411
No known key found for this signature in database
GPG Key ID: 303E4494BB28068C
54 changed files with 1695 additions and 1488 deletions

View File

@ -17,11 +17,9 @@ pub fn get_attesting_indices<T: EthSpec>(
target_relative_epoch,
)?;
/* TODO(freeze): re-enable this?
if bitlist.len() > committee.committee.len() {
if bitlist.len() != committee.committee.len() {
return Err(BeaconStateError::InvalidBitfield);
}
*/
Ok(committee
.committee

View File

@ -3,7 +3,7 @@ use types::*;
/// Return the compact committee root at `relative_epoch`.
///
/// Spec v0.8.0
/// Spec v0.8.3
pub fn get_compact_committees_root<T: EthSpec>(
state: &BeaconState<T>,
relative_epoch: RelativeEpoch,
@ -11,28 +11,13 @@ pub fn get_compact_committees_root<T: EthSpec>(
) -> Result<Hash256, BeaconStateError> {
let mut committees =
FixedVector::<_, T::ShardCount>::from_elem(CompactCommittee::<T>::default());
// FIXME: this is a spec bug, whereby the start shard for the epoch after the next epoch
// is mistakenly used. The start shard from the cache SHOULD work.
// Waiting on a release to fix https://github.com/ethereum/eth2.0-specs/issues/1315
let start_shard = if relative_epoch == RelativeEpoch::Next {
state.next_epoch_start_shard(spec)?
} else {
state.get_epoch_start_shard(relative_epoch)?
};
let start_shard = state.get_epoch_start_shard(relative_epoch)?;
for committee_number in 0..state.get_committee_count(relative_epoch)? {
let shard = (start_shard + committee_number) % T::ShardCount::to_u64();
// FIXME: this is a partial workaround for the above, but it only works in the case
// where there's a committee for every shard in every epoch. It works for the minimal
// tests but not the mainnet ones.
let fake_shard = if relative_epoch == RelativeEpoch::Next {
(shard + 1) % T::ShardCount::to_u64()
} else {
shard
};
for &index in state
.get_crosslink_committee_for_shard(fake_shard, relative_epoch)?
.get_crosslink_committee_for_shard(shard, relative_epoch)?
.committee
{
let validator = state

View File

@ -11,6 +11,8 @@ pub fn get_indexed_attestation<T: EthSpec>(
state: &BeaconState<T>,
attestation: &Attestation<T>,
) -> Result<IndexedAttestation<T>> {
// Note: we rely on both calls to `get_attesting_indices` to check the bitfield lengths
// against the committee length
let attesting_indices =
get_attesting_indices(state, &attestation.data, &attestation.aggregation_bits)?;

View File

@ -1,8 +1,5 @@
use crate::common::get_compact_committees_root;
use apply_rewards::process_rewards_and_penalties;
use errors::EpochProcessingError as Error;
use process_slashings::process_slashings;
use registry_updates::process_registry_updates;
use std::collections::HashMap;
use tree_hash::TreeHash;
use types::*;
@ -17,6 +14,10 @@ pub mod tests;
pub mod validator_statuses;
pub mod winning_root;
pub use apply_rewards::process_rewards_and_penalties;
pub use process_slashings::process_slashings;
pub use registry_updates::process_registry_updates;
/// Maps a shard to a winning root.
///
/// It is generated during crosslink processing and later used to reward/penalize validators.
@ -218,45 +219,29 @@ pub fn process_final_updates<T: EthSpec>(
}
}
// Update start shard.
state.start_shard = state.next_epoch_start_shard(spec)?;
// This is a hack to allow us to update index roots and slashed balances for the next epoch.
//
// The indentation here is to make it obvious where the weird stuff happens.
{
state.slot += 1;
// Set active index root
let index_epoch = next_epoch + spec.activation_exit_delay;
let indices_list = VariableList::<usize, T::ValidatorRegistryLimit>::from(
state.get_active_validator_indices(index_epoch),
);
state.set_active_index_root(
index_epoch,
Hash256::from_slice(&indices_list.tree_hash_root()),
spec,
)?;
// Reset slashings
state.set_slashings(next_epoch, 0)?;
// Set randao mix
state.set_randao_mix(next_epoch, *state.get_randao_mix(current_epoch)?)?;
state.slot -= 1;
}
// Set active index root
let index_epoch = next_epoch + spec.activation_exit_delay;
let indices_list = VariableList::<usize, T::ValidatorRegistryLimit>::from(
state.get_active_validator_indices(index_epoch),
);
state.set_active_index_root(
index_epoch,
Hash256::from_slice(&indices_list.tree_hash_root()),
spec,
)?;
// Set committees root
// Note: we do this out-of-order w.r.t. to the spec, because we don't want the slot to be
// incremented. It's safe because the updates to slashings and the RANDAO mix (above) don't
// affect this.
state.set_compact_committee_root(
next_epoch,
get_compact_committees_root(state, RelativeEpoch::Next, spec)?,
spec,
)?;
// Reset slashings
state.set_slashings(next_epoch, 0)?;
// Set randao mix
state.set_randao_mix(next_epoch, *state.get_randao_mix(current_epoch)?)?;
// Set historical root accumulator
if next_epoch.as_u64() % (T::SlotsPerHistoricalRoot::to_u64() / T::slots_per_epoch()) == 0 {
let historical_batch = state.historical_batch();
@ -265,6 +250,9 @@ pub fn process_final_updates<T: EthSpec>(
.push(Hash256::from_slice(&historical_batch.tree_hash_root()))?;
}
// Update start shard.
state.start_shard = state.get_epoch_start_shard(RelativeEpoch::Next)?;
// Rotate current/previous epoch attestations
state.previous_epoch_attestations =
std::mem::replace(&mut state.current_epoch_attestations, VariableList::empty());

View File

@ -4,25 +4,13 @@ use crate::{Checkpoint, Crosslink, Hash256};
use serde_derive::{Deserialize, Serialize};
use ssz_derive::{Decode, Encode};
use test_random_derive::TestRandom;
use tree_hash::TreeHash;
use tree_hash_derive::{SignedRoot, TreeHash};
use tree_hash_derive::TreeHash;
/// The data upon which an attestation is based.
///
/// Spec v0.8.0
#[derive(
Debug,
Clone,
PartialEq,
Eq,
Serialize,
Deserialize,
Hash,
Encode,
Decode,
TreeHash,
TestRandom,
SignedRoot,
Debug, Clone, PartialEq, Eq, Serialize, Deserialize, Hash, Encode, Decode, TreeHash, TestRandom,
)]
pub struct AttestationData {
// LMD GHOST vote

View File

@ -60,6 +60,22 @@ pub enum Error {
SszTypesError(ssz_types::Error),
}
/// Control whether an epoch-indexed field can be indexed at the next epoch or not.
#[derive(Debug, PartialEq, Clone, Copy)]
enum AllowNextEpoch {
True,
False,
}
impl AllowNextEpoch {
fn upper_bound_of(self, current_epoch: Epoch) -> Epoch {
match self {
AllowNextEpoch::True => current_epoch + 1,
AllowNextEpoch::False => current_epoch,
}
}
}
/// The state of the `BeaconChain` at some slot.
///
/// Spec v0.8.0
@ -108,12 +124,12 @@ where
pub start_shard: u64,
pub randao_mixes: FixedVector<Hash256, T::EpochsPerHistoricalVector>,
#[compare_fields(as_slice)]
active_index_roots: FixedVector<Hash256, T::EpochsPerHistoricalVector>,
pub active_index_roots: FixedVector<Hash256, T::EpochsPerHistoricalVector>,
#[compare_fields(as_slice)]
compact_committees_roots: FixedVector<Hash256, T::EpochsPerHistoricalVector>,
pub compact_committees_roots: FixedVector<Hash256, T::EpochsPerHistoricalVector>,
// Slashings
slashings: FixedVector<u64, T::EpochsPerSlashingsVector>,
pub slashings: FixedVector<u64, T::EpochsPerSlashingsVector>,
// Attestations
pub previous_epoch_attestations: VariableList<PendingAttestation<T>, T::MaxPendingAttestations>,
@ -282,14 +298,6 @@ impl<T: EthSpec> BeaconState<T> {
Ok(cache.epoch_start_shard())
}
pub fn next_epoch_start_shard(&self, spec: &ChainSpec) -> Result<u64, Error> {
let cache = self.cache(RelativeEpoch::Current)?;
let active_validator_count = cache.active_validator_count();
let shard_delta = T::get_shard_delta(active_validator_count, spec.target_committee_size);
Ok((self.start_shard + shard_delta) % T::ShardCount::to_u64())
}
/// Get the slot of an attestation.
///
/// Note: Utilizes the cache and will fail if the appropriate cache is not initialized.
@ -463,12 +471,16 @@ impl<T: EthSpec> BeaconState<T> {
/// Safely obtains the index for `randao_mixes`
///
/// Spec v0.8.0
fn get_randao_mix_index(&self, epoch: Epoch) -> Result<usize, Error> {
/// Spec v0.8.1
fn get_randao_mix_index(
&self,
epoch: Epoch,
allow_next_epoch: AllowNextEpoch,
) -> Result<usize, Error> {
let current_epoch = self.current_epoch();
let len = T::EpochsPerHistoricalVector::to_u64();
if epoch + len > current_epoch && epoch <= current_epoch {
if current_epoch < epoch + len && epoch <= allow_next_epoch.upper_bound_of(current_epoch) {
Ok(epoch.as_usize() % len as usize)
} else {
Err(Error::EpochOutOfBounds)
@ -496,7 +508,7 @@ impl<T: EthSpec> BeaconState<T> {
///
/// Spec v0.8.1
pub fn get_randao_mix(&self, epoch: Epoch) -> Result<&Hash256, Error> {
let i = self.get_randao_mix_index(epoch)?;
let i = self.get_randao_mix_index(epoch, AllowNextEpoch::False)?;
Ok(&self.randao_mixes[i])
}
@ -504,21 +516,29 @@ impl<T: EthSpec> BeaconState<T> {
///
/// Spec v0.8.1
pub fn set_randao_mix(&mut self, epoch: Epoch, mix: Hash256) -> Result<(), Error> {
let i = self.get_randao_mix_index(epoch)?;
let i = self.get_randao_mix_index(epoch, AllowNextEpoch::True)?;
self.randao_mixes[i] = mix;
Ok(())
}
/// Safely obtains the index for `active_index_roots`, given some `epoch`.
///
/// If `allow_next_epoch` is `True`, then we allow an _extra_ one epoch of lookahead.
///
/// Spec v0.8.1
fn get_active_index_root_index(&self, epoch: Epoch, spec: &ChainSpec) -> Result<usize, Error> {
fn get_active_index_root_index(
&self,
epoch: Epoch,
spec: &ChainSpec,
allow_next_epoch: AllowNextEpoch,
) -> Result<usize, Error> {
let current_epoch = self.current_epoch();
let lookahead = spec.activation_exit_delay;
let lookback = self.active_index_roots.len() as u64 - lookahead;
let epoch_upper_bound = allow_next_epoch.upper_bound_of(current_epoch) + lookahead;
if epoch + lookback > current_epoch && current_epoch + lookahead >= epoch {
if current_epoch < epoch + lookback && epoch <= epoch_upper_bound {
Ok(epoch.as_usize() % self.active_index_roots.len())
} else {
Err(Error::EpochOutOfBounds)
@ -529,7 +549,7 @@ impl<T: EthSpec> BeaconState<T> {
///
/// Spec v0.8.1
pub fn get_active_index_root(&self, epoch: Epoch, spec: &ChainSpec) -> Result<Hash256, Error> {
let i = self.get_active_index_root_index(epoch, spec)?;
let i = self.get_active_index_root_index(epoch, spec, AllowNextEpoch::False)?;
Ok(self.active_index_roots[i])
}
@ -542,7 +562,7 @@ impl<T: EthSpec> BeaconState<T> {
index_root: Hash256,
spec: &ChainSpec,
) -> Result<(), Error> {
let i = self.get_active_index_root_index(epoch, spec)?;
let i = self.get_active_index_root_index(epoch, spec, AllowNextEpoch::True)?;
self.active_index_roots[i] = index_root;
Ok(())
}
@ -556,19 +576,17 @@ impl<T: EthSpec> BeaconState<T> {
/// Safely obtains the index for `compact_committees_roots`, given some `epoch`.
///
/// Spec v0.8.0
/// Spec v0.8.1
fn get_compact_committee_root_index(
&self,
epoch: Epoch,
spec: &ChainSpec,
allow_next_epoch: AllowNextEpoch,
) -> Result<usize, Error> {
let current_epoch = self.current_epoch();
let len = T::EpochsPerHistoricalVector::to_u64();
let lookahead = spec.activation_exit_delay;
let lookback = self.compact_committees_roots.len() as u64 - lookahead;
if epoch + lookback > current_epoch && current_epoch + lookahead >= epoch {
Ok(epoch.as_usize() % self.compact_committees_roots.len())
if current_epoch < epoch + len && epoch <= allow_next_epoch.upper_bound_of(current_epoch) {
Ok(epoch.as_usize() % len as usize)
} else {
Err(Error::EpochOutOfBounds)
}
@ -576,26 +594,21 @@ impl<T: EthSpec> BeaconState<T> {
/// Return the `compact_committee_root` at a recent `epoch`.
///
/// Spec v0.8.0
pub fn get_compact_committee_root(
&self,
epoch: Epoch,
spec: &ChainSpec,
) -> Result<Hash256, Error> {
let i = self.get_compact_committee_root_index(epoch, spec)?;
/// Spec v0.8.1
pub fn get_compact_committee_root(&self, epoch: Epoch) -> Result<Hash256, Error> {
let i = self.get_compact_committee_root_index(epoch, AllowNextEpoch::False)?;
Ok(self.compact_committees_roots[i])
}
/// Set the `compact_committee_root` at a recent `epoch`.
///
/// Spec v0.8.0
/// Spec v0.8.1
pub fn set_compact_committee_root(
&mut self,
epoch: Epoch,
index_root: Hash256,
spec: &ChainSpec,
) -> Result<(), Error> {
let i = self.get_compact_committee_root_index(epoch, spec)?;
let i = self.get_compact_committee_root_index(epoch, AllowNextEpoch::True)?;
self.compact_committees_roots[i] = index_root;
Ok(())
}
@ -646,14 +659,19 @@ impl<T: EthSpec> BeaconState<T> {
/// Safely obtain the index for `slashings`, given some `epoch`.
///
/// Spec v0.8.0
fn get_slashings_index(&self, epoch: Epoch) -> Result<usize, Error> {
/// Spec v0.8.1
fn get_slashings_index(
&self,
epoch: Epoch,
allow_next_epoch: AllowNextEpoch,
) -> Result<usize, Error> {
// We allow the slashings vector to be accessed at any cached epoch at or before
// the current epoch.
if epoch <= self.current_epoch()
&& epoch + T::EpochsPerSlashingsVector::to_u64() >= self.current_epoch() + 1
// the current epoch, or the next epoch if `AllowNextEpoch::True` is passed.
let current_epoch = self.current_epoch();
if current_epoch < epoch + T::EpochsPerSlashingsVector::to_u64()
&& epoch <= allow_next_epoch.upper_bound_of(current_epoch)
{
Ok((epoch.as_u64() % T::EpochsPerSlashingsVector::to_u64()) as usize)
Ok(epoch.as_usize() % T::EpochsPerSlashingsVector::to_usize())
} else {
Err(Error::EpochOutOfBounds)
}
@ -668,17 +686,17 @@ impl<T: EthSpec> BeaconState<T> {
/// Get the total slashed balances for some epoch.
///
/// Spec v0.8.0
/// Spec v0.8.1
pub fn get_slashings(&self, epoch: Epoch) -> Result<u64, Error> {
let i = self.get_slashings_index(epoch)?;
let i = self.get_slashings_index(epoch, AllowNextEpoch::False)?;
Ok(self.slashings[i])
}
/// Set the total slashed balances for some epoch.
///
/// Spec v0.8.0
/// Spec v0.8.1
pub fn set_slashings(&mut self, epoch: Epoch, value: u64) -> Result<(), Error> {
let i = self.get_slashings_index(epoch)?;
let i = self.get_slashings_index(epoch, AllowNextEpoch::True)?;
self.slashings[i] = value;
Ok(())
}

View File

@ -90,11 +90,11 @@ fn test_active_index<T: EthSpec>(state_slot: Slot) {
// Test the start and end of the range.
assert_eq!(
state.get_active_index_root_index(*range.start(), &spec),
state.get_active_index_root_index(*range.start(), &spec, AllowNextEpoch::False),
Ok(modulo(*range.start()))
);
assert_eq!(
state.get_active_index_root_index(*range.end(), &spec),
state.get_active_index_root_index(*range.end(), &spec, AllowNextEpoch::False),
Ok(modulo(*range.end()))
);
@ -102,12 +102,12 @@ fn test_active_index<T: EthSpec>(state_slot: Slot) {
if state.current_epoch() > 0 {
// Test is invalid on epoch zero, cannot subtract from zero.
assert_eq!(
state.get_active_index_root_index(*range.start() - 1, &spec),
state.get_active_index_root_index(*range.start() - 1, &spec, AllowNextEpoch::False),
Err(Error::EpochOutOfBounds)
);
}
assert_eq!(
state.get_active_index_root_index(*range.end() + 1, &spec),
state.get_active_index_root_index(*range.end() + 1, &spec, AllowNextEpoch::False),
Err(Error::EpochOutOfBounds)
);
}

View File

@ -3,8 +3,7 @@ use crate::{Epoch, Hash256};
use serde_derive::{Deserialize, Serialize};
use ssz_derive::{Decode, Encode};
use test_random_derive::TestRandom;
use tree_hash::TreeHash;
use tree_hash_derive::{SignedRoot, TreeHash};
use tree_hash_derive::TreeHash;
/// Casper FFG checkpoint, used in attestations.
///
@ -22,7 +21,6 @@ use tree_hash_derive::{SignedRoot, TreeHash};
Decode,
TreeHash,
TestRandom,
SignedRoot,
)]
pub struct Checkpoint {
pub epoch: Epoch,

View File

@ -5,7 +5,8 @@ authors = ["Paul Hauner <paul@paulhauner.com>"]
edition = "2018"
[dependencies]
milagro_bls = { git = "https://github.com/sigp/milagro_bls", tag = "v0.10.0" }
# FIXME: update sigp repo
milagro_bls = { git = "https://github.com/michaelsproul/milagro_bls", branch = "little-endian-v0.10" }
eth2_hashing = { path = "../eth2_hashing" }
hex = "0.3"
rand = "^0.5"

View File

@ -220,13 +220,26 @@ where
fn from_ssz_bytes(bytes: &[u8]) -> Result<Self, ssz::DecodeError> {
if bytes.is_empty() {
Ok(FixedVector::from(vec![]))
Err(ssz::DecodeError::InvalidByteLength {
len: 0,
expected: 1,
})
} else if T::is_ssz_fixed_len() {
bytes
.chunks(T::ssz_fixed_len())
.map(|chunk| T::from_ssz_bytes(chunk))
.collect::<Result<Vec<T>, _>>()
.and_then(|vec| Ok(vec.into()))
.and_then(|vec| {
if vec.len() == N::to_usize() {
Ok(vec.into())
} else {
Err(ssz::DecodeError::BytesInvalid(format!(
"wrong number of vec elements, got: {}, expected: {}",
vec.len(),
N::to_usize()
)))
}
})
} else {
ssz::decode_list_of_variable_length_items(bytes).and_then(|vec| Ok(vec.into()))
}

View File

@ -1,5 +1,5 @@
use super::*;
use ethereum_types::H256;
use ethereum_types::{H256, U128, U256};
macro_rules! impl_for_bitsize {
($type: ident, $bit_size: expr) => {
@ -73,6 +73,46 @@ macro_rules! impl_for_u8_array {
impl_for_u8_array!(4);
impl_for_u8_array!(32);
impl TreeHash for U128 {
fn tree_hash_type() -> TreeHashType {
TreeHashType::Basic
}
fn tree_hash_packed_encoding(&self) -> Vec<u8> {
let mut result = vec![0; 16];
self.to_little_endian(&mut result);
result
}
fn tree_hash_packing_factor() -> usize {
2
}
fn tree_hash_root(&self) -> Vec<u8> {
merkle_root(&self.tree_hash_packed_encoding(), 0)
}
}
impl TreeHash for U256 {
fn tree_hash_type() -> TreeHashType {
TreeHashType::Basic
}
fn tree_hash_packed_encoding(&self) -> Vec<u8> {
let mut result = vec![0; 32];
self.to_little_endian(&mut result);
result
}
fn tree_hash_packing_factor() -> usize {
1
}
fn tree_hash_root(&self) -> Vec<u8> {
merkle_root(&self.tree_hash_packed_encoding(), 0)
}
}
impl TreeHash for H256 {
fn tree_hash_type() -> TreeHashType {
TreeHashType::Vector

View File

@ -18,7 +18,9 @@ serde_derive = "1.0"
serde_repr = "0.1"
serde_yaml = "0.8"
eth2_ssz = "0.1"
eth2_ssz_derive = "0.1"
tree_hash = "0.1"
tree_hash_derive = "0.2"
state_processing = { path = "../../eth2/state_processing" }
swap_or_not_shuffle = { path = "../../eth2/utils/swap_or_not_shuffle" }
types = { path = "../../eth2/types" }

@ -1 +1 @@
Subproject commit aaa1673f508103e11304833e0456e4149f880065
Subproject commit ae6dd9011df05fab8c7e651c09cf9c940973bf81

View File

@ -2,7 +2,6 @@ use self::BlsSetting::*;
use crate::error::Error;
use serde_repr::Deserialize_repr;
// TODO: use this in every test case
#[derive(Deserialize_repr, Debug, Clone, Copy)]
#[repr(u8)]
pub enum BlsSetting {

View File

@ -1,6 +1,7 @@
use super::*;
use compare_fields::{CompareFields, Comparison, FieldComparison};
use std::fmt::Debug;
use std::path::{Path, PathBuf};
use types::BeaconState;
pub const MAX_VALUE_STRING_LEN: usize = 500;
@ -9,14 +10,21 @@ pub const MAX_VALUE_STRING_LEN: usize = 500;
pub struct CaseResult {
pub case_index: usize,
pub desc: String,
pub path: PathBuf,
pub result: Result<(), Error>,
}
impl CaseResult {
pub fn new(case_index: usize, case: &impl Case, result: Result<(), Error>) -> Self {
pub fn new(
case_index: usize,
path: &Path,
case: &impl Case,
result: Result<(), Error>,
) -> Self {
CaseResult {
case_index,
desc: case.description(),
path: path.into(),
result,
}
}

View File

@ -1,5 +1,7 @@
use super::*;
use rayon::prelude::*;
use std::fmt::Debug;
use std::path::{Path, PathBuf};
mod bls_aggregate_pubkeys;
mod bls_aggregate_sigs;
@ -7,20 +9,11 @@ mod bls_g2_compressed;
mod bls_g2_uncompressed;
mod bls_priv_to_pub;
mod bls_sign_msg;
mod epoch_processing_crosslinks;
mod epoch_processing_final_updates;
mod epoch_processing_justification_and_finalization;
mod epoch_processing_registry_updates;
mod epoch_processing_slashings;
mod common;
mod epoch_processing;
mod genesis_initialization;
mod genesis_validity;
mod operations_attestation;
mod operations_attester_slashing;
mod operations_block_header;
mod operations_deposit;
mod operations_exit;
mod operations_proposer_slashing;
mod operations_transfer;
mod operations;
mod sanity_blocks;
mod sanity_slots;
mod shuffling;
@ -33,27 +26,23 @@ pub use bls_g2_compressed::*;
pub use bls_g2_uncompressed::*;
pub use bls_priv_to_pub::*;
pub use bls_sign_msg::*;
pub use epoch_processing_crosslinks::*;
pub use epoch_processing_final_updates::*;
pub use epoch_processing_justification_and_finalization::*;
pub use epoch_processing_registry_updates::*;
pub use epoch_processing_slashings::*;
pub use common::SszStaticType;
pub use epoch_processing::*;
pub use genesis_initialization::*;
pub use genesis_validity::*;
pub use operations_attestation::*;
pub use operations_attester_slashing::*;
pub use operations_block_header::*;
pub use operations_deposit::*;
pub use operations_exit::*;
pub use operations_proposer_slashing::*;
pub use operations_transfer::*;
pub use operations::*;
pub use sanity_blocks::*;
pub use sanity_slots::*;
pub use shuffling::*;
pub use ssz_generic::*;
pub use ssz_static::*;
pub trait Case: Debug {
pub trait LoadCase: Sized {
/// Load the test case from a test case directory.
fn load_from_dir(_path: &Path) -> Result<Self, Error>;
}
pub trait Case: Debug + Sync {
/// An optional field for implementing a custom description.
///
/// Defaults to "no description".
@ -70,51 +59,15 @@ pub trait Case: Debug {
#[derive(Debug)]
pub struct Cases<T> {
pub test_cases: Vec<T>,
pub test_cases: Vec<(PathBuf, T)>,
}
impl<T> EfTest for Cases<T>
where
T: Case + Debug,
{
fn test_results(&self) -> Vec<CaseResult> {
impl<T: Case> Cases<T> {
pub fn test_results(&self) -> Vec<CaseResult> {
self.test_cases
.iter()
.into_par_iter()
.enumerate()
.map(|(i, tc)| CaseResult::new(i, tc, tc.result(i)))
.map(|(i, (ref path, ref tc))| CaseResult::new(i, path, tc, tc.result(i)))
.collect()
}
}
impl<T: YamlDecode> YamlDecode for Cases<T> {
/// Decodes a YAML list of test cases
fn yaml_decode(yaml: &str) -> Result<Self, Error> {
let mut p = 0;
let mut elems: Vec<&str> = yaml
.match_indices("\n- ")
// Skip the `\n` used for matching a new line
.map(|(i, _)| i + 1)
.map(|i| {
let yaml_element = &yaml[p..i];
p = i;
yaml_element
})
.collect();
elems.push(&yaml[p..]);
let test_cases = elems
.iter()
.map(|s| {
// Remove the `- ` prefix.
let s = &s[2..];
// Remove a single level of indenting.
s.replace("\n ", "\n")
})
.map(|s| T::yaml_decode(&s.to_string()).unwrap())
.collect();
Ok(Self { test_cases })
}
}

View File

@ -1,5 +1,6 @@
use super::*;
use crate::case_result::compare_result;
use crate::cases::common::BlsCase;
use bls::{AggregatePublicKey, PublicKey};
use serde_derive::Deserialize;
@ -9,11 +10,7 @@ pub struct BlsAggregatePubkeys {
pub output: String,
}
impl YamlDecode for BlsAggregatePubkeys {
fn yaml_decode(yaml: &str) -> Result<Self, Error> {
Ok(serde_yaml::from_str(yaml).unwrap())
}
}
impl BlsCase for BlsAggregatePubkeys {}
impl Case for BlsAggregatePubkeys {
fn result(&self, _case_index: usize) -> Result<(), Error> {

View File

@ -1,5 +1,6 @@
use super::*;
use crate::case_result::compare_result;
use crate::cases::common::BlsCase;
use bls::{AggregateSignature, Signature};
use serde_derive::Deserialize;
@ -9,11 +10,7 @@ pub struct BlsAggregateSigs {
pub output: String,
}
impl YamlDecode for BlsAggregateSigs {
fn yaml_decode(yaml: &str) -> Result<Self, Error> {
Ok(serde_yaml::from_str(yaml).unwrap())
}
}
impl BlsCase for BlsAggregateSigs {}
impl Case for BlsAggregateSigs {
fn result(&self, _case_index: usize) -> Result<(), Error> {

View File

@ -1,5 +1,6 @@
use super::*;
use crate::case_result::compare_result;
use crate::cases::common::BlsCase;
use bls::{compress_g2, hash_on_g2};
use serde_derive::Deserialize;
@ -15,11 +16,7 @@ pub struct BlsG2Compressed {
pub output: Vec<String>,
}
impl YamlDecode for BlsG2Compressed {
fn yaml_decode(yaml: &str) -> Result<Self, Error> {
Ok(serde_yaml::from_str(yaml).unwrap())
}
}
impl BlsCase for BlsG2Compressed {}
impl Case for BlsG2Compressed {
fn result(&self, _case_index: usize) -> Result<(), Error> {
@ -45,14 +42,9 @@ impl Case for BlsG2Compressed {
}
}
// Converts a vector to u64 (from big endian)
// Converts a vector to u64 (from little endian)
fn bytes_to_u64(array: &[u8]) -> u64 {
let mut result: u64 = 0;
for (i, value) in array.iter().rev().enumerate() {
if i == 8 {
break;
}
result += u64::pow(2, i as u32 * 8) * u64::from(*value);
}
result
let mut bytes = [0u8; 8];
bytes.copy_from_slice(array);
u64::from_le_bytes(bytes)
}

View File

@ -1,5 +1,6 @@
use super::*;
use crate::case_result::compare_result;
use crate::cases::common::BlsCase;
use bls::hash_on_g2;
use serde_derive::Deserialize;
@ -9,18 +10,14 @@ pub struct BlsG2UncompressedInput {
pub domain: String,
}
impl BlsCase for BlsG2UncompressedInput {}
#[derive(Debug, Clone, Deserialize)]
pub struct BlsG2Uncompressed {
pub input: BlsG2UncompressedInput,
pub output: Vec<Vec<String>>,
}
impl YamlDecode for BlsG2Uncompressed {
fn yaml_decode(yaml: &str) -> Result<Self, Error> {
Ok(serde_yaml::from_str(yaml).unwrap())
}
}
impl Case for BlsG2Uncompressed {
fn result(&self, _case_index: usize) -> Result<(), Error> {
// Convert message and domain to required types

View File

@ -1,5 +1,6 @@
use super::*;
use crate::case_result::compare_result;
use crate::cases::common::BlsCase;
use bls::{PublicKey, SecretKey};
use serde_derive::Deserialize;
@ -9,11 +10,7 @@ pub struct BlsPrivToPub {
pub output: String,
}
impl YamlDecode for BlsPrivToPub {
fn yaml_decode(yaml: &str) -> Result<Self, Error> {
Ok(serde_yaml::from_str(yaml).unwrap())
}
}
impl BlsCase for BlsPrivToPub {}
impl Case for BlsPrivToPub {
fn result(&self, _case_index: usize) -> Result<(), Error> {

View File

@ -1,5 +1,6 @@
use super::*;
use crate::case_result::compare_result;
use crate::cases::common::BlsCase;
use bls::{SecretKey, Signature};
use serde_derive::Deserialize;
@ -16,11 +17,7 @@ pub struct BlsSign {
pub output: String,
}
impl YamlDecode for BlsSign {
fn yaml_decode(yaml: &str) -> Result<Self, Error> {
Ok(serde_yaml::from_str(yaml).unwrap())
}
}
impl BlsCase for BlsSign {}
impl Case for BlsSign {
fn result(&self, _case_index: usize) -> Result<(), Error> {
@ -45,16 +42,11 @@ impl Case for BlsSign {
}
}
// Converts a vector to u64 (from big endian)
// Converts a vector to u64 (from little endian)
fn bytes_to_u64(array: &[u8]) -> u64 {
let mut result: u64 = 0;
for (i, value) in array.iter().rev().enumerate() {
if i == 8 {
break;
}
result += u64::pow(2, i as u32 * 8) * u64::from(*value);
}
result
let mut bytes = [0u8; 8];
bytes.copy_from_slice(array);
u64::from_le_bytes(bytes)
}
// Increase the size of an array to 48 bytes

View File

@ -0,0 +1,72 @@
use crate::cases::LoadCase;
use crate::decode::yaml_decode_file;
use crate::error::Error;
use serde_derive::Deserialize;
use ssz::{Decode, Encode};
use ssz_derive::{Decode, Encode};
use std::convert::TryFrom;
use std::fmt::Debug;
use std::path::Path;
use tree_hash::TreeHash;
/// Trait for all BLS cases to eliminate some boilerplate.
pub trait BlsCase: serde::de::DeserializeOwned {}
impl<T: BlsCase> LoadCase for T {
fn load_from_dir(path: &Path) -> Result<Self, Error> {
yaml_decode_file(&path.join("data.yaml"))
}
}
/// Macro to wrap U128 and U256 so they deserialize correctly.
macro_rules! uint_wrapper {
($wrapper_name:ident, $wrapped_type:ty) => {
#[derive(Debug, Clone, Copy, Default, PartialEq, Decode, Encode, Deserialize)]
#[serde(try_from = "String")]
pub struct $wrapper_name {
pub x: $wrapped_type,
}
impl TryFrom<String> for $wrapper_name {
type Error = String;
fn try_from(s: String) -> Result<Self, Self::Error> {
<$wrapped_type>::from_dec_str(&s)
.map(|x| Self { x })
.map_err(|e| format!("{:?}", e))
}
}
impl tree_hash::TreeHash for $wrapper_name {
fn tree_hash_type() -> tree_hash::TreeHashType {
<$wrapped_type>::tree_hash_type()
}
fn tree_hash_packed_encoding(&self) -> Vec<u8> {
self.x.tree_hash_packed_encoding()
}
fn tree_hash_packing_factor() -> usize {
<$wrapped_type>::tree_hash_packing_factor()
}
fn tree_hash_root(&self) -> Vec<u8> {
self.x.tree_hash_root()
}
}
};
}
uint_wrapper!(TestU128, ethereum_types::U128);
uint_wrapper!(TestU256, ethereum_types::U256);
/// Trait alias for all deez bounds
pub trait SszStaticType:
serde::de::DeserializeOwned + Decode + Encode + TreeHash + Clone + PartialEq + Debug + Sync
{
}
impl<T> SszStaticType for T where
T: serde::de::DeserializeOwned + Decode + Encode + TreeHash + Clone + PartialEq + Debug + Sync
{
}

View File

@ -0,0 +1,143 @@
use super::*;
use crate::bls_setting::BlsSetting;
use crate::case_result::compare_beacon_state_results_without_caches;
use crate::decode::{ssz_decode_file, yaml_decode_file};
use crate::type_name;
use crate::type_name::TypeName;
use serde_derive::Deserialize;
use state_processing::per_epoch_processing::{
errors::EpochProcessingError, process_crosslinks, process_final_updates,
process_justification_and_finalization, process_registry_updates, process_slashings,
validator_statuses::ValidatorStatuses,
};
use std::marker::PhantomData;
use std::path::{Path, PathBuf};
use types::{BeaconState, ChainSpec, EthSpec};
#[derive(Debug, Clone, Default, Deserialize)]
pub struct Metadata {
pub description: Option<String>,
pub bls_setting: Option<BlsSetting>,
}
#[derive(Debug, Clone, Deserialize)]
#[serde(bound = "E: EthSpec")]
pub struct EpochProcessing<E: EthSpec, T: EpochTransition<E>> {
pub path: PathBuf,
pub metadata: Metadata,
pub pre: BeaconState<E>,
pub post: Option<BeaconState<E>>,
#[serde(skip_deserializing)]
_phantom: PhantomData<T>,
}
pub trait EpochTransition<E: EthSpec>: TypeName + Debug + Sync {
fn run(state: &mut BeaconState<E>, spec: &ChainSpec) -> Result<(), EpochProcessingError>;
}
#[derive(Debug)]
pub struct JustificationAndFinalization;
#[derive(Debug)]
pub struct Crosslinks;
#[derive(Debug)]
pub struct RegistryUpdates;
#[derive(Debug)]
pub struct Slashings;
#[derive(Debug)]
pub struct FinalUpdates;
type_name!(
JustificationAndFinalization,
"justification_and_finalization"
);
type_name!(Crosslinks, "crosslinks");
type_name!(RegistryUpdates, "registry_updates");
type_name!(Slashings, "slashings");
type_name!(FinalUpdates, "final_updates");
impl<E: EthSpec> EpochTransition<E> for JustificationAndFinalization {
fn run(state: &mut BeaconState<E>, spec: &ChainSpec) -> Result<(), EpochProcessingError> {
let mut validator_statuses = ValidatorStatuses::new(state, spec)?;
validator_statuses.process_attestations(state, spec)?;
process_justification_and_finalization(state, &validator_statuses.total_balances)
}
}
impl<E: EthSpec> EpochTransition<E> for Crosslinks {
fn run(state: &mut BeaconState<E>, spec: &ChainSpec) -> Result<(), EpochProcessingError> {
process_crosslinks(state, spec)?;
Ok(())
}
}
impl<E: EthSpec> EpochTransition<E> for RegistryUpdates {
fn run(state: &mut BeaconState<E>, spec: &ChainSpec) -> Result<(), EpochProcessingError> {
process_registry_updates(state, spec)
}
}
impl<E: EthSpec> EpochTransition<E> for Slashings {
fn run(state: &mut BeaconState<E>, spec: &ChainSpec) -> Result<(), EpochProcessingError> {
let mut validator_statuses = ValidatorStatuses::new(&state, spec)?;
validator_statuses.process_attestations(&state, spec)?;
process_slashings(state, validator_statuses.total_balances.current_epoch, spec)?;
Ok(())
}
}
impl<E: EthSpec> EpochTransition<E> for FinalUpdates {
fn run(state: &mut BeaconState<E>, spec: &ChainSpec) -> Result<(), EpochProcessingError> {
process_final_updates(state, spec)
}
}
impl<E: EthSpec, T: EpochTransition<E>> LoadCase for EpochProcessing<E, T> {
fn load_from_dir(path: &Path) -> Result<Self, Error> {
let metadata_path = path.join("meta.yaml");
let metadata: Metadata = if metadata_path.is_file() {
yaml_decode_file(&metadata_path)?
} else {
Metadata::default()
};
let pre = ssz_decode_file(&path.join("pre.ssz"))?;
let post_file = path.join("post.ssz");
let post = if post_file.is_file() {
Some(ssz_decode_file(&post_file)?)
} else {
None
};
Ok(Self {
path: path.into(),
metadata,
pre,
post,
_phantom: PhantomData,
})
}
}
impl<E: EthSpec, T: EpochTransition<E>> Case for EpochProcessing<E, T> {
fn description(&self) -> String {
self.metadata
.description
.clone()
.unwrap_or_else(String::new)
}
fn result(&self, _case_index: usize) -> Result<(), Error> {
let mut state = self.pre.clone();
let mut expected = self.post.clone();
let spec = &E::default_spec();
let mut result = (|| {
// Processing requires the epoch cache.
state.build_all_caches(spec)?;
T::run(&mut state, spec).map(|_| state)
})();
compare_beacon_state_results_without_caches(&mut result, &mut expected)
}
}

View File

@ -1,37 +0,0 @@
use super::*;
use crate::case_result::compare_beacon_state_results_without_caches;
use serde_derive::Deserialize;
use state_processing::per_epoch_processing::process_crosslinks;
use types::{BeaconState, EthSpec};
#[derive(Debug, Clone, Deserialize)]
#[serde(bound = "E: EthSpec")]
pub struct EpochProcessingCrosslinks<E: EthSpec> {
pub description: String,
pub pre: BeaconState<E>,
pub post: Option<BeaconState<E>>,
}
impl<E: EthSpec> YamlDecode for EpochProcessingCrosslinks<E> {
fn yaml_decode(yaml: &str) -> Result<Self, Error> {
Ok(serde_yaml::from_str(yaml).unwrap())
}
}
impl<E: EthSpec> Case for EpochProcessingCrosslinks<E> {
fn description(&self) -> String {
self.description.clone()
}
fn result(&self, _case_index: usize) -> Result<(), Error> {
let mut state = self.pre.clone();
let mut expected = self.post.clone();
// Processing requires the epoch cache.
state.build_all_caches(&E::default_spec()).unwrap();
let mut result = process_crosslinks(&mut state, &E::default_spec()).map(|_| state);
compare_beacon_state_results_without_caches(&mut result, &mut expected)
}
}

View File

@ -1,41 +0,0 @@
use super::*;
use crate::case_result::compare_beacon_state_results_without_caches;
use serde_derive::Deserialize;
use state_processing::per_epoch_processing::process_final_updates;
use types::{BeaconState, EthSpec};
#[derive(Debug, Clone, Deserialize)]
#[serde(bound = "E: EthSpec")]
pub struct EpochProcessingFinalUpdates<E: EthSpec> {
pub description: String,
pub pre: BeaconState<E>,
pub post: Option<BeaconState<E>>,
}
impl<E: EthSpec> YamlDecode for EpochProcessingFinalUpdates<E> {
fn yaml_decode(yaml: &str) -> Result<Self, Error> {
Ok(serde_yaml::from_str(yaml).unwrap())
}
}
impl<E: EthSpec> Case for EpochProcessingFinalUpdates<E> {
fn description(&self) -> String {
self.description.clone()
}
fn result(&self, _case_index: usize) -> Result<(), Error> {
let mut state = self.pre.clone();
let mut expected = self.post.clone();
let spec = &E::default_spec();
let mut result = (|| {
// Processing requires the epoch cache.
state.build_all_caches(spec)?;
process_final_updates(&mut state, spec).map(|_| state)
})();
compare_beacon_state_results_without_caches(&mut result, &mut expected)
}
}

View File

@ -1,46 +0,0 @@
use super::*;
use crate::case_result::compare_beacon_state_results_without_caches;
use serde_derive::Deserialize;
use state_processing::per_epoch_processing::{
process_justification_and_finalization, validator_statuses::ValidatorStatuses,
};
use types::{BeaconState, EthSpec};
#[derive(Debug, Clone, Deserialize)]
#[serde(bound = "E: EthSpec")]
pub struct EpochProcessingJustificationAndFinalization<E: EthSpec> {
pub description: String,
pub pre: BeaconState<E>,
pub post: Option<BeaconState<E>>,
}
impl<E: EthSpec> YamlDecode for EpochProcessingJustificationAndFinalization<E> {
fn yaml_decode(yaml: &str) -> Result<Self, Error> {
Ok(serde_yaml::from_str(yaml).unwrap())
}
}
impl<E: EthSpec> Case for EpochProcessingJustificationAndFinalization<E> {
fn description(&self) -> String {
self.description.clone()
}
fn result(&self, _case_index: usize) -> Result<(), Error> {
let mut state = self.pre.clone();
let mut expected = self.post.clone();
let spec = &E::default_spec();
// Processing requires the epoch cache.
state.build_all_caches(spec).unwrap();
let mut result = (|| {
let mut validator_statuses = ValidatorStatuses::new(&state, spec)?;
validator_statuses.process_attestations(&state, spec)?;
process_justification_and_finalization(&mut state, &validator_statuses.total_balances)
.map(|_| state)
})();
compare_beacon_state_results_without_caches(&mut result, &mut expected)
}
}

View File

@ -1,38 +0,0 @@
use super::*;
use crate::case_result::compare_beacon_state_results_without_caches;
use serde_derive::Deserialize;
use state_processing::per_epoch_processing::registry_updates::process_registry_updates;
use types::{BeaconState, EthSpec};
#[derive(Debug, Clone, Deserialize)]
#[serde(bound = "E: EthSpec")]
pub struct EpochProcessingRegistryUpdates<E: EthSpec> {
pub description: String,
pub pre: BeaconState<E>,
pub post: Option<BeaconState<E>>,
}
impl<E: EthSpec> YamlDecode for EpochProcessingRegistryUpdates<E> {
fn yaml_decode(yaml: &str) -> Result<Self, Error> {
Ok(serde_yaml::from_str(yaml).unwrap())
}
}
impl<E: EthSpec> Case for EpochProcessingRegistryUpdates<E> {
fn description(&self) -> String {
self.description.clone()
}
fn result(&self, _case_index: usize) -> Result<(), Error> {
let mut state = self.pre.clone();
let mut expected = self.post.clone();
let spec = &E::default_spec();
// Processing requires the epoch cache.
state.build_all_caches(spec).unwrap();
let mut result = process_registry_updates(&mut state, spec).map(|_| state);
compare_beacon_state_results_without_caches(&mut result, &mut expected)
}
}

View File

@ -1,50 +0,0 @@
use super::*;
use crate::case_result::compare_beacon_state_results_without_caches;
use serde_derive::Deserialize;
use state_processing::per_epoch_processing::{
process_slashings::process_slashings, validator_statuses::ValidatorStatuses,
};
use types::{BeaconState, EthSpec};
#[derive(Debug, Clone, Deserialize)]
#[serde(bound = "E: EthSpec")]
pub struct EpochProcessingSlashings<E: EthSpec> {
pub description: String,
pub pre: BeaconState<E>,
pub post: Option<BeaconState<E>>,
}
impl<E: EthSpec> YamlDecode for EpochProcessingSlashings<E> {
fn yaml_decode(yaml: &str) -> Result<Self, Error> {
Ok(serde_yaml::from_str(yaml).unwrap())
}
}
impl<E: EthSpec> Case for EpochProcessingSlashings<E> {
fn description(&self) -> String {
self.description.clone()
}
fn result(&self, _case_index: usize) -> Result<(), Error> {
let mut state = self.pre.clone();
let mut expected = self.post.clone();
let spec = &E::default_spec();
let mut result = (|| {
// Processing requires the epoch cache.
state.build_all_caches(spec)?;
let mut validator_statuses = ValidatorStatuses::new(&state, spec)?;
validator_statuses.process_attestations(&state, spec)?;
process_slashings(
&mut state,
validator_statuses.total_balances.current_epoch,
spec,
)
.map(|_| state)
})();
compare_beacon_state_results_without_caches(&mut result, &mut expected)
}
}

View File

@ -1,34 +1,51 @@
use super::*;
use crate::bls_setting::BlsSetting;
use crate::case_result::compare_beacon_state_results_without_caches;
use crate::decode::{ssz_decode_file, yaml_decode_file};
use serde_derive::Deserialize;
use state_processing::initialize_beacon_state_from_eth1;
use std::path::PathBuf;
use types::{BeaconState, Deposit, EthSpec, Hash256};
#[derive(Debug, Clone, Deserialize)]
struct Metadata {
deposits_count: usize,
}
#[derive(Debug, Clone, Deserialize)]
#[serde(bound = "E: EthSpec")]
pub struct GenesisInitialization<E: EthSpec> {
pub description: String,
pub bls_setting: Option<BlsSetting>,
pub path: PathBuf,
pub eth1_block_hash: Hash256,
pub eth1_timestamp: u64,
pub deposits: Vec<Deposit>,
pub state: Option<BeaconState<E>>,
}
impl<E: EthSpec> YamlDecode for GenesisInitialization<E> {
fn yaml_decode(yaml: &str) -> Result<Self, Error> {
Ok(serde_yaml::from_str(yaml).unwrap())
impl<E: EthSpec> LoadCase for GenesisInitialization<E> {
fn load_from_dir(path: &Path) -> Result<Self, Error> {
let eth1_block_hash = ssz_decode_file(&path.join("eth1_block_hash.ssz"))?;
let eth1_timestamp = yaml_decode_file(&path.join("eth1_timestamp.yaml"))?;
let meta: Metadata = yaml_decode_file(&path.join("meta.yaml"))?;
let deposits: Vec<Deposit> = (0..meta.deposits_count)
.map(|i| {
let filename = format!("deposits_{}.ssz", i);
ssz_decode_file(&path.join(filename))
})
.collect::<Result<_, _>>()?;
let state = ssz_decode_file(&path.join("state.ssz"))?;
Ok(Self {
path: path.into(),
eth1_block_hash,
eth1_timestamp,
deposits,
state: Some(state),
})
}
}
impl<E: EthSpec> Case for GenesisInitialization<E> {
fn description(&self) -> String {
self.description.clone()
}
fn result(&self, _case_index: usize) -> Result<(), Error> {
self.bls_setting.unwrap_or_default().check()?;
let spec = &E::default_spec();
let mut result = initialize_beacon_state_from_eth1(

View File

@ -1,31 +1,28 @@
use super::*;
use crate::bls_setting::BlsSetting;
use crate::decode::{ssz_decode_file, yaml_decode_file};
use serde_derive::Deserialize;
use state_processing::is_valid_genesis_state;
use std::path::Path;
use types::{BeaconState, EthSpec};
#[derive(Debug, Clone, Deserialize)]
#[serde(bound = "E: EthSpec")]
pub struct GenesisValidity<E: EthSpec> {
pub description: String,
pub bls_setting: Option<BlsSetting>,
pub genesis: BeaconState<E>,
pub is_valid: bool,
}
impl<E: EthSpec> YamlDecode for GenesisValidity<E> {
fn yaml_decode(yaml: &str) -> Result<Self, Error> {
Ok(serde_yaml::from_str(yaml).unwrap())
impl<E: EthSpec> LoadCase for GenesisValidity<E> {
fn load_from_dir(path: &Path) -> Result<Self, Error> {
let genesis = ssz_decode_file(&path.join("genesis.ssz"))?;
let is_valid = yaml_decode_file(&path.join("is_valid.yaml"))?;
Ok(Self { genesis, is_valid })
}
}
impl<E: EthSpec> Case for GenesisValidity<E> {
fn description(&self) -> String {
self.description.clone()
}
fn result(&self, _case_index: usize) -> Result<(), Error> {
self.bls_setting.unwrap_or_default().check()?;
let spec = &E::default_spec();
let is_valid = is_valid_genesis_state(&self.genesis, spec);

View File

@ -0,0 +1,194 @@
use super::*;
use crate::bls_setting::BlsSetting;
use crate::case_result::compare_beacon_state_results_without_caches;
use crate::decode::{ssz_decode_file, yaml_decode_file};
use crate::type_name::TypeName;
use serde_derive::Deserialize;
use ssz::Decode;
use state_processing::per_block_processing::{
errors::BlockProcessingError, process_attestations, process_attester_slashings,
process_block_header, process_deposits, process_exits, process_proposer_slashings,
process_transfers, VerifySignatures,
};
use std::fmt::Debug;
use std::path::Path;
use types::{
Attestation, AttesterSlashing, BeaconBlock, BeaconState, ChainSpec, Deposit, EthSpec,
ProposerSlashing, Transfer, VoluntaryExit,
};
#[derive(Debug, Clone, Default, Deserialize)]
struct Metadata {
description: Option<String>,
bls_setting: Option<BlsSetting>,
}
#[derive(Debug, Clone)]
pub struct Operations<E: EthSpec, O: Operation<E>> {
metadata: Metadata,
pub pre: BeaconState<E>,
pub operation: O,
pub post: Option<BeaconState<E>>,
}
pub trait Operation<E: EthSpec>: Decode + TypeName + Debug + Sync {
fn handler_name() -> String {
Self::name().to_lowercase()
}
fn filename() -> String {
format!("{}.ssz", Self::handler_name())
}
fn apply_to(
&self,
state: &mut BeaconState<E>,
spec: &ChainSpec,
) -> Result<(), BlockProcessingError>;
}
impl<E: EthSpec> Operation<E> for Attestation<E> {
fn apply_to(
&self,
state: &mut BeaconState<E>,
spec: &ChainSpec,
) -> Result<(), BlockProcessingError> {
process_attestations(state, &[self.clone()], VerifySignatures::True, spec)
}
}
impl<E: EthSpec> Operation<E> for AttesterSlashing<E> {
fn handler_name() -> String {
"attester_slashing".into()
}
fn apply_to(
&self,
state: &mut BeaconState<E>,
spec: &ChainSpec,
) -> Result<(), BlockProcessingError> {
process_attester_slashings(state, &[self.clone()], VerifySignatures::True, spec)
}
}
impl<E: EthSpec> Operation<E> for Deposit {
fn apply_to(
&self,
state: &mut BeaconState<E>,
spec: &ChainSpec,
) -> Result<(), BlockProcessingError> {
process_deposits(state, &[self.clone()], spec)
}
}
impl<E: EthSpec> Operation<E> for ProposerSlashing {
fn handler_name() -> String {
"proposer_slashing".into()
}
fn apply_to(
&self,
state: &mut BeaconState<E>,
spec: &ChainSpec,
) -> Result<(), BlockProcessingError> {
process_proposer_slashings(state, &[self.clone()], VerifySignatures::True, spec)
}
}
impl<E: EthSpec> Operation<E> for Transfer {
fn apply_to(
&self,
state: &mut BeaconState<E>,
spec: &ChainSpec,
) -> Result<(), BlockProcessingError> {
process_transfers(state, &[self.clone()], VerifySignatures::True, spec)
}
}
impl<E: EthSpec> Operation<E> for VoluntaryExit {
fn handler_name() -> String {
"voluntary_exit".into()
}
fn apply_to(
&self,
state: &mut BeaconState<E>,
spec: &ChainSpec,
) -> Result<(), BlockProcessingError> {
process_exits(state, &[self.clone()], VerifySignatures::True, spec)
}
}
impl<E: EthSpec> Operation<E> for BeaconBlock<E> {
fn handler_name() -> String {
"block_header".into()
}
fn filename() -> String {
"block.ssz".into()
}
fn apply_to(
&self,
state: &mut BeaconState<E>,
spec: &ChainSpec,
) -> Result<(), BlockProcessingError> {
Ok(process_block_header(
state,
self,
None,
VerifySignatures::True,
spec,
)?)
}
}
impl<E: EthSpec, O: Operation<E>> LoadCase for Operations<E, O> {
fn load_from_dir(path: &Path) -> Result<Self, Error> {
let metadata_path = path.join("meta.yaml");
let metadata: Metadata = if metadata_path.is_file() {
yaml_decode_file(&metadata_path)?
} else {
Metadata::default()
};
let pre = ssz_decode_file(&path.join("pre.ssz"))?;
let operation = ssz_decode_file(&path.join(O::filename()))?;
let post_filename = path.join("post.ssz");
let post = if post_filename.is_file() {
Some(ssz_decode_file(&post_filename)?)
} else {
None
};
Ok(Self {
metadata,
pre,
operation,
post,
})
}
}
impl<E: EthSpec, O: Operation<E>> Case for Operations<E, O> {
fn description(&self) -> String {
self.metadata
.description
.clone()
.unwrap_or_else(String::new)
}
fn result(&self, _case_index: usize) -> Result<(), Error> {
self.metadata.bls_setting.unwrap_or_default().check()?;
let spec = &E::default_spec();
let mut state = self.pre.clone();
let mut expected = self.post.clone();
// Processing requires the epoch cache.
state.build_all_caches(spec).unwrap();
let mut result = self.operation.apply_to(&mut state, spec).map(|()| state);
compare_beacon_state_results_without_caches(&mut result, &mut expected)
}
}

View File

@ -1,47 +0,0 @@
use super::*;
use crate::bls_setting::BlsSetting;
use crate::case_result::compare_beacon_state_results_without_caches;
use serde_derive::Deserialize;
use state_processing::per_block_processing::{process_attestations, VerifySignatures};
use types::{Attestation, BeaconState, EthSpec};
#[derive(Debug, Clone, Deserialize)]
#[serde(bound = "E: EthSpec")]
pub struct OperationsAttestation<E: EthSpec> {
pub description: String,
pub bls_setting: Option<BlsSetting>,
pub pre: BeaconState<E>,
pub attestation: Attestation<E>,
pub post: Option<BeaconState<E>>,
}
impl<E: EthSpec> YamlDecode for OperationsAttestation<E> {
fn yaml_decode(yaml: &str) -> Result<Self, Error> {
Ok(serde_yaml::from_str(&yaml).unwrap())
}
}
impl<E: EthSpec> Case for OperationsAttestation<E> {
fn description(&self) -> String {
self.description.clone()
}
fn result(&self, _case_index: usize) -> Result<(), Error> {
let spec = &E::default_spec();
self.bls_setting.unwrap_or_default().check()?;
let mut state = self.pre.clone();
let attestation = self.attestation.clone();
let mut expected = self.post.clone();
// Processing requires the epoch cache.
state.build_all_caches(spec).unwrap();
let result = process_attestations(&mut state, &[attestation], VerifySignatures::True, spec);
let mut result = result.and_then(|_| Ok(state));
compare_beacon_state_results_without_caches(&mut result, &mut expected)
}
}

View File

@ -1,52 +0,0 @@
use super::*;
use crate::bls_setting::BlsSetting;
use crate::case_result::compare_beacon_state_results_without_caches;
use serde_derive::Deserialize;
use state_processing::per_block_processing::{process_attester_slashings, VerifySignatures};
use types::{AttesterSlashing, BeaconState, EthSpec};
#[derive(Debug, Clone, Deserialize)]
pub struct OperationsAttesterSlashing<E: EthSpec> {
pub description: String,
pub bls_setting: Option<BlsSetting>,
#[serde(bound = "E: EthSpec")]
pub pre: BeaconState<E>,
#[serde(bound = "E: EthSpec")]
pub attester_slashing: AttesterSlashing<E>,
#[serde(bound = "E: EthSpec")]
pub post: Option<BeaconState<E>>,
}
impl<E: EthSpec> YamlDecode for OperationsAttesterSlashing<E> {
fn yaml_decode(yaml: &str) -> Result<Self, Error> {
Ok(serde_yaml::from_str(yaml).unwrap())
}
}
impl<E: EthSpec> Case for OperationsAttesterSlashing<E> {
fn description(&self) -> String {
self.description.clone()
}
fn result(&self, _case_index: usize) -> Result<(), Error> {
self.bls_setting.unwrap_or_default().check()?;
let mut state = self.pre.clone();
let attester_slashing = self.attester_slashing.clone();
let mut expected = self.post.clone();
// Processing requires the epoch cache.
state.build_all_caches(&E::default_spec()).unwrap();
let result = process_attester_slashings(
&mut state,
&[attester_slashing],
VerifySignatures::True,
&E::default_spec(),
);
let mut result = result.and_then(|_| Ok(state));
compare_beacon_state_results_without_caches(&mut result, &mut expected)
}
}

View File

@ -1,46 +0,0 @@
use super::*;
use crate::bls_setting::BlsSetting;
use crate::case_result::compare_beacon_state_results_without_caches;
use serde_derive::Deserialize;
use state_processing::per_block_processing::{process_block_header, VerifySignatures};
use types::{BeaconBlock, BeaconState, EthSpec};
#[derive(Debug, Clone, Deserialize)]
#[serde(bound = "E: EthSpec")]
pub struct OperationsBlockHeader<E: EthSpec> {
pub description: String,
pub bls_setting: Option<BlsSetting>,
pub pre: BeaconState<E>,
pub block: BeaconBlock<E>,
pub post: Option<BeaconState<E>>,
}
impl<E: EthSpec> YamlDecode for OperationsBlockHeader<E> {
fn yaml_decode(yaml: &str) -> Result<Self, Error> {
Ok(serde_yaml::from_str(yaml).unwrap())
}
}
impl<E: EthSpec> Case for OperationsBlockHeader<E> {
fn description(&self) -> String {
self.description.clone()
}
fn result(&self, _case_index: usize) -> Result<(), Error> {
let spec = &E::default_spec();
self.bls_setting.unwrap_or_default().check()?;
let mut state = self.pre.clone();
let mut expected = self.post.clone();
// Processing requires the epoch cache.
state.build_all_caches(spec).unwrap();
let mut result =
process_block_header(&mut state, &self.block, None, VerifySignatures::True, spec)
.map(|_| state);
compare_beacon_state_results_without_caches(&mut result, &mut expected)
}
}

View File

@ -1,42 +0,0 @@
use super::*;
use crate::bls_setting::BlsSetting;
use crate::case_result::compare_beacon_state_results_without_caches;
use serde_derive::Deserialize;
use state_processing::per_block_processing::process_deposits;
use types::{BeaconState, Deposit, EthSpec};
#[derive(Debug, Clone, Deserialize)]
#[serde(bound = "E: EthSpec")]
pub struct OperationsDeposit<E: EthSpec> {
pub description: String,
pub bls_setting: Option<BlsSetting>,
pub pre: BeaconState<E>,
pub deposit: Deposit,
pub post: Option<BeaconState<E>>,
}
impl<E: EthSpec> YamlDecode for OperationsDeposit<E> {
fn yaml_decode(yaml: &str) -> Result<Self, Error> {
Ok(serde_yaml::from_str(yaml).unwrap())
}
}
impl<E: EthSpec> Case for OperationsDeposit<E> {
fn description(&self) -> String {
self.description.clone()
}
fn result(&self, _case_index: usize) -> Result<(), Error> {
self.bls_setting.unwrap_or_default().check()?;
let mut state = self.pre.clone();
let deposit = self.deposit.clone();
let mut expected = self.post.clone();
let result = process_deposits(&mut state, &[deposit], &E::default_spec());
let mut result = result.and_then(|_| Ok(state));
compare_beacon_state_results_without_caches(&mut result, &mut expected)
}
}

View File

@ -1,50 +0,0 @@
use super::*;
use crate::bls_setting::BlsSetting;
use crate::case_result::compare_beacon_state_results_without_caches;
use serde_derive::Deserialize;
use state_processing::per_block_processing::{process_exits, VerifySignatures};
use types::{BeaconState, EthSpec, VoluntaryExit};
#[derive(Debug, Clone, Deserialize)]
#[serde(bound = "E: EthSpec")]
pub struct OperationsExit<E: EthSpec> {
pub description: String,
pub bls_setting: Option<BlsSetting>,
pub pre: BeaconState<E>,
pub voluntary_exit: VoluntaryExit,
pub post: Option<BeaconState<E>>,
}
impl<E: EthSpec> YamlDecode for OperationsExit<E> {
fn yaml_decode(yaml: &str) -> Result<Self, Error> {
Ok(serde_yaml::from_str(yaml).unwrap())
}
}
impl<E: EthSpec> Case for OperationsExit<E> {
fn description(&self) -> String {
self.description.clone()
}
fn result(&self, _case_index: usize) -> Result<(), Error> {
self.bls_setting.unwrap_or_default().check()?;
let mut state = self.pre.clone();
let exit = self.voluntary_exit.clone();
let mut expected = self.post.clone();
// Exit processing requires the epoch cache.
state.build_all_caches(&E::default_spec()).unwrap();
let result = process_exits(
&mut state,
&[exit],
VerifySignatures::True,
&E::default_spec(),
);
let mut result = result.and_then(|_| Ok(state));
compare_beacon_state_results_without_caches(&mut result, &mut expected)
}
}

View File

@ -1,50 +0,0 @@
use super::*;
use crate::bls_setting::BlsSetting;
use crate::case_result::compare_beacon_state_results_without_caches;
use serde_derive::Deserialize;
use state_processing::per_block_processing::{process_proposer_slashings, VerifySignatures};
use types::{BeaconState, EthSpec, ProposerSlashing};
#[derive(Debug, Clone, Deserialize)]
#[serde(bound = "E: EthSpec")]
pub struct OperationsProposerSlashing<E: EthSpec> {
pub description: String,
pub bls_setting: Option<BlsSetting>,
pub pre: BeaconState<E>,
pub proposer_slashing: ProposerSlashing,
pub post: Option<BeaconState<E>>,
}
impl<E: EthSpec> YamlDecode for OperationsProposerSlashing<E> {
fn yaml_decode(yaml: &str) -> Result<Self, Error> {
Ok(serde_yaml::from_str(yaml).unwrap())
}
}
impl<E: EthSpec> Case for OperationsProposerSlashing<E> {
fn description(&self) -> String {
self.description.clone()
}
fn result(&self, _case_index: usize) -> Result<(), Error> {
self.bls_setting.unwrap_or_default().check()?;
let mut state = self.pre.clone();
let proposer_slashing = self.proposer_slashing.clone();
let mut expected = self.post.clone();
// Processing requires the epoch cache.
state.build_all_caches(&E::default_spec()).unwrap();
let result = process_proposer_slashings(
&mut state,
&[proposer_slashing],
VerifySignatures::True,
&E::default_spec(),
);
let mut result = result.and_then(|_| Ok(state));
compare_beacon_state_results_without_caches(&mut result, &mut expected)
}
}

View File

@ -1,47 +0,0 @@
use super::*;
use crate::bls_setting::BlsSetting;
use crate::case_result::compare_beacon_state_results_without_caches;
use serde_derive::Deserialize;
use state_processing::per_block_processing::{process_transfers, VerifySignatures};
use types::{BeaconState, EthSpec, Transfer};
#[derive(Debug, Clone, Deserialize)]
#[serde(bound = "E: EthSpec")]
pub struct OperationsTransfer<E: EthSpec> {
pub description: String,
pub bls_setting: Option<BlsSetting>,
pub pre: BeaconState<E>,
pub transfer: Transfer,
pub post: Option<BeaconState<E>>,
}
impl<E: EthSpec> YamlDecode for OperationsTransfer<E> {
fn yaml_decode(yaml: &str) -> Result<Self, Error> {
Ok(serde_yaml::from_str(yaml).unwrap())
}
}
impl<E: EthSpec> Case for OperationsTransfer<E> {
fn description(&self) -> String {
self.description.clone()
}
fn result(&self, _case_index: usize) -> Result<(), Error> {
self.bls_setting.unwrap_or_default().check()?;
let mut state = self.pre.clone();
let transfer = self.transfer.clone();
let mut expected = self.post.clone();
// Transfer processing requires the epoch cache.
state.build_all_caches(&E::default_spec()).unwrap();
let spec = E::default_spec();
let result = process_transfers(&mut state, &[transfer], VerifySignatures::True, &spec);
let mut result = result.and_then(|_| Ok(state));
compare_beacon_state_results_without_caches(&mut result, &mut expected)
}
}

View File

@ -1,35 +1,65 @@
use super::*;
use crate::bls_setting::BlsSetting;
use crate::case_result::compare_beacon_state_results_without_caches;
use crate::decode::{ssz_decode_file, yaml_decode_file};
use serde_derive::Deserialize;
use state_processing::{
per_block_processing, per_slot_processing, BlockProcessingError, BlockSignatureStrategy,
};
use types::{BeaconBlock, BeaconState, EthSpec, RelativeEpoch};
#[derive(Debug, Clone, Deserialize)]
pub struct Metadata {
pub description: Option<String>,
pub bls_setting: Option<BlsSetting>,
pub blocks_count: usize,
}
#[derive(Debug, Clone, Deserialize)]
#[serde(bound = "E: EthSpec")]
pub struct SanityBlocks<E: EthSpec> {
pub description: String,
pub bls_setting: Option<BlsSetting>,
pub metadata: Metadata,
pub pre: BeaconState<E>,
pub blocks: Vec<BeaconBlock<E>>,
pub post: Option<BeaconState<E>>,
}
impl<E: EthSpec> YamlDecode for SanityBlocks<E> {
fn yaml_decode(yaml: &str) -> Result<Self, Error> {
Ok(serde_yaml::from_str(yaml).unwrap())
impl<E: EthSpec> LoadCase for SanityBlocks<E> {
fn load_from_dir(path: &Path) -> Result<Self, Error> {
let metadata: Metadata = yaml_decode_file(&path.join("meta.yaml"))?;
let pre = ssz_decode_file(&path.join("pre.ssz"))?;
let blocks: Vec<BeaconBlock<E>> = (0..metadata.blocks_count)
.map(|i| {
let filename = format!("blocks_{}.ssz", i);
ssz_decode_file(&path.join(filename))
})
.collect::<Result<_, _>>()?;
let post_file = path.join("post.ssz");
let post = if post_file.is_file() {
Some(ssz_decode_file(&post_file)?)
} else {
None
};
Ok(Self {
metadata,
pre,
blocks,
post,
})
}
}
impl<E: EthSpec> Case for SanityBlocks<E> {
fn description(&self) -> String {
self.description.clone()
self.metadata
.description
.clone()
.unwrap_or_else(String::new)
}
fn result(&self, _case_index: usize) -> Result<(), Error> {
self.bls_setting.unwrap_or_default().check()?;
self.metadata.bls_setting.unwrap_or_default().check()?;
let mut state = self.pre.clone();
let mut expected = self.post.clone();

View File

@ -1,30 +1,63 @@
use super::*;
use crate::bls_setting::BlsSetting;
use crate::case_result::compare_beacon_state_results_without_caches;
use crate::decode::{ssz_decode_file, yaml_decode_file};
use serde_derive::Deserialize;
use state_processing::per_slot_processing;
use types::{BeaconState, EthSpec};
#[derive(Debug, Clone, Default, Deserialize)]
pub struct Metadata {
pub description: Option<String>,
pub bls_setting: Option<BlsSetting>,
}
#[derive(Debug, Clone, Deserialize)]
#[serde(bound = "E: EthSpec")]
pub struct SanitySlots<E: EthSpec> {
pub description: String,
pub metadata: Metadata,
pub pre: BeaconState<E>,
pub slots: usize,
pub slots: u64,
pub post: Option<BeaconState<E>>,
}
impl<E: EthSpec> YamlDecode for SanitySlots<E> {
fn yaml_decode(yaml: &str) -> Result<Self, Error> {
Ok(serde_yaml::from_str(yaml).unwrap())
impl<E: EthSpec> LoadCase for SanitySlots<E> {
fn load_from_dir(path: &Path) -> Result<Self, Error> {
let metadata_path = path.join("meta.yaml");
let metadata: Metadata = if metadata_path.is_file() {
yaml_decode_file(&metadata_path)?
} else {
Metadata::default()
};
let pre = ssz_decode_file(&path.join("pre.ssz"))?;
let slots: u64 = yaml_decode_file(&path.join("slots.yaml"))?;
let post_file = path.join("post.ssz");
let post = if post_file.is_file() {
Some(ssz_decode_file(&post_file)?)
} else {
None
};
Ok(Self {
metadata,
pre,
slots,
post,
})
}
}
impl<E: EthSpec> Case for SanitySlots<E> {
fn description(&self) -> String {
self.description.clone()
self.metadata
.description
.clone()
.unwrap_or_else(String::new)
}
fn result(&self, _case_index: usize) -> Result<(), Error> {
self.metadata.bls_setting.unwrap_or_default().check()?;
let mut state = self.pre.clone();
let mut expected = self.post.clone();
let spec = &E::default_spec();

View File

@ -1,5 +1,6 @@
use super::*;
use crate::case_result::compare_result;
use crate::decode::yaml_decode_file;
use serde_derive::Deserialize;
use std::marker::PhantomData;
use swap_or_not_shuffle::{get_permutated_index, shuffle_list};
@ -8,21 +9,21 @@ use swap_or_not_shuffle::{get_permutated_index, shuffle_list};
pub struct Shuffling<T> {
pub seed: String,
pub count: usize,
pub shuffled: Vec<usize>,
pub mapping: Vec<usize>,
#[serde(skip)]
_phantom: PhantomData<T>,
}
impl<T> YamlDecode for Shuffling<T> {
fn yaml_decode(yaml: &str) -> Result<Self, Error> {
Ok(serde_yaml::from_str(yaml).unwrap())
impl<T: EthSpec> LoadCase for Shuffling<T> {
fn load_from_dir(path: &Path) -> Result<Self, Error> {
yaml_decode_file(&path.join("mapping.yaml"))
}
}
impl<T: EthSpec> Case for Shuffling<T> {
fn result(&self, _case_index: usize) -> Result<(), Error> {
if self.count == 0 {
compare_result::<_, Error>(&Ok(vec![]), &Some(self.shuffled.clone()))?;
compare_result::<_, Error>(&Ok(vec![]), &Some(self.mapping.clone()))?;
} else {
let spec = T::default_spec();
let seed = hex::decode(&self.seed[2..])
@ -34,12 +35,12 @@ impl<T: EthSpec> Case for Shuffling<T> {
get_permutated_index(i, self.count, &seed, spec.shuffle_round_count).unwrap()
})
.collect();
compare_result::<_, Error>(&Ok(shuffling), &Some(self.shuffled.clone()))?;
compare_result::<_, Error>(&Ok(shuffling), &Some(self.mapping.clone()))?;
// Test "shuffle_list"
let input: Vec<usize> = (0..self.count).collect();
let shuffling = shuffle_list(input, spec.shuffle_round_count, &seed, false).unwrap();
compare_result::<_, Error>(&Ok(shuffling), &Some(self.shuffled.clone()))?;
compare_result::<_, Error>(&Ok(shuffling), &Some(self.mapping.clone()))?;
}
Ok(())

View File

@ -1,68 +1,270 @@
#![allow(non_snake_case)]
use super::*;
use crate::case_result::compare_result;
use ethereum_types::{U128, U256};
use crate::cases::common::{SszStaticType, TestU128, TestU256};
use crate::cases::ssz_static::{check_serialization, check_tree_hash};
use crate::decode::yaml_decode_file;
use serde_derive::Deserialize;
use ssz::Decode;
use std::fmt::Debug;
use ssz_derive::{Decode, Encode};
use std::fs;
use std::path::{Path, PathBuf};
use tree_hash_derive::TreeHash;
use types::typenum::*;
use types::{BitList, BitVector, FixedVector, VariableList};
#[derive(Debug, Clone, Deserialize)]
pub struct SszGeneric {
#[serde(alias = "type")]
pub type_name: String,
pub valid: bool,
pub value: Option<String>,
pub ssz: Option<String>,
struct Metadata {
root: String,
signing_root: Option<String>,
}
impl YamlDecode for SszGeneric {
fn yaml_decode(yaml: &str) -> Result<Self, Error> {
Ok(serde_yaml::from_str(yaml).unwrap())
#[derive(Debug, Clone)]
pub struct SszGeneric {
path: PathBuf,
handler_name: String,
case_name: String,
}
impl LoadCase for SszGeneric {
fn load_from_dir(path: &Path) -> Result<Self, Error> {
let components = path
.components()
.map(|c| c.as_os_str().to_string_lossy().into_owned())
.rev()
.collect::<Vec<_>>();
// Test case name is last
let case_name = components[0].clone();
// Handler name is third last, before suite name and case name
let handler_name = components[2].clone();
Ok(Self {
path: path.into(),
handler_name,
case_name,
})
}
}
macro_rules! type_dispatch {
($function:ident,
($($arg:expr),*),
$base_ty:tt,
<$($param_ty:ty),*>,
[ $value:expr => primitive_type ] $($rest:tt)*) => {
match $value {
"bool" => type_dispatch!($function, ($($arg),*), $base_ty, <$($param_ty,)* bool>, $($rest)*),
"uint8" => type_dispatch!($function, ($($arg),*), $base_ty, <$($param_ty,)* u8>, $($rest)*),
"uint16" => type_dispatch!($function, ($($arg),*), $base_ty, <$($param_ty,)* u16>, $($rest)*),
"uint32" => type_dispatch!($function, ($($arg),*), $base_ty, <$($param_ty,)* u32>, $($rest)*),
"uint64" => type_dispatch!($function, ($($arg),*), $base_ty, <$($param_ty,)* u64>, $($rest)*),
"uint128" => type_dispatch!($function, ($($arg),*), $base_ty, <$($param_ty,)* TestU128>, $($rest)*),
"uint256" => type_dispatch!($function, ($($arg),*), $base_ty, <$($param_ty,)* TestU256>, $($rest)*),
_ => Err(Error::FailedToParseTest(format!("unsupported: {}", $value))),
}
};
($function:ident,
($($arg:expr),*),
$base_ty:tt,
<$($param_ty:ty),*>,
[ $value:expr => typenum ] $($rest:tt)*) => {
match $value {
// DO YOU LIKE NUMBERS?
"0" => type_dispatch!($function, ($($arg),*), $base_ty, <$($param_ty,)* U0>, $($rest)*),
"1" => type_dispatch!($function, ($($arg),*), $base_ty, <$($param_ty,)* U1>, $($rest)*),
"2" => type_dispatch!($function, ($($arg),*), $base_ty, <$($param_ty,)* U2>, $($rest)*),
"3" => type_dispatch!($function, ($($arg),*), $base_ty, <$($param_ty,)* U3>, $($rest)*),
"4" => type_dispatch!($function, ($($arg),*), $base_ty, <$($param_ty,)* U4>, $($rest)*),
"5" => type_dispatch!($function, ($($arg),*), $base_ty, <$($param_ty,)* U5>, $($rest)*),
"6" => type_dispatch!($function, ($($arg),*), $base_ty, <$($param_ty,)* U6>, $($rest)*),
"7" => type_dispatch!($function, ($($arg),*), $base_ty, <$($param_ty,)* U7>, $($rest)*),
"8" => type_dispatch!($function, ($($arg),*), $base_ty, <$($param_ty,)* U8>, $($rest)*),
"9" => type_dispatch!($function, ($($arg),*), $base_ty, <$($param_ty,)* U9>, $($rest)*),
"16" => type_dispatch!($function, ($($arg),*), $base_ty, <$($param_ty,)* U16>, $($rest)*),
"31" => type_dispatch!($function, ($($arg),*), $base_ty, <$($param_ty,)* U31>, $($rest)*),
"32" => type_dispatch!($function, ($($arg),*), $base_ty, <$($param_ty,)* U32>, $($rest)*),
"64" => type_dispatch!($function, ($($arg),*), $base_ty, <$($param_ty,)* U64>, $($rest)*),
"128" => type_dispatch!($function, ($($arg),*), $base_ty, <$($param_ty,)* U128>, $($rest)*),
"256" => type_dispatch!($function, ($($arg),*), $base_ty, <$($param_ty,)* U256>, $($rest)*),
"512" => type_dispatch!($function, ($($arg),*), $base_ty, <$($param_ty,)* U512>, $($rest)*),
"513" => type_dispatch!($function, ($($arg),*), $base_ty, <$($param_ty,)* U513>, $($rest)*),
"1024" => type_dispatch!($function, ($($arg),*), $base_ty, <$($param_ty,)* U1024>, $($rest)*),
"2048" => type_dispatch!($function, ($($arg),*), $base_ty, <$($param_ty,)* U2048>, $($rest)*),
"4096" => type_dispatch!($function, ($($arg),*), $base_ty, <$($param_ty,)* U4096>, $($rest)*),
"8192" => type_dispatch!($function, ($($arg),*), $base_ty, <$($param_ty,)* U8192>, $($rest)*),
_ => Err(Error::FailedToParseTest(format!("unsupported: {}", $value))),
}
};
($function:ident,
($($arg:expr),*),
$base_ty:tt,
<$($param_ty:ty),*>,
[ $value:expr => test_container ] $($rest:tt)*) => {
match $value {
"SingleFieldTestStruct" => type_dispatch!($function, ($($arg),*), $base_ty, <$($param_ty,)* SingleFieldTestStruct>, $($rest)*),
"SmallTestStruct" => type_dispatch!($function, ($($arg),*), $base_ty, <$($param_ty,)* SmallTestStruct>, $($rest)*),
"FixedTestStruct" => type_dispatch!($function, ($($arg),*), $base_ty, <$($param_ty,)* FixedTestStruct>, $($rest)*),
"VarTestStruct" => type_dispatch!($function, ($($arg),*), $base_ty, <$($param_ty,)* VarTestStruct>, $($rest)*),
"BitsStruct" => type_dispatch!($function, ($($arg),*), $base_ty, <$($param_ty,)* BitsStruct>, $($rest)*),
// TODO: enable ComplexTestStruct
"ComplexTestStruct" => Err(Error::SkippedKnownFailure),
_ => Err(Error::FailedToParseTest(format!("unsupported: {}", $value))),
}
};
// No base type: apply type params to function
($function:ident, ($($arg:expr),*), _, <$($param_ty:ty),*>,) => {
$function::<$($param_ty),*>($($arg),*)
};
($function:ident, ($($arg:expr),*), $base_type_name:ident, <$($param_ty:ty),*>,) => {
$function::<$base_type_name<$($param_ty),*>>($($arg),*)
}
}
impl Case for SszGeneric {
fn result(&self, _case_index: usize) -> Result<(), Error> {
if let Some(ssz) = &self.ssz {
match self.type_name.as_ref() {
"uint8" => ssz_generic_test::<u8>(self.valid, ssz, &self.value),
"uint16" => ssz_generic_test::<u16>(self.valid, ssz, &self.value),
"uint32" => ssz_generic_test::<u32>(self.valid, ssz, &self.value),
"uint64" => ssz_generic_test::<u64>(self.valid, ssz, &self.value),
"uint128" => ssz_generic_test::<U128>(self.valid, ssz, &self.value),
"uint256" => ssz_generic_test::<U256>(self.valid, ssz, &self.value),
_ => Err(Error::FailedToParseTest(format!(
"Unknown type: {}",
self.type_name
))),
let parts = self.case_name.split('_').collect::<Vec<_>>();
match self.handler_name.as_str() {
"basic_vector" => {
let elem_ty = parts[1];
let length = parts[2];
type_dispatch!(
ssz_generic_test,
(&self.path),
FixedVector,
<>,
[elem_ty => primitive_type]
[length => typenum]
)?;
}
} else {
// Skip tests that do not have an ssz field.
//
// See: https://github.com/ethereum/eth2.0-specs/issues/1079
Ok(())
"bitlist" => {
let mut limit = parts[1];
// Test format is inconsistent, pretend the limit is 32 (arbitrary)
// https://github.com/ethereum/eth2.0-spec-tests
if limit == "no" {
limit = "32";
}
type_dispatch!(
ssz_generic_test,
(&self.path),
BitList,
<>,
[limit => typenum]
)?;
}
"bitvector" => {
let length = parts[1];
type_dispatch!(
ssz_generic_test,
(&self.path),
BitVector,
<>,
[length => typenum]
)?;
}
"boolean" => {
ssz_generic_test::<bool>(&self.path)?;
}
"uints" => {
let type_name = "uint".to_owned() + parts[1];
type_dispatch!(
ssz_generic_test,
(&self.path),
_,
<>,
[type_name.as_str() => primitive_type]
)?;
}
"containers" => {
let type_name = parts[0];
type_dispatch!(
ssz_generic_test,
(&self.path),
_,
<>,
[type_name => test_container]
)?;
}
_ => panic!("unsupported handler: {}", self.handler_name),
}
Ok(())
}
}
/// Execute a `ssz_generic` test case.
fn ssz_generic_test<T>(should_be_ok: bool, ssz: &str, value: &Option<String>) -> Result<(), Error>
where
T: Decode + YamlDecode + Debug + PartialEq<T>,
{
let ssz = hex::decode(&ssz[2..]).map_err(|e| Error::FailedToParseTest(format!("{:?}", e)))?;
// We do not cater for the scenario where the test is valid but we are not passed any SSZ.
if should_be_ok && value.is_none() {
panic!("Unexpected test input. Cannot pass without value.")
}
let expected = if let Some(string) = value {
Some(T::yaml_decode(string)?)
fn ssz_generic_test<T: SszStaticType>(path: &Path) -> Result<(), Error> {
let meta_path = path.join("meta.yaml");
let meta: Option<Metadata> = if meta_path.is_file() {
Some(yaml_decode_file(&meta_path)?)
} else {
None
};
let decoded = T::from_ssz_bytes(&ssz);
let serialized = fs::read(&path.join("serialized.ssz")).expect("serialized.ssz exists");
compare_result(&decoded, &expected)
let value_path = path.join("value.yaml");
let value: Option<T> = if value_path.is_file() {
Some(yaml_decode_file(&value_path)?)
} else {
None
};
// Valid
// TODO: signing root (annoying because of traits)
if let Some(value) = value {
check_serialization(&value, &serialized)?;
if let Some(ref meta) = meta {
check_tree_hash(&meta.root, value.tree_hash_root())?;
}
}
// Invalid
else {
if let Ok(decoded) = T::from_ssz_bytes(&serialized) {
return Err(Error::DidntFail(format!(
"Decoded invalid bytes into: {:?}",
decoded
)));
}
}
Ok(())
}
// Containers for SSZ generic tests
#[derive(Debug, Clone, Default, PartialEq, Decode, Encode, TreeHash, Deserialize)]
struct SingleFieldTestStruct {
A: u8,
}
#[derive(Debug, Clone, Default, PartialEq, Decode, Encode, TreeHash, Deserialize)]
struct SmallTestStruct {
A: u16,
B: u16,
}
#[derive(Debug, Clone, Default, PartialEq, Decode, Encode, TreeHash, Deserialize)]
struct FixedTestStruct {
A: u8,
B: u64,
C: u32,
}
#[derive(Debug, Clone, Default, PartialEq, Decode, Encode, TreeHash, Deserialize)]
struct VarTestStruct {
A: u16,
B: VariableList<u16, U1024>,
C: u8,
}
#[derive(Debug, Clone, PartialEq, Decode, Encode, TreeHash, Deserialize)]
struct BitsStruct {
A: BitList<U5>,
B: BitVector<U2>,
C: BitVector<U1>,
D: BitList<U6>,
E: BitVector<U8>,
}

View File

@ -1,127 +1,100 @@
use super::*;
use crate::case_result::compare_result;
use crate::cases::common::SszStaticType;
use crate::decode::yaml_decode_file;
use serde_derive::Deserialize;
use ssz::{Decode, Encode};
use std::fmt::Debug;
use std::marker::PhantomData;
use tree_hash::TreeHash;
use types::{
test_utils::TestRandom, Attestation, AttestationData, AttestationDataAndCustodyBit,
AttesterSlashing, BeaconBlock, BeaconBlockBody, BeaconBlockHeader, BeaconState, Checkpoint,
CompactCommittee, Crosslink, Deposit, DepositData, Eth1Data, EthSpec, Fork, Hash256,
HistoricalBatch, IndexedAttestation, PendingAttestation, ProposerSlashing, Transfer, Validator,
VoluntaryExit,
};
// Enum variant names are used by Serde when deserializing the test YAML
#[allow(clippy::large_enum_variant)]
#[derive(Debug, Clone, Deserialize)]
pub enum SszStatic<E>
where
E: EthSpec,
{
Fork(SszStaticInner<Fork, E>),
Crosslink(SszStaticInner<Crosslink, E>),
Checkpoint(SszStaticInner<Checkpoint, E>),
CompactCommittee(SszStaticInner<CompactCommittee<E>, E>),
Eth1Data(SszStaticInner<Eth1Data, E>),
AttestationData(SszStaticInner<AttestationData, E>),
AttestationDataAndCustodyBit(SszStaticInner<AttestationDataAndCustodyBit, E>),
IndexedAttestation(SszStaticInner<IndexedAttestation<E>, E>),
DepositData(SszStaticInner<DepositData, E>),
BeaconBlockHeader(SszStaticInner<BeaconBlockHeader, E>),
Validator(SszStaticInner<Validator, E>),
PendingAttestation(SszStaticInner<PendingAttestation<E>, E>),
HistoricalBatch(SszStaticInner<HistoricalBatch<E>, E>),
ProposerSlashing(SszStaticInner<ProposerSlashing, E>),
AttesterSlashing(SszStaticInner<AttesterSlashing<E>, E>),
Attestation(SszStaticInner<Attestation<E>, E>),
Deposit(SszStaticInner<Deposit, E>),
VoluntaryExit(SszStaticInner<VoluntaryExit, E>),
Transfer(SszStaticInner<Transfer, E>),
BeaconBlockBody(SszStaticInner<BeaconBlockBody<E>, E>),
BeaconBlock(SszStaticInner<BeaconBlock<E>, E>),
BeaconState(SszStaticInner<BeaconState<E>, E>),
}
use std::fs;
use tree_hash::SignedRoot;
use types::Hash256;
#[derive(Debug, Clone, Deserialize)]
pub struct SszStaticInner<T, E>
where
E: EthSpec,
{
pub value: T,
pub serialized: String,
pub root: String,
#[serde(skip, default)]
_phantom: PhantomData<E>,
struct SszStaticRoots {
root: String,
signing_root: Option<String>,
}
impl<E: EthSpec + serde::de::DeserializeOwned> YamlDecode for SszStatic<E> {
fn yaml_decode(yaml: &str) -> Result<Self, Error> {
serde_yaml::from_str(yaml).map_err(|e| Error::FailedToParseTest(format!("{:?}", e)))
#[derive(Debug, Clone)]
pub struct SszStatic<T> {
roots: SszStaticRoots,
serialized: Vec<u8>,
value: T,
}
#[derive(Debug, Clone)]
pub struct SszStaticSR<T> {
roots: SszStaticRoots,
serialized: Vec<u8>,
value: T,
}
fn load_from_dir<T: SszStaticType>(path: &Path) -> Result<(SszStaticRoots, Vec<u8>, T), Error> {
let roots = yaml_decode_file(&path.join("roots.yaml"))?;
let serialized = fs::read(&path.join("serialized.ssz")).expect("serialized.ssz exists");
let value = yaml_decode_file(&path.join("value.yaml"))?;
Ok((roots, serialized, value))
}
impl<T: SszStaticType> LoadCase for SszStatic<T> {
fn load_from_dir(path: &Path) -> Result<Self, Error> {
load_from_dir(path).map(|(roots, serialized, value)| Self {
roots,
serialized,
value,
})
}
}
impl<E: EthSpec> Case for SszStatic<E> {
fn result(&self, _case_index: usize) -> Result<(), Error> {
use self::SszStatic::*;
match *self {
Fork(ref val) => ssz_static_test(val),
Crosslink(ref val) => ssz_static_test(val),
Checkpoint(ref val) => ssz_static_test(val),
CompactCommittee(ref val) => ssz_static_test(val),
Eth1Data(ref val) => ssz_static_test(val),
AttestationData(ref val) => ssz_static_test(val),
AttestationDataAndCustodyBit(ref val) => ssz_static_test(val),
IndexedAttestation(ref val) => ssz_static_test(val),
DepositData(ref val) => ssz_static_test(val),
BeaconBlockHeader(ref val) => ssz_static_test(val),
Validator(ref val) => ssz_static_test(val),
PendingAttestation(ref val) => ssz_static_test(val),
HistoricalBatch(ref val) => ssz_static_test(val),
ProposerSlashing(ref val) => ssz_static_test(val),
AttesterSlashing(ref val) => ssz_static_test(val),
Attestation(ref val) => ssz_static_test(val),
Deposit(ref val) => ssz_static_test(val),
VoluntaryExit(ref val) => ssz_static_test(val),
Transfer(ref val) => ssz_static_test(val),
BeaconBlockBody(ref val) => ssz_static_test(val),
BeaconBlock(ref val) => ssz_static_test(val),
BeaconState(ref val) => ssz_static_test(val),
}
impl<T: SszStaticType + SignedRoot> LoadCase for SszStaticSR<T> {
fn load_from_dir(path: &Path) -> Result<Self, Error> {
load_from_dir(path).map(|(roots, serialized, value)| Self {
roots,
serialized,
value,
})
}
}
fn ssz_static_test<T, E: EthSpec>(tc: &SszStaticInner<T, E>) -> Result<(), Error>
where
T: Clone
+ Decode
+ Debug
+ Encode
+ PartialEq<T>
+ serde::de::DeserializeOwned
+ TreeHash
+ TestRandom,
{
// Verify we can decode SSZ in the same way we can decode YAML.
let ssz = hex::decode(&tc.serialized[2..])
.map_err(|e| Error::FailedToParseTest(format!("{:?}", e)))?;
let expected = tc.value.clone();
let decode_result = T::from_ssz_bytes(&ssz);
compare_result(&decode_result, &Some(expected))?;
pub fn check_serialization<T: SszStaticType>(value: &T, serialized: &[u8]) -> Result<(), Error> {
// Check serialization
let serialized_result = value.as_ssz_bytes();
compare_result::<Vec<u8>, Error>(&Ok(serialized_result), &Some(serialized.to_vec()))?;
// Verify we can encode the result back into original ssz bytes
let decoded = decode_result.unwrap();
let encoded_result = decoded.as_ssz_bytes();
compare_result::<Vec<u8>, Error>(&Ok(encoded_result), &Some(ssz))?;
// Verify the TreeHash root of the decoded struct matches the test.
let expected_root =
&hex::decode(&tc.root[2..]).map_err(|e| Error::FailedToParseTest(format!("{:?}", e)))?;
let expected_root = Hash256::from_slice(&expected_root);
let tree_hash_root = Hash256::from_slice(&decoded.tree_hash_root());
compare_result::<Hash256, Error>(&Ok(tree_hash_root), &Some(expected_root))?;
// Check deserialization
let deserialized_result = T::from_ssz_bytes(serialized);
compare_result(&deserialized_result, &Some(value.clone()))?;
Ok(())
}
pub fn check_tree_hash(expected_str: &str, actual_root: Vec<u8>) -> Result<(), Error> {
let expected_root = hex::decode(&expected_str[2..])
.map_err(|e| Error::FailedToParseTest(format!("{:?}", e)))?;
let expected_root = Hash256::from_slice(&expected_root);
let tree_hash_root = Hash256::from_slice(&actual_root);
compare_result::<Hash256, Error>(&Ok(tree_hash_root), &Some(expected_root))
}
impl<T: SszStaticType> Case for SszStatic<T> {
fn result(&self, _case_index: usize) -> Result<(), Error> {
check_serialization(&self.value, &self.serialized)?;
check_tree_hash(&self.roots.root, self.value.tree_hash_root())?;
Ok(())
}
}
impl<T: SszStaticType + SignedRoot> Case for SszStaticSR<T> {
fn result(&self, _case_index: usize) -> Result<(), Error> {
check_serialization(&self.value, &self.serialized)?;
check_tree_hash(&self.roots.root, self.value.tree_hash_root())?;
check_tree_hash(
&self
.roots
.signing_root
.as_ref()
.expect("signed root exists"),
self.value.signed_root(),
)?;
Ok(())
}
}

View File

@ -0,0 +1,31 @@
use super::*;
use std::fs;
use std::path::Path;
pub fn yaml_decode<T: serde::de::DeserializeOwned>(string: &str) -> Result<T, Error> {
serde_yaml::from_str(string).map_err(|e| Error::FailedToParseTest(format!("{:?}", e)))
}
pub fn yaml_decode_file<T: serde::de::DeserializeOwned>(path: &Path) -> Result<T, Error> {
fs::read_to_string(path)
.map_err(|e| {
Error::FailedToParseTest(format!("Unable to load {}: {:?}", path.display(), e))
})
.and_then(|s| yaml_decode(&s))
}
pub fn ssz_decode_file<T: ssz::Decode>(path: &Path) -> Result<T, Error> {
fs::read(path)
.map_err(|e| {
Error::FailedToParseTest(format!("Unable to load {}: {:?}", path.display(), e))
})
.and_then(|s| {
T::from_ssz_bytes(&s).map_err(|e| {
Error::FailedToParseTest(format!(
"Unable to parse SSZ at {}: {:?}",
path.display(),
e
))
})
})
}

View File

@ -1,253 +0,0 @@
use crate::case_result::CaseResult;
use crate::cases::*;
use crate::doc_header::DocHeader;
use crate::error::Error;
use crate::yaml_decode::{yaml_split_header_and_cases, YamlDecode};
use crate::EfTest;
use serde_derive::Deserialize;
use std::{fs::File, io::prelude::*, path::PathBuf};
use types::{MainnetEthSpec, MinimalEthSpec};
#[derive(Debug, Deserialize)]
pub struct Doc {
pub header_yaml: String,
pub cases_yaml: String,
pub path: PathBuf,
}
impl Doc {
fn from_path(path: PathBuf) -> Self {
let mut file = File::open(path.clone()).unwrap();
let mut yaml = String::new();
file.read_to_string(&mut yaml).unwrap();
let (header_yaml, cases_yaml) = yaml_split_header_and_cases(yaml.clone());
Self {
header_yaml,
cases_yaml,
path,
}
}
pub fn test_results(&self) -> Vec<CaseResult> {
let header: DocHeader = serde_yaml::from_str(&self.header_yaml.as_str()).unwrap();
match (
header.runner.as_ref(),
header.handler.as_ref(),
header.config.as_ref(),
) {
("ssz", "uint", _) => run_test::<SszGeneric>(self),
("ssz", "static", "minimal") => run_test::<SszStatic<MinimalEthSpec>>(self),
("ssz", "static", "mainnet") => run_test::<SszStatic<MainnetEthSpec>>(self),
("sanity", "slots", "minimal") => run_test::<SanitySlots<MinimalEthSpec>>(self),
// FIXME: skipped due to compact committees issue
("sanity", "slots", "mainnet") => vec![], // run_test::<SanitySlots<MainnetEthSpec>>(self),
("sanity", "blocks", "minimal") => run_test::<SanityBlocks<MinimalEthSpec>>(self),
// FIXME: skipped due to compact committees issue
("sanity", "blocks", "mainnet") => vec![], // run_test::<SanityBlocks<MainnetEthSpec>>(self),
("shuffling", "core", "minimal") => run_test::<Shuffling<MinimalEthSpec>>(self),
("shuffling", "core", "mainnet") => run_test::<Shuffling<MainnetEthSpec>>(self),
("bls", "aggregate_pubkeys", "mainnet") => run_test::<BlsAggregatePubkeys>(self),
("bls", "aggregate_sigs", "mainnet") => run_test::<BlsAggregateSigs>(self),
("bls", "msg_hash_compressed", "mainnet") => run_test::<BlsG2Compressed>(self),
// Note this test fails due to a difference in our internal representations. It does
// not effect verification or external representation.
//
// It is skipped.
("bls", "msg_hash_uncompressed", "mainnet") => vec![],
("bls", "priv_to_pub", "mainnet") => run_test::<BlsPrivToPub>(self),
("bls", "sign_msg", "mainnet") => run_test::<BlsSign>(self),
("operations", "deposit", "mainnet") => {
run_test::<OperationsDeposit<MainnetEthSpec>>(self)
}
("operations", "deposit", "minimal") => {
run_test::<OperationsDeposit<MinimalEthSpec>>(self)
}
("operations", "transfer", "mainnet") => {
run_test::<OperationsTransfer<MainnetEthSpec>>(self)
}
("operations", "transfer", "minimal") => {
run_test::<OperationsTransfer<MinimalEthSpec>>(self)
}
("operations", "voluntary_exit", "mainnet") => {
run_test::<OperationsExit<MainnetEthSpec>>(self)
}
("operations", "voluntary_exit", "minimal") => {
run_test::<OperationsExit<MinimalEthSpec>>(self)
}
("operations", "proposer_slashing", "mainnet") => {
run_test::<OperationsProposerSlashing<MainnetEthSpec>>(self)
}
("operations", "proposer_slashing", "minimal") => {
run_test::<OperationsProposerSlashing<MinimalEthSpec>>(self)
}
("operations", "attester_slashing", "mainnet") => {
run_test::<OperationsAttesterSlashing<MainnetEthSpec>>(self)
}
("operations", "attester_slashing", "minimal") => {
run_test::<OperationsAttesterSlashing<MinimalEthSpec>>(self)
}
("operations", "attestation", "mainnet") => {
run_test::<OperationsAttestation<MainnetEthSpec>>(self)
}
("operations", "attestation", "minimal") => {
run_test::<OperationsAttestation<MinimalEthSpec>>(self)
}
("operations", "block_header", "mainnet") => {
run_test::<OperationsBlockHeader<MainnetEthSpec>>(self)
}
("operations", "block_header", "minimal") => {
run_test::<OperationsBlockHeader<MinimalEthSpec>>(self)
}
("epoch_processing", "crosslinks", "minimal") => {
run_test::<EpochProcessingCrosslinks<MinimalEthSpec>>(self)
}
("epoch_processing", "crosslinks", "mainnet") => {
run_test::<EpochProcessingCrosslinks<MainnetEthSpec>>(self)
}
("epoch_processing", "registry_updates", "minimal") => {
run_test::<EpochProcessingRegistryUpdates<MinimalEthSpec>>(self)
}
("epoch_processing", "registry_updates", "mainnet") => {
run_test::<EpochProcessingRegistryUpdates<MainnetEthSpec>>(self)
}
("epoch_processing", "justification_and_finalization", "minimal") => {
run_test::<EpochProcessingJustificationAndFinalization<MinimalEthSpec>>(self)
}
("epoch_processing", "justification_and_finalization", "mainnet") => {
run_test::<EpochProcessingJustificationAndFinalization<MainnetEthSpec>>(self)
}
("epoch_processing", "slashings", "minimal") => {
run_test::<EpochProcessingSlashings<MinimalEthSpec>>(self)
}
("epoch_processing", "slashings", "mainnet") => {
run_test::<EpochProcessingSlashings<MainnetEthSpec>>(self)
}
("epoch_processing", "final_updates", "minimal") => {
run_test::<EpochProcessingFinalUpdates<MinimalEthSpec>>(self)
}
("epoch_processing", "final_updates", "mainnet") => {
vec![]
// FIXME: skipped due to compact committees issue
// run_test::<EpochProcessingFinalUpdates<MainnetEthSpec>>(self)
}
("genesis", "initialization", "minimal") => {
run_test::<GenesisInitialization<MinimalEthSpec>>(self)
}
("genesis", "initialization", "mainnet") => {
run_test::<GenesisInitialization<MainnetEthSpec>>(self)
}
("genesis", "validity", "minimal") => run_test::<GenesisValidity<MinimalEthSpec>>(self),
("genesis", "validity", "mainnet") => run_test::<GenesisValidity<MainnetEthSpec>>(self),
(runner, handler, config) => panic!(
"No implementation for runner: \"{}\", handler: \"{}\", config: \"{}\"",
runner, handler, config
),
}
}
pub fn assert_tests_pass(path: PathBuf) {
let doc = Self::from_path(path);
let results = doc.test_results();
let (failed, skipped_bls, skipped_known_failures) = categorize_results(&results);
if failed.len() + skipped_known_failures.len() > 0 {
print_results(
&doc,
&failed,
&skipped_bls,
&skipped_known_failures,
&results,
);
if !failed.is_empty() {
panic!("Tests failed (see above)");
}
} else {
println!("Passed {} tests in {:?}", results.len(), doc.path);
}
}
}
pub fn run_test<T>(doc: &Doc) -> Vec<CaseResult>
where
Cases<T>: EfTest + YamlDecode,
{
// Pass only the "test_cases" YAML string to `yaml_decode`.
let test_cases: Cases<T> = Cases::yaml_decode(&doc.cases_yaml).unwrap();
test_cases.test_results()
}
pub fn categorize_results(
results: &[CaseResult],
) -> (Vec<&CaseResult>, Vec<&CaseResult>, Vec<&CaseResult>) {
let mut failed = vec![];
let mut skipped_bls = vec![];
let mut skipped_known_failures = vec![];
for case in results {
match case.result.as_ref().err() {
Some(Error::SkippedBls) => skipped_bls.push(case),
Some(Error::SkippedKnownFailure) => skipped_known_failures.push(case),
Some(_) => failed.push(case),
None => (),
}
}
(failed, skipped_bls, skipped_known_failures)
}
pub fn print_results(
doc: &Doc,
failed: &[&CaseResult],
skipped_bls: &[&CaseResult],
skipped_known_failures: &[&CaseResult],
results: &[CaseResult],
) {
let header: DocHeader = serde_yaml::from_str(&doc.header_yaml).unwrap();
println!("--------------------------------------------------");
println!(
"Test {}",
if failed.is_empty() {
"Result"
} else {
"Failure"
}
);
println!("Title: {}", header.title);
println!("File: {:?}", doc.path);
println!(
"{} tests, {} failed, {} skipped (known failure), {} skipped (bls), {} passed. (See below for errors)",
results.len(),
failed.len(),
skipped_known_failures.len(),
skipped_bls.len(),
results.len() - skipped_bls.len() - skipped_known_failures.len() - failed.len()
);
println!();
for case in skipped_known_failures {
println!("-------");
println!(
"case[{}] ({}) skipped because it's a known failure",
case.case_index, case.desc,
);
}
for failure in failed {
let error = failure.result.clone().unwrap_err();
println!("-------");
println!(
"case[{}] ({}) failed with {}:",
failure.case_index,
failure.desc,
error.name()
);
println!("{}", error.message());
}
println!();
}

View File

@ -1,12 +0,0 @@
use serde_derive::Deserialize;
#[derive(Debug, Deserialize)]
pub struct DocHeader {
pub title: String,
pub summary: String,
pub forks_timeline: String,
pub forks: Vec<String>,
pub config: String,
pub runner: String,
pub handler: String,
}

View File

@ -0,0 +1,292 @@
use crate::cases::{self, Case, Cases, EpochTransition, LoadCase, Operation};
use crate::type_name;
use crate::type_name::TypeName;
use std::fs;
use std::marker::PhantomData;
use std::path::PathBuf;
use tree_hash::SignedRoot;
use types::EthSpec;
pub trait Handler {
type Case: Case + LoadCase;
fn config_name() -> &'static str {
"general"
}
fn fork_name() -> &'static str {
"phase0"
}
fn runner_name() -> &'static str;
fn handler_name() -> String;
fn run() {
let handler_path = PathBuf::from(env!("CARGO_MANIFEST_DIR"))
.join("eth2.0-spec-tests")
.join("tests")
.join(Self::config_name())
.join(Self::fork_name())
.join(Self::runner_name())
.join(Self::handler_name());
// Iterate through test suites
let test_cases = fs::read_dir(&handler_path)
.expect("handler dir exists")
.flat_map(|entry| {
entry
.ok()
.filter(|e| e.file_type().map(|ty| ty.is_dir()).unwrap_or(false))
})
.flat_map(|suite| fs::read_dir(suite.path()).expect("suite dir exists"))
.flat_map(Result::ok)
.map(|test_case_dir| {
let path = test_case_dir.path();
let case = Self::Case::load_from_dir(&path).expect("test should load");
(path, case)
})
.collect();
let results = Cases { test_cases }.test_results();
let name = format!("{}/{}", Self::runner_name(), Self::handler_name());
crate::results::assert_tests_pass(&name, &handler_path, &results);
}
}
macro_rules! bls_handler {
($runner_name: ident, $case_name:ident, $handler_name:expr) => {
pub struct $runner_name;
impl Handler for $runner_name {
type Case = cases::$case_name;
fn runner_name() -> &'static str {
"bls"
}
fn handler_name() -> String {
$handler_name.into()
}
}
};
}
bls_handler!(
BlsAggregatePubkeysHandler,
BlsAggregatePubkeys,
"aggregate_pubkeys"
);
bls_handler!(BlsAggregateSigsHandler, BlsAggregateSigs, "aggregate_sigs");
bls_handler!(
BlsG2CompressedHandler,
BlsG2Compressed,
"msg_hash_compressed"
);
bls_handler!(BlsPrivToPubHandler, BlsPrivToPub, "priv_to_pub");
bls_handler!(BlsSignMsgHandler, BlsSign, "sign_msg");
/// Handler for SSZ types that do not implement `SignedRoot`.
pub struct SszStaticHandler<T, E>(PhantomData<(T, E)>);
/// Handler for SSZ types that do implement `SignedRoot`.
pub struct SszStaticSRHandler<T, E>(PhantomData<(T, E)>);
impl<T, E> Handler for SszStaticHandler<T, E>
where
T: cases::SszStaticType + TypeName,
E: TypeName,
{
type Case = cases::SszStatic<T>;
fn config_name() -> &'static str {
E::name()
}
fn runner_name() -> &'static str {
"ssz_static"
}
fn handler_name() -> String {
T::name().into()
}
}
impl<T, E> Handler for SszStaticSRHandler<T, E>
where
T: cases::SszStaticType + SignedRoot + TypeName,
E: TypeName,
{
type Case = cases::SszStaticSR<T>;
fn config_name() -> &'static str {
E::name()
}
fn runner_name() -> &'static str {
"ssz_static"
}
fn handler_name() -> String {
T::name().into()
}
}
pub struct ShufflingHandler<E>(PhantomData<E>);
impl<E: EthSpec + TypeName> Handler for ShufflingHandler<E> {
type Case = cases::Shuffling<E>;
fn config_name() -> &'static str {
E::name()
}
fn runner_name() -> &'static str {
"shuffling"
}
fn handler_name() -> String {
"core".into()
}
}
pub struct SanityBlocksHandler<E>(PhantomData<E>);
impl<E: EthSpec + TypeName> Handler for SanityBlocksHandler<E> {
type Case = cases::SanityBlocks<E>;
fn config_name() -> &'static str {
E::name()
}
fn runner_name() -> &'static str {
"sanity"
}
fn handler_name() -> String {
"blocks".into()
}
}
pub struct SanitySlotsHandler<E>(PhantomData<E>);
impl<E: EthSpec + TypeName> Handler for SanitySlotsHandler<E> {
type Case = cases::SanitySlots<E>;
fn config_name() -> &'static str {
E::name()
}
fn runner_name() -> &'static str {
"sanity"
}
fn handler_name() -> String {
"slots".into()
}
}
pub struct EpochProcessingHandler<E, T>(PhantomData<(E, T)>);
impl<E: EthSpec + TypeName, T: EpochTransition<E>> Handler for EpochProcessingHandler<E, T> {
type Case = cases::EpochProcessing<E, T>;
fn config_name() -> &'static str {
E::name()
}
fn runner_name() -> &'static str {
"epoch_processing"
}
fn handler_name() -> String {
T::name().into()
}
}
pub struct GenesisValidityHandler<E>(PhantomData<E>);
impl<E: EthSpec + TypeName> Handler for GenesisValidityHandler<E> {
type Case = cases::GenesisValidity<E>;
fn config_name() -> &'static str {
E::name()
}
fn runner_name() -> &'static str {
"genesis"
}
fn handler_name() -> String {
"validity".into()
}
}
pub struct GenesisInitializationHandler<E>(PhantomData<E>);
impl<E: EthSpec + TypeName> Handler for GenesisInitializationHandler<E> {
type Case = cases::GenesisInitialization<E>;
fn config_name() -> &'static str {
E::name()
}
fn runner_name() -> &'static str {
"genesis"
}
fn handler_name() -> String {
"initialization".into()
}
}
pub struct OperationsHandler<E, O>(PhantomData<(E, O)>);
impl<E: EthSpec + TypeName, O: Operation<E>> Handler for OperationsHandler<E, O> {
type Case = cases::Operations<E, O>;
fn config_name() -> &'static str {
E::name()
}
fn runner_name() -> &'static str {
"operations"
}
fn handler_name() -> String {
O::handler_name()
}
}
pub struct SszGenericHandler<H>(PhantomData<H>);
impl<H: TypeName> Handler for SszGenericHandler<H> {
type Case = cases::SszGeneric;
fn config_name() -> &'static str {
"general"
}
fn runner_name() -> &'static str {
"ssz_generic"
}
fn handler_name() -> String {
H::name().into()
}
}
// Supported SSZ generic handlers
pub struct BasicVector;
type_name!(BasicVector, "basic_vector");
pub struct Bitlist;
type_name!(Bitlist, "bitlist");
pub struct Bitvector;
type_name!(Bitvector, "bitvector");
pub struct Boolean;
type_name!(Boolean, "boolean");
pub struct Uints;
type_name!(Uints, "uints");
pub struct Containers;
type_name!(Containers, "containers");

View File

@ -2,21 +2,17 @@ use types::EthSpec;
pub use case_result::CaseResult;
pub use cases::Case;
pub use doc::Doc;
pub use cases::{
Crosslinks, FinalUpdates, JustificationAndFinalization, RegistryUpdates, Slashings,
};
pub use error::Error;
pub use yaml_decode::YamlDecode;
pub use handler::*;
mod bls_setting;
mod case_result;
mod cases;
mod doc;
mod doc_header;
mod decode;
mod error;
mod yaml_decode;
/// Defined where an object can return the results of some test(s) adhering to the Ethereum
/// Foundation testing format.
pub trait EfTest {
/// Returns the results of executing one or more tests.
fn test_results(&self) -> Vec<CaseResult>;
}
mod handler;
mod results;
mod type_name;

View File

@ -0,0 +1,92 @@
use crate::case_result::CaseResult;
use crate::error::Error;
use std::path::Path;
pub fn assert_tests_pass(handler_name: &str, path: &Path, results: &[CaseResult]) {
let (failed, skipped_bls, skipped_known_failures) = categorize_results(results);
if failed.len() + skipped_known_failures.len() > 0 {
print_results(
handler_name,
&failed,
&skipped_bls,
&skipped_known_failures,
&results,
);
if !failed.is_empty() {
panic!("Tests failed (see above)");
}
} else {
println!("Passed {} tests in {}", results.len(), path.display());
}
}
pub fn categorize_results(
results: &[CaseResult],
) -> (Vec<&CaseResult>, Vec<&CaseResult>, Vec<&CaseResult>) {
let mut failed = vec![];
let mut skipped_bls = vec![];
let mut skipped_known_failures = vec![];
for case in results {
match case.result.as_ref().err() {
Some(Error::SkippedBls) => skipped_bls.push(case),
Some(Error::SkippedKnownFailure) => skipped_known_failures.push(case),
Some(_) => failed.push(case),
None => (),
}
}
(failed, skipped_bls, skipped_known_failures)
}
pub fn print_results(
handler_name: &str,
failed: &[&CaseResult],
skipped_bls: &[&CaseResult],
skipped_known_failures: &[&CaseResult],
results: &[CaseResult],
) {
println!("--------------------------------------------------");
println!(
"Test {}",
if failed.is_empty() {
"Result"
} else {
"Failure"
}
);
println!("Title: {}", handler_name);
println!(
"{} tests, {} failed, {} skipped (known failure), {} skipped (bls), {} passed. (See below for errors)",
results.len(),
failed.len(),
skipped_known_failures.len(),
skipped_bls.len(),
results.len() - skipped_bls.len() - skipped_known_failures.len() - failed.len()
);
println!();
for case in skipped_known_failures {
println!("-------");
println!(
"case ({}) from {} skipped because it's a known failure",
case.desc,
case.path.display()
);
}
for failure in failed {
let error = failure.result.clone().unwrap_err();
println!("-------");
println!(
"case {} ({}) from {} failed with {}:",
failure.case_index,
failure.desc,
failure.path.display(),
error.name()
);
println!("{}", error.message());
}
println!();
}

View File

@ -0,0 +1,60 @@
//! Mapping from types to canonical string identifiers used in testing.
use types::*;
pub trait TypeName {
fn name() -> &'static str;
}
#[macro_export]
macro_rules! type_name {
($typ:ident) => {
type_name!($typ, stringify!($typ));
};
($typ:ident, $name:expr) => {
impl TypeName for $typ {
fn name() -> &'static str {
$name
}
}
};
}
#[macro_export]
macro_rules! type_name_generic {
($typ:ident) => {
type_name_generic!($typ, stringify!($typ));
};
($typ:ident, $name:expr) => {
impl<E: EthSpec> TypeName for $typ<E> {
fn name() -> &'static str {
$name
}
}
};
}
type_name!(MinimalEthSpec, "minimal");
type_name!(MainnetEthSpec, "mainnet");
type_name_generic!(Attestation);
type_name!(AttestationData);
type_name!(AttestationDataAndCustodyBit);
type_name_generic!(AttesterSlashing);
type_name_generic!(BeaconBlock);
type_name_generic!(BeaconBlockBody);
type_name!(BeaconBlockHeader);
type_name_generic!(BeaconState);
type_name!(Checkpoint);
type_name_generic!(CompactCommittee);
type_name!(Crosslink);
type_name!(Deposit);
type_name!(DepositData);
type_name!(Eth1Data);
type_name!(Fork);
type_name_generic!(HistoricalBatch);
type_name_generic!(IndexedAttestation);
type_name_generic!(PendingAttestation);
type_name!(ProposerSlashing);
type_name!(Transfer);
type_name!(Validator);
type_name!(VoluntaryExit);

View File

@ -1,59 +0,0 @@
use super::*;
use ethereum_types::{U128, U256};
use types::Fork;
mod utils;
pub use utils::*;
pub trait YamlDecode: Sized {
/// Decode an object from the test specification YAML.
fn yaml_decode(string: &str) -> Result<Self, Error>;
}
/// Basic types can general be decoded with the `parse` fn if they implement `str::FromStr`.
macro_rules! impl_via_parse {
($ty: ty) => {
impl YamlDecode for $ty {
fn yaml_decode(string: &str) -> Result<Self, Error> {
string
.parse::<Self>()
.map_err(|e| Error::FailedToParseTest(format!("{:?}", e)))
}
}
};
}
impl_via_parse!(u8);
impl_via_parse!(u16);
impl_via_parse!(u32);
impl_via_parse!(u64);
/// Some `ethereum-types` methods have a `str::FromStr` implementation that expects `0x`-prefixed:
/// hex, so we use `from_dec_str` instead.
macro_rules! impl_via_from_dec_str {
($ty: ty) => {
impl YamlDecode for $ty {
fn yaml_decode(string: &str) -> Result<Self, Error> {
Self::from_dec_str(string).map_err(|e| Error::FailedToParseTest(format!("{:?}", e)))
}
}
};
}
impl_via_from_dec_str!(U128);
impl_via_from_dec_str!(U256);
/// Types that already implement `serde::Deserialize` can be decoded using `serde_yaml`.
macro_rules! impl_via_serde_yaml {
($ty: ty) => {
impl YamlDecode for $ty {
fn yaml_decode(string: &str) -> Result<Self, Error> {
serde_yaml::from_str(string)
.map_err(|e| Error::FailedToParseTest(format!("{:?}", e)))
}
}
};
}
impl_via_serde_yaml!(Fork);

View File

@ -1,10 +0,0 @@
pub fn yaml_split_header_and_cases(mut yaml: String) -> (String, String) {
let test_cases_start = yaml.find("\ntest_cases:\n").unwrap();
// + 1 to skip the \n we used for matching.
let mut test_cases = yaml.split_off(test_cases_start + 1);
let end_of_first_line = test_cases.find('\n').unwrap();
let test_cases = test_cases.split_off(end_of_first_line + 1);
(yaml, test_cases)
}

View File

@ -1,225 +1,214 @@
use ef_tests::*;
use rayon::prelude::*;
use std::path::{Path, PathBuf};
use walkdir::WalkDir;
fn yaml_files_in_test_dir(dir: &Path) -> Vec<PathBuf> {
let base_path = PathBuf::from(env!("CARGO_MANIFEST_DIR"))
.join("eth2.0-spec-tests")
.join("tests")
.join(dir);
assert!(
base_path.exists(),
format!(
"Unable to locate {:?}. Did you init git submodules?",
base_path
)
);
let mut paths: Vec<PathBuf> = WalkDir::new(base_path)
.into_iter()
.filter_map(|e| e.ok())
.filter_map(|entry| {
if entry.file_type().is_file() {
match entry.file_name().to_str() {
Some(f) if f.ends_with(".yaml") => Some(entry.path().to_path_buf()),
Some(f) if f.ends_with(".yml") => Some(entry.path().to_path_buf()),
_ => None,
}
} else {
None
}
})
.collect();
// Reverse the file order. Assuming files come in lexicographical order, executing tests in
// reverse means we get the "minimal" tests before the "mainnet" tests. This makes life easier
// for debugging.
paths.reverse();
paths
}
#[test]
#[cfg(feature = "fake_crypto")]
fn ssz_generic() {
yaml_files_in_test_dir(&Path::new("ssz_generic"))
.into_par_iter()
.for_each(|file| {
Doc::assert_tests_pass(file);
});
}
#[test]
#[cfg(feature = "fake_crypto")]
fn ssz_static() {
yaml_files_in_test_dir(&Path::new("ssz_static"))
.into_par_iter()
.for_each(|file| {
Doc::assert_tests_pass(file);
});
}
use types::*;
#[test]
fn shuffling() {
yaml_files_in_test_dir(&Path::new("shuffling").join("core"))
.into_par_iter()
.for_each(|file| {
Doc::assert_tests_pass(file);
});
ShufflingHandler::<MinimalEthSpec>::run();
ShufflingHandler::<MainnetEthSpec>::run();
}
#[test]
fn operations_deposit() {
yaml_files_in_test_dir(&Path::new("operations").join("deposit"))
.into_par_iter()
.for_each(|file| {
Doc::assert_tests_pass(file);
});
OperationsHandler::<MinimalEthSpec, Deposit>::run();
OperationsHandler::<MainnetEthSpec, Deposit>::run();
}
#[test]
fn operations_transfer() {
yaml_files_in_test_dir(&Path::new("operations").join("transfer"))
.into_par_iter()
.rev()
.for_each(|file| {
Doc::assert_tests_pass(file);
});
OperationsHandler::<MinimalEthSpec, Transfer>::run();
// Note: there are no transfer tests for mainnet
}
#[test]
fn operations_exit() {
yaml_files_in_test_dir(&Path::new("operations").join("voluntary_exit"))
.into_par_iter()
.for_each(|file| {
Doc::assert_tests_pass(file);
});
OperationsHandler::<MinimalEthSpec, VoluntaryExit>::run();
OperationsHandler::<MainnetEthSpec, VoluntaryExit>::run();
}
#[test]
fn operations_proposer_slashing() {
yaml_files_in_test_dir(&Path::new("operations").join("proposer_slashing"))
.into_par_iter()
.for_each(|file| {
Doc::assert_tests_pass(file);
});
OperationsHandler::<MinimalEthSpec, ProposerSlashing>::run();
OperationsHandler::<MainnetEthSpec, ProposerSlashing>::run();
}
#[test]
fn operations_attester_slashing() {
yaml_files_in_test_dir(&Path::new("operations").join("attester_slashing"))
.into_par_iter()
.for_each(|file| {
Doc::assert_tests_pass(file);
});
OperationsHandler::<MinimalEthSpec, AttesterSlashing<_>>::run();
OperationsHandler::<MainnetEthSpec, AttesterSlashing<_>>::run();
}
#[test]
fn operations_attestation() {
yaml_files_in_test_dir(&Path::new("operations").join("attestation"))
.into_par_iter()
.for_each(|file| {
Doc::assert_tests_pass(file);
});
OperationsHandler::<MinimalEthSpec, Attestation<_>>::run();
OperationsHandler::<MainnetEthSpec, Attestation<_>>::run();
}
#[test]
fn operations_block_header() {
yaml_files_in_test_dir(&Path::new("operations").join("block_header"))
.into_par_iter()
.for_each(|file| {
Doc::assert_tests_pass(file);
});
OperationsHandler::<MinimalEthSpec, BeaconBlock<_>>::run();
OperationsHandler::<MainnetEthSpec, BeaconBlock<_>>::run();
}
#[test]
fn sanity_blocks() {
yaml_files_in_test_dir(&Path::new("sanity").join("blocks"))
.into_par_iter()
.for_each(|file| {
Doc::assert_tests_pass(file);
});
SanityBlocksHandler::<MinimalEthSpec>::run();
SanityBlocksHandler::<MainnetEthSpec>::run();
}
#[test]
fn sanity_slots() {
yaml_files_in_test_dir(&Path::new("sanity").join("slots"))
.into_par_iter()
.for_each(|file| {
Doc::assert_tests_pass(file);
});
SanitySlotsHandler::<MinimalEthSpec>::run();
SanitySlotsHandler::<MainnetEthSpec>::run();
}
#[test]
#[cfg(not(feature = "fake_crypto"))]
fn bls() {
yaml_files_in_test_dir(&Path::new("bls"))
.into_par_iter()
.for_each(|file| {
Doc::assert_tests_pass(file);
});
fn bls_aggregate_pubkeys() {
BlsAggregatePubkeysHandler::run();
}
#[test]
#[cfg(not(feature = "fake_crypto"))]
fn bls_aggregate_sigs() {
BlsAggregateSigsHandler::run();
}
#[test]
#[cfg(not(feature = "fake_crypto"))]
fn bls_msg_hash_g2_compressed() {
BlsG2CompressedHandler::run();
}
#[test]
#[cfg(not(feature = "fake_crypto"))]
fn bls_priv_to_pub() {
BlsPrivToPubHandler::run();
}
#[test]
#[cfg(not(feature = "fake_crypto"))]
fn bls_sign_msg() {
BlsSignMsgHandler::run();
}
#[cfg(feature = "fake_crypto")]
macro_rules! ssz_static_test {
// Signed-root
($test_name:ident, $typ:ident$(<$generics:tt>)?, SR) => {
ssz_static_test!($test_name, SszStaticSRHandler, $typ$(<$generics>)?);
};
// Non-signed root
($test_name:ident, $typ:ident$(<$generics:tt>)?) => {
ssz_static_test!($test_name, SszStaticHandler, $typ$(<$generics>)?);
};
// Generic
($test_name:ident, $handler:ident, $typ:ident<_>) => {
ssz_static_test!(
$test_name, $handler, {
($typ<MinimalEthSpec>, MinimalEthSpec),
($typ<MainnetEthSpec>, MainnetEthSpec)
}
);
};
// Non-generic
($test_name:ident, $handler:ident, $typ:ident) => {
ssz_static_test!(
$test_name, $handler, {
($typ, MinimalEthSpec),
($typ, MainnetEthSpec)
}
);
};
// Base case
($test_name:ident, $handler:ident, { $(($typ:ty, $spec:ident)),+ }) => {
#[test]
fn $test_name() {
$(
$handler::<$typ, $spec>::run();
)+
}
};
}
#[cfg(feature = "fake_crypto")]
mod ssz_static {
use ef_tests::{Handler, SszStaticHandler, SszStaticSRHandler};
use types::*;
ssz_static_test!(attestation, Attestation<_>, SR);
ssz_static_test!(attestation_data, AttestationData);
ssz_static_test!(
attestation_data_and_custody_bit,
AttestationDataAndCustodyBit
);
ssz_static_test!(attester_slashing, AttesterSlashing<_>);
ssz_static_test!(beacon_block, BeaconBlock<_>, SR);
ssz_static_test!(beacon_block_body, BeaconBlockBody<_>);
ssz_static_test!(beacon_block_header, BeaconBlockHeader, SR);
ssz_static_test!(beacon_state, BeaconState<_>);
ssz_static_test!(checkpoint, Checkpoint);
ssz_static_test!(compact_committee, CompactCommittee<_>);
ssz_static_test!(crosslink, Crosslink);
ssz_static_test!(deposit, Deposit);
ssz_static_test!(deposit_data, DepositData, SR);
ssz_static_test!(eth1_data, Eth1Data);
ssz_static_test!(fork, Fork);
ssz_static_test!(historical_batch, HistoricalBatch<_>);
ssz_static_test!(indexed_attestation, IndexedAttestation<_>, SR);
ssz_static_test!(pending_attestation, PendingAttestation<_>);
ssz_static_test!(proposer_slashing, ProposerSlashing);
ssz_static_test!(transfer, Transfer, SR);
ssz_static_test!(validator, Validator);
ssz_static_test!(voluntary_exit, VoluntaryExit, SR);
}
#[test]
fn ssz_generic() {
SszGenericHandler::<BasicVector>::run();
SszGenericHandler::<Bitlist>::run();
SszGenericHandler::<Bitvector>::run();
SszGenericHandler::<Boolean>::run();
SszGenericHandler::<Uints>::run();
SszGenericHandler::<Containers>::run();
}
#[test]
fn epoch_processing_justification_and_finalization() {
yaml_files_in_test_dir(&Path::new("epoch_processing").join("justification_and_finalization"))
.into_par_iter()
.for_each(|file| {
Doc::assert_tests_pass(file);
});
EpochProcessingHandler::<MinimalEthSpec, JustificationAndFinalization>::run();
EpochProcessingHandler::<MainnetEthSpec, JustificationAndFinalization>::run();
}
#[test]
fn epoch_processing_crosslinks() {
yaml_files_in_test_dir(&Path::new("epoch_processing").join("crosslinks"))
.into_par_iter()
.for_each(|file| {
Doc::assert_tests_pass(file);
});
EpochProcessingHandler::<MinimalEthSpec, Crosslinks>::run();
EpochProcessingHandler::<MainnetEthSpec, Crosslinks>::run();
}
#[test]
fn epoch_processing_registry_updates() {
yaml_files_in_test_dir(&Path::new("epoch_processing").join("registry_updates"))
.into_par_iter()
.for_each(|file| {
Doc::assert_tests_pass(file);
});
EpochProcessingHandler::<MinimalEthSpec, RegistryUpdates>::run();
EpochProcessingHandler::<MainnetEthSpec, RegistryUpdates>::run();
}
#[test]
fn epoch_processing_slashings() {
yaml_files_in_test_dir(&Path::new("epoch_processing").join("slashings"))
.into_par_iter()
.for_each(|file| {
Doc::assert_tests_pass(file);
});
EpochProcessingHandler::<MinimalEthSpec, Slashings>::run();
EpochProcessingHandler::<MainnetEthSpec, Slashings>::run();
}
#[test]
fn epoch_processing_final_updates() {
yaml_files_in_test_dir(&Path::new("epoch_processing").join("final_updates"))
.into_par_iter()
.for_each(|file| {
Doc::assert_tests_pass(file);
});
EpochProcessingHandler::<MainnetEthSpec, FinalUpdates>::run();
EpochProcessingHandler::<MainnetEthSpec, FinalUpdates>::run();
}
#[test]
fn genesis_initialization() {
yaml_files_in_test_dir(&Path::new("genesis").join("initialization"))
.into_par_iter()
.for_each(|file| {
Doc::assert_tests_pass(file);
});
GenesisInitializationHandler::<MinimalEthSpec>::run();
}
#[test]
fn genesis_validity() {
yaml_files_in_test_dir(&Path::new("genesis").join("validity"))
.into_par_iter()
.for_each(|file| {
Doc::assert_tests_pass(file);
});
GenesisValidityHandler::<MinimalEthSpec>::run();
// Note: there are no genesis validity tests for mainnet
}