Shuffling and sanity tests
This commit is contained in:
parent
23a308e595
commit
81cafdc804
@ -3,7 +3,6 @@ use crate::{Epoch, Hash256};
|
||||
use serde_derive::{Deserialize, Serialize};
|
||||
use ssz_derive::{Decode, Encode};
|
||||
use test_random_derive::TestRandom;
|
||||
use tree_hash::TreeHash;
|
||||
use tree_hash_derive::TreeHash;
|
||||
|
||||
/// Casper FFG checkpoint, used in attestations.
|
||||
|
@ -5,7 +5,7 @@ authors = ["Paul Hauner <paul@paulhauner.com>"]
|
||||
edition = "2018"
|
||||
|
||||
[dependencies]
|
||||
milagro_bls = { git = "https://github.com/sigp/milagro_bls", tag = "v0.9.0" }
|
||||
milagro_bls = { git = "https://github.com/michaelsproul/milagro_bls", branch = "little-endian" }
|
||||
eth2_hashing = { path = "../eth2_hashing" }
|
||||
hex = "0.3"
|
||||
rand = "^0.5"
|
||||
|
@ -1,6 +1,7 @@
|
||||
use super::*;
|
||||
use compare_fields::{CompareFields, Comparison, FieldComparison};
|
||||
use std::fmt::Debug;
|
||||
use std::path::PathBuf;
|
||||
use types::BeaconState;
|
||||
|
||||
pub const MAX_VALUE_STRING_LEN: usize = 500;
|
||||
@ -9,6 +10,7 @@ pub const MAX_VALUE_STRING_LEN: usize = 500;
|
||||
pub struct CaseResult {
|
||||
pub case_index: usize,
|
||||
pub desc: String,
|
||||
pub path: PathBuf,
|
||||
pub result: Result<(), Error>,
|
||||
}
|
||||
|
||||
@ -17,6 +19,7 @@ impl CaseResult {
|
||||
CaseResult {
|
||||
case_index,
|
||||
desc: case.description(),
|
||||
path: case.path().into(),
|
||||
result,
|
||||
}
|
||||
}
|
||||
|
@ -67,6 +67,11 @@ pub trait Case: Debug {
|
||||
"no description".to_string()
|
||||
}
|
||||
|
||||
/// Path to the directory for this test case.
|
||||
fn path(&self) -> &Path {
|
||||
Path::new("")
|
||||
}
|
||||
|
||||
/// Execute a test and return the result.
|
||||
///
|
||||
/// `case_index` reports the index of the case in the set of test cases. It is not strictly
|
||||
@ -76,19 +81,13 @@ pub trait Case: Debug {
|
||||
|
||||
pub trait BlsCase: serde::de::DeserializeOwned {}
|
||||
|
||||
impl<T> YamlDecode for T
|
||||
where
|
||||
T: BlsCase,
|
||||
{
|
||||
impl<T: BlsCase> YamlDecode for T {
|
||||
fn yaml_decode(string: &str) -> Result<Self, Error> {
|
||||
serde_yaml::from_str(string).map_err(|e| Error::FailedToParseTest(format!("{:?}", e)))
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> LoadCase for T
|
||||
where
|
||||
T: BlsCase,
|
||||
{
|
||||
impl<T: BlsCase> LoadCase for T {
|
||||
fn load_from_dir(path: &Path) -> Result<Self, Error> {
|
||||
Self::yaml_decode_file(&path.join("data.yaml"))
|
||||
}
|
||||
@ -111,37 +110,3 @@ where
|
||||
.collect()
|
||||
}
|
||||
}
|
||||
|
||||
// FIXME(michael): delete this
|
||||
impl<T: YamlDecode> YamlDecode for Cases<T> {
|
||||
/// Decodes a YAML list of test cases
|
||||
fn yaml_decode(yaml: &str) -> Result<Self, Error> {
|
||||
let mut p = 0;
|
||||
let mut elems: Vec<&str> = yaml
|
||||
.match_indices("\n- ")
|
||||
// Skip the `\n` used for matching a new line
|
||||
.map(|(i, _)| i + 1)
|
||||
.map(|i| {
|
||||
let yaml_element = &yaml[p..i];
|
||||
p = i;
|
||||
|
||||
yaml_element
|
||||
})
|
||||
.collect();
|
||||
|
||||
elems.push(&yaml[p..]);
|
||||
|
||||
let test_cases = elems
|
||||
.iter()
|
||||
.map(|s| {
|
||||
// Remove the `- ` prefix.
|
||||
let s = &s[2..];
|
||||
// Remove a single level of indenting.
|
||||
s.replace("\n ", "\n")
|
||||
})
|
||||
.map(|s| T::yaml_decode(&s.to_string()).unwrap())
|
||||
.collect();
|
||||
|
||||
Ok(Self { test_cases })
|
||||
}
|
||||
}
|
||||
|
@ -41,14 +41,9 @@ impl Case for BlsG2Compressed {
|
||||
}
|
||||
}
|
||||
|
||||
// Converts a vector to u64 (from big endian)
|
||||
// Converts a vector to u64 (from little endian)
|
||||
fn bytes_to_u64(array: &[u8]) -> u64 {
|
||||
let mut result: u64 = 0;
|
||||
for (i, value) in array.iter().rev().enumerate() {
|
||||
if i == 8 {
|
||||
break;
|
||||
}
|
||||
result += u64::pow(2, i as u32 * 8) * u64::from(*value);
|
||||
}
|
||||
result
|
||||
let mut bytes = [0u8; 8];
|
||||
bytes.copy_from_slice(array);
|
||||
u64::from_le_bytes(bytes)
|
||||
}
|
||||
|
@ -41,16 +41,11 @@ impl Case for BlsSign {
|
||||
}
|
||||
}
|
||||
|
||||
// Converts a vector to u64 (from big endian)
|
||||
// Converts a vector to u64 (from little endian)
|
||||
fn bytes_to_u64(array: &[u8]) -> u64 {
|
||||
let mut result: u64 = 0;
|
||||
for (i, value) in array.iter().rev().enumerate() {
|
||||
if i == 8 {
|
||||
break;
|
||||
}
|
||||
result += u64::pow(2, i as u32 * 8) * u64::from(*value);
|
||||
}
|
||||
result
|
||||
let mut bytes = [0u8; 8];
|
||||
bytes.copy_from_slice(array);
|
||||
u64::from_le_bytes(bytes)
|
||||
}
|
||||
|
||||
// Increase the size of an array to 48 bytes
|
||||
|
@ -1,35 +1,72 @@
|
||||
use super::*;
|
||||
use crate::bls_setting::BlsSetting;
|
||||
use crate::case_result::compare_beacon_state_results_without_caches;
|
||||
use crate::yaml_decode::{ssz_decode_file, yaml_decode_file};
|
||||
use serde_derive::Deserialize;
|
||||
use state_processing::{
|
||||
per_block_processing, per_slot_processing, BlockInvalid, BlockProcessingError,
|
||||
};
|
||||
use std::path::PathBuf;
|
||||
use types::{BeaconBlock, BeaconState, EthSpec, RelativeEpoch};
|
||||
|
||||
#[derive(Debug, Clone, Deserialize)]
|
||||
pub struct Metadata {
|
||||
pub description: Option<String>,
|
||||
pub bls_setting: Option<BlsSetting>,
|
||||
pub blocks_count: usize,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Deserialize)]
|
||||
#[serde(bound = "E: EthSpec")]
|
||||
pub struct SanityBlocks<E: EthSpec> {
|
||||
pub description: String,
|
||||
pub bls_setting: Option<BlsSetting>,
|
||||
pub path: PathBuf,
|
||||
pub metadata: Metadata,
|
||||
pub pre: BeaconState<E>,
|
||||
pub blocks: Vec<BeaconBlock<E>>,
|
||||
pub post: Option<BeaconState<E>>,
|
||||
}
|
||||
|
||||
impl<E: EthSpec> YamlDecode for SanityBlocks<E> {
|
||||
fn yaml_decode(yaml: &str) -> Result<Self, Error> {
|
||||
Ok(serde_yaml::from_str(yaml).unwrap())
|
||||
impl<E: EthSpec> LoadCase for SanityBlocks<E> {
|
||||
fn load_from_dir(path: &Path) -> Result<Self, Error> {
|
||||
let metadata: Metadata = yaml_decode_file(&path.join("meta.yaml"))?;
|
||||
let pre = ssz_decode_file(&path.join("pre.ssz"))?;
|
||||
let blocks: Vec<BeaconBlock<E>> = (0..metadata.blocks_count)
|
||||
.map(|i| {
|
||||
let filename = format!("blocks_{}.ssz", i);
|
||||
ssz_decode_file(&path.join(filename))
|
||||
})
|
||||
.collect::<Result<_, _>>()?;
|
||||
let post_file = path.join("post.ssz");
|
||||
let post = if post_file.is_file() {
|
||||
Some(ssz_decode_file(&post_file)?)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
Ok(Self {
|
||||
path: path.into(),
|
||||
metadata,
|
||||
pre,
|
||||
blocks,
|
||||
post,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl<E: EthSpec> Case for SanityBlocks<E> {
|
||||
fn description(&self) -> String {
|
||||
self.description.clone()
|
||||
self.metadata
|
||||
.description
|
||||
.clone()
|
||||
.unwrap_or_else(String::new)
|
||||
}
|
||||
|
||||
fn path(&self) -> &Path {
|
||||
&self.path
|
||||
}
|
||||
|
||||
fn result(&self, _case_index: usize) -> Result<(), Error> {
|
||||
self.bls_setting.unwrap_or_default().check()?;
|
||||
self.metadata.bls_setting.unwrap_or_default().check()?;
|
||||
|
||||
let mut state = self.pre.clone();
|
||||
let mut expected = self.post.clone();
|
||||
|
@ -1,30 +1,70 @@
|
||||
use super::*;
|
||||
use crate::bls_setting::BlsSetting;
|
||||
use crate::case_result::compare_beacon_state_results_without_caches;
|
||||
use crate::yaml_decode::{ssz_decode_file, yaml_decode_file};
|
||||
use serde_derive::Deserialize;
|
||||
use state_processing::per_slot_processing;
|
||||
use std::path::PathBuf;
|
||||
use types::{BeaconState, EthSpec};
|
||||
|
||||
#[derive(Debug, Clone, Default, Deserialize)]
|
||||
pub struct Metadata {
|
||||
pub description: Option<String>,
|
||||
pub bls_setting: Option<BlsSetting>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Deserialize)]
|
||||
#[serde(bound = "E: EthSpec")]
|
||||
pub struct SanitySlots<E: EthSpec> {
|
||||
pub description: String,
|
||||
pub path: PathBuf,
|
||||
pub metadata: Metadata,
|
||||
pub pre: BeaconState<E>,
|
||||
pub slots: usize,
|
||||
pub slots: u64,
|
||||
pub post: Option<BeaconState<E>>,
|
||||
}
|
||||
|
||||
impl<E: EthSpec> YamlDecode for SanitySlots<E> {
|
||||
fn yaml_decode(yaml: &str) -> Result<Self, Error> {
|
||||
Ok(serde_yaml::from_str(yaml).unwrap())
|
||||
impl<E: EthSpec> LoadCase for SanitySlots<E> {
|
||||
fn load_from_dir(path: &Path) -> Result<Self, Error> {
|
||||
let metadata_path = path.join("meta.yaml");
|
||||
let metadata: Metadata = if metadata_path.is_file() {
|
||||
yaml_decode_file(&path.join("meta.yaml"))?
|
||||
} else {
|
||||
Metadata::default()
|
||||
};
|
||||
let pre = ssz_decode_file(&path.join("pre.ssz"))?;
|
||||
let slots: u64 = yaml_decode_file(&path.join("slots.yaml"))?;
|
||||
let post_file = path.join("post.ssz");
|
||||
let post = if post_file.is_file() {
|
||||
Some(ssz_decode_file(&post_file)?)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
Ok(Self {
|
||||
path: path.into(),
|
||||
metadata,
|
||||
pre,
|
||||
slots,
|
||||
post,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl<E: EthSpec> Case for SanitySlots<E> {
|
||||
fn description(&self) -> String {
|
||||
self.description.clone()
|
||||
self.metadata
|
||||
.description
|
||||
.clone()
|
||||
.unwrap_or_else(String::new)
|
||||
}
|
||||
|
||||
fn path(&self) -> &Path {
|
||||
&self.path
|
||||
}
|
||||
|
||||
fn result(&self, _case_index: usize) -> Result<(), Error> {
|
||||
self.metadata.bls_setting.unwrap_or_default().check()?;
|
||||
|
||||
let mut state = self.pre.clone();
|
||||
let mut expected = self.post.clone();
|
||||
let spec = &E::default_spec();
|
||||
|
@ -8,7 +8,7 @@ use swap_or_not_shuffle::{get_permutated_index, shuffle_list};
|
||||
pub struct Shuffling<T> {
|
||||
pub seed: String,
|
||||
pub count: usize,
|
||||
pub shuffled: Vec<usize>,
|
||||
pub mapping: Vec<usize>,
|
||||
#[serde(skip)]
|
||||
_phantom: PhantomData<T>,
|
||||
}
|
||||
@ -19,10 +19,16 @@ impl<T> YamlDecode for Shuffling<T> {
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: EthSpec> LoadCase for Shuffling<T> {
|
||||
fn load_from_dir(path: &Path) -> Result<Self, Error> {
|
||||
Self::yaml_decode_file(&path.join("mapping.yaml"))
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: EthSpec> Case for Shuffling<T> {
|
||||
fn result(&self, _case_index: usize) -> Result<(), Error> {
|
||||
if self.count == 0 {
|
||||
compare_result::<_, Error>(&Ok(vec![]), &Some(self.shuffled.clone()))?;
|
||||
compare_result::<_, Error>(&Ok(vec![]), &Some(self.mapping.clone()))?;
|
||||
} else {
|
||||
let spec = T::default_spec();
|
||||
let seed = hex::decode(&self.seed[2..])
|
||||
@ -34,12 +40,12 @@ impl<T: EthSpec> Case for Shuffling<T> {
|
||||
get_permutated_index(i, self.count, &seed, spec.shuffle_round_count).unwrap()
|
||||
})
|
||||
.collect();
|
||||
compare_result::<_, Error>(&Ok(shuffling), &Some(self.shuffled.clone()))?;
|
||||
compare_result::<_, Error>(&Ok(shuffling), &Some(self.mapping.clone()))?;
|
||||
|
||||
// Test "shuffle_list"
|
||||
let input: Vec<usize> = (0..self.count).collect();
|
||||
let shuffling = shuffle_list(input, spec.shuffle_round_count, &seed, false).unwrap();
|
||||
compare_result::<_, Error>(&Ok(shuffling), &Some(self.shuffled.clone()))?;
|
||||
compare_result::<_, Error>(&Ok(shuffling), &Some(self.mapping.clone()))?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
|
@ -1,274 +0,0 @@
|
||||
use crate::case_result::CaseResult;
|
||||
use crate::cases::*;
|
||||
use crate::doc_header::DocHeader;
|
||||
use crate::error::Error;
|
||||
use crate::yaml_decode::YamlDecode;
|
||||
use crate::EfTest;
|
||||
use serde_derive::Deserialize;
|
||||
use std::{
|
||||
fs::File,
|
||||
io::prelude::*,
|
||||
path::{Path, PathBuf},
|
||||
};
|
||||
use types::{MainnetEthSpec, MinimalEthSpec};
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
pub struct Doc {
|
||||
pub header_yaml: String,
|
||||
pub cases_yaml: String,
|
||||
pub path: PathBuf,
|
||||
}
|
||||
|
||||
impl Doc {
|
||||
fn from_path(path: PathBuf) -> Self {
|
||||
let mut file = File::open(path.clone()).unwrap();
|
||||
|
||||
let mut cases_yaml = String::new();
|
||||
file.read_to_string(&mut cases_yaml).unwrap();
|
||||
|
||||
Self {
|
||||
cases_yaml,
|
||||
path,
|
||||
header_yaml: String::new(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn test_results(&self) -> Vec<CaseResult> {
|
||||
let header: DocHeader = serde_yaml::from_str(&self.header_yaml.as_str()).unwrap();
|
||||
|
||||
match (
|
||||
header.runner.as_ref(),
|
||||
header.handler.as_ref(),
|
||||
header.config.as_ref(),
|
||||
) {
|
||||
("ssz", "uint", _) => run_test::<SszGeneric>(self),
|
||||
("sanity", "slots", "minimal") => run_test::<SanitySlots<MinimalEthSpec>>(self),
|
||||
// FIXME: skipped due to compact committees issue
|
||||
("sanity", "slots", "mainnet") => vec![], // run_test::<SanitySlots<MainnetEthSpec>>(self),
|
||||
("sanity", "blocks", "minimal") => run_test::<SanityBlocks<MinimalEthSpec>>(self),
|
||||
// FIXME: skipped due to compact committees issue
|
||||
("sanity", "blocks", "mainnet") => vec![], // run_test::<SanityBlocks<MainnetEthSpec>>(self),
|
||||
("shuffling", "core", "minimal") => run_test::<Shuffling<MinimalEthSpec>>(self),
|
||||
("shuffling", "core", "mainnet") => run_test::<Shuffling<MainnetEthSpec>>(self),
|
||||
("bls", "aggregate_pubkeys", "mainnet") => run_test::<BlsAggregatePubkeys>(self),
|
||||
("bls", "aggregate_sigs", "mainnet") => run_test::<BlsAggregateSigs>(self),
|
||||
("bls", "msg_hash_compressed", "mainnet") => run_test::<BlsG2Compressed>(self),
|
||||
// Note this test fails due to a difference in our internal representations. It does
|
||||
// not effect verification or external representation.
|
||||
//
|
||||
// It is skipped.
|
||||
("bls", "msg_hash_uncompressed", "mainnet") => vec![],
|
||||
("bls", "priv_to_pub", "mainnet") => run_test::<BlsPrivToPub>(self),
|
||||
("bls", "sign_msg", "mainnet") => run_test::<BlsSign>(self),
|
||||
("operations", "deposit", "mainnet") => {
|
||||
run_test::<OperationsDeposit<MainnetEthSpec>>(self)
|
||||
}
|
||||
("operations", "deposit", "minimal") => {
|
||||
run_test::<OperationsDeposit<MinimalEthSpec>>(self)
|
||||
}
|
||||
("operations", "transfer", "mainnet") => {
|
||||
run_test::<OperationsTransfer<MainnetEthSpec>>(self)
|
||||
}
|
||||
("operations", "transfer", "minimal") => {
|
||||
run_test::<OperationsTransfer<MinimalEthSpec>>(self)
|
||||
}
|
||||
("operations", "voluntary_exit", "mainnet") => {
|
||||
run_test::<OperationsExit<MainnetEthSpec>>(self)
|
||||
}
|
||||
("operations", "voluntary_exit", "minimal") => {
|
||||
run_test::<OperationsExit<MinimalEthSpec>>(self)
|
||||
}
|
||||
("operations", "proposer_slashing", "mainnet") => {
|
||||
run_test::<OperationsProposerSlashing<MainnetEthSpec>>(self)
|
||||
}
|
||||
("operations", "proposer_slashing", "minimal") => {
|
||||
run_test::<OperationsProposerSlashing<MinimalEthSpec>>(self)
|
||||
}
|
||||
("operations", "attester_slashing", "mainnet") => {
|
||||
run_test::<OperationsAttesterSlashing<MainnetEthSpec>>(self)
|
||||
}
|
||||
("operations", "attester_slashing", "minimal") => {
|
||||
run_test::<OperationsAttesterSlashing<MinimalEthSpec>>(self)
|
||||
}
|
||||
("operations", "attestation", "mainnet") => {
|
||||
run_test::<OperationsAttestation<MainnetEthSpec>>(self)
|
||||
}
|
||||
("operations", "attestation", "minimal") => {
|
||||
run_test::<OperationsAttestation<MinimalEthSpec>>(self)
|
||||
}
|
||||
("operations", "block_header", "mainnet") => {
|
||||
run_test::<OperationsBlockHeader<MainnetEthSpec>>(self)
|
||||
}
|
||||
("operations", "block_header", "minimal") => {
|
||||
run_test::<OperationsBlockHeader<MinimalEthSpec>>(self)
|
||||
}
|
||||
("epoch_processing", "crosslinks", "minimal") => {
|
||||
run_test::<EpochProcessingCrosslinks<MinimalEthSpec>>(self)
|
||||
}
|
||||
("epoch_processing", "crosslinks", "mainnet") => {
|
||||
run_test::<EpochProcessingCrosslinks<MainnetEthSpec>>(self)
|
||||
}
|
||||
("epoch_processing", "registry_updates", "minimal") => {
|
||||
run_test::<EpochProcessingRegistryUpdates<MinimalEthSpec>>(self)
|
||||
}
|
||||
("epoch_processing", "registry_updates", "mainnet") => {
|
||||
run_test::<EpochProcessingRegistryUpdates<MainnetEthSpec>>(self)
|
||||
}
|
||||
("epoch_processing", "justification_and_finalization", "minimal") => {
|
||||
run_test::<EpochProcessingJustificationAndFinalization<MinimalEthSpec>>(self)
|
||||
}
|
||||
("epoch_processing", "justification_and_finalization", "mainnet") => {
|
||||
run_test::<EpochProcessingJustificationAndFinalization<MainnetEthSpec>>(self)
|
||||
}
|
||||
("epoch_processing", "slashings", "minimal") => {
|
||||
run_test::<EpochProcessingSlashings<MinimalEthSpec>>(self)
|
||||
}
|
||||
("epoch_processing", "slashings", "mainnet") => {
|
||||
run_test::<EpochProcessingSlashings<MainnetEthSpec>>(self)
|
||||
}
|
||||
("epoch_processing", "final_updates", "minimal") => {
|
||||
run_test::<EpochProcessingFinalUpdates<MinimalEthSpec>>(self)
|
||||
}
|
||||
("epoch_processing", "final_updates", "mainnet") => {
|
||||
vec![]
|
||||
// FIXME: skipped due to compact committees issue
|
||||
// run_test::<EpochProcessingFinalUpdates<MainnetEthSpec>>(self)
|
||||
}
|
||||
("genesis", "initialization", "minimal") => {
|
||||
run_test::<GenesisInitialization<MinimalEthSpec>>(self)
|
||||
}
|
||||
("genesis", "initialization", "mainnet") => {
|
||||
run_test::<GenesisInitialization<MainnetEthSpec>>(self)
|
||||
}
|
||||
("genesis", "validity", "minimal") => run_test::<GenesisValidity<MinimalEthSpec>>(self),
|
||||
("genesis", "validity", "mainnet") => run_test::<GenesisValidity<MainnetEthSpec>>(self),
|
||||
(runner, handler, config) => panic!(
|
||||
"No implementation for runner: \"{}\", handler: \"{}\", config: \"{}\"",
|
||||
runner, handler, config
|
||||
),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn assert_tests_pass(path: PathBuf) {
|
||||
let doc = Self::from_path(path);
|
||||
let results = doc.test_results();
|
||||
|
||||
let (failed, skipped_bls, skipped_known_failures) = categorize_results(&results);
|
||||
|
||||
if failed.len() + skipped_known_failures.len() > 0 {
|
||||
print_results(
|
||||
&doc,
|
||||
&failed,
|
||||
&skipped_bls,
|
||||
&skipped_known_failures,
|
||||
&results,
|
||||
);
|
||||
if !failed.is_empty() {
|
||||
panic!("Tests failed (see above)");
|
||||
}
|
||||
} else {
|
||||
println!("Passed {} tests in {:?}", results.len(), doc.path);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn assert_tests_pass(path: &Path, results: &[CaseResult]) {
|
||||
let doc = Doc {
|
||||
header_yaml: String::new(),
|
||||
cases_yaml: String::new(),
|
||||
path: path.into(),
|
||||
};
|
||||
|
||||
let (failed, skipped_bls, skipped_known_failures) = categorize_results(results);
|
||||
|
||||
if failed.len() + skipped_known_failures.len() > 0 {
|
||||
print_results(
|
||||
&doc,
|
||||
&failed,
|
||||
&skipped_bls,
|
||||
&skipped_known_failures,
|
||||
&results,
|
||||
);
|
||||
if !failed.is_empty() {
|
||||
panic!("Tests failed (see above)");
|
||||
}
|
||||
} else {
|
||||
println!("Passed {} tests in {}", results.len(), path.display());
|
||||
}
|
||||
}
|
||||
|
||||
pub fn run_test<T>(_: &Doc) -> Vec<CaseResult>
|
||||
where
|
||||
Cases<T>: EfTest + YamlDecode,
|
||||
{
|
||||
panic!("FIXME(michael): delete this")
|
||||
}
|
||||
|
||||
pub fn categorize_results(
|
||||
results: &[CaseResult],
|
||||
) -> (Vec<&CaseResult>, Vec<&CaseResult>, Vec<&CaseResult>) {
|
||||
let mut failed = vec![];
|
||||
let mut skipped_bls = vec![];
|
||||
let mut skipped_known_failures = vec![];
|
||||
|
||||
for case in results {
|
||||
match case.result.as_ref().err() {
|
||||
Some(Error::SkippedBls) => skipped_bls.push(case),
|
||||
Some(Error::SkippedKnownFailure) => skipped_known_failures.push(case),
|
||||
Some(_) => failed.push(case),
|
||||
None => (),
|
||||
}
|
||||
}
|
||||
|
||||
(failed, skipped_bls, skipped_known_failures)
|
||||
}
|
||||
|
||||
pub fn print_results(
|
||||
doc: &Doc,
|
||||
failed: &[&CaseResult],
|
||||
skipped_bls: &[&CaseResult],
|
||||
skipped_known_failures: &[&CaseResult],
|
||||
results: &[CaseResult],
|
||||
) {
|
||||
println!("--------------------------------------------------");
|
||||
println!(
|
||||
"Test {}",
|
||||
if failed.is_empty() {
|
||||
"Result"
|
||||
} else {
|
||||
"Failure"
|
||||
}
|
||||
);
|
||||
println!("Title: TODO");
|
||||
println!("File: {:?}", doc.path);
|
||||
println!(
|
||||
"{} tests, {} failed, {} skipped (known failure), {} skipped (bls), {} passed. (See below for errors)",
|
||||
results.len(),
|
||||
failed.len(),
|
||||
skipped_known_failures.len(),
|
||||
skipped_bls.len(),
|
||||
results.len() - skipped_bls.len() - skipped_known_failures.len() - failed.len()
|
||||
);
|
||||
println!();
|
||||
|
||||
for case in skipped_known_failures {
|
||||
println!("-------");
|
||||
println!(
|
||||
"case[{}] ({}) skipped because it's a known failure",
|
||||
case.case_index, case.desc,
|
||||
);
|
||||
}
|
||||
for failure in failed {
|
||||
let error = failure.result.clone().unwrap_err();
|
||||
|
||||
println!("-------");
|
||||
println!(
|
||||
"case[{}] ({}) failed with {}:",
|
||||
failure.case_index,
|
||||
failure.desc,
|
||||
error.name()
|
||||
);
|
||||
println!("{}", error.message());
|
||||
}
|
||||
println!();
|
||||
}
|
@ -1,12 +0,0 @@
|
||||
use serde_derive::Deserialize;
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
pub struct DocHeader {
|
||||
pub title: String,
|
||||
pub summary: String,
|
||||
pub forks_timeline: String,
|
||||
pub forks: Vec<String>,
|
||||
pub config: String,
|
||||
pub runner: String,
|
||||
pub handler: String,
|
||||
}
|
@ -5,6 +5,7 @@ use std::fs;
|
||||
use std::marker::PhantomData;
|
||||
use std::path::PathBuf;
|
||||
use tree_hash::SignedRoot;
|
||||
use types::EthSpec;
|
||||
|
||||
pub trait Handler {
|
||||
type Case: Case + LoadCase;
|
||||
@ -47,7 +48,8 @@ pub trait Handler {
|
||||
|
||||
let results = Cases { test_cases }.test_results();
|
||||
|
||||
crate::doc::assert_tests_pass(&handler_path, &results);
|
||||
let name = format!("{}/{}", Self::runner_name(), Self::handler_name());
|
||||
crate::results::assert_tests_pass(&name, &handler_path, &results);
|
||||
}
|
||||
}
|
||||
|
||||
@ -128,3 +130,57 @@ where
|
||||
T::name()
|
||||
}
|
||||
}
|
||||
|
||||
pub struct ShufflingHandler<E>(PhantomData<E>);
|
||||
|
||||
impl<E: EthSpec + TypeName> Handler for ShufflingHandler<E> {
|
||||
type Case = cases::Shuffling<E>;
|
||||
|
||||
fn config_name() -> &'static str {
|
||||
E::name()
|
||||
}
|
||||
|
||||
fn runner_name() -> &'static str {
|
||||
"shuffling"
|
||||
}
|
||||
|
||||
fn handler_name() -> &'static str {
|
||||
"core"
|
||||
}
|
||||
}
|
||||
|
||||
pub struct SanityBlocksHandler<E>(PhantomData<E>);
|
||||
|
||||
impl<E: EthSpec + TypeName> Handler for SanityBlocksHandler<E> {
|
||||
type Case = cases::SanityBlocks<E>;
|
||||
|
||||
fn config_name() -> &'static str {
|
||||
E::name()
|
||||
}
|
||||
|
||||
fn runner_name() -> &'static str {
|
||||
"sanity"
|
||||
}
|
||||
|
||||
fn handler_name() -> &'static str {
|
||||
"blocks"
|
||||
}
|
||||
}
|
||||
|
||||
pub struct SanitySlotsHandler<E>(PhantomData<E>);
|
||||
|
||||
impl<E: EthSpec + TypeName> Handler for SanitySlotsHandler<E> {
|
||||
type Case = cases::SanitySlots<E>;
|
||||
|
||||
fn config_name() -> &'static str {
|
||||
E::name()
|
||||
}
|
||||
|
||||
fn runner_name() -> &'static str {
|
||||
"sanity"
|
||||
}
|
||||
|
||||
fn handler_name() -> &'static str {
|
||||
"slots"
|
||||
}
|
||||
}
|
||||
|
@ -2,7 +2,6 @@ use types::EthSpec;
|
||||
|
||||
pub use case_result::CaseResult;
|
||||
pub use cases::Case;
|
||||
pub use doc::Doc;
|
||||
pub use error::Error;
|
||||
pub use handler::*;
|
||||
pub use yaml_decode::YamlDecode;
|
||||
@ -10,10 +9,9 @@ pub use yaml_decode::YamlDecode;
|
||||
mod bls_setting;
|
||||
mod case_result;
|
||||
mod cases;
|
||||
mod doc;
|
||||
mod doc_header;
|
||||
mod error;
|
||||
mod handler;
|
||||
mod results;
|
||||
mod type_name;
|
||||
mod yaml_decode;
|
||||
|
||||
|
91
tests/ef_tests/src/results.rs
Normal file
91
tests/ef_tests/src/results.rs
Normal file
@ -0,0 +1,91 @@
|
||||
use crate::case_result::CaseResult;
|
||||
use crate::error::Error;
|
||||
use std::path::Path;
|
||||
|
||||
pub fn assert_tests_pass(handler_name: &str, path: &Path, results: &[CaseResult]) {
|
||||
let (failed, skipped_bls, skipped_known_failures) = categorize_results(results);
|
||||
|
||||
if failed.len() + skipped_known_failures.len() > 0 {
|
||||
print_results(
|
||||
handler_name,
|
||||
&failed,
|
||||
&skipped_bls,
|
||||
&skipped_known_failures,
|
||||
&results,
|
||||
);
|
||||
if !failed.is_empty() {
|
||||
panic!("Tests failed (see above)");
|
||||
}
|
||||
} else {
|
||||
println!("Passed {} tests in {}", results.len(), path.display());
|
||||
}
|
||||
}
|
||||
|
||||
pub fn categorize_results(
|
||||
results: &[CaseResult],
|
||||
) -> (Vec<&CaseResult>, Vec<&CaseResult>, Vec<&CaseResult>) {
|
||||
let mut failed = vec![];
|
||||
let mut skipped_bls = vec![];
|
||||
let mut skipped_known_failures = vec![];
|
||||
|
||||
for case in results {
|
||||
match case.result.as_ref().err() {
|
||||
Some(Error::SkippedBls) => skipped_bls.push(case),
|
||||
Some(Error::SkippedKnownFailure) => skipped_known_failures.push(case),
|
||||
Some(_) => failed.push(case),
|
||||
None => (),
|
||||
}
|
||||
}
|
||||
|
||||
(failed, skipped_bls, skipped_known_failures)
|
||||
}
|
||||
|
||||
pub fn print_results(
|
||||
handler_name: &str,
|
||||
failed: &[&CaseResult],
|
||||
skipped_bls: &[&CaseResult],
|
||||
skipped_known_failures: &[&CaseResult],
|
||||
results: &[CaseResult],
|
||||
) {
|
||||
println!("--------------------------------------------------");
|
||||
println!(
|
||||
"Test {}",
|
||||
if failed.is_empty() {
|
||||
"Result"
|
||||
} else {
|
||||
"Failure"
|
||||
}
|
||||
);
|
||||
println!("Title: {}", handler_name);
|
||||
println!(
|
||||
"{} tests, {} failed, {} skipped (known failure), {} skipped (bls), {} passed. (See below for errors)",
|
||||
results.len(),
|
||||
failed.len(),
|
||||
skipped_known_failures.len(),
|
||||
skipped_bls.len(),
|
||||
results.len() - skipped_bls.len() - skipped_known_failures.len() - failed.len()
|
||||
);
|
||||
println!();
|
||||
|
||||
for case in skipped_known_failures {
|
||||
println!("-------");
|
||||
println!(
|
||||
"case ({}) from {} skipped because it's a known failure",
|
||||
case.desc,
|
||||
case.path.display()
|
||||
);
|
||||
}
|
||||
for failure in failed {
|
||||
let error = failure.result.clone().unwrap_err();
|
||||
|
||||
println!("-------");
|
||||
println!(
|
||||
"case ({}) from {} failed with {}:",
|
||||
failure.desc,
|
||||
failure.path.display(),
|
||||
error.name()
|
||||
);
|
||||
println!("{}", error.message());
|
||||
}
|
||||
println!();
|
||||
}
|
@ -4,6 +4,34 @@ use std::fs;
|
||||
use std::path::Path;
|
||||
use types::Fork;
|
||||
|
||||
pub fn yaml_decode<T: serde::de::DeserializeOwned>(string: &str) -> Result<T, Error> {
|
||||
serde_yaml::from_str(string).map_err(|e| Error::FailedToParseTest(format!("{:?}", e)))
|
||||
}
|
||||
|
||||
pub fn yaml_decode_file<T: serde::de::DeserializeOwned>(path: &Path) -> Result<T, Error> {
|
||||
fs::read_to_string(path)
|
||||
.map_err(|e| {
|
||||
Error::FailedToParseTest(format!("Unable to load {}: {:?}", path.display(), e))
|
||||
})
|
||||
.and_then(|s| yaml_decode(&s))
|
||||
}
|
||||
|
||||
pub fn ssz_decode_file<T: ssz::Decode>(path: &Path) -> Result<T, Error> {
|
||||
fs::read(path)
|
||||
.map_err(|e| {
|
||||
Error::FailedToParseTest(format!("Unable to load {}: {:?}", path.display(), e))
|
||||
})
|
||||
.and_then(|s| {
|
||||
T::from_ssz_bytes(&s).map_err(|e| {
|
||||
Error::FailedToParseTest(format!(
|
||||
"Unable to parse SSZ at {}: {:?}",
|
||||
path.display(),
|
||||
e
|
||||
))
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
pub trait YamlDecode: Sized {
|
||||
/// Decode an object from the test specification YAML.
|
||||
fn yaml_decode(string: &str) -> Result<Self, Error>;
|
||||
|
@ -48,6 +48,7 @@ fn yaml_files_in_test_dir(dir: &Path) -> Vec<PathBuf> {
|
||||
paths
|
||||
}
|
||||
|
||||
/*
|
||||
#[test]
|
||||
#[cfg(feature = "fake_crypto")]
|
||||
fn ssz_generic() {
|
||||
@ -58,6 +59,7 @@ fn ssz_generic() {
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
#[test]
|
||||
#[cfg(feature = "fake_crypto")]
|
||||
fn ssz_static() {
|
||||
@ -67,16 +69,15 @@ fn ssz_static() {
|
||||
Doc::assert_tests_pass(file);
|
||||
});
|
||||
}
|
||||
*/
|
||||
|
||||
#[test]
|
||||
fn shuffling() {
|
||||
yaml_files_in_test_dir(&Path::new("shuffling").join("core"))
|
||||
.into_par_iter()
|
||||
.for_each(|file| {
|
||||
Doc::assert_tests_pass(file);
|
||||
});
|
||||
ShufflingHandler::<MinimalEthSpec>::run();
|
||||
ShufflingHandler::<MainnetEthSpec>::run();
|
||||
}
|
||||
|
||||
/*
|
||||
#[test]
|
||||
fn operations_deposit() {
|
||||
yaml_files_in_test_dir(&Path::new("operations").join("deposit"))
|
||||
@ -140,25 +141,21 @@ fn operations_block_header() {
|
||||
Doc::assert_tests_pass(file);
|
||||
});
|
||||
}
|
||||
*/
|
||||
|
||||
#[test]
|
||||
fn sanity_blocks() {
|
||||
yaml_files_in_test_dir(&Path::new("sanity").join("blocks"))
|
||||
.into_par_iter()
|
||||
.for_each(|file| {
|
||||
Doc::assert_tests_pass(file);
|
||||
});
|
||||
SanityBlocksHandler::<MinimalEthSpec>::run();
|
||||
SanityBlocksHandler::<MainnetEthSpec>::run();
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn sanity_slots() {
|
||||
yaml_files_in_test_dir(&Path::new("sanity").join("slots"))
|
||||
.into_par_iter()
|
||||
.for_each(|file| {
|
||||
Doc::assert_tests_pass(file);
|
||||
});
|
||||
SanitySlotsHandler::<MinimalEthSpec>::run();
|
||||
SanitySlotsHandler::<MainnetEthSpec>::run();
|
||||
}
|
||||
|
||||
/*
|
||||
#[test]
|
||||
#[cfg(not(feature = "fake_crypto"))]
|
||||
fn bls() {
|
||||
@ -168,6 +165,7 @@ fn bls() {
|
||||
Doc::assert_tests_pass(file);
|
||||
});
|
||||
}
|
||||
*/
|
||||
|
||||
#[test]
|
||||
#[cfg(not(feature = "fake_crypto"))]
|
||||
@ -264,6 +262,7 @@ ssz_static_test!(ssz_static_transfer, Transfer, SR);
|
||||
ssz_static_test!(ssz_static_validator, Validator);
|
||||
ssz_static_test!(ssz_static_voluntary_exit, VoluntaryExit, SR);
|
||||
|
||||
/*
|
||||
#[test]
|
||||
fn epoch_processing_justification_and_finalization() {
|
||||
yaml_files_in_test_dir(&Path::new("epoch_processing").join("justification_and_finalization"))
|
||||
@ -326,3 +325,4 @@ fn genesis_validity() {
|
||||
Doc::assert_tests_pass(file);
|
||||
});
|
||||
}
|
||||
*/
|
||||
|
Loading…
Reference in New Issue
Block a user