Update spec tests to v1.1.0-beta.4 (#2548)
## Proposed Changes
Bump the spec tests to beta.4, including the new randomised tests (which all pass 🎉)
This commit is contained in:
parent
00a7ef0036
commit
a844ce5ba9
@ -1,4 +1,4 @@
|
|||||||
testing/ef_tests/eth2.0-spec-tests
|
testing/ef_tests/consensus-spec-tests
|
||||||
target/
|
target/
|
||||||
*.data
|
*.data
|
||||||
*.tar.gz
|
*.tar.gz
|
||||||
|
2
.github/workflows/test-suite.yml
vendored
2
.github/workflows/test-suite.yml
vendored
@ -98,7 +98,7 @@ jobs:
|
|||||||
- uses: actions/checkout@v1
|
- uses: actions/checkout@v1
|
||||||
- name: Get latest version of stable Rust
|
- name: Get latest version of stable Rust
|
||||||
run: rustup update stable
|
run: rustup update stable
|
||||||
- name: Run eth2.0-spec-tests with blst, milagro and fake_crypto
|
- name: Run consensus-spec-tests with blst, milagro and fake_crypto
|
||||||
run: make test-ef
|
run: make test-ef
|
||||||
dockerfile-ubuntu:
|
dockerfile-ubuntu:
|
||||||
name: dockerfile-ubuntu
|
name: dockerfile-ubuntu
|
||||||
|
2
Makefile
2
Makefile
@ -105,7 +105,7 @@ run-ef-tests:
|
|||||||
cargo test --release --manifest-path=$(EF_TESTS)/Cargo.toml --features "ef_tests"
|
cargo test --release --manifest-path=$(EF_TESTS)/Cargo.toml --features "ef_tests"
|
||||||
cargo test --release --manifest-path=$(EF_TESTS)/Cargo.toml --features "ef_tests,fake_crypto"
|
cargo test --release --manifest-path=$(EF_TESTS)/Cargo.toml --features "ef_tests,fake_crypto"
|
||||||
cargo test --release --manifest-path=$(EF_TESTS)/Cargo.toml --features "ef_tests,milagro"
|
cargo test --release --manifest-path=$(EF_TESTS)/Cargo.toml --features "ef_tests,milagro"
|
||||||
./$(EF_TESTS)/check_all_files_accessed.py $(EF_TESTS)/.accessed_file_log.txt $(EF_TESTS)/eth2.0-spec-tests
|
./$(EF_TESTS)/check_all_files_accessed.py $(EF_TESTS)/.accessed_file_log.txt $(EF_TESTS)/consensus-spec-tests
|
||||||
|
|
||||||
# Run the tests in the `beacon_chain` crate.
|
# Run the tests in the `beacon_chain` crate.
|
||||||
test-beacon-chain: test-beacon-chain-base test-beacon-chain-altair
|
test-beacon-chain: test-beacon-chain-base test-beacon-chain-altair
|
||||||
|
@ -36,15 +36,15 @@ you can run them locally and avoid CI failures:
|
|||||||
|
|
||||||
_The lighthouse test suite is quite extensive, running the whole suite may take 30+ minutes._
|
_The lighthouse test suite is quite extensive, running the whole suite may take 30+ minutes._
|
||||||
|
|
||||||
### Ethereum 2.0 Spec Tests
|
### Consensus Spec Tests
|
||||||
|
|
||||||
The
|
The
|
||||||
[ethereum/eth2.0-spec-tests](https://github.com/ethereum/eth2.0-spec-tests/)
|
[ethereum/consensus-spec-tests](https://github.com/ethereum/consensus-spec-tests/)
|
||||||
repository contains a large set of tests that verify Lighthouse behaviour
|
repository contains a large set of tests that verify Lighthouse behaviour
|
||||||
against the Ethereum Foundation specifications.
|
against the Ethereum Foundation specifications.
|
||||||
|
|
||||||
These tests are quite large (100's of MB) so they're only downloaded if you run
|
These tests are quite large (100's of MB) so they're only downloaded if you run
|
||||||
`$ make test-ef` (or anything that run it). You may want to avoid
|
`$ make test-ef` (or anything that runs it). You may want to avoid
|
||||||
downloading these tests if you're on a slow or metered Internet connection. CI
|
downloading these tests if you're on a slow or metered Internet connection. CI
|
||||||
will require them to pass, though.
|
will require them to pass, though.
|
||||||
|
|
||||||
|
@ -492,6 +492,7 @@ impl ChainSpec {
|
|||||||
Self {
|
Self {
|
||||||
max_committees_per_slot: 4,
|
max_committees_per_slot: 4,
|
||||||
target_committee_size: 4,
|
target_committee_size: 4,
|
||||||
|
churn_limit_quotient: 32,
|
||||||
shuffle_round_count: 10,
|
shuffle_round_count: 10,
|
||||||
min_genesis_active_validator_count: 64,
|
min_genesis_active_validator_count: 64,
|
||||||
min_genesis_time: 1578009600,
|
min_genesis_time: 1578009600,
|
||||||
|
@ -1,4 +1,8 @@
|
|||||||
use crate::{generic_public_key::GenericPublicKey, Error};
|
use crate::{
|
||||||
|
generic_public_key::{GenericPublicKey, TPublicKey},
|
||||||
|
Error,
|
||||||
|
};
|
||||||
|
use std::fmt::{self, Debug};
|
||||||
use std::marker::PhantomData;
|
use std::marker::PhantomData;
|
||||||
|
|
||||||
/// Implemented on some struct from a BLS library so it may be used internally in this crate.
|
/// Implemented on some struct from a BLS library so it may be used internally in this crate.
|
||||||
@ -35,3 +39,13 @@ where
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl<Pub, AggPub> Debug for GenericAggregatePublicKey<Pub, AggPub>
|
||||||
|
where
|
||||||
|
AggPub: TAggregatePublicKey<Pub>,
|
||||||
|
Pub: TPublicKey,
|
||||||
|
{
|
||||||
|
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||||
|
write!(f, "{:?}", self.to_public_key())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
@ -194,6 +194,20 @@ where
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Wrapper for `fast_aggregate_verify` accepting `G2_POINT_AT_INFINITY` signature when
|
||||||
|
/// `pubkeys` is empty.
|
||||||
|
pub fn eth_fast_aggregate_verify(
|
||||||
|
&self,
|
||||||
|
msg: Hash256,
|
||||||
|
pubkeys: &[&GenericPublicKey<Pub>],
|
||||||
|
) -> bool {
|
||||||
|
if pubkeys.is_empty() && self.is_infinity() {
|
||||||
|
true
|
||||||
|
} else {
|
||||||
|
self.fast_aggregate_verify(msg, pubkeys)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/// Verify that `self` represents an aggregate signature where all `pubkeys` have signed their
|
/// Verify that `self` represents an aggregate signature where all `pubkeys` have signed their
|
||||||
/// corresponding message in `msgs`.
|
/// corresponding message in `msgs`.
|
||||||
///
|
///
|
||||||
|
2
testing/ef_tests/.gitignore
vendored
2
testing/ef_tests/.gitignore
vendored
@ -1,2 +1,2 @@
|
|||||||
/eth2.0-spec-tests
|
/consensus-spec-tests
|
||||||
.accessed_file_log.txt
|
.accessed_file_log.txt
|
||||||
|
@ -1,8 +1,8 @@
|
|||||||
TESTS_TAG := v1.1.0-beta.2
|
TESTS_TAG := v1.1.0-beta.4
|
||||||
TESTS = general minimal mainnet
|
TESTS = general minimal mainnet
|
||||||
TARBALLS = $(patsubst %,%-$(TESTS_TAG).tar.gz,$(TESTS))
|
TARBALLS = $(patsubst %,%-$(TESTS_TAG).tar.gz,$(TESTS))
|
||||||
|
|
||||||
REPO_NAME := eth2.0-spec-tests
|
REPO_NAME := consensus-spec-tests
|
||||||
OUTPUT_DIR := ./$(REPO_NAME)
|
OUTPUT_DIR := ./$(REPO_NAME)
|
||||||
|
|
||||||
BASE_URL := https://github.com/ethereum/$(REPO_NAME)/releases/download/$(TESTS_TAG)
|
BASE_URL := https://github.com/ethereum/$(REPO_NAME)/releases/download/$(TESTS_TAG)
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
# Ethereum 2.0 Specification Tests
|
# Consensus Specification Tests
|
||||||
|
|
||||||
This crate parses and executes the test vectors at [ethereum/eth2.0-spec-tests](https://github.com/ethereum/eth2.0-spec-tests).
|
This crate parses and executes the test vectors at [ethereum/consensus-spec-tests](https://github.com/ethereum/consensus-spec-tests).
|
||||||
|
|
||||||
Functionality is achieved only via the `$ cargo test --features ef_tests` command.
|
Functionality is achieved only via the `$ cargo test --features ef_tests` command.
|
||||||
|
|
||||||
@ -14,10 +14,10 @@ $ make
|
|||||||
```
|
```
|
||||||
|
|
||||||
_Note: this may download hundreds of MB of compressed archives from the
|
_Note: this may download hundreds of MB of compressed archives from the
|
||||||
[ethereum/eth2.0-spec-tests](https://github.com/ethereum/eth2.0-spec-tests/),
|
[ethereum/consensus-spec-tests](https://github.com/ethereum/consensus-spec-tests/),
|
||||||
which may expand into several GB of files._
|
which may expand into several GB of files._
|
||||||
|
|
||||||
If successful, you should now have the extracted tests in `./eth2.0-spec-tests`.
|
If successful, you should now have the extracted tests in `./consensus-spec-tests`.
|
||||||
|
|
||||||
Run them with:
|
Run them with:
|
||||||
|
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
#!/usr/bin/env python3
|
#!/usr/bin/env python3
|
||||||
|
|
||||||
# The purpose of this script is to compare a list of file names that were accessed during testing
|
# The purpose of this script is to compare a list of file names that were accessed during testing
|
||||||
# against all the file names in the eth2.0-spec-tests repository. It then checks to see which files
|
# against all the file names in the consensus-spec-tests repository. It then checks to see which files
|
||||||
# were not accessed and returns an error if any non-intentionally-ignored files are detected.
|
# were not accessed and returns an error if any non-intentionally-ignored files are detected.
|
||||||
#
|
#
|
||||||
# The ultimate goal is to detect any accidentally-missed spec tests.
|
# The ultimate goal is to detect any accidentally-missed spec tests.
|
||||||
@ -12,20 +12,13 @@ import sys
|
|||||||
# First argument should the path to a file which contains a list of accessed file names.
|
# First argument should the path to a file which contains a list of accessed file names.
|
||||||
accessed_files_filename = sys.argv[1]
|
accessed_files_filename = sys.argv[1]
|
||||||
|
|
||||||
# Second argument should be the path to the eth2.0-spec-tests directory.
|
# Second argument should be the path to the consensus-spec-tests directory.
|
||||||
tests_dir_filename = sys.argv[2]
|
tests_dir_filename = sys.argv[2]
|
||||||
|
|
||||||
# If any of the file names found in the eth2.0-spec-tests directory *starts with* one of the
|
# If any of the file names found in the consensus-spec-tests directory *starts with* one of the
|
||||||
# following strings, we will assume they are to be ignored (i.e., we are purposefully *not* running
|
# following strings, we will assume they are to be ignored (i.e., we are purposefully *not* running
|
||||||
# the spec tests).
|
# the spec tests).
|
||||||
excluded_paths = [
|
excluded_paths = [
|
||||||
# Configs from future phases
|
|
||||||
"tests/mainnet/config/custody_game.yaml",
|
|
||||||
"tests/mainnet/config/sharding.yaml",
|
|
||||||
"tests/mainnet/config/merge.yaml",
|
|
||||||
"tests/minimal/config/custody_game.yaml",
|
|
||||||
"tests/minimal/config/sharding.yaml",
|
|
||||||
"tests/minimal/config/merge.yaml",
|
|
||||||
# Merge tests
|
# Merge tests
|
||||||
"tests/minimal/merge",
|
"tests/minimal/merge",
|
||||||
"tests/mainnet/merge",
|
"tests/mainnet/merge",
|
||||||
@ -53,7 +46,7 @@ excluded_paths = [
|
|||||||
]
|
]
|
||||||
|
|
||||||
def normalize_path(path):
|
def normalize_path(path):
|
||||||
return path.split("eth2.0-spec-tests/", )[1]
|
return path.split("consensus-spec-tests/", )[1]
|
||||||
|
|
||||||
# Determine the list of filenames which were accessed during tests.
|
# Determine the list of filenames which were accessed during tests.
|
||||||
passed = set()
|
passed = set()
|
||||||
|
@ -6,6 +6,8 @@ use types::ForkName;
|
|||||||
|
|
||||||
mod bls_aggregate_sigs;
|
mod bls_aggregate_sigs;
|
||||||
mod bls_aggregate_verify;
|
mod bls_aggregate_verify;
|
||||||
|
mod bls_eth_aggregate_pubkeys;
|
||||||
|
mod bls_eth_fast_aggregate_verify;
|
||||||
mod bls_fast_aggregate_verify;
|
mod bls_fast_aggregate_verify;
|
||||||
mod bls_sign_msg;
|
mod bls_sign_msg;
|
||||||
mod bls_verify_msg;
|
mod bls_verify_msg;
|
||||||
@ -25,6 +27,8 @@ mod transition;
|
|||||||
|
|
||||||
pub use bls_aggregate_sigs::*;
|
pub use bls_aggregate_sigs::*;
|
||||||
pub use bls_aggregate_verify::*;
|
pub use bls_aggregate_verify::*;
|
||||||
|
pub use bls_eth_aggregate_pubkeys::*;
|
||||||
|
pub use bls_eth_fast_aggregate_verify::*;
|
||||||
pub use bls_fast_aggregate_verify::*;
|
pub use bls_fast_aggregate_verify::*;
|
||||||
pub use bls_sign_msg::*;
|
pub use bls_sign_msg::*;
|
||||||
pub use bls_verify_msg::*;
|
pub use bls_verify_msg::*;
|
||||||
|
@ -13,6 +13,10 @@ pub struct BlsAggregateSigs {
|
|||||||
impl BlsCase for BlsAggregateSigs {}
|
impl BlsCase for BlsAggregateSigs {}
|
||||||
|
|
||||||
impl Case for BlsAggregateSigs {
|
impl Case for BlsAggregateSigs {
|
||||||
|
fn is_enabled_for_fork(fork_name: ForkName) -> bool {
|
||||||
|
fork_name == ForkName::Base
|
||||||
|
}
|
||||||
|
|
||||||
fn result(&self, _case_index: usize, _fork_name: ForkName) -> Result<(), Error> {
|
fn result(&self, _case_index: usize, _fork_name: ForkName) -> Result<(), Error> {
|
||||||
let mut aggregate_signature = AggregateSignature::infinity();
|
let mut aggregate_signature = AggregateSignature::infinity();
|
||||||
|
|
||||||
|
@ -21,6 +21,10 @@ pub struct BlsAggregateVerify {
|
|||||||
impl BlsCase for BlsAggregateVerify {}
|
impl BlsCase for BlsAggregateVerify {}
|
||||||
|
|
||||||
impl Case for BlsAggregateVerify {
|
impl Case for BlsAggregateVerify {
|
||||||
|
fn is_enabled_for_fork(fork_name: ForkName) -> bool {
|
||||||
|
fork_name == ForkName::Base
|
||||||
|
}
|
||||||
|
|
||||||
fn result(&self, _case_index: usize, _fork_name: ForkName) -> Result<(), Error> {
|
fn result(&self, _case_index: usize, _fork_name: ForkName) -> Result<(), Error> {
|
||||||
let messages = self
|
let messages = self
|
||||||
.input
|
.input
|
||||||
|
48
testing/ef_tests/src/cases/bls_eth_aggregate_pubkeys.rs
Normal file
48
testing/ef_tests/src/cases/bls_eth_aggregate_pubkeys.rs
Normal file
@ -0,0 +1,48 @@
|
|||||||
|
use super::*;
|
||||||
|
use crate::case_result::compare_result;
|
||||||
|
use crate::cases::common::BlsCase;
|
||||||
|
use bls::{AggregatePublicKey, PublicKeyBytes};
|
||||||
|
use serde_derive::Deserialize;
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Deserialize)]
|
||||||
|
pub struct BlsEthAggregatePubkeys {
|
||||||
|
pub input: Vec<PublicKeyBytes>,
|
||||||
|
pub output: Option<PublicKeyBytes>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl BlsCase for BlsEthAggregatePubkeys {}
|
||||||
|
|
||||||
|
impl Case for BlsEthAggregatePubkeys {
|
||||||
|
fn is_enabled_for_fork(fork_name: ForkName) -> bool {
|
||||||
|
fork_name == ForkName::Altair
|
||||||
|
}
|
||||||
|
|
||||||
|
fn result(&self, _case_index: usize, _fork_name: ForkName) -> Result<(), Error> {
|
||||||
|
let pubkeys_result = self
|
||||||
|
.input
|
||||||
|
.iter()
|
||||||
|
.map(|pkb| pkb.decompress())
|
||||||
|
.collect::<Result<Vec<_>, _>>();
|
||||||
|
|
||||||
|
let pubkeys = match pubkeys_result {
|
||||||
|
Ok(pubkeys) => pubkeys,
|
||||||
|
Err(bls::Error::InvalidInfinityPublicKey | bls::Error::BlstError(_))
|
||||||
|
if self.output.is_none() =>
|
||||||
|
{
|
||||||
|
return Ok(());
|
||||||
|
}
|
||||||
|
#[cfg(feature = "milagro")]
|
||||||
|
Err(bls::Error::MilagroError(_)) if self.output.is_none() => {
|
||||||
|
return Ok(());
|
||||||
|
}
|
||||||
|
Err(e) => return Err(Error::FailedToParseTest(format!("{:?}", e))),
|
||||||
|
};
|
||||||
|
|
||||||
|
let aggregate_pubkey =
|
||||||
|
AggregatePublicKey::aggregate(&pubkeys).map(|agg| agg.to_public_key());
|
||||||
|
|
||||||
|
let expected = self.output.as_ref().map(|pk| pk.decompress().unwrap());
|
||||||
|
|
||||||
|
compare_result::<_, bls::Error>(&aggregate_pubkey, &expected)
|
||||||
|
}
|
||||||
|
}
|
61
testing/ef_tests/src/cases/bls_eth_fast_aggregate_verify.rs
Normal file
61
testing/ef_tests/src/cases/bls_eth_fast_aggregate_verify.rs
Normal file
@ -0,0 +1,61 @@
|
|||||||
|
use super::*;
|
||||||
|
use crate::case_result::compare_result;
|
||||||
|
use crate::cases::common::BlsCase;
|
||||||
|
use bls::{AggregateSignature, PublicKeyBytes};
|
||||||
|
use serde_derive::Deserialize;
|
||||||
|
use std::convert::TryInto;
|
||||||
|
use types::Hash256;
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Deserialize)]
|
||||||
|
pub struct BlsEthFastAggregateVerifyInput {
|
||||||
|
pub pubkeys: Vec<PublicKeyBytes>,
|
||||||
|
#[serde(alias = "messages")]
|
||||||
|
pub message: String,
|
||||||
|
pub signature: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Deserialize)]
|
||||||
|
pub struct BlsEthFastAggregateVerify {
|
||||||
|
pub input: BlsEthFastAggregateVerifyInput,
|
||||||
|
pub output: bool,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl BlsCase for BlsEthFastAggregateVerify {}
|
||||||
|
|
||||||
|
impl Case for BlsEthFastAggregateVerify {
|
||||||
|
fn is_enabled_for_fork(fork_name: ForkName) -> bool {
|
||||||
|
fork_name == ForkName::Altair
|
||||||
|
}
|
||||||
|
|
||||||
|
fn result(&self, _case_index: usize, _fork_name: ForkName) -> Result<(), Error> {
|
||||||
|
let message = Hash256::from_slice(
|
||||||
|
&hex::decode(&self.input.message[2..])
|
||||||
|
.map_err(|e| Error::FailedToParseTest(format!("{:?}", e)))?,
|
||||||
|
);
|
||||||
|
|
||||||
|
let pubkeys_result = self
|
||||||
|
.input
|
||||||
|
.pubkeys
|
||||||
|
.iter()
|
||||||
|
.map(|pkb| pkb.try_into())
|
||||||
|
.collect::<Result<Vec<_>, _>>();
|
||||||
|
|
||||||
|
let pubkeys = match pubkeys_result {
|
||||||
|
Ok(pubkeys) => pubkeys,
|
||||||
|
Err(bls::Error::InvalidInfinityPublicKey) if !self.output => {
|
||||||
|
return Ok(());
|
||||||
|
}
|
||||||
|
Err(e) => return Err(Error::FailedToParseTest(format!("{:?}", e))),
|
||||||
|
};
|
||||||
|
|
||||||
|
let pubkey_refs = pubkeys.iter().collect::<Vec<_>>();
|
||||||
|
|
||||||
|
let signature_ok = hex::decode(&self.input.signature[2..])
|
||||||
|
.ok()
|
||||||
|
.and_then(|bytes: Vec<u8>| AggregateSignature::deserialize(&bytes).ok())
|
||||||
|
.map(|signature| signature.eth_fast_aggregate_verify(message, &pubkey_refs))
|
||||||
|
.unwrap_or(false);
|
||||||
|
|
||||||
|
compare_result::<bool, ()>(&Ok(signature_ok), &Some(self.output))
|
||||||
|
}
|
||||||
|
}
|
@ -23,6 +23,10 @@ pub struct BlsFastAggregateVerify {
|
|||||||
impl BlsCase for BlsFastAggregateVerify {}
|
impl BlsCase for BlsFastAggregateVerify {}
|
||||||
|
|
||||||
impl Case for BlsFastAggregateVerify {
|
impl Case for BlsFastAggregateVerify {
|
||||||
|
fn is_enabled_for_fork(fork_name: ForkName) -> bool {
|
||||||
|
fork_name == ForkName::Base
|
||||||
|
}
|
||||||
|
|
||||||
fn result(&self, _case_index: usize, _fork_name: ForkName) -> Result<(), Error> {
|
fn result(&self, _case_index: usize, _fork_name: ForkName) -> Result<(), Error> {
|
||||||
let message = Hash256::from_slice(
|
let message = Hash256::from_slice(
|
||||||
&hex::decode(&self.input.message[2..])
|
&hex::decode(&self.input.message[2..])
|
||||||
|
@ -20,6 +20,10 @@ pub struct BlsSign {
|
|||||||
impl BlsCase for BlsSign {}
|
impl BlsCase for BlsSign {}
|
||||||
|
|
||||||
impl Case for BlsSign {
|
impl Case for BlsSign {
|
||||||
|
fn is_enabled_for_fork(fork_name: ForkName) -> bool {
|
||||||
|
fork_name == ForkName::Base
|
||||||
|
}
|
||||||
|
|
||||||
fn result(&self, _case_index: usize, _fork_name: ForkName) -> Result<(), Error> {
|
fn result(&self, _case_index: usize, _fork_name: ForkName) -> Result<(), Error> {
|
||||||
// Convert private_key and message to required types
|
// Convert private_key and message to required types
|
||||||
let sk = hex::decode(&self.input.privkey[2..])
|
let sk = hex::decode(&self.input.privkey[2..])
|
||||||
|
@ -22,6 +22,10 @@ pub struct BlsVerify {
|
|||||||
impl BlsCase for BlsVerify {}
|
impl BlsCase for BlsVerify {}
|
||||||
|
|
||||||
impl Case for BlsVerify {
|
impl Case for BlsVerify {
|
||||||
|
fn is_enabled_for_fork(fork_name: ForkName) -> bool {
|
||||||
|
fork_name == ForkName::Base
|
||||||
|
}
|
||||||
|
|
||||||
fn result(&self, _case_index: usize, _fork_name: ForkName) -> Result<(), Error> {
|
fn result(&self, _case_index: usize, _fork_name: ForkName) -> Result<(), Error> {
|
||||||
let message = hex::decode(&self.input.message[2..])
|
let message = hex::decode(&self.input.message[2..])
|
||||||
.map_err(|e| Error::FailedToParseTest(format!("{:?}", e)))?;
|
.map_err(|e| Error::FailedToParseTest(format!("{:?}", e)))?;
|
||||||
|
@ -139,7 +139,6 @@ impl Case for SszGeneric {
|
|||||||
let mut limit = parts[1];
|
let mut limit = parts[1];
|
||||||
|
|
||||||
// Test format is inconsistent, pretend the limit is 32 (arbitrary)
|
// Test format is inconsistent, pretend the limit is 32 (arbitrary)
|
||||||
// https://github.com/ethereum/eth2.0-spec-tests
|
|
||||||
if limit == "no" {
|
if limit == "no" {
|
||||||
limit = "32";
|
limit = "32";
|
||||||
}
|
}
|
||||||
|
@ -37,7 +37,7 @@ pub trait Handler {
|
|||||||
};
|
};
|
||||||
|
|
||||||
let handler_path = PathBuf::from(env!("CARGO_MANIFEST_DIR"))
|
let handler_path = PathBuf::from(env!("CARGO_MANIFEST_DIR"))
|
||||||
.join("eth2.0-spec-tests")
|
.join("consensus-spec-tests")
|
||||||
.join("tests")
|
.join("tests")
|
||||||
.join(Self::config_name())
|
.join(Self::config_name())
|
||||||
.join(fork_name_str)
|
.join(fork_name_str)
|
||||||
@ -82,10 +82,6 @@ macro_rules! bls_handler {
|
|||||||
impl Handler for $runner_name {
|
impl Handler for $runner_name {
|
||||||
type Case = cases::$case_name;
|
type Case = cases::$case_name;
|
||||||
|
|
||||||
fn is_enabled_for_fork(&self, fork_name: ForkName) -> bool {
|
|
||||||
fork_name == ForkName::Base
|
|
||||||
}
|
|
||||||
|
|
||||||
fn runner_name() -> &'static str {
|
fn runner_name() -> &'static str {
|
||||||
"bls"
|
"bls"
|
||||||
}
|
}
|
||||||
@ -110,6 +106,16 @@ bls_handler!(
|
|||||||
BlsFastAggregateVerify,
|
BlsFastAggregateVerify,
|
||||||
"fast_aggregate_verify"
|
"fast_aggregate_verify"
|
||||||
);
|
);
|
||||||
|
bls_handler!(
|
||||||
|
BlsEthAggregatePubkeysHandler,
|
||||||
|
BlsEthAggregatePubkeys,
|
||||||
|
"eth_aggregate_pubkeys"
|
||||||
|
);
|
||||||
|
bls_handler!(
|
||||||
|
BlsEthFastAggregateVerifyHandler,
|
||||||
|
BlsEthFastAggregateVerify,
|
||||||
|
"eth_fast_aggregate_verify"
|
||||||
|
);
|
||||||
|
|
||||||
/// Handler for SSZ types.
|
/// Handler for SSZ types.
|
||||||
pub struct SszStaticHandler<T, E> {
|
pub struct SszStaticHandler<T, E> {
|
||||||
@ -258,8 +264,8 @@ impl<E: EthSpec + TypeName> Handler for SanityBlocksHandler<E> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn is_enabled_for_fork(&self, _fork_name: ForkName) -> bool {
|
fn is_enabled_for_fork(&self, _fork_name: ForkName) -> bool {
|
||||||
// FIXME(altair): v1.1.0-alpha.3 doesn't mark the historical blocks test as
|
// NOTE: v1.1.0-beta.4 doesn't mark the historical blocks test as requiring real crypto, so
|
||||||
// requiring real crypto, so only run these tests with real crypto for now.
|
// only run these tests with real crypto for now.
|
||||||
cfg!(not(feature = "fake_crypto"))
|
cfg!(not(feature = "fake_crypto"))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -284,6 +290,26 @@ impl<E: EthSpec + TypeName> Handler for SanitySlotsHandler<E> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[derive(Derivative)]
|
||||||
|
#[derivative(Default(bound = ""))]
|
||||||
|
pub struct RandomHandler<E>(PhantomData<E>);
|
||||||
|
|
||||||
|
impl<E: EthSpec + TypeName> Handler for RandomHandler<E> {
|
||||||
|
type Case = cases::SanityBlocks<E>;
|
||||||
|
|
||||||
|
fn config_name() -> &'static str {
|
||||||
|
E::name()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn runner_name() -> &'static str {
|
||||||
|
"random"
|
||||||
|
}
|
||||||
|
|
||||||
|
fn handler_name(&self) -> String {
|
||||||
|
"random".into()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
#[derive(Derivative)]
|
#[derive(Derivative)]
|
||||||
#[derivative(Default(bound = ""))]
|
#[derivative(Default(bound = ""))]
|
||||||
pub struct EpochProcessingHandler<E, T>(PhantomData<(E, T)>);
|
pub struct EpochProcessingHandler<E, T>(PhantomData<(E, T)>);
|
||||||
|
@ -82,6 +82,12 @@ fn sanity_slots() {
|
|||||||
SanitySlotsHandler::<MainnetEthSpec>::default().run();
|
SanitySlotsHandler::<MainnetEthSpec>::default().run();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn random() {
|
||||||
|
RandomHandler::<MinimalEthSpec>::default().run();
|
||||||
|
RandomHandler::<MainnetEthSpec>::default().run();
|
||||||
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
#[cfg(not(feature = "fake_crypto"))]
|
#[cfg(not(feature = "fake_crypto"))]
|
||||||
fn bls_aggregate() {
|
fn bls_aggregate() {
|
||||||
@ -112,6 +118,18 @@ fn bls_fast_aggregate_verify() {
|
|||||||
BlsFastAggregateVerifyHandler::default().run();
|
BlsFastAggregateVerifyHandler::default().run();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
#[cfg(not(feature = "fake_crypto"))]
|
||||||
|
fn bls_eth_aggregate_pubkeys() {
|
||||||
|
BlsEthAggregatePubkeysHandler::default().run();
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
#[cfg(not(feature = "fake_crypto"))]
|
||||||
|
fn bls_eth_fast_aggregate_verify() {
|
||||||
|
BlsEthFastAggregateVerifyHandler::default().run();
|
||||||
|
}
|
||||||
|
|
||||||
/// As for `ssz_static_test_no_run` (below), but also executes the function as a test.
|
/// As for `ssz_static_test_no_run` (below), but also executes the function as a test.
|
||||||
#[cfg(feature = "fake_crypto")]
|
#[cfg(feature = "fake_crypto")]
|
||||||
macro_rules! ssz_static_test {
|
macro_rules! ssz_static_test {
|
||||||
@ -177,7 +195,6 @@ mod ssz_static {
|
|||||||
ssz_static_test!(beacon_block_header, BeaconBlockHeader);
|
ssz_static_test!(beacon_block_header, BeaconBlockHeader);
|
||||||
ssz_static_test!(beacon_state, SszStaticTHCHandler, BeaconState<_>);
|
ssz_static_test!(beacon_state, SszStaticTHCHandler, BeaconState<_>);
|
||||||
ssz_static_test!(checkpoint, Checkpoint);
|
ssz_static_test!(checkpoint, Checkpoint);
|
||||||
// FIXME(altair): add ContributionAndProof
|
|
||||||
ssz_static_test!(deposit, Deposit);
|
ssz_static_test!(deposit, Deposit);
|
||||||
ssz_static_test!(deposit_data, DepositData);
|
ssz_static_test!(deposit_data, DepositData);
|
||||||
ssz_static_test!(deposit_message, DepositMessage);
|
ssz_static_test!(deposit_message, DepositMessage);
|
||||||
@ -197,10 +214,8 @@ mod ssz_static {
|
|||||||
SignedBeaconBlock<_>
|
SignedBeaconBlock<_>
|
||||||
);
|
);
|
||||||
ssz_static_test!(signed_beacon_block_header, SignedBeaconBlockHeader);
|
ssz_static_test!(signed_beacon_block_header, SignedBeaconBlockHeader);
|
||||||
// FIXME(altair): add SignedContributionAndProof
|
|
||||||
ssz_static_test!(signed_voluntary_exit, SignedVoluntaryExit);
|
ssz_static_test!(signed_voluntary_exit, SignedVoluntaryExit);
|
||||||
ssz_static_test!(signing_data, SigningData);
|
ssz_static_test!(signing_data, SigningData);
|
||||||
// FIXME(altair): add SyncCommitteeContribution/Signature/SigningData
|
|
||||||
ssz_static_test!(validator, Validator);
|
ssz_static_test!(validator, Validator);
|
||||||
ssz_static_test!(voluntary_exit, VoluntaryExit);
|
ssz_static_test!(voluntary_exit, VoluntaryExit);
|
||||||
|
|
||||||
|
Loading…
Reference in New Issue
Block a user