Remove equivocating validators from fork choice (#3371)

## Issue Addressed

Closes https://github.com/sigp/lighthouse/issues/3241
Closes https://github.com/sigp/lighthouse/issues/3242

## Proposed Changes

* [x] Implement logic to remove equivocating validators from fork choice per https://github.com/ethereum/consensus-specs/pull/2845
* [x] Update tests to v1.2.0-rc.1. The new test which exercises `equivocating_indices` is passing.
* [x] Pull in some SSZ abstractions from the `tree-states` branch that make implementing Vec-compatible encoding for types like `BTreeSet` and `BTreeMap`.
* [x] Implement schema upgrades and downgrades for the database (new schema version is V11).
* [x] Apply attester slashings from blocks to fork choice

## Additional Info

* This PR doesn't need the `BTreeMap` impl, but `tree-states` does, and I don't think there's any harm in keeping it. But I could also be convinced to drop it.

Blocked on #3322.
This commit is contained in:
Michael Sproul 2022-07-28 09:43:41 +00:00
parent efb360cc6d
commit d04fde3ba9
25 changed files with 742 additions and 151 deletions

1
Cargo.lock generated
View File

@ -1726,6 +1726,7 @@ version = "0.4.1"
dependencies = [
"eth2_ssz_derive",
"ethereum-types 0.12.1",
"itertools",
"smallvec",
]

View File

@ -2095,11 +2095,20 @@ impl<T: BeaconChainTypes> BeaconChain<T> {
)?)
}
/// Accept some attester slashing and queue it for inclusion in an appropriate block.
/// Accept a verified attester slashing and:
///
/// 1. Apply it to fork choice.
/// 2. Add it to the op pool.
pub fn import_attester_slashing(
&self,
attester_slashing: SigVerifiedOp<AttesterSlashing<T::EthSpec>>,
) {
// Add to fork choice.
self.canonical_head
.fork_choice_write_lock()
.on_attester_slashing(attester_slashing.as_inner());
// Add to the op pool (if we have the ability to propose blocks).
if self.eth1_chain.is_some() {
self.op_pool.insert_attester_slashing(
attester_slashing,
@ -2717,6 +2726,11 @@ impl<T: BeaconChainTypes> BeaconChain<T> {
.process_valid_state(current_slot.epoch(T::EthSpec::slots_per_epoch()), &state);
let validator_monitor = self.validator_monitor.read();
// Register each attester slashing in the block with fork choice.
for attester_slashing in block.body().attester_slashings() {
fork_choice.on_attester_slashing(attester_slashing);
}
// Register each attestation in the block with the fork choice service.
for attestation in block.body().attestations() {
let _fork_choice_attestation_timer =

View File

@ -8,6 +8,7 @@ use crate::{metrics, BeaconSnapshot};
use derivative::Derivative;
use fork_choice::ForkChoiceStore;
use ssz_derive::{Decode, Encode};
use std::collections::BTreeSet;
use std::marker::PhantomData;
use std::sync::Arc;
use store::{Error as StoreError, HotColdDB, ItemStore};
@ -158,6 +159,7 @@ pub struct BeaconForkChoiceStore<E: EthSpec, Hot: ItemStore<E>, Cold: ItemStore<
unrealized_justified_checkpoint: Checkpoint,
unrealized_finalized_checkpoint: Checkpoint,
proposer_boost_root: Hash256,
equivocating_indices: BTreeSet<u64>,
_phantom: PhantomData<E>,
}
@ -206,6 +208,7 @@ where
unrealized_justified_checkpoint: justified_checkpoint,
unrealized_finalized_checkpoint: finalized_checkpoint,
proposer_boost_root: Hash256::zero(),
equivocating_indices: BTreeSet::new(),
_phantom: PhantomData,
}
}
@ -223,6 +226,7 @@ where
unrealized_justified_checkpoint: self.unrealized_justified_checkpoint,
unrealized_finalized_checkpoint: self.unrealized_finalized_checkpoint,
proposer_boost_root: self.proposer_boost_root,
equivocating_indices: self.equivocating_indices.clone(),
}
}
@ -242,6 +246,7 @@ where
unrealized_justified_checkpoint: persisted.unrealized_justified_checkpoint,
unrealized_finalized_checkpoint: persisted.unrealized_finalized_checkpoint,
proposer_boost_root: persisted.proposer_boost_root,
equivocating_indices: persisted.equivocating_indices,
_phantom: PhantomData,
})
}
@ -350,30 +355,40 @@ where
fn set_proposer_boost_root(&mut self, proposer_boost_root: Hash256) {
self.proposer_boost_root = proposer_boost_root;
}
fn equivocating_indices(&self) -> &BTreeSet<u64> {
&self.equivocating_indices
}
fn extend_equivocating_indices(&mut self, indices: impl IntoIterator<Item = u64>) {
self.equivocating_indices.extend(indices);
}
}
/// A container which allows persisting the `BeaconForkChoiceStore` to the on-disk database.
#[superstruct(
variants(V1, V7, V8, V10),
variants(V1, V7, V8, V10, V11),
variant_attributes(derive(Encode, Decode)),
no_enum
)]
pub struct PersistedForkChoiceStore {
#[superstruct(only(V1, V7))]
pub balances_cache: BalancesCacheV1,
#[superstruct(only(V8, V10))]
#[superstruct(only(V8, V10, V11))]
pub balances_cache: BalancesCacheV8,
pub time: Slot,
pub finalized_checkpoint: Checkpoint,
pub justified_checkpoint: Checkpoint,
pub justified_balances: Vec<u64>,
pub best_justified_checkpoint: Checkpoint,
#[superstruct(only(V10))]
#[superstruct(only(V10, V11))]
pub unrealized_justified_checkpoint: Checkpoint,
#[superstruct(only(V10))]
#[superstruct(only(V10, V11))]
pub unrealized_finalized_checkpoint: Checkpoint,
#[superstruct(only(V7, V8, V10))]
#[superstruct(only(V7, V8, V10, V11))]
pub proposer_boost_root: Hash256,
#[superstruct(only(V11))]
pub equivocating_indices: BTreeSet<u64>,
}
pub type PersistedForkChoiceStore = PersistedForkChoiceStoreV10;
pub type PersistedForkChoiceStore = PersistedForkChoiceStoreV11;

View File

@ -1,6 +1,6 @@
use crate::beacon_fork_choice_store::{
PersistedForkChoiceStoreV1, PersistedForkChoiceStoreV10, PersistedForkChoiceStoreV7,
PersistedForkChoiceStoreV8,
PersistedForkChoiceStoreV1, PersistedForkChoiceStoreV10, PersistedForkChoiceStoreV11,
PersistedForkChoiceStoreV7, PersistedForkChoiceStoreV8,
};
use ssz::{Decode, Encode};
use ssz_derive::{Decode, Encode};
@ -8,10 +8,10 @@ use store::{DBColumn, Error, StoreItem};
use superstruct::superstruct;
// If adding a new version you should update this type alias and fix the breakages.
pub type PersistedForkChoice = PersistedForkChoiceV10;
pub type PersistedForkChoice = PersistedForkChoiceV11;
#[superstruct(
variants(V1, V7, V8, V10),
variants(V1, V7, V8, V10, V11),
variant_attributes(derive(Encode, Decode)),
no_enum
)]
@ -25,6 +25,8 @@ pub struct PersistedForkChoice {
pub fork_choice_store: PersistedForkChoiceStoreV8,
#[superstruct(only(V10))]
pub fork_choice_store: PersistedForkChoiceStoreV10,
#[superstruct(only(V11))]
pub fork_choice_store: PersistedForkChoiceStoreV11,
}
macro_rules! impl_store_item {
@ -49,3 +51,4 @@ impl_store_item!(PersistedForkChoiceV1);
impl_store_item!(PersistedForkChoiceV7);
impl_store_item!(PersistedForkChoiceV8);
impl_store_item!(PersistedForkChoiceV10);
impl_store_item!(PersistedForkChoiceV11);

View File

@ -1,5 +1,6 @@
//! Utilities for managing database schema changes.
mod migration_schema_v10;
mod migration_schema_v11;
mod migration_schema_v6;
mod migration_schema_v7;
mod migration_schema_v8;
@ -8,7 +9,8 @@ mod types;
use crate::beacon_chain::{BeaconChainTypes, FORK_CHOICE_DB_KEY};
use crate::persisted_fork_choice::{
PersistedForkChoiceV1, PersistedForkChoiceV10, PersistedForkChoiceV7, PersistedForkChoiceV8,
PersistedForkChoiceV1, PersistedForkChoiceV10, PersistedForkChoiceV11, PersistedForkChoiceV7,
PersistedForkChoiceV8,
};
use crate::types::ChainSpec;
use slog::{warn, Logger};
@ -36,6 +38,12 @@ pub fn migrate_schema<T: BeaconChainTypes>(
migrate_schema::<T>(db.clone(), datadir, from, next, log.clone(), spec)?;
migrate_schema::<T>(db, datadir, next, to, log, spec)
}
// Downgrade across multiple versions by recursively migrating one step at a time.
(_, _) if to.as_u64() + 1 < from.as_u64() => {
let next = SchemaVersion(from.as_u64() - 1);
migrate_schema::<T>(db.clone(), datadir, from, next, log.clone(), spec)?;
migrate_schema::<T>(db, datadir, next, to, log, spec)
}
//
// Migrations from before SchemaVersion(5) are deprecated.
@ -159,6 +167,35 @@ pub fn migrate_schema<T: BeaconChainTypes>(
Ok(())
}
// Upgrade from v10 to v11 adding support for equivocating indices to fork choice.
(SchemaVersion(10), SchemaVersion(11)) => {
let mut ops = vec![];
let fork_choice_opt = db.get_item::<PersistedForkChoiceV10>(&FORK_CHOICE_DB_KEY)?;
if let Some(fork_choice) = fork_choice_opt {
let updated_fork_choice = migration_schema_v11::update_fork_choice(fork_choice);
ops.push(updated_fork_choice.as_kv_store_op(FORK_CHOICE_DB_KEY));
}
db.store_schema_version_atomically(to, ops)?;
Ok(())
}
// Downgrade from v11 to v10 removing support for equivocating indices from fork choice.
(SchemaVersion(11), SchemaVersion(10)) => {
let mut ops = vec![];
let fork_choice_opt = db.get_item::<PersistedForkChoiceV11>(&FORK_CHOICE_DB_KEY)?;
if let Some(fork_choice) = fork_choice_opt {
let updated_fork_choice =
migration_schema_v11::downgrade_fork_choice(fork_choice, log);
ops.push(updated_fork_choice.as_kv_store_op(FORK_CHOICE_DB_KEY));
}
db.store_schema_version_atomically(to, ops)?;
Ok(())
}
// Anything else is an error.
(_, _) => Err(HotColdDBError::UnsupportedSchemaVersion {
target_version: to,

View File

@ -0,0 +1,77 @@
use crate::beacon_fork_choice_store::{PersistedForkChoiceStoreV10, PersistedForkChoiceStoreV11};
use crate::persisted_fork_choice::{PersistedForkChoiceV10, PersistedForkChoiceV11};
use slog::{warn, Logger};
use std::collections::BTreeSet;
/// Add the equivocating indices field.
pub fn update_fork_choice(fork_choice_v10: PersistedForkChoiceV10) -> PersistedForkChoiceV11 {
let PersistedForkChoiceStoreV10 {
balances_cache,
time,
finalized_checkpoint,
justified_checkpoint,
justified_balances,
best_justified_checkpoint,
unrealized_justified_checkpoint,
unrealized_finalized_checkpoint,
proposer_boost_root,
} = fork_choice_v10.fork_choice_store;
PersistedForkChoiceV11 {
fork_choice: fork_choice_v10.fork_choice,
fork_choice_store: PersistedForkChoiceStoreV11 {
balances_cache,
time,
finalized_checkpoint,
justified_checkpoint,
justified_balances,
best_justified_checkpoint,
unrealized_justified_checkpoint,
unrealized_finalized_checkpoint,
proposer_boost_root,
equivocating_indices: BTreeSet::new(),
},
}
}
pub fn downgrade_fork_choice(
fork_choice_v11: PersistedForkChoiceV11,
log: Logger,
) -> PersistedForkChoiceV10 {
let PersistedForkChoiceStoreV11 {
balances_cache,
time,
finalized_checkpoint,
justified_checkpoint,
justified_balances,
best_justified_checkpoint,
unrealized_justified_checkpoint,
unrealized_finalized_checkpoint,
proposer_boost_root,
equivocating_indices,
} = fork_choice_v11.fork_choice_store;
if !equivocating_indices.is_empty() {
warn!(
log,
"Deleting slashed validators from fork choice store";
"count" => equivocating_indices.len(),
"message" => "this may make your node more susceptible to following the wrong chain",
);
}
PersistedForkChoiceV10 {
fork_choice: fork_choice_v11.fork_choice,
fork_choice_store: PersistedForkChoiceStoreV10 {
balances_cache,
time,
finalized_checkpoint,
justified_checkpoint,
justified_balances,
best_justified_checkpoint,
unrealized_justified_checkpoint,
unrealized_finalized_checkpoint,
proposer_boost_root,
},
}
}

View File

@ -4,7 +4,7 @@ use ssz::{Decode, Encode};
use ssz_derive::{Decode, Encode};
use types::{Checkpoint, Hash256, Slot};
pub const CURRENT_SCHEMA_VERSION: SchemaVersion = SchemaVersion(10);
pub const CURRENT_SCHEMA_VERSION: SchemaVersion = SchemaVersion(11);
// All the keys that get stored under the `BeaconMeta` column.
//

View File

@ -1,19 +1,23 @@
use crate::{ForkChoiceStore, InvalidationOperation};
use proto_array::{Block as ProtoBlock, ExecutionStatus, ProtoArrayForkChoice};
use ssz_derive::{Decode, Encode};
use state_processing::per_epoch_processing;
use state_processing::{
per_block_processing::errors::AttesterSlashingValidationError, per_epoch_processing,
};
use std::cmp::Ordering;
use std::collections::BTreeSet;
use std::marker::PhantomData;
use std::time::Duration;
use types::{
consts::merge::INTERVALS_PER_SLOT, AttestationShufflingId, BeaconBlockRef, BeaconState,
BeaconStateError, ChainSpec, Checkpoint, Epoch, EthSpec, ExecPayload, ExecutionBlockHash,
Hash256, IndexedAttestation, RelativeEpoch, SignedBeaconBlock, Slot,
consts::merge::INTERVALS_PER_SLOT, AttestationShufflingId, AttesterSlashing, BeaconBlockRef,
BeaconState, BeaconStateError, ChainSpec, Checkpoint, Epoch, EthSpec, ExecPayload,
ExecutionBlockHash, Hash256, IndexedAttestation, RelativeEpoch, SignedBeaconBlock, Slot,
};
#[derive(Debug)]
pub enum Error<T> {
InvalidAttestation(InvalidAttestation),
InvalidAttesterSlashing(AttesterSlashingValidationError),
InvalidBlock(InvalidBlock),
ProtoArrayError(String),
InvalidProtoArrayBytes(String),
@ -63,6 +67,12 @@ impl<T> From<InvalidAttestation> for Error<T> {
}
}
impl<T> From<AttesterSlashingValidationError> for Error<T> {
fn from(e: AttesterSlashingValidationError) -> Self {
Error::InvalidAttesterSlashing(e)
}
}
impl<T> From<state_processing::EpochProcessingError> for Error<T> {
fn from(e: state_processing::EpochProcessingError) -> Self {
Error::UnrealizedVoteProcessing(e)
@ -413,26 +423,6 @@ where
Ok(fork_choice)
}
/*
/// Instantiates `Self` from some existing components.
///
/// This is useful if the existing components have been loaded from disk after a process
/// restart.
pub fn from_components(
fc_store: T,
proto_array: ProtoArrayForkChoice,
queued_attestations: Vec<QueuedAttestation>,
) -> Self {
Self {
fc_store,
proto_array,
queued_attestations,
forkchoice_update_parameters: None,
_phantom: PhantomData,
}
}
*/
/// Returns cached information that can be used to issue a `forkchoiceUpdated` message to an
/// execution engine.
///
@ -507,6 +497,7 @@ where
*store.finalized_checkpoint(),
store.justified_balances(),
store.proposer_boost_root(),
store.equivocating_indices(),
current_slot,
spec,
)?;
@ -1109,6 +1100,22 @@ where
Ok(())
}
/// Apply an attester slashing to fork choice.
///
/// We assume that the attester slashing provided to this function has already been verified.
pub fn on_attester_slashing(&mut self, slashing: &AttesterSlashing<E>) {
let attesting_indices_set = |att: &IndexedAttestation<E>| {
att.attesting_indices
.iter()
.copied()
.collect::<BTreeSet<_>>()
};
let att1_indices = attesting_indices_set(&slashing.attestation_1);
let att2_indices = attesting_indices_set(&slashing.attestation_2);
self.fc_store
.extend_equivocating_indices(att1_indices.intersection(&att2_indices).copied());
}
/// Call `on_tick` for all slots between `fc_store.get_current_slot()` and the provided
/// `current_slot`. Returns the value of `self.fc_store.get_current_slot`.
pub fn update_time(
@ -1325,8 +1332,6 @@ where
// If the parent block has execution enabled, always import the block.
//
// TODO(bellatrix): this condition has not yet been merged into the spec.
//
// See:
//
// https://github.com/ethereum/consensus-specs/pull/2844

View File

@ -1,3 +1,4 @@
use std::collections::BTreeSet;
use types::{BeaconBlockRef, BeaconState, Checkpoint, EthSpec, ExecPayload, Hash256, Slot};
/// Approximates the `Store` in "Ethereum 2.0 Phase 0 -- Beacon Chain Fork Choice":
@ -76,4 +77,10 @@ pub trait ForkChoiceStore<T: EthSpec>: Sized {
/// Sets the proposer boost root.
fn set_proposer_boost_root(&mut self, proposer_boost_root: Hash256);
/// Gets the equivocating indices.
fn equivocating_indices(&self) -> &BTreeSet<u64>;
/// Adds to the set of equivocating indices.
fn extend_equivocating_indices(&mut self, indices: impl IntoIterator<Item = u64>);
}

View File

@ -6,6 +6,7 @@ mod votes;
use crate::proto_array_fork_choice::{Block, ExecutionStatus, ProtoArrayForkChoice};
use crate::InvalidationOperation;
use serde_derive::{Deserialize, Serialize};
use std::collections::BTreeSet;
use types::{
AttestationShufflingId, Checkpoint, Epoch, EthSpec, ExecutionBlockHash, Hash256,
MainnetEthSpec, Slot,
@ -88,6 +89,7 @@ impl ForkChoiceTestDefinition {
ExecutionStatus::Optimistic(ExecutionBlockHash::zero()),
)
.expect("should create fork choice struct");
let equivocating_indices = BTreeSet::new();
for (op_index, op) in self.operations.into_iter().enumerate() {
match op.clone() {
@ -103,6 +105,7 @@ impl ForkChoiceTestDefinition {
finalized_checkpoint,
&justified_state_balances,
Hash256::zero(),
&equivocating_indices,
Slot::new(0),
&spec,
)
@ -130,6 +133,7 @@ impl ForkChoiceTestDefinition {
finalized_checkpoint,
&justified_state_balances,
proposer_boost_root,
&equivocating_indices,
Slot::new(0),
&spec,
)
@ -154,6 +158,7 @@ impl ForkChoiceTestDefinition {
finalized_checkpoint,
&justified_state_balances,
Hash256::zero(),
&equivocating_indices,
Slot::new(0),
&spec,
);

View File

@ -4,7 +4,7 @@ use crate::ssz_container::SszContainer;
use serde_derive::{Deserialize, Serialize};
use ssz::{Decode, Encode};
use ssz_derive::{Decode, Encode};
use std::collections::HashMap;
use std::collections::{BTreeSet, HashMap};
use types::{
AttestationShufflingId, ChainSpec, Checkpoint, Epoch, EthSpec, ExecutionBlockHash, Hash256,
Slot,
@ -260,12 +260,14 @@ impl ProtoArrayForkChoice {
.map_err(|e| format!("process_block_error: {:?}", e))
}
#[allow(clippy::too_many_arguments)]
pub fn find_head<E: EthSpec>(
&mut self,
justified_checkpoint: Checkpoint,
finalized_checkpoint: Checkpoint,
justified_state_balances: &[u64],
proposer_boost_root: Hash256,
equivocating_indices: &BTreeSet<u64>,
current_slot: Slot,
spec: &ChainSpec,
) -> Result<Hash256, String> {
@ -278,6 +280,7 @@ impl ProtoArrayForkChoice {
&mut self.votes,
old_balances,
new_balances,
equivocating_indices,
)
.map_err(|e| format!("find_head compute_deltas failed: {:?}", e))?;
@ -439,6 +442,7 @@ fn compute_deltas(
votes: &mut ElasticList<VoteTracker>,
old_balances: &[u64],
new_balances: &[u64],
equivocating_indices: &BTreeSet<u64>,
) -> Result<Vec<i64>, Error> {
let mut deltas = vec![0_i64; indices.len()];
@ -449,6 +453,38 @@ fn compute_deltas(
continue;
}
// Handle newly slashed validators by deducting their weight from their current vote. We
// determine if they are newly slashed by checking whether their `vote.current_root` is
// non-zero. After applying the deduction a single time we set their `current_root` to zero
// and never update it again (thus preventing repeat deductions).
//
// Even if they make new attestations which are processed by `process_attestation` these
// will only update their `vote.next_root`.
if equivocating_indices.contains(&(val_index as u64)) {
// First time we've processed this slashing in fork choice:
//
// 1. Add a negative delta for their `current_root`.
// 2. Set their `current_root` (permanently) to zero.
if !vote.current_root.is_zero() {
let old_balance = old_balances.get(val_index).copied().unwrap_or(0);
if let Some(current_delta_index) = indices.get(&vote.current_root).copied() {
let delta = deltas
.get(current_delta_index)
.ok_or(Error::InvalidNodeDelta(current_delta_index))?
.checked_sub(old_balance as i64)
.ok_or(Error::DeltaOverflow(current_delta_index))?;
// Array access safe due to check on previous line.
deltas[current_delta_index] = delta;
}
vote.current_root = Hash256::zero();
}
// We've handled this slashed validator, continue without applying an ordinary delta.
continue;
}
// If the validator was not included in the _old_ balances (i.e., it did not exist yet)
// then say its balance was zero.
let old_balance = old_balances.get(val_index).copied().unwrap_or(0);
@ -605,6 +641,7 @@ mod test_compute_deltas {
let mut votes = ElasticList::default();
let mut old_balances = vec![];
let mut new_balances = vec![];
let equivocating_indices = BTreeSet::new();
for i in 0..validator_count {
indices.insert(hash_from_index(i), i);
@ -617,8 +654,14 @@ mod test_compute_deltas {
new_balances.push(0);
}
let deltas = compute_deltas(&indices, &mut votes, &old_balances, &new_balances)
.expect("should compute deltas");
let deltas = compute_deltas(
&indices,
&mut votes,
&old_balances,
&new_balances,
&equivocating_indices,
)
.expect("should compute deltas");
assert_eq!(
deltas.len(),
@ -649,6 +692,7 @@ mod test_compute_deltas {
let mut votes = ElasticList::default();
let mut old_balances = vec![];
let mut new_balances = vec![];
let equivocating_indices = BTreeSet::new();
for i in 0..validator_count {
indices.insert(hash_from_index(i), i);
@ -661,8 +705,14 @@ mod test_compute_deltas {
new_balances.push(BALANCE);
}
let deltas = compute_deltas(&indices, &mut votes, &old_balances, &new_balances)
.expect("should compute deltas");
let deltas = compute_deltas(
&indices,
&mut votes,
&old_balances,
&new_balances,
&equivocating_indices,
)
.expect("should compute deltas");
assert_eq!(
deltas.len(),
@ -700,6 +750,7 @@ mod test_compute_deltas {
let mut votes = ElasticList::default();
let mut old_balances = vec![];
let mut new_balances = vec![];
let equivocating_indices = BTreeSet::new();
for i in 0..validator_count {
indices.insert(hash_from_index(i), i);
@ -712,8 +763,14 @@ mod test_compute_deltas {
new_balances.push(BALANCE);
}
let deltas = compute_deltas(&indices, &mut votes, &old_balances, &new_balances)
.expect("should compute deltas");
let deltas = compute_deltas(
&indices,
&mut votes,
&old_balances,
&new_balances,
&equivocating_indices,
)
.expect("should compute deltas");
assert_eq!(
deltas.len(),
@ -746,6 +803,7 @@ mod test_compute_deltas {
let mut votes = ElasticList::default();
let mut old_balances = vec![];
let mut new_balances = vec![];
let equivocating_indices = BTreeSet::new();
for i in 0..validator_count {
indices.insert(hash_from_index(i), i);
@ -758,8 +816,14 @@ mod test_compute_deltas {
new_balances.push(BALANCE);
}
let deltas = compute_deltas(&indices, &mut votes, &old_balances, &new_balances)
.expect("should compute deltas");
let deltas = compute_deltas(
&indices,
&mut votes,
&old_balances,
&new_balances,
&equivocating_indices,
)
.expect("should compute deltas");
assert_eq!(
deltas.len(),
@ -797,6 +861,7 @@ mod test_compute_deltas {
let mut indices = HashMap::new();
let mut votes = ElasticList::default();
let equivocating_indices = BTreeSet::new();
// There is only one block.
indices.insert(hash_from_index(1), 0);
@ -819,8 +884,14 @@ mod test_compute_deltas {
next_epoch: Epoch::new(0),
});
let deltas = compute_deltas(&indices, &mut votes, &old_balances, &new_balances)
.expect("should compute deltas");
let deltas = compute_deltas(
&indices,
&mut votes,
&old_balances,
&new_balances,
&equivocating_indices,
)
.expect("should compute deltas");
assert_eq!(deltas.len(), 1, "deltas should have expected length");
@ -849,6 +920,7 @@ mod test_compute_deltas {
let mut votes = ElasticList::default();
let mut old_balances = vec![];
let mut new_balances = vec![];
let equivocating_indices = BTreeSet::new();
for i in 0..validator_count {
indices.insert(hash_from_index(i), i);
@ -861,8 +933,14 @@ mod test_compute_deltas {
new_balances.push(NEW_BALANCE);
}
let deltas = compute_deltas(&indices, &mut votes, &old_balances, &new_balances)
.expect("should compute deltas");
let deltas = compute_deltas(
&indices,
&mut votes,
&old_balances,
&new_balances,
&equivocating_indices,
)
.expect("should compute deltas");
assert_eq!(
deltas.len(),
@ -902,6 +980,7 @@ mod test_compute_deltas {
let mut indices = HashMap::new();
let mut votes = ElasticList::default();
let equivocating_indices = BTreeSet::new();
// There are two blocks.
indices.insert(hash_from_index(1), 0);
@ -921,8 +1000,14 @@ mod test_compute_deltas {
});
}
let deltas = compute_deltas(&indices, &mut votes, &old_balances, &new_balances)
.expect("should compute deltas");
let deltas = compute_deltas(
&indices,
&mut votes,
&old_balances,
&new_balances,
&equivocating_indices,
)
.expect("should compute deltas");
assert_eq!(deltas.len(), 2, "deltas should have expected length");
@ -951,6 +1036,7 @@ mod test_compute_deltas {
let mut indices = HashMap::new();
let mut votes = ElasticList::default();
let equivocating_indices = BTreeSet::new();
// There are two blocks.
indices.insert(hash_from_index(1), 0);
@ -970,8 +1056,14 @@ mod test_compute_deltas {
});
}
let deltas = compute_deltas(&indices, &mut votes, &old_balances, &new_balances)
.expect("should compute deltas");
let deltas = compute_deltas(
&indices,
&mut votes,
&old_balances,
&new_balances,
&equivocating_indices,
)
.expect("should compute deltas");
assert_eq!(deltas.len(), 2, "deltas should have expected length");
@ -992,4 +1084,72 @@ mod test_compute_deltas {
);
}
}
#[test]
fn validator_equivocates() {
const OLD_BALANCE: u64 = 42;
const NEW_BALANCE: u64 = 43;
let mut indices = HashMap::new();
let mut votes = ElasticList::default();
// There are two blocks.
indices.insert(hash_from_index(1), 0);
indices.insert(hash_from_index(2), 1);
// There are two validators.
let old_balances = vec![OLD_BALANCE; 2];
let new_balances = vec![NEW_BALANCE; 2];
// Both validator move votes from block 1 to block 2.
for _ in 0..2 {
votes.0.push(VoteTracker {
current_root: hash_from_index(1),
next_root: hash_from_index(2),
next_epoch: Epoch::new(0),
});
}
// Validator 0 is slashed.
let equivocating_indices = BTreeSet::from_iter([0]);
let deltas = compute_deltas(
&indices,
&mut votes,
&old_balances,
&new_balances,
&equivocating_indices,
)
.expect("should compute deltas");
assert_eq!(deltas.len(), 2, "deltas should have expected length");
assert_eq!(
deltas[0],
-2 * OLD_BALANCE as i64,
"block 1 should have lost two old balances"
);
assert_eq!(
deltas[1], NEW_BALANCE as i64,
"block 2 should have gained one balance"
);
// Validator 0's current root should have been reset.
assert_eq!(votes.0[0].current_root, Hash256::zero());
assert_eq!(votes.0[0].next_root, hash_from_index(2));
// Validator 1's current root should have been updated.
assert_eq!(votes.0[1].current_root, hash_from_index(2));
// Re-computing the deltas should be a no-op (no repeat deduction for the slashed validator).
let deltas = compute_deltas(
&indices,
&mut votes,
&new_balances,
&new_balances,
&equivocating_indices,
)
.expect("should compute deltas");
assert_eq!(deltas, vec![0, 0]);
}
}

View File

@ -14,7 +14,8 @@ eth2_ssz_derive = "0.3.0"
[dependencies]
ethereum-types = "0.12.1"
smallvec = "1.6.1"
smallvec = { version = "1.6.1", features = ["const_generics"] }
itertools = "0.10.3"
[features]
arbitrary = ["ethereum-types/arbitrary"]

View File

@ -5,6 +5,7 @@ use std::cmp::Ordering;
type SmallVec8<T> = SmallVec<[T; 8]>;
pub mod impls;
pub mod try_from_iter;
/// Returned when SSZ decoding fails.
#[derive(Debug, PartialEq, Clone)]

View File

@ -1,7 +1,11 @@
use super::*;
use crate::decode::try_from_iter::{TryCollect, TryFromIter};
use core::num::NonZeroUsize;
use ethereum_types::{H160, H256, U128, U256};
use itertools::process_results;
use smallvec::SmallVec;
use std::collections::{BTreeMap, BTreeSet};
use std::iter::{self, FromIterator};
use std::sync::Arc;
macro_rules! impl_decodable_for_uint {
@ -380,14 +384,14 @@ macro_rules! impl_for_vec {
fn from_ssz_bytes(bytes: &[u8]) -> Result<Self, DecodeError> {
if bytes.is_empty() {
Ok(vec![].into())
Ok(Self::from_iter(iter::empty()))
} else if T::is_ssz_fixed_len() {
bytes
.chunks(T::ssz_fixed_len())
.map(|chunk| T::from_ssz_bytes(chunk))
.map(T::from_ssz_bytes)
.collect()
} else {
decode_list_of_variable_length_items(bytes, $max_len).map(|vec| vec.into())
decode_list_of_variable_length_items(bytes, $max_len)
}
}
}
@ -395,26 +399,73 @@ macro_rules! impl_for_vec {
}
impl_for_vec!(Vec<T>, None);
impl_for_vec!(SmallVec<[T; 1]>, Some(1));
impl_for_vec!(SmallVec<[T; 2]>, Some(2));
impl_for_vec!(SmallVec<[T; 3]>, Some(3));
impl_for_vec!(SmallVec<[T; 4]>, Some(4));
impl_for_vec!(SmallVec<[T; 5]>, Some(5));
impl_for_vec!(SmallVec<[T; 6]>, Some(6));
impl_for_vec!(SmallVec<[T; 7]>, Some(7));
impl_for_vec!(SmallVec<[T; 8]>, Some(8));
impl_for_vec!(SmallVec<[T; 1]>, None);
impl_for_vec!(SmallVec<[T; 2]>, None);
impl_for_vec!(SmallVec<[T; 3]>, None);
impl_for_vec!(SmallVec<[T; 4]>, None);
impl_for_vec!(SmallVec<[T; 5]>, None);
impl_for_vec!(SmallVec<[T; 6]>, None);
impl_for_vec!(SmallVec<[T; 7]>, None);
impl_for_vec!(SmallVec<[T; 8]>, None);
impl<K, V> Decode for BTreeMap<K, V>
where
K: Decode + Ord,
V: Decode,
{
fn is_ssz_fixed_len() -> bool {
false
}
fn from_ssz_bytes(bytes: &[u8]) -> Result<Self, DecodeError> {
if bytes.is_empty() {
Ok(Self::from_iter(iter::empty()))
} else if <(K, V)>::is_ssz_fixed_len() {
bytes
.chunks(<(K, V)>::ssz_fixed_len())
.map(<(K, V)>::from_ssz_bytes)
.collect()
} else {
decode_list_of_variable_length_items(bytes, None)
}
}
}
impl<T> Decode for BTreeSet<T>
where
T: Decode + Ord,
{
fn is_ssz_fixed_len() -> bool {
false
}
fn from_ssz_bytes(bytes: &[u8]) -> Result<Self, DecodeError> {
if bytes.is_empty() {
Ok(Self::from_iter(iter::empty()))
} else if T::is_ssz_fixed_len() {
bytes
.chunks(T::ssz_fixed_len())
.map(T::from_ssz_bytes)
.collect()
} else {
decode_list_of_variable_length_items(bytes, None)
}
}
}
/// Decodes `bytes` as if it were a list of variable-length items.
///
/// The `ssz::SszDecoder` can also perform this functionality, however it it significantly faster
/// as it is optimized to read same-typed items whilst `ssz::SszDecoder` supports reading items of
/// differing types.
pub fn decode_list_of_variable_length_items<T: Decode>(
/// The `ssz::SszDecoder` can also perform this functionality, however this function is
/// significantly faster as it is optimized to read same-typed items whilst `ssz::SszDecoder`
/// supports reading items of differing types.
pub fn decode_list_of_variable_length_items<T: Decode, Container: TryFromIter<T>>(
bytes: &[u8],
max_len: Option<usize>,
) -> Result<Vec<T>, DecodeError> {
) -> Result<Container, DecodeError> {
if bytes.is_empty() {
return Ok(vec![]);
return Container::try_from_iter(iter::empty()).map_err(|e| {
DecodeError::BytesInvalid(format!("Error trying to collect empty list: {:?}", e))
});
}
let first_offset = read_offset(bytes)?;
@ -433,35 +484,27 @@ pub fn decode_list_of_variable_length_items<T: Decode>(
)));
}
// Only initialize the vec with a capacity if a maximum length is provided.
//
// We assume that if a max length is provided then the application is able to handle an
// allocation of this size.
let mut values = if max_len.is_some() {
Vec::with_capacity(num_items)
} else {
vec![]
};
let mut offset = first_offset;
for i in 1..=num_items {
let slice_option = if i == num_items {
bytes.get(offset..)
} else {
let start = offset;
process_results(
(1..=num_items).map(|i| {
let slice_option = if i == num_items {
bytes.get(offset..)
} else {
let start = offset;
let next_offset = read_offset(&bytes[(i * BYTES_PER_LENGTH_OFFSET)..])?;
offset = sanitize_offset(next_offset, Some(offset), bytes.len(), Some(first_offset))?;
let next_offset = read_offset(&bytes[(i * BYTES_PER_LENGTH_OFFSET)..])?;
offset =
sanitize_offset(next_offset, Some(offset), bytes.len(), Some(first_offset))?;
bytes.get(start..offset)
};
bytes.get(start..offset)
};
let slice = slice_option.ok_or(DecodeError::OutOfBoundsByte { i: offset })?;
values.push(T::from_ssz_bytes(slice)?);
}
Ok(values)
let slice = slice_option.ok_or(DecodeError::OutOfBoundsByte { i: offset })?;
T::from_ssz_bytes(slice)
}),
|iter| iter.try_collect(),
)?
.map_err(|e| DecodeError::BytesInvalid(format!("Error collecting into container: {:?}", e)))
}
#[cfg(test)]

View File

@ -0,0 +1,96 @@
use smallvec::SmallVec;
use std::collections::{BTreeMap, BTreeSet};
use std::convert::Infallible;
use std::fmt::Debug;
/// Partial variant of `std::iter::FromIterator`.
///
/// This trait is implemented for types which can be constructed from an iterator of decoded SSZ
/// values, but which may refuse values once a length limit is reached.
pub trait TryFromIter<T>: Sized {
type Error: Debug;
fn try_from_iter<I>(iter: I) -> Result<Self, Self::Error>
where
I: IntoIterator<Item = T>;
}
// It would be nice to be able to do a blanket impl, e.g.
//
// `impl TryFromIter<T> for C where C: FromIterator<T>`
//
// However this runs into trait coherence issues due to the type parameter `T` on `TryFromIter`.
//
// E.g. If we added an impl downstream for `List<T, N>` then another crate downstream of that
// could legally add an impl of `FromIterator<Local> for List<Local, N>` which would create
// two conflicting implementations for `List<Local, N>`. Hence the `List<T, N>` impl is disallowed
// by the compiler in the presence of the blanket impl. That's obviously annoying, so we opt to
// abandon the blanket impl in favour of impls for selected types.
impl<T> TryFromIter<T> for Vec<T> {
type Error = Infallible;
fn try_from_iter<I>(iter: I) -> Result<Self, Self::Error>
where
I: IntoIterator<Item = T>,
{
Ok(Self::from_iter(iter))
}
}
impl<T, const N: usize> TryFromIter<T> for SmallVec<[T; N]> {
type Error = Infallible;
fn try_from_iter<I>(iter: I) -> Result<Self, Self::Error>
where
I: IntoIterator<Item = T>,
{
Ok(Self::from_iter(iter))
}
}
impl<K, V> TryFromIter<(K, V)> for BTreeMap<K, V>
where
K: Ord,
{
type Error = Infallible;
fn try_from_iter<I>(iter: I) -> Result<Self, Self::Error>
where
I: IntoIterator<Item = (K, V)>,
{
Ok(Self::from_iter(iter))
}
}
impl<T> TryFromIter<T> for BTreeSet<T>
where
T: Ord,
{
type Error = Infallible;
fn try_from_iter<I>(iter: I) -> Result<Self, Self::Error>
where
I: IntoIterator<Item = T>,
{
Ok(Self::from_iter(iter))
}
}
/// Partial variant of `collect`.
pub trait TryCollect: Iterator {
fn try_collect<C>(self) -> Result<C, C::Error>
where
C: TryFromIter<Self::Item>;
}
impl<I> TryCollect for I
where
I: Iterator,
{
fn try_collect<C>(self) -> Result<C, C::Error>
where
C: TryFromIter<Self::Item>,
{
C::try_from_iter(self)
}
}

View File

@ -2,6 +2,7 @@ use super::*;
use core::num::NonZeroUsize;
use ethereum_types::{H160, H256, U128, U256};
use smallvec::SmallVec;
use std::collections::{BTreeMap, BTreeSet};
use std::sync::Arc;
macro_rules! impl_encodable_for_uint {
@ -220,6 +221,65 @@ impl<T: Encode> Encode for Arc<T> {
}
}
// Encode transparently through references.
impl<'a, T: Encode> Encode for &'a T {
fn is_ssz_fixed_len() -> bool {
T::is_ssz_fixed_len()
}
fn ssz_fixed_len() -> usize {
T::ssz_fixed_len()
}
fn ssz_append(&self, buf: &mut Vec<u8>) {
T::ssz_append(self, buf)
}
fn ssz_bytes_len(&self) -> usize {
T::ssz_bytes_len(self)
}
}
/// Compute the encoded length of a vector-like sequence of `T`.
pub fn sequence_ssz_bytes_len<I, T>(iter: I) -> usize
where
I: Iterator<Item = T> + ExactSizeIterator,
T: Encode,
{
// Compute length before doing any iteration.
let length = iter.len();
if <T as Encode>::is_ssz_fixed_len() {
<T as Encode>::ssz_fixed_len() * length
} else {
let mut len = iter.map(|item| item.ssz_bytes_len()).sum();
len += BYTES_PER_LENGTH_OFFSET * length;
len
}
}
/// Encode a vector-like sequence of `T`.
pub fn sequence_ssz_append<I, T>(iter: I, buf: &mut Vec<u8>)
where
I: Iterator<Item = T> + ExactSizeIterator,
T: Encode,
{
if T::is_ssz_fixed_len() {
buf.reserve(T::ssz_fixed_len() * iter.len());
for item in iter {
item.ssz_append(buf);
}
} else {
let mut encoder = SszEncoder::container(buf, iter.len() * BYTES_PER_LENGTH_OFFSET);
for item in iter {
encoder.append(&item);
}
encoder.finalize();
}
}
macro_rules! impl_for_vec {
($type: ty) => {
impl<T: Encode> Encode for $type {
@ -228,32 +288,11 @@ macro_rules! impl_for_vec {
}
fn ssz_bytes_len(&self) -> usize {
if <T as Encode>::is_ssz_fixed_len() {
<T as Encode>::ssz_fixed_len() * self.len()
} else {
let mut len = self.iter().map(|item| item.ssz_bytes_len()).sum();
len += BYTES_PER_LENGTH_OFFSET * self.len();
len
}
sequence_ssz_bytes_len(self.iter())
}
fn ssz_append(&self, buf: &mut Vec<u8>) {
if T::is_ssz_fixed_len() {
buf.reserve(T::ssz_fixed_len() * self.len());
for item in self {
item.ssz_append(buf);
}
} else {
let mut encoder =
SszEncoder::container(buf, self.len() * BYTES_PER_LENGTH_OFFSET);
for item in self {
encoder.append(item);
}
encoder.finalize();
}
sequence_ssz_append(self.iter(), buf)
}
}
};
@ -269,6 +308,41 @@ impl_for_vec!(SmallVec<[T; 6]>);
impl_for_vec!(SmallVec<[T; 7]>);
impl_for_vec!(SmallVec<[T; 8]>);
impl<K, V> Encode for BTreeMap<K, V>
where
K: Encode + Ord,
V: Encode,
{
fn is_ssz_fixed_len() -> bool {
false
}
fn ssz_bytes_len(&self) -> usize {
sequence_ssz_bytes_len(self.iter())
}
fn ssz_append(&self, buf: &mut Vec<u8>) {
sequence_ssz_append(self.iter(), buf)
}
}
impl<T> Encode for BTreeSet<T>
where
T: Encode + Ord,
{
fn is_ssz_fixed_len() -> bool {
false
}
fn ssz_bytes_len(&self) -> usize {
sequence_ssz_bytes_len(self.iter())
}
fn ssz_append(&self, buf: &mut Vec<u8>) {
sequence_ssz_append(self.iter(), buf)
}
}
impl Encode for bool {
fn is_ssz_fixed_len() -> bool {
true

View File

@ -40,8 +40,8 @@ pub mod legacy;
mod union_selector;
pub use decode::{
impls::decode_list_of_variable_length_items, read_offset, split_union_bytes, Decode,
DecodeError, SszDecoder, SszDecoderBuilder,
impls::decode_list_of_variable_length_items, read_offset, split_union_bytes,
try_from_iter::TryFromIter, Decode, DecodeError, SszDecoder, SszDecoderBuilder,
};
pub use encode::{encode_length, Encode, SszEncoder};
pub use union_selector::UnionSelector;

View File

@ -4,6 +4,8 @@ use ssz_derive::{Decode, Encode};
mod round_trip {
use super::*;
use std::collections::BTreeMap;
use std::iter::FromIterator;
fn round_trip<T: Encode + Decode + std::fmt::Debug + PartialEq>(items: Vec<T>) {
for item in items {
@ -321,6 +323,52 @@ mod round_trip {
round_trip(vec);
}
#[test]
fn btree_map_fixed() {
let data = vec![
BTreeMap::new(),
BTreeMap::from_iter(vec![(0u8, 0u16), (1, 2), (2, 4), (4, 6)]),
];
round_trip(data);
}
#[test]
fn btree_map_variable_value() {
let data = vec![
BTreeMap::new(),
BTreeMap::from_iter(vec![
(
0u64,
ThreeVariableLen {
a: 1,
b: vec![3, 5, 7],
c: vec![],
d: vec![0, 0],
},
),
(
1,
ThreeVariableLen {
a: 99,
b: vec![1],
c: vec![2, 3, 4, 5, 6, 7, 8, 9, 10],
d: vec![4, 5, 6, 7, 8],
},
),
(
2,
ThreeVariableLen {
a: 0,
b: vec![],
c: vec![],
d: vec![],
},
),
]),
];
round_trip(data);
}
}
mod derive_macro {

View File

@ -255,7 +255,8 @@ where
})
.map(Into::into)
} else {
ssz::decode_list_of_variable_length_items(bytes, Some(max_len)).map(|vec| vec.into())
ssz::decode_list_of_variable_length_items(bytes, Some(max_len))
.map(|vec: Vec<_>| vec.into())
}
}
}

View File

@ -1,4 +1,4 @@
TESTS_TAG := v1.1.10
TESTS_TAG := v1.2.0-rc.1
TESTS = general minimal mainnet
TARBALLS = $(patsubst %,%-$(TESTS_TAG).tar.gz,$(TESTS))

View File

@ -33,6 +33,8 @@ excluded_paths = [
"tests/.*/.*/ssz_static/LightClientSnapshot",
# Merkle-proof tests for light clients
"tests/.*/.*/merkle/single_proof",
# Capella tests are disabled for now.
"tests/.*/capella",
# One of the EF researchers likes to pack the tarballs on a Mac
".*\.DS_Store.*"
]

View File

@ -276,7 +276,8 @@ impl<E: EthSpec, T: EpochTransition<E>> Case for EpochProcessing<E, T> {
&& T::name() != "inactivity_updates"
&& T::name() != "participation_flag_updates"
}
ForkName::Altair | ForkName::Merge => true, // TODO: revisit when tests are out
// No phase0 tests for Altair and later.
ForkName::Altair | ForkName::Merge => T::name() != "participation_record_updates",
}
}

View File

@ -52,13 +52,13 @@ pub struct Checks {
#[derive(Debug, Clone, Deserialize)]
#[serde(untagged, deny_unknown_fields)]
pub enum Step<B, A, P, S> {
pub enum Step<B, A, AS, P> {
Tick { tick: u64 },
ValidBlock { block: B },
MaybeValidBlock { block: B, valid: bool },
Attestation { attestation: A },
AttesterSlashing { attester_slashing: AS },
PowBlock { pow_block: P },
AttesterSlashing { attester_slashing: S },
Checks { checks: Box<Checks> },
}
@ -75,12 +75,7 @@ pub struct ForkChoiceTest<E: EthSpec> {
pub anchor_state: BeaconState<E>,
pub anchor_block: BeaconBlock<E>,
#[allow(clippy::type_complexity)]
pub steps: Vec<Step<SignedBeaconBlock<E>, Attestation<E>, PowBlock, AttesterSlashing<E>>>,
}
/// Spec to be used for fork choice tests.
pub fn fork_choice_spec<E: EthSpec>(fork_name: ForkName) -> ChainSpec {
testing_spec::<E>(fork_name)
pub steps: Vec<Step<SignedBeaconBlock<E>, Attestation<E>, AttesterSlashing<E>, PowBlock>>,
}
impl<E: EthSpec> LoadCase for ForkChoiceTest<E> {
@ -92,7 +87,7 @@ impl<E: EthSpec> LoadCase for ForkChoiceTest<E> {
.to_str()
.expect("path must be valid OsStr")
.to_string();
let spec = &fork_choice_spec::<E>(fork_name);
let spec = &testing_spec::<E>(fork_name);
let steps: Vec<Step<String, String, String, String>> =
yaml_decode_file(&path.join("steps.yaml"))?;
// Resolve the object names in `steps.yaml` into actual decoded block/attestation objects.
@ -116,14 +111,14 @@ impl<E: EthSpec> LoadCase for ForkChoiceTest<E> {
ssz_decode_file(&path.join(format!("{}.ssz_snappy", attestation)))
.map(|attestation| Step::Attestation { attestation })
}
Step::PowBlock { pow_block } => {
ssz_decode_file(&path.join(format!("{}.ssz_snappy", pow_block)))
.map(|pow_block| Step::PowBlock { pow_block })
}
Step::AttesterSlashing { attester_slashing } => {
ssz_decode_file(&path.join(format!("{}.ssz_snappy", attester_slashing)))
.map(|attester_slashing| Step::AttesterSlashing { attester_slashing })
}
Step::PowBlock { pow_block } => {
ssz_decode_file(&path.join(format!("{}.ssz_snappy", pow_block)))
.map(|pow_block| Step::PowBlock { pow_block })
}
Step::Checks { checks } => Ok(Step::Checks { checks }),
})
.collect::<Result<_, _>>()?;
@ -159,15 +154,12 @@ impl<E: EthSpec> Case for ForkChoiceTest<E> {
}
fn result(&self, _case_index: usize, fork_name: ForkName) -> Result<(), Error> {
let tester = Tester::new(self, fork_choice_spec::<E>(fork_name))?;
let tester = Tester::new(self, testing_spec::<E>(fork_name))?;
// TODO(merge): re-enable this test before production.
// This test is skipped until we can do retrospective confirmations of the terminal
// block after an optimistic sync.
if self.description == "block_lookup_failed"
//TODO(sean): enable once we implement equivocation logic (https://github.com/sigp/lighthouse/issues/3241)
|| self.description == "discard_equivocations"
{
if self.description == "block_lookup_failed" {
return Err(Error::SkippedKnownFailure);
};
@ -179,11 +171,10 @@ impl<E: EthSpec> Case for ForkChoiceTest<E> {
tester.process_block(block.clone(), *valid)?
}
Step::Attestation { attestation } => tester.process_attestation(attestation)?,
Step::AttesterSlashing { attester_slashing } => {
tester.process_attester_slashing(attester_slashing)
}
Step::PowBlock { pow_block } => tester.process_pow_block(pow_block),
//TODO(sean): enable once we implement equivocation logic (https://github.com/sigp/lighthouse/issues/3241)
Step::AttesterSlashing {
attester_slashing: _,
} => (),
Step::Checks { checks } => {
let Checks {
head,
@ -443,6 +434,14 @@ impl<E: EthSpec> Tester<E> {
.map_err(|e| Error::InternalError(format!("attestation import failed with {:?}", e)))
}
pub fn process_attester_slashing(&self, attester_slashing: &AttesterSlashing<E>) {
self.harness
.chain
.canonical_head
.fork_choice_write_lock()
.on_attester_slashing(attester_slashing)
}
pub fn process_pow_block(&self, pow_block: &PowBlock) {
let el = self.harness.mock_execution_layer.as_ref().unwrap();

View File

@ -52,7 +52,7 @@ pub trait Handler {
.filter(|e| e.file_type().map(|ty| ty.is_dir()).unwrap_or(false))
};
let test_cases = fs::read_dir(&handler_path)
.expect("handler dir exists")
.unwrap_or_else(|e| panic!("handler dir {} exists: {:?}", handler_path.display(), e))
.filter_map(as_directory)
.flat_map(|suite| fs::read_dir(suite.path()).expect("suite dir exists"))
.filter_map(as_directory)

View File

@ -377,8 +377,9 @@ fn epoch_processing_participation_record_updates() {
#[test]
fn epoch_processing_sync_committee_updates() {
// There are presently no mainnet tests, see:
// https://github.com/ethereum/consensus-spec-tests/issues/29
EpochProcessingHandler::<MinimalEthSpec, SyncCommitteeUpdates>::default().run();
EpochProcessingHandler::<MainnetEthSpec, SyncCommitteeUpdates>::default().run();
}
#[test]