Merge branch 'master' into signature-scheme-update

This commit is contained in:
Kirk Baird 2019-02-18 10:54:26 +11:00
commit 2c8aa17e0d
38 changed files with 2017 additions and 1280 deletions

View File

@ -9,9 +9,11 @@ members = [
"eth2/utils/boolean-bitfield",
"eth2/utils/hashing",
"eth2/utils/honey-badger-split",
"eth2/utils/int_to_bytes",
"eth2/utils/slot_clock",
"eth2/utils/ssz",
"eth2/utils/vec_shuffle",
"eth2/utils/swap_or_not_shuffle",
"eth2/utils/fisher_yates_shuffle",
"beacon_node",
"beacon_node/db",
"beacon_node/beacon_chain",

View File

@ -1,7 +1,8 @@
use crate::cached_beacon_state::CachedBeaconState;
use state_processing::validate_attestation_without_signature;
use std::collections::{HashMap, HashSet};
use types::{
beacon_state::CommitteesError, AggregateSignature, Attestation, AttestationData, BeaconState,
beacon_state::BeaconStateError, AggregateSignature, Attestation, AttestationData, BeaconState,
Bitfield, ChainSpec, FreeAttestation, Signature,
};
@ -76,12 +77,12 @@ impl AttestationAggregator {
/// - The signature is verified against that of the validator at `validator_index`.
pub fn process_free_attestation(
&mut self,
state: &BeaconState,
cached_state: &CachedBeaconState,
free_attestation: &FreeAttestation,
spec: &ChainSpec,
) -> Result<Outcome, CommitteesError> {
) -> Result<Outcome, BeaconStateError> {
let (slot, shard, committee_index) = some_or_invalid!(
state.attestation_slot_and_shard_for_validator(
cached_state.attestation_slot_and_shard_for_validator(
free_attestation.validator_index as usize,
spec,
)?,
@ -104,7 +105,8 @@ impl AttestationAggregator {
let signable_message = free_attestation.data.signable_message(PHASE_0_CUSTODY_BIT);
let validator_record = some_or_invalid!(
state
cached_state
.state
.validator_registry
.get(free_attestation.validator_index as usize),
Message::BadValidatorIndex

View File

@ -1,4 +1,5 @@
use crate::attestation_aggregator::{AttestationAggregator, Outcome as AggregationOutcome};
use crate::cached_beacon_state::CachedBeaconState;
use crate::checkpoint::CheckPoint;
use db::{
stores::{BeaconBlockStore, BeaconStateStore},
@ -14,7 +15,7 @@ use state_processing::{
};
use std::sync::Arc;
use types::{
beacon_state::CommitteesError,
beacon_state::BeaconStateError,
readers::{BeaconBlockReader, BeaconStateReader},
AttestationData, BeaconBlock, BeaconBlockBody, BeaconState, ChainSpec, Crosslink, Deposit,
Epoch, Eth1Data, FreeAttestation, Hash256, PublicKey, Signature, Slot,
@ -24,7 +25,7 @@ use types::{
pub enum Error {
InsufficientValidators,
BadRecentBlockRoots,
CommitteesError(CommitteesError),
BeaconStateError(BeaconStateError),
DBInconsistent(String),
DBError(String),
ForkChoiceError(ForkChoiceError),
@ -69,6 +70,7 @@ pub struct BeaconChain<T: ClientDB + Sized, U: SlotClock, F: ForkChoice> {
canonical_head: RwLock<CheckPoint>,
finalized_head: RwLock<CheckPoint>,
pub state: RwLock<BeaconState>,
pub cached_state: RwLock<CachedBeaconState>,
pub spec: ChainSpec,
pub fork_choice: RwLock<F>,
}
@ -99,7 +101,7 @@ where
initial_validator_deposits,
latest_eth1_data,
&spec,
);
)?;
let state_root = genesis_state.canonical_root();
state_store.put(&state_root, &ssz_encode(&genesis_state)[..])?;
@ -107,6 +109,11 @@ where
let block_root = genesis_block.canonical_root();
block_store.put(&block_root, &ssz_encode(&genesis_block)[..])?;
let cached_state = RwLock::new(CachedBeaconState::from_beacon_state(
genesis_state.clone(),
spec.clone(),
)?);
let finalized_head = RwLock::new(CheckPoint::new(
genesis_block.clone(),
block_root,
@ -127,6 +134,7 @@ where
slot_clock,
attestation_aggregator,
state: RwLock::new(genesis_state.clone()),
cached_state,
finalized_head,
canonical_head,
spec,
@ -252,7 +260,8 @@ where
///
/// Information is read from the present `beacon_state` shuffling, so only information from the
/// present and prior epoch is available.
pub fn block_proposer(&self, slot: Slot) -> Result<usize, CommitteesError> {
pub fn block_proposer(&self, slot: Slot) -> Result<usize, BeaconStateError> {
trace!("BeaconChain::block_proposer: slot: {}", slot);
let index = self
.state
.read()
@ -273,9 +282,13 @@ where
pub fn validator_attestion_slot_and_shard(
&self,
validator_index: usize,
) -> Result<Option<(Slot, u64)>, CommitteesError> {
) -> Result<Option<(Slot, u64)>, BeaconStateError> {
trace!(
"BeaconChain::validator_attestion_slot_and_shard: validator_index: {}",
validator_index
);
if let Some((slot, shard, _committee)) = self
.state
.cached_state
.read()
.attestation_slot_and_shard_for_validator(validator_index, &self.spec)?
{
@ -287,6 +300,7 @@ where
/// Produce an `AttestationData` that is valid for the present `slot` and given `shard`.
pub fn produce_attestation_data(&self, shard: u64) -> Result<AttestationData, Error> {
trace!("BeaconChain::produce_attestation_data: shard: {}", shard);
let justified_epoch = self.justified_epoch();
let justified_block_root = *self
.state
@ -332,9 +346,7 @@ where
let aggregation_outcome = self
.attestation_aggregator
.write()
.process_free_attestation(&self.state.read(), &free_attestation, &self.spec)?;
// TODO: Check this comment
//.map_err(|e| e.into())?;
.process_free_attestation(&self.cached_state.read(), &free_attestation, &self.spec)?;
// return if the attestation is invalid
if !aggregation_outcome.valid {
@ -489,6 +501,9 @@ where
);
// Update the local state variable.
*self.state.write() = state.clone();
// Update the cached state variable.
*self.cached_state.write() =
CachedBeaconState::from_beacon_state(state.clone(), self.spec.clone())?;
}
Ok(BlockProcessingOutcome::ValidBlock(ValidBlock::Processed))
@ -537,9 +552,15 @@ where
},
};
state
.per_block_processing_without_verifying_block_signature(&block, &self.spec)
.ok()?;
trace!("BeaconChain::produce_block: updating state for new block.",);
let result =
state.per_block_processing_without_verifying_block_signature(&block, &self.spec);
trace!(
"BeaconNode::produce_block: state processing result: {:?}",
result
);
result.ok()?;
let state_root = state.canonical_root();
@ -588,8 +609,8 @@ impl From<ForkChoiceError> for Error {
}
}
impl From<CommitteesError> for Error {
fn from(e: CommitteesError) -> Error {
Error::CommitteesError(e)
impl From<BeaconStateError> for Error {
fn from(e: BeaconStateError) -> Error {
Error::BeaconStateError(e)
}
}

View File

@ -0,0 +1,150 @@
use log::{debug, trace};
use std::collections::HashMap;
use types::{beacon_state::BeaconStateError, BeaconState, ChainSpec, Epoch, Slot};
pub const CACHE_PREVIOUS: bool = false;
pub const CACHE_CURRENT: bool = true;
pub const CACHE_NEXT: bool = false;
pub type CrosslinkCommittees = Vec<(Vec<usize>, u64)>;
pub type Shard = u64;
pub type CommitteeIndex = u64;
pub type AttestationDuty = (Slot, Shard, CommitteeIndex);
pub type AttestationDutyMap = HashMap<u64, AttestationDuty>;
// TODO: CachedBeaconState is presently duplicating `BeaconState` and `ChainSpec`. This is a
// massive memory waste, switch them to references.
pub struct CachedBeaconState {
pub state: BeaconState,
committees: Vec<Vec<CrosslinkCommittees>>,
attestation_duties: Vec<AttestationDutyMap>,
next_epoch: Epoch,
current_epoch: Epoch,
previous_epoch: Epoch,
spec: ChainSpec,
}
impl CachedBeaconState {
pub fn from_beacon_state(
state: BeaconState,
spec: ChainSpec,
) -> Result<Self, BeaconStateError> {
let current_epoch = state.current_epoch(&spec);
let previous_epoch = if current_epoch == spec.genesis_epoch {
current_epoch
} else {
current_epoch.saturating_sub(1_u64)
};
let next_epoch = state.next_epoch(&spec);
let mut committees: Vec<Vec<CrosslinkCommittees>> = Vec::with_capacity(3);
let mut attestation_duties: Vec<AttestationDutyMap> = Vec::with_capacity(3);
if CACHE_PREVIOUS {
debug!("from_beacon_state: building previous epoch cache.");
let cache = build_epoch_cache(&state, previous_epoch, &spec)?;
committees.push(cache.committees);
attestation_duties.push(cache.attestation_duty_map);
} else {
committees.push(vec![]);
attestation_duties.push(HashMap::new());
}
if CACHE_CURRENT {
debug!("from_beacon_state: building current epoch cache.");
let cache = build_epoch_cache(&state, current_epoch, &spec)?;
committees.push(cache.committees);
attestation_duties.push(cache.attestation_duty_map);
} else {
committees.push(vec![]);
attestation_duties.push(HashMap::new());
}
if CACHE_NEXT {
debug!("from_beacon_state: building next epoch cache.");
let cache = build_epoch_cache(&state, next_epoch, &spec)?;
committees.push(cache.committees);
attestation_duties.push(cache.attestation_duty_map);
} else {
committees.push(vec![]);
attestation_duties.push(HashMap::new());
}
Ok(Self {
state,
committees,
attestation_duties,
next_epoch,
current_epoch,
previous_epoch,
spec,
})
}
fn slot_to_cache_index(&self, slot: Slot) -> Option<usize> {
trace!("slot_to_cache_index: cache lookup");
match slot.epoch(self.spec.epoch_length) {
epoch if (epoch == self.previous_epoch) & CACHE_PREVIOUS => Some(0),
epoch if (epoch == self.current_epoch) & CACHE_CURRENT => Some(1),
epoch if (epoch == self.next_epoch) & CACHE_NEXT => Some(2),
_ => None,
}
}
/// Returns the `slot`, `shard` and `committee_index` for which a validator must produce an
/// attestation.
///
/// Cached method.
///
/// Spec v0.2.0
pub fn attestation_slot_and_shard_for_validator(
&self,
validator_index: usize,
_spec: &ChainSpec,
) -> Result<Option<(Slot, u64, u64)>, BeaconStateError> {
// Get the result for this epoch.
let cache_index = self
.slot_to_cache_index(self.state.slot)
.expect("Current epoch should always have a cache index.");
let duties = self.attestation_duties[cache_index]
.get(&(validator_index as u64))
.and_then(|tuple| Some(*tuple));
Ok(duties)
}
}
struct EpochCacheResult {
committees: Vec<CrosslinkCommittees>,
attestation_duty_map: AttestationDutyMap,
}
fn build_epoch_cache(
state: &BeaconState,
epoch: Epoch,
spec: &ChainSpec,
) -> Result<EpochCacheResult, BeaconStateError> {
let mut epoch_committees: Vec<CrosslinkCommittees> =
Vec::with_capacity(spec.epoch_length as usize);
let mut attestation_duty_map: AttestationDutyMap = HashMap::new();
for slot in epoch.slot_iter(spec.epoch_length) {
let slot_committees = state.get_crosslink_committees_at_slot(slot, false, spec)?;
for (committee, shard) in slot_committees {
for (committee_index, validator_index) in committee.iter().enumerate() {
attestation_duty_map.insert(
*validator_index as u64,
(slot, shard, committee_index as u64),
);
}
}
epoch_committees.push(state.get_crosslink_committees_at_slot(slot, false, spec)?)
}
Ok(EpochCacheResult {
committees: epoch_committees,
attestation_duty_map,
})
}

View File

@ -1,5 +1,6 @@
mod attestation_aggregator;
mod beacon_chain;
mod cached_beacon_state;
mod checkpoint;
pub use self::beacon_chain::{BeaconChain, Error};

View File

@ -6,7 +6,7 @@ use db::{
stores::{BeaconBlockStore, BeaconStateStore},
MemoryDB,
};
use fork_choice::{optimised_lmd_ghost::OptimisedLMDGhost, slow_lmd_ghost::SlowLMDGhost}; // import all the algorithms
use fork_choice::OptimisedLMDGhost;
use log::debug;
use rayon::prelude::*;
use slot_clock::TestingSlotClock;
@ -128,7 +128,18 @@ impl BeaconChainHarness {
pub fn increment_beacon_chain_slot(&mut self) -> Slot {
let slot = self.beacon_chain.present_slot() + 1;
debug!("Incrementing BeaconChain slot to {}.", slot);
let nth_slot = slot
- slot
.epoch(self.spec.epoch_length)
.start_slot(self.spec.epoch_length);
let nth_epoch = slot.epoch(self.spec.epoch_length) - self.spec.genesis_epoch;
debug!(
"Advancing BeaconChain to slot {}, epoch {} (epoch height: {}, slot {} in epoch.).",
slot,
slot.epoch(self.spec.epoch_length),
nth_epoch,
nth_slot
);
self.beacon_chain.slot_clock.set_slot(slot.as_u64());
self.beacon_chain.advance_state(slot).unwrap();
@ -209,6 +220,7 @@ impl BeaconChainHarness {
self.increment_beacon_chain_slot();
// Produce a new block.
debug!("Producing block...");
let block = self.produce_block();
debug!("Submitting block for processing...");
self.beacon_chain.process_block(block).unwrap();

View File

@ -10,7 +10,7 @@ use block_producer::{BlockProducer, Error as BlockPollError};
use db::MemoryDB;
use direct_beacon_node::DirectBeaconNode;
use direct_duties::DirectDuties;
use fork_choice::{optimised_lmd_ghost::OptimisedLMDGhost, slow_lmd_ghost::SlowLMDGhost};
use fork_choice::OptimisedLMDGhost;
use local_signer::LocalSigner;
use slot_clock::TestingSlotClock;
use std::sync::Arc;

View File

@ -1,19 +1,14 @@
use env_logger::{Builder, Env};
use log::debug;
use test_harness::BeaconChainHarness;
use types::{ChainSpec, Slot};
use types::ChainSpec;
#[test]
#[ignore]
fn it_can_build_on_genesis_block() {
let mut spec = ChainSpec::foundation();
spec.genesis_slot = Slot::new(spec.epoch_length * 8);
Builder::from_env(Env::default().default_filter_or("info")).init();
/*
spec.shard_count = spec.shard_count / 8;
spec.target_committee_size = spec.target_committee_size / 8;
*/
let validator_count = 1000;
let spec = ChainSpec::few_validators();
let validator_count = 8;
let mut harness = BeaconChainHarness::new(spec, validator_count as usize);
@ -23,21 +18,22 @@ fn it_can_build_on_genesis_block() {
#[test]
#[ignore]
fn it_can_produce_past_first_epoch_boundary() {
Builder::from_env(Env::default().default_filter_or("debug")).init();
Builder::from_env(Env::default().default_filter_or("info")).init();
let validator_count = 100;
let spec = ChainSpec::few_validators();
let validator_count = 8;
debug!("Starting harness build...");
let mut harness = BeaconChainHarness::new(ChainSpec::foundation(), validator_count);
let mut harness = BeaconChainHarness::new(spec, validator_count);
debug!("Harness built, tests starting..");
let blocks = harness.spec.epoch_length * 3 + 1;
let blocks = harness.spec.epoch_length * 2 + 1;
for i in 0..blocks {
harness.advance_chain_with_block();
debug!("Produced block {}/{}.", i, blocks);
debug!("Produced block {}/{}.", i + 1, blocks);
}
let dump = harness.chain_dump().expect("Chain dump failed.");

View File

@ -5,6 +5,7 @@ authors = ["Paul Hauner <paul@paulhauner.com>"]
edition = "2018"
[dependencies]
slot_clock = { path = "../../eth2/utils/slot_clock" }
ssz = { path = "../../eth2/utils/ssz" }
types = { path = "../../eth2/types" }
int_to_bytes = { path = "../utils/int_to_bytes" }
slot_clock = { path = "../utils/slot_clock" }
ssz = { path = "../utils/ssz" }
types = { path = "../types" }

View File

@ -1,8 +1,8 @@
pub mod test_utils;
mod traits;
use int_to_bytes::int_to_bytes32;
use slot_clock::SlotClock;
use ssz::ssz_encode;
use std::sync::Arc;
use types::{BeaconBlock, ChainSpec, Slot};
@ -132,7 +132,7 @@ impl<T: SlotClock, U: BeaconNode, V: DutiesReader, W: Signer> BlockProducer<T, U
fn produce_block(&mut self, slot: Slot) -> Result<PollOutcome, Error> {
let randao_reveal = {
// TODO: add domain, etc to this message. Also ensure result matches `into_to_bytes32`.
let message = ssz_encode(&slot.epoch(self.spec.epoch_length));
let message = int_to_bytes32(slot.epoch(self.spec.epoch_length).as_u64());
match self
.signer

View File

@ -10,3 +10,9 @@ ssz = { path = "../utils/ssz" }
types = { path = "../types" }
fast-math = "0.1.1"
byteorder = "1.3.1"
[dev-dependencies]
yaml-rust = "0.4.2"
bls = { path = "../utils/bls" }
slot_clock = { path = "../utils/slot_clock" }
beacon_chain = { path = "../../beacon_node/beacon_chain" }

View File

@ -44,13 +44,15 @@ extern crate types;
pub mod longest_chain;
pub mod optimised_lmd_ghost;
pub mod protolambda_lmd_ghost;
pub mod slow_lmd_ghost;
use db::stores::BeaconBlockAtSlotError;
use db::DBError;
use types::{BeaconBlock, Hash256};
pub use longest_chain::LongestChain;
pub use optimised_lmd_ghost::OptimisedLMDGhost;
/// Defines the interface for Fork Choices. Each Fork choice will define their own data structures
/// which can be built in block processing through the `add_block` and `add_attestation` functions.
/// The main fork choice algorithm is specified in `find_head
@ -83,6 +85,7 @@ pub enum ForkChoiceError {
CannotFindBestChild,
ChildrenNotFound,
StorageError(String),
HeadNotFound,
}
impl From<DBError> for ForkChoiceError {
@ -113,6 +116,4 @@ pub enum ForkChoiceAlgorithms {
SlowLMDGhost,
/// An optimised version of LMD-GHOST by Vitalik.
OptimisedLMDGhost,
/// An optimised version of LMD-GHOST by Protolambda.
ProtoLMDGhost,
}

View File

@ -1,52 +1,73 @@
use db::stores::BeaconBlockStore;
use db::{ClientDB, DBError};
use ssz::{Decodable, DecodeError};
use crate::{ForkChoice, ForkChoiceError};
use db::{stores::BeaconBlockStore, ClientDB};
use std::sync::Arc;
use types::{BeaconBlock, Hash256, Slot};
pub enum ForkChoiceError {
BadSszInDatabase,
MissingBlock,
DBError(String),
}
pub fn longest_chain<T>(
head_block_hashes: &[Hash256],
block_store: &Arc<BeaconBlockStore<T>>,
) -> Result<Option<usize>, ForkChoiceError>
pub struct LongestChain<T>
where
T: ClientDB + Sized,
{
let mut head_blocks: Vec<(usize, BeaconBlock)> = vec![];
/// List of head block hashes
head_block_hashes: Vec<Hash256>,
/// Block storage access.
block_store: Arc<BeaconBlockStore<T>>,
}
impl<T> LongestChain<T>
where
T: ClientDB + Sized,
{
pub fn new(block_store: Arc<BeaconBlockStore<T>>) -> Self {
LongestChain {
head_block_hashes: Vec::new(),
block_store,
}
}
}
impl<T: ClientDB + Sized> ForkChoice for LongestChain<T> {
fn add_block(
&mut self,
block: &BeaconBlock,
block_hash: &Hash256,
) -> Result<(), ForkChoiceError> {
// add the block hash to head_block_hashes removing the parent if it exists
self.head_block_hashes
.retain(|hash| *hash != block.parent_root);
self.head_block_hashes.push(*block_hash);
Ok(())
}
fn add_attestation(&mut self, _: u64, _: &Hash256) -> Result<(), ForkChoiceError> {
// do nothing
Ok(())
}
fn find_head(&mut self, _: &Hash256) -> Result<Hash256, ForkChoiceError> {
let mut head_blocks: Vec<(usize, BeaconBlock)> = vec![];
/*
* Load all the head_block hashes from the DB as SszBeaconBlocks.
*/
for (index, block_hash) in head_block_hashes.iter().enumerate() {
let ssz = block_store
.get(&block_hash)?
.ok_or(ForkChoiceError::MissingBlock)?;
let (block, _) = BeaconBlock::ssz_decode(&ssz, 0)?;
for (index, block_hash) in self.head_block_hashes.iter().enumerate() {
let block = self
.block_store
.get_deserialized(&block_hash)?
.ok_or_else(|| ForkChoiceError::MissingBeaconBlock(*block_hash))?;
head_blocks.push((index, block));
}
/*
* Loop through all the head blocks and find the highest slot.
*/
let highest_slot: Option<Slot> = None;
for (_, block) in &head_blocks {
let slot = block.slot;
let highest_slot = head_blocks
.iter()
.fold(Slot::from(0u64), |highest, (_, block)| {
std::cmp::max(block.slot, highest)
});
match highest_slot {
None => Some(slot),
Some(winning_slot) => {
if slot > winning_slot {
Some(slot)
} else {
Some(winning_slot)
}
}
};
// if we find no blocks, return Error
if highest_slot == 0 {
return Err(ForkChoiceError::HeadNotFound);
}
/*
@ -55,39 +76,27 @@ where
* Ultimately, the index of the head_block hash with the highest slot and highest block
* hash will be the winner.
*/
match highest_slot {
None => Ok(None),
Some(highest_slot) => {
let mut highest_blocks = vec![];
for (index, block) in head_blocks {
let head_index: Option<usize> =
head_blocks
.iter()
.fold(None, |smallest_index, (index, block)| {
if block.slot == highest_slot {
highest_blocks.push((index, block))
if smallest_index.is_none() {
return Some(*index);
}
return Some(std::cmp::min(
*index,
smallest_index.expect("Cannot be None"),
));
}
smallest_index
});
if head_index.is_none() {
return Err(ForkChoiceError::HeadNotFound);
}
highest_blocks.sort_by(|a, b| head_block_hashes[a.0].cmp(&head_block_hashes[b.0]));
let (index, _) = highest_blocks[0];
Ok(Some(index))
}
}
}
impl From<DecodeError> for ForkChoiceError {
fn from(_: DecodeError) -> Self {
ForkChoiceError::BadSszInDatabase
}
}
impl From<DBError> for ForkChoiceError {
fn from(e: DBError) -> Self {
ForkChoiceError::DBError(e.message)
}
}
#[cfg(test)]
mod tests {
#[test]
fn test_naive_fork_choice() {
assert_eq!(2 + 2, 4);
Ok(self.head_block_hashes[head_index.unwrap()])
}
}

View File

@ -30,8 +30,8 @@ use fast_math::log2_raw;
use std::collections::HashMap;
use std::sync::Arc;
use types::{
readers::BeaconBlockReader, slot_epoch::Slot, slot_height::SlotHeight,
validator_registry::get_active_validator_indices, BeaconBlock, Hash256,
readers::BeaconBlockReader, validator_registry::get_active_validator_indices, BeaconBlock,
Hash256, Slot, SlotHeight,
};
//TODO: Pruning - Children
@ -116,7 +116,7 @@ where
.ok_or_else(|| ForkChoiceError::MissingBeaconState(*state_root))?;
let active_validator_indices = get_active_validator_indices(
&current_state.validator_registry,
&current_state.validator_registry[..],
block_slot.epoch(EPOCH_LENGTH),
);

View File

@ -28,10 +28,8 @@ use db::{
use std::collections::HashMap;
use std::sync::Arc;
use types::{
readers::{BeaconBlockReader, BeaconStateReader},
slot_epoch::Slot,
validator_registry::get_active_validator_indices,
BeaconBlock, Hash256,
readers::BeaconBlockReader, validator_registry::get_active_validator_indices, BeaconBlock,
Hash256, Slot,
};
//TODO: Pruning and syncing

View File

@ -6,6 +6,7 @@ edition = "2018"
[dependencies]
hashing = { path = "../utils/hashing" }
int_to_bytes = { path = "../utils/int_to_bytes" }
integer-sqrt = "0.1"
log = "0.4"
ssz = { path = "../utils/ssz" }

View File

@ -1,9 +1,10 @@
use crate::SlotProcessingError;
use hashing::hash;
use log::debug;
use int_to_bytes::int_to_bytes32;
use log::{debug, trace};
use ssz::{ssz_encode, TreeHash};
use types::{
beacon_state::{AttestationValidationError, CommitteesError},
beacon_state::{AttestationParticipantsError, BeaconStateError},
AggregatePublicKey, Attestation, BeaconBlock, BeaconState, ChainSpec, Crosslink, Epoch, Exit,
Fork, Hash256, PendingAttestation, PublicKey, Signature,
};
@ -41,10 +42,23 @@ pub enum Error {
BadCustodyReseeds,
BadCustodyChallenges,
BadCustodyResponses,
CommitteesError(CommitteesError),
BeaconStateError(BeaconStateError),
SlotProcessingError(SlotProcessingError),
}
#[derive(Debug, PartialEq)]
pub enum AttestationValidationError {
IncludedTooEarly,
IncludedTooLate,
WrongJustifiedSlot,
WrongJustifiedRoot,
BadLatestCrosslinkRoot,
BadSignature,
ShardBlockRootNotZero,
NoBlockRoot,
AttestationParticipantsError(AttestationParticipantsError),
}
macro_rules! ensure {
($condition: expr, $result: expr) => {
if !$condition {
@ -110,7 +124,7 @@ fn per_block_processing_signature_optional(
ensure!(
bls_verify(
&block_proposer.pubkey,
&ssz_encode(&state.current_epoch(spec)),
&int_to_bytes32(state.current_epoch(spec).as_u64()),
&block.randao_reveal,
get_domain(&state.fork, state.current_epoch(spec), DOMAIN_RANDAO)
),
@ -205,6 +219,8 @@ fn per_block_processing_signature_optional(
Error::MaxAttestationsExceeded
);
debug!("Verifying {} attestations.", block.body.attestations.len());
for attestation in &block.body.attestations {
validate_attestation(&state, attestation, spec)?;
@ -217,11 +233,6 @@ fn per_block_processing_signature_optional(
state.latest_attestations.push(pending_attestation);
}
debug!(
"{} attestations verified & processed.",
block.body.attestations.len()
);
/*
* Deposits
*/
@ -298,6 +309,10 @@ fn validate_attestation_signature_optional(
spec: &ChainSpec,
verify_signature: bool,
) -> Result<(), AttestationValidationError> {
trace!(
"validate_attestation_signature_optional: attestation epoch: {}",
attestation.data.slot.epoch(spec.epoch_length)
);
ensure!(
attestation.data.slot + spec.min_attestation_inclusion_delay <= state.slot,
AttestationValidationError::IncludedTooEarly
@ -388,9 +403,9 @@ impl From<AttestationValidationError> for Error {
}
}
impl From<CommitteesError> for Error {
fn from(e: CommitteesError) -> Error {
Error::CommitteesError(e)
impl From<BeaconStateError> for Error {
fn from(e: BeaconStateError) -> Error {
Error::BeaconStateError(e)
}
}
@ -399,3 +414,9 @@ impl From<SlotProcessingError> for Error {
Error::SlotProcessingError(e)
}
}
impl From<AttestationParticipantsError> for AttestationValidationError {
fn from(e: AttestationParticipantsError) -> AttestationValidationError {
AttestationValidationError::AttestationParticipantsError(e)
}
}

View File

@ -5,7 +5,7 @@ use ssz::TreeHash;
use std::collections::{HashMap, HashSet};
use std::iter::FromIterator;
use types::{
beacon_state::{AttestationParticipantsError, CommitteesError, InclusionError},
beacon_state::{AttestationParticipantsError, BeaconStateError, InclusionError},
validator_registry::get_active_validator_indices,
BeaconState, ChainSpec, Crosslink, Epoch, Hash256, PendingAttestation,
};
@ -27,7 +27,7 @@ pub enum Error {
NoBlockRoots,
BaseRewardQuotientIsZero,
NoRandaoSeed,
CommitteesError(CommitteesError),
BeaconStateError(BeaconStateError),
AttestationParticipantsError(AttestationParticipantsError),
InclusionError(InclusionError),
WinningRootError(WinningRootError),
@ -144,8 +144,10 @@ impl EpochProcessable for BeaconState {
let previous_epoch_attester_indices =
self.get_attestation_participants_union(&previous_epoch_attestations[..], spec)?;
let previous_total_balance =
self.get_total_balance(&previous_epoch_attester_indices[..], spec);
let previous_total_balance = self.get_total_balance(
&get_active_validator_indices(&self.validator_registry, previous_epoch),
spec,
);
/*
* Validators targetting the previous justified slot
@ -315,6 +317,11 @@ impl EpochProcessable for BeaconState {
// for slot in self.slot.saturating_sub(2 * spec.epoch_length)..self.slot {
for slot in self.previous_epoch(spec).slot_iter(spec.epoch_length) {
trace!(
"Finding winning root for slot: {} (epoch: {})",
slot,
slot.epoch(spec.epoch_length)
);
let crosslink_committees_at_slot =
self.get_crosslink_committees_at_slot(slot, false, spec)?;
@ -352,7 +359,8 @@ impl EpochProcessable for BeaconState {
/*
* Rewards and Penalities
*/
let base_reward_quotient = previous_total_balance.integer_sqrt();
let base_reward_quotient =
previous_total_balance.integer_sqrt() / spec.base_reward_quotient;
if base_reward_quotient == 0 {
return Err(Error::BaseRewardQuotientIsZero);
}
@ -539,6 +547,12 @@ impl EpochProcessable for BeaconState {
*/
self.previous_calculation_epoch = self.current_calculation_epoch;
self.previous_epoch_start_shard = self.current_epoch_start_shard;
debug!(
"setting previous_epoch_seed to : {}",
self.current_epoch_seed
);
self.previous_epoch_seed = self.current_epoch_seed;
let should_update_validator_registy = if self.finalized_epoch
@ -553,25 +567,24 @@ impl EpochProcessable for BeaconState {
};
if should_update_validator_registy {
trace!("updating validator registry.");
self.update_validator_registry(spec);
self.current_calculation_epoch = next_epoch;
self.current_epoch_start_shard = (self.current_epoch_start_shard
+ self.get_current_epoch_committee_count(spec) as u64)
% spec.shard_count;
self.current_epoch_seed = self
.generate_seed(self.current_calculation_epoch, spec)
.ok_or_else(|| Error::NoRandaoSeed)?;
self.current_epoch_seed = self.generate_seed(self.current_calculation_epoch, spec)?
} else {
trace!("not updating validator registry.");
let epochs_since_last_registry_update =
current_epoch - self.validator_registry_update_epoch;
if (epochs_since_last_registry_update > 1)
& epochs_since_last_registry_update.is_power_of_two()
{
self.current_calculation_epoch = next_epoch;
self.current_epoch_seed = self
.generate_seed(self.current_calculation_epoch, spec)
.ok_or_else(|| Error::NoRandaoSeed)?;
self.current_epoch_seed =
self.generate_seed(self.current_calculation_epoch, spec)?
}
}
@ -689,9 +702,9 @@ impl From<InclusionError> for Error {
}
}
impl From<CommitteesError> for Error {
fn from(e: CommitteesError) -> Error {
Error::CommitteesError(e)
impl From<BeaconStateError> for Error {
fn from(e: BeaconStateError) -> Error {
Error::BeaconStateError(e)
}
}

View File

@ -1,9 +1,9 @@
use crate::{EpochProcessable, EpochProcessingError};
use types::{beacon_state::CommitteesError, BeaconState, ChainSpec, Hash256};
use types::{beacon_state::BeaconStateError, BeaconState, ChainSpec, Hash256};
#[derive(Debug, PartialEq)]
pub enum Error {
CommitteesError(CommitteesError),
BeaconStateError(BeaconStateError),
EpochProcessingError(EpochProcessingError),
}
@ -49,9 +49,9 @@ fn merkle_root(_input: &[Hash256]) -> Hash256 {
Hash256::zero()
}
impl From<CommitteesError> for Error {
fn from(e: CommitteesError) -> Error {
Error::CommitteesError(e)
impl From<BeaconStateError> for Error {
fn from(e: BeaconStateError) -> Error {
Error::BeaconStateError(e)
}
}

View File

@ -18,4 +18,7 @@ serde_derive = "1.0"
serde_json = "1.0"
slog = "^2.2.3"
ssz = { path = "../utils/ssz" }
vec_shuffle = { path = "../utils/vec_shuffle" }
swap_or_not_shuffle = { path = "../utils/swap_or_not_shuffle" }
[dev-dependencies]
env_logger = "0.6.0"

View File

@ -5,51 +5,38 @@ use crate::{
Hash256, PendingAttestation, PublicKey, Signature, Slot, Validator,
};
use honey_badger_split::SplitExt;
use log::trace;
use rand::RngCore;
use serde_derive::Serialize;
use ssz::{hash, Decodable, DecodeError, Encodable, SszStream, TreeHash};
use std::ops::Range;
use vec_shuffle::shuffle;
use swap_or_not_shuffle::get_permutated_index;
pub enum Error {
InsufficientValidators,
BadBlockSignature,
InvalidEpoch(Slot, Range<Epoch>),
CommitteesError(CommitteesError),
}
mod tests;
#[derive(Debug, PartialEq)]
pub enum CommitteesError {
InvalidEpoch,
InsufficientNumberOfValidators,
BadRandao,
pub enum BeaconStateError {
EpochOutOfBounds,
UnableToShuffle,
InsufficientRandaoMixes,
InsufficientValidators,
InsufficientBlockRoots,
InsufficientIndexRoots,
InsufficientAttestations,
InsufficientCommittees,
}
#[derive(Debug, PartialEq)]
pub enum InclusionError {
NoIncludedAttestations,
/// The validator did not participate in an attestation in this period.
NoAttestationsForValidator,
AttestationParticipantsError(AttestationParticipantsError),
}
#[derive(Debug, PartialEq)]
pub enum AttestationParticipantsError {
/// There is no committee for the given shard in the given epoch.
NoCommitteeForShard,
NoCommittees,
BadBitfieldLength,
CommitteesError(CommitteesError),
}
#[derive(Debug, PartialEq)]
pub enum AttestationValidationError {
IncludedTooEarly,
IncludedTooLate,
WrongJustifiedSlot,
WrongJustifiedRoot,
BadLatestCrosslinkRoot,
BadSignature,
ShardBlockRootNotZero,
NoBlockRoot,
AttestationParticipantsError(AttestationParticipantsError),
BeaconStateError(BeaconStateError),
}
macro_rules! safe_add_assign {
@ -110,7 +97,7 @@ impl BeaconState {
initial_validator_deposits: Vec<Deposit>,
latest_eth1_data: Eth1Data,
spec: &ChainSpec,
) -> BeaconState {
) -> Result<BeaconState, BeaconStateError> {
let initial_crosslink = Crosslink {
epoch: spec.genesis_epoch,
shard_block_root: spec.zero_hash,
@ -194,11 +181,9 @@ impl BeaconState {
));
genesis_state.latest_index_roots =
vec![genesis_active_index_root; spec.latest_index_roots_length];
genesis_state.current_epoch_seed = genesis_state
.generate_seed(spec.genesis_epoch, spec)
.expect("Unable to generate seed.");
genesis_state.current_epoch_seed = genesis_state.generate_seed(spec.genesis_epoch, spec)?;
genesis_state
Ok(genesis_state)
}
/// Return the tree hash root for this `BeaconState`.
@ -219,7 +204,12 @@ impl BeaconState {
///
/// Spec v0.2.0
pub fn previous_epoch(&self, spec: &ChainSpec) -> Epoch {
self.current_epoch(spec).saturating_sub(1_u64)
let current_epoch = self.current_epoch(&spec);
if current_epoch == spec.genesis_epoch {
current_epoch
} else {
current_epoch - 1
}
}
/// The epoch following `self.current_epoch()`.
@ -267,23 +257,50 @@ impl BeaconState {
/// committee is itself a list of validator indices.
///
/// Spec v0.1
pub fn get_shuffling(&self, seed: Hash256, epoch: Epoch, spec: &ChainSpec) -> Vec<Vec<usize>> {
pub fn get_shuffling(
&self,
seed: Hash256,
epoch: Epoch,
spec: &ChainSpec,
) -> Option<Vec<Vec<usize>>> {
let active_validator_indices =
get_active_validator_indices(&self.validator_registry, epoch);
if active_validator_indices.is_empty() {
return None;
}
trace!(
"get_shuffling: active_validator_indices.len() == {}",
active_validator_indices.len()
);
let committees_per_epoch =
self.get_epoch_committee_count(active_validator_indices.len(), spec);
// TODO: check that Hash256::from(u64) matches 'int_to_bytes32'.
let seed = seed ^ Hash256::from(epoch.as_u64());
// TODO: fix `expect` assert.
let shuffled_active_validator_indices =
shuffle(&seed, active_validator_indices).expect("Max validator count exceed!");
trace!(
"get_shuffling: active_validator_indices.len() == {}, committees_per_epoch: {}",
active_validator_indices.len(),
committees_per_epoch
);
let mut shuffled_active_validator_indices = vec![0; active_validator_indices.len()];
for &i in &active_validator_indices {
let shuffled_i = get_permutated_index(
i,
active_validator_indices.len(),
&seed[..],
spec.shuffle_round_count,
)?;
shuffled_active_validator_indices[i] = active_validator_indices[shuffled_i]
}
Some(
shuffled_active_validator_indices
.honey_badger_split(committees_per_epoch as usize)
.map(|slice: &[usize]| slice.to_vec())
.collect()
.collect(),
)
}
/// Return the number of committees in the previous epoch.
@ -321,9 +338,17 @@ impl BeaconState {
+ 1;
let latest_index_root = current_epoch + spec.entry_exit_delay;
if (epoch <= earliest_index_root) & (epoch >= latest_index_root) {
trace!(
"get_active_index_root: epoch: {}, earliest: {}, latest: {}",
epoch,
earliest_index_root,
latest_index_root
);
if (epoch >= earliest_index_root) & (epoch <= latest_index_root) {
Some(self.latest_index_roots[epoch.as_usize() % spec.latest_index_roots_length])
} else {
trace!("get_active_index_root: epoch out of range.");
None
}
}
@ -331,12 +356,27 @@ impl BeaconState {
/// Generate a seed for the given ``epoch``.
///
/// Spec v0.2.0
pub fn generate_seed(&self, epoch: Epoch, spec: &ChainSpec) -> Option<Hash256> {
let mut input = self.get_randao_mix(epoch, spec)?.to_vec();
input.append(&mut self.get_active_index_root(epoch, spec)?.to_vec());
pub fn generate_seed(
&self,
epoch: Epoch,
spec: &ChainSpec,
) -> Result<Hash256, BeaconStateError> {
let mut input = self
.get_randao_mix(epoch, spec)
.ok_or_else(|| BeaconStateError::InsufficientRandaoMixes)?
.to_vec();
input.append(
&mut self
.get_active_index_root(epoch, spec)
.ok_or_else(|| BeaconStateError::InsufficientIndexRoots)?
.to_vec(),
);
// TODO: ensure `Hash256::from(u64)` == `int_to_bytes32`.
input.append(&mut Hash256::from(epoch.as_u64()).to_vec());
Some(Hash256::from(&hash(&input[..])[..]))
Ok(Hash256::from(&hash(&input[..])[..]))
}
/// Return the list of ``(committee, shard)`` tuples for the ``slot``.
@ -350,39 +390,36 @@ impl BeaconState {
slot: Slot,
registry_change: bool,
spec: &ChainSpec,
) -> Result<Vec<(Vec<usize>, u64)>, CommitteesError> {
) -> Result<Vec<(Vec<usize>, u64)>, BeaconStateError> {
let epoch = slot.epoch(spec.epoch_length);
let current_epoch = self.current_epoch(spec);
let previous_epoch = if current_epoch == spec.genesis_epoch {
current_epoch
} else {
current_epoch.saturating_sub(1_u64)
};
let previous_epoch = self.previous_epoch(spec);
let next_epoch = self.next_epoch(spec);
let (committees_per_epoch, seed, shuffling_epoch, shuffling_start_shard) =
if epoch == previous_epoch {
(
self.get_previous_epoch_committee_count(spec),
self.previous_epoch_seed,
self.previous_calculation_epoch,
self.previous_epoch_start_shard,
)
} else if epoch == current_epoch {
if epoch == current_epoch {
trace!("get_crosslink_committees_at_slot: current_epoch");
(
self.get_current_epoch_committee_count(spec),
self.current_epoch_seed,
self.current_calculation_epoch,
self.current_epoch_start_shard,
)
} else if epoch == previous_epoch {
trace!("get_crosslink_committees_at_slot: previous_epoch");
(
self.get_previous_epoch_committee_count(spec),
self.previous_epoch_seed,
self.previous_calculation_epoch,
self.previous_epoch_start_shard,
)
} else if epoch == next_epoch {
trace!("get_crosslink_committees_at_slot: next_epoch");
let current_committees_per_epoch = self.get_current_epoch_committee_count(spec);
let epochs_since_last_registry_update =
current_epoch - self.validator_registry_update_epoch;
let (seed, shuffling_start_shard) = if registry_change {
let next_seed = self
.generate_seed(next_epoch, spec)
.ok_or_else(|| CommitteesError::BadRandao)?;
let next_seed = self.generate_seed(next_epoch, spec)?;
(
next_seed,
(self.current_epoch_start_shard + current_committees_per_epoch)
@ -391,9 +428,7 @@ impl BeaconState {
} else if (epochs_since_last_registry_update > 1)
& epochs_since_last_registry_update.is_power_of_two()
{
let next_seed = self
.generate_seed(next_epoch, spec)
.ok_or_else(|| CommitteesError::BadRandao)?;
let next_seed = self.generate_seed(next_epoch, spec)?;
(next_seed, self.current_epoch_start_shard)
} else {
(self.current_epoch_seed, self.current_epoch_start_shard)
@ -405,15 +440,24 @@ impl BeaconState {
shuffling_start_shard,
)
} else {
panic!("Epoch out-of-bounds.")
return Err(BeaconStateError::EpochOutOfBounds);
};
let shuffling = self.get_shuffling(seed, shuffling_epoch, spec);
let shuffling = self
.get_shuffling(seed, shuffling_epoch, spec)
.ok_or_else(|| BeaconStateError::UnableToShuffle)?;
let offset = slot.as_u64() % spec.epoch_length;
let committees_per_slot = committees_per_epoch / spec.epoch_length;
let slot_start_shard =
(shuffling_start_shard + committees_per_slot * offset) % spec.shard_count;
trace!(
"get_crosslink_committees_at_slot: committees_per_slot: {}, slot_start_shard: {}, seed: {}",
committees_per_slot,
slot_start_shard,
seed
);
let mut crosslinks_at_slot = vec![];
for i in 0..committees_per_slot {
let tuple = (
@ -433,7 +477,7 @@ impl BeaconState {
&self,
validator_index: usize,
spec: &ChainSpec,
) -> Result<Option<(Slot, u64, u64)>, CommitteesError> {
) -> Result<Option<(Slot, u64, u64)>, BeaconStateError> {
let mut result = None;
for slot in self.current_epoch(spec).slot_iter(spec.epoch_length) {
for (committee, shard) in self.get_crosslink_committees_at_slot(slot, false, spec)? {
@ -463,16 +507,20 @@ impl BeaconState {
&self,
slot: Slot,
spec: &ChainSpec,
) -> Result<usize, CommitteesError> {
) -> Result<usize, BeaconStateError> {
let committees = self.get_crosslink_committees_at_slot(slot, false, spec)?;
trace!(
"get_beacon_proposer_index: slot: {}, committees_count: {}",
slot,
committees.len()
);
committees
.first()
.ok_or(CommitteesError::InsufficientNumberOfValidators)
.ok_or(BeaconStateError::InsufficientValidators)
.and_then(|(first_committee, _)| {
let index = (slot.as_usize())
.checked_rem(first_committee.len())
.ok_or(CommitteesError::InsufficientNumberOfValidators)?;
// NOTE: next index will not panic as we have already returned if this is the case.
.ok_or(BeaconStateError::InsufficientValidators)?;
Ok(first_committee[index])
})
}
@ -708,7 +756,7 @@ impl BeaconState {
&mut self,
validator_index: usize,
spec: &ChainSpec,
) -> Result<(), CommitteesError> {
) -> Result<(), BeaconStateError> {
self.exit_validator(validator_index, spec);
let current_epoch = self.current_epoch(spec);
@ -828,7 +876,7 @@ impl BeaconState {
let earliest_attestation_index = included_attestations
.iter()
.min_by_key(|i| attestations[**i].inclusion_slot)
.ok_or_else(|| InclusionError::NoIncludedAttestations)?;
.ok_or_else(|| InclusionError::NoAttestationsForValidator)?;
Ok(attestations[*earliest_attestation_index].clone())
}
@ -933,34 +981,18 @@ fn hash_tree_root<T: TreeHash>(input: Vec<T>) -> Hash256 {
Hash256::from(&input.hash_tree_root()[..])
}
impl From<AttestationParticipantsError> for AttestationValidationError {
fn from(e: AttestationParticipantsError) -> AttestationValidationError {
AttestationValidationError::AttestationParticipantsError(e)
impl From<BeaconStateError> for AttestationParticipantsError {
fn from(e: BeaconStateError) -> AttestationParticipantsError {
AttestationParticipantsError::BeaconStateError(e)
}
}
impl From<CommitteesError> for AttestationParticipantsError {
fn from(e: CommitteesError) -> AttestationParticipantsError {
AttestationParticipantsError::CommitteesError(e)
}
}
/*
*/
impl From<AttestationParticipantsError> for InclusionError {
fn from(e: AttestationParticipantsError) -> InclusionError {
InclusionError::AttestationParticipantsError(e)
}
}
impl From<CommitteesError> for Error {
fn from(e: CommitteesError) -> Error {
Error::CommitteesError(e)
}
}
impl Encodable for BeaconState {
fn ssz_append(&self, s: &mut SszStream) {
s.append(&self.slot);
@ -1115,33 +1147,3 @@ impl<T: RngCore> TestRandom<T> for BeaconState {
}
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::test_utils::{SeedableRng, TestRandom, XorShiftRng};
use ssz::ssz_encode;
#[test]
pub fn test_ssz_round_trip() {
let mut rng = XorShiftRng::from_seed([42; 16]);
let original = BeaconState::random_for_test(&mut rng);
let bytes = ssz_encode(&original);
let (decoded, _) = <_>::ssz_decode(&bytes, 0).unwrap();
assert_eq!(original, decoded);
}
#[test]
pub fn test_hash_tree_root() {
let mut rng = XorShiftRng::from_seed([42; 16]);
let original = BeaconState::random_for_test(&mut rng);
let result = original.hash_tree_root();
assert_eq!(result.len(), 32);
// TODO: Add further tests
// https://github.com/sigp/lighthouse/issues/170
}
}

View File

@ -0,0 +1,97 @@
#![cfg(test)]
use super::*;
use crate::test_utils::{SeedableRng, TestRandom, XorShiftRng};
use crate::{
beacon_state::BeaconStateError, BeaconState, ChainSpec, Deposit, DepositData, DepositInput,
Eth1Data, Hash256, Keypair,
};
use bls::create_proof_of_possession;
use ssz::ssz_encode;
struct BeaconStateTestBuilder {
pub genesis_time: u64,
pub initial_validator_deposits: Vec<Deposit>,
pub latest_eth1_data: Eth1Data,
pub spec: ChainSpec,
pub keypairs: Vec<Keypair>,
}
impl BeaconStateTestBuilder {
pub fn with_random_validators(validator_count: usize) -> Self {
let genesis_time = 10_000_000;
let keypairs: Vec<Keypair> = (0..validator_count)
.collect::<Vec<usize>>()
.iter()
.map(|_| Keypair::random())
.collect();
let initial_validator_deposits = keypairs
.iter()
.map(|keypair| Deposit {
branch: vec![], // branch verification is not specified.
index: 0, // index verification is not specified.
deposit_data: DepositData {
amount: 32_000_000_000, // 32 ETH (in Gwei)
timestamp: genesis_time - 1,
deposit_input: DepositInput {
pubkey: keypair.pk.clone(),
withdrawal_credentials: Hash256::zero(), // Withdrawal not possible.
proof_of_possession: create_proof_of_possession(&keypair),
},
},
})
.collect();
let latest_eth1_data = Eth1Data {
deposit_root: Hash256::zero(),
block_hash: Hash256::zero(),
};
let spec = ChainSpec::foundation();
Self {
genesis_time,
initial_validator_deposits,
latest_eth1_data,
spec,
keypairs,
}
}
pub fn build(&self) -> Result<BeaconState, BeaconStateError> {
BeaconState::genesis(
self.genesis_time,
self.initial_validator_deposits.clone(),
self.latest_eth1_data.clone(),
&self.spec,
)
}
}
#[test]
pub fn can_produce_genesis_block() {
let builder = BeaconStateTestBuilder::with_random_validators(2);
builder.build().unwrap();
}
#[test]
pub fn test_ssz_round_trip() {
let mut rng = XorShiftRng::from_seed([42; 16]);
let original = BeaconState::random_for_test(&mut rng);
let bytes = ssz_encode(&original);
let (decoded, _) = <_>::ssz_decode(&bytes, 0).unwrap();
assert_eq!(original, decoded);
}
#[test]
pub fn test_hash_tree_root() {
let mut rng = XorShiftRng::from_seed([42; 16]);
let original = BeaconState::random_for_test(&mut rng);
let result = original.hash_tree_root();
assert_eq!(result.len(), 32);
// TODO: Add further tests
// https://github.com/sigp/lighthouse/issues/170
}

View File

@ -1,7 +1,96 @@
use crate::{Address, ChainSpec, Epoch, Hash256, Signature, Slot};
use crate::{Address, Epoch, Hash256, Slot};
use bls::Signature;
const GWEI: u64 = 1_000_000_000;
/// Holds all the "constants" for a BeaconChain.
///
/// Spec v0.2.0
#[derive(PartialEq, Debug, Clone)]
pub struct ChainSpec {
/*
* Misc
*/
pub shard_count: u64,
pub target_committee_size: u64,
pub max_balance_churn_quotient: u64,
pub beacon_chain_shard_number: u64,
pub max_indices_per_slashable_vote: u64,
pub max_withdrawals_per_epoch: u64,
pub shuffle_round_count: u8,
/*
* Deposit contract
*/
pub deposit_contract_address: Address,
pub deposit_contract_tree_depth: u64,
/*
* Gwei values
*/
pub min_deposit_amount: u64,
pub max_deposit_amount: u64,
pub fork_choice_balance_increment: u64,
pub ejection_balance: u64,
/*
* Initial Values
*/
pub genesis_fork_version: u64,
pub genesis_slot: Slot,
pub genesis_epoch: Epoch,
pub genesis_start_shard: u64,
pub far_future_epoch: Epoch,
pub zero_hash: Hash256,
pub empty_signature: Signature,
pub bls_withdrawal_prefix_byte: u8,
/*
* Time parameters
*/
pub slot_duration: u64,
pub min_attestation_inclusion_delay: u64,
pub epoch_length: u64,
pub seed_lookahead: Epoch,
pub entry_exit_delay: u64,
pub eth1_data_voting_period: u64,
pub min_validator_withdrawal_epochs: Epoch,
/*
* State list lengths
*/
pub latest_block_roots_length: usize,
pub latest_randao_mixes_length: usize,
pub latest_index_roots_length: usize,
pub latest_penalized_exit_length: usize,
/*
* Reward and penalty quotients
*/
pub base_reward_quotient: u64,
pub whistleblower_reward_quotient: u64,
pub includer_reward_quotient: u64,
pub inactivity_penalty_quotient: u64,
/*
* Max operations per block
*/
pub max_proposer_slashings: u64,
pub max_attester_slashings: u64,
pub max_attestations: u64,
pub max_deposits: u64,
pub max_exits: u64,
/*
* Signature domains
*/
pub domain_deposit: u64,
pub domain_attestation: u64,
pub domain_proposal: u64,
pub domain_exit: u64,
pub domain_randao: u64,
}
impl ChainSpec {
/// Returns a `ChainSpec` compatible with the specification from Ethereum Foundation.
///
@ -100,6 +189,26 @@ impl ChainSpec {
}
}
impl ChainSpec {
/// Returns a `ChainSpec` compatible with the specification suitable for 8 validators.
///
/// Spec v0.2.0
pub fn few_validators() -> Self {
let genesis_slot = Slot::new(2_u64.pow(19));
let epoch_length = 8;
let genesis_epoch = genesis_slot.epoch(epoch_length);
Self {
shard_count: 1,
target_committee_size: 1,
genesis_slot,
genesis_epoch,
epoch_length,
..ChainSpec::foundation()
}
}
}
#[cfg(test)]
mod tests {
use super::*;

View File

@ -8,6 +8,7 @@ pub mod beacon_block;
pub mod beacon_block_body;
pub mod beacon_state;
pub mod casper_slashing;
pub mod chain_spec;
pub mod crosslink;
pub mod deposit;
pub mod deposit_data;
@ -24,9 +25,10 @@ pub mod readers;
pub mod shard_reassignment_record;
pub mod slashable_attestation;
pub mod slashable_vote_data;
#[macro_use]
pub mod slot_epoch_macros;
pub mod slot_epoch;
pub mod slot_height;
pub mod spec;
pub mod validator;
pub mod validator_registry;
pub mod validator_registry_delta_block;
@ -42,6 +44,7 @@ pub use crate::beacon_block::BeaconBlock;
pub use crate::beacon_block_body::BeaconBlockBody;
pub use crate::beacon_state::BeaconState;
pub use crate::casper_slashing::CasperSlashing;
pub use crate::chain_spec::ChainSpec;
pub use crate::crosslink::Crosslink;
pub use crate::deposit::Deposit;
pub use crate::deposit_data::DepositData;
@ -58,7 +61,6 @@ pub use crate::slashable_attestation::SlashableAttestation;
pub use crate::slashable_vote_data::SlashableVoteData;
pub use crate::slot_epoch::{Epoch, Slot};
pub use crate::slot_height::SlotHeight;
pub use crate::spec::ChainSpec;
pub use crate::validator::{StatusFlags as ValidatorStatusFlags, Validator};
pub use crate::validator_registry_delta_block::ValidatorRegistryDeltaBlock;

View File

@ -21,255 +21,6 @@ use std::hash::{Hash, Hasher};
use std::iter::Iterator;
use std::ops::{Add, AddAssign, Div, DivAssign, Mul, MulAssign, Rem, Sub, SubAssign};
macro_rules! impl_from_into_u64 {
($main: ident) => {
impl From<u64> for $main {
fn from(n: u64) -> $main {
$main(n)
}
}
impl Into<u64> for $main {
fn into(self) -> u64 {
self.0
}
}
impl $main {
pub fn as_u64(&self) -> u64 {
self.0
}
}
};
}
macro_rules! impl_from_into_usize {
($main: ident) => {
impl From<usize> for $main {
fn from(n: usize) -> $main {
$main(n as u64)
}
}
impl Into<usize> for $main {
fn into(self) -> usize {
self.0 as usize
}
}
impl $main {
pub fn as_usize(&self) -> usize {
self.0 as usize
}
}
};
}
macro_rules! impl_math_between {
($main: ident, $other: ident) => {
impl PartialOrd<$other> for $main {
/// Utilizes `partial_cmp` on the underlying `u64`.
fn partial_cmp(&self, other: &$other) -> Option<Ordering> {
Some(self.0.cmp(&(*other).into()))
}
}
impl PartialEq<$other> for $main {
fn eq(&self, other: &$other) -> bool {
let other: u64 = (*other).into();
self.0 == other
}
}
impl Add<$other> for $main {
type Output = $main;
fn add(self, other: $other) -> $main {
$main::from(self.0.saturating_add(other.into()))
}
}
impl AddAssign<$other> for $main {
fn add_assign(&mut self, other: $other) {
self.0 = self.0.saturating_add(other.into());
}
}
impl Sub<$other> for $main {
type Output = $main;
fn sub(self, other: $other) -> $main {
$main::from(self.0.saturating_sub(other.into()))
}
}
impl SubAssign<$other> for $main {
fn sub_assign(&mut self, other: $other) {
self.0 = self.0.saturating_sub(other.into());
}
}
impl Mul<$other> for $main {
type Output = $main;
fn mul(self, rhs: $other) -> $main {
let rhs: u64 = rhs.into();
$main::from(self.0.saturating_mul(rhs))
}
}
impl MulAssign<$other> for $main {
fn mul_assign(&mut self, rhs: $other) {
let rhs: u64 = rhs.into();
self.0 = self.0.saturating_mul(rhs)
}
}
impl Div<$other> for $main {
type Output = $main;
fn div(self, rhs: $other) -> $main {
let rhs: u64 = rhs.into();
if rhs == 0 {
panic!("Cannot divide by zero-valued Slot/Epoch")
}
$main::from(self.0 / rhs)
}
}
impl DivAssign<$other> for $main {
fn div_assign(&mut self, rhs: $other) {
let rhs: u64 = rhs.into();
if rhs == 0 {
panic!("Cannot divide by zero-valued Slot/Epoch")
}
self.0 = self.0 / rhs
}
}
impl Rem<$other> for $main {
type Output = $main;
fn rem(self, modulus: $other) -> $main {
let modulus: u64 = modulus.into();
$main::from(self.0 % modulus)
}
}
};
}
macro_rules! impl_math {
($type: ident) => {
impl $type {
pub fn saturating_sub<T: Into<$type>>(&self, other: T) -> $type {
*self - other.into()
}
pub fn saturating_add<T: Into<$type>>(&self, other: T) -> $type {
*self + other.into()
}
pub fn checked_div<T: Into<$type>>(&self, rhs: T) -> Option<$type> {
let rhs: $type = rhs.into();
if rhs == 0 {
None
} else {
Some(*self / rhs)
}
}
pub fn is_power_of_two(&self) -> bool {
self.0.is_power_of_two()
}
}
impl Ord for $type {
fn cmp(&self, other: &$type) -> Ordering {
let other: u64 = (*other).into();
self.0.cmp(&other)
}
}
};
}
macro_rules! impl_display {
($type: ident) => {
impl fmt::Display for $type {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}", self.0)
}
}
impl slog::Value for $type {
fn serialize(
&self,
record: &slog::Record,
key: slog::Key,
serializer: &mut slog::Serializer,
) -> slog::Result {
self.0.serialize(record, key, serializer)
}
}
};
}
macro_rules! impl_ssz {
($type: ident) => {
impl Encodable for $type {
fn ssz_append(&self, s: &mut SszStream) {
s.append(&self.0);
}
}
impl Decodable for $type {
fn ssz_decode(bytes: &[u8], i: usize) -> Result<(Self, usize), DecodeError> {
let (value, i) = <_>::ssz_decode(bytes, i)?;
Ok(($type(value), i))
}
}
impl TreeHash for $type {
fn hash_tree_root(&self) -> Vec<u8> {
let mut result: Vec<u8> = vec![];
result.append(&mut self.0.hash_tree_root());
hash(&result)
}
}
impl<T: RngCore> TestRandom<T> for $type {
fn random_for_test(rng: &mut T) -> Self {
$type::from(u64::random_for_test(rng))
}
}
};
}
macro_rules! impl_hash {
($type: ident) => {
// Implemented to stop clippy lint:
// https://rust-lang.github.io/rust-clippy/master/index.html#derive_hash_xor_eq
impl Hash for $type {
fn hash<H: Hasher>(&self, state: &mut H) {
ssz_encode(self).hash(state)
}
}
};
}
macro_rules! impl_common {
($type: ident) => {
impl_from_into_u64!($type);
impl_from_into_usize!($type);
impl_math_between!($type, $type);
impl_math_between!($type, u64);
impl_math!($type);
impl_display!($type);
impl_ssz!($type);
impl_hash!($type);
};
}
#[derive(Eq, Debug, Clone, Copy, Default, Serialize)]
pub struct Slot(u64);
@ -349,373 +100,19 @@ impl<'a> Iterator for SlotIter<'a> {
}
#[cfg(test)]
mod tests {
use super::*;
macro_rules! new_tests {
($type: ident) => {
#[test]
fn new() {
assert_eq!($type(0), $type::new(0));
assert_eq!($type(3), $type::new(3));
assert_eq!($type(u64::max_value()), $type::new(u64::max_value()));
}
};
}
macro_rules! from_into_tests {
($type: ident, $other: ident) => {
#[test]
fn into() {
let x: $other = $type(0).into();
assert_eq!(x, 0);
let x: $other = $type(3).into();
assert_eq!(x, 3);
let x: $other = $type(u64::max_value()).into();
// Note: this will fail on 32 bit systems. This is expected as we don't have a proper
// 32-bit system strategy in place.
assert_eq!(x, $other::max_value());
}
#[test]
fn from() {
assert_eq!($type(0), $type::from(0_u64));
assert_eq!($type(3), $type::from(3_u64));
assert_eq!($type(u64::max_value()), $type::from($other::max_value()));
}
};
}
macro_rules! math_between_tests {
($type: ident, $other: ident) => {
#[test]
fn partial_ord() {
let assert_partial_ord = |a: u64, partial_ord: Ordering, b: u64| {
let other: $other = $type(b).into();
assert_eq!($type(a).partial_cmp(&other), Some(partial_ord));
};
assert_partial_ord(1, Ordering::Less, 2);
assert_partial_ord(2, Ordering::Greater, 1);
assert_partial_ord(0, Ordering::Less, u64::max_value());
assert_partial_ord(u64::max_value(), Ordering::Greater, 0);
}
#[test]
fn partial_eq() {
let assert_partial_eq = |a: u64, b: u64, is_equal: bool| {
let other: $other = $type(b).into();
assert_eq!($type(a).eq(&other), is_equal);
};
assert_partial_eq(0, 0, true);
assert_partial_eq(0, 1, false);
assert_partial_eq(1, 0, false);
assert_partial_eq(1, 1, true);
assert_partial_eq(u64::max_value(), u64::max_value(), true);
assert_partial_eq(0, u64::max_value(), false);
assert_partial_eq(u64::max_value(), 0, false);
}
#[test]
fn add_and_add_assign() {
let assert_add = |a: u64, b: u64, result: u64| {
let other: $other = $type(b).into();
assert_eq!($type(a) + other, $type(result));
let mut add_assigned = $type(a);
add_assigned += other;
assert_eq!(add_assigned, $type(result));
};
assert_add(0, 1, 1);
assert_add(1, 0, 1);
assert_add(1, 2, 3);
assert_add(2, 1, 3);
assert_add(7, 7, 14);
// Addition should be saturating.
assert_add(u64::max_value(), 1, u64::max_value());
assert_add(u64::max_value(), u64::max_value(), u64::max_value());
}
#[test]
fn sub_and_sub_assign() {
let assert_sub = |a: u64, b: u64, result: u64| {
let other: $other = $type(b).into();
assert_eq!($type(a) - other, $type(result));
let mut sub_assigned = $type(a);
sub_assigned -= other;
assert_eq!(sub_assigned, $type(result));
};
assert_sub(1, 0, 1);
assert_sub(2, 1, 1);
assert_sub(14, 7, 7);
assert_sub(u64::max_value(), 1, u64::max_value() - 1);
assert_sub(u64::max_value(), u64::max_value(), 0);
// Subtraction should be saturating
assert_sub(0, 1, 0);
assert_sub(1, 2, 0);
}
#[test]
fn mul_and_mul_assign() {
let assert_mul = |a: u64, b: u64, result: u64| {
let other: $other = $type(b).into();
assert_eq!($type(a) * other, $type(result));
let mut mul_assigned = $type(a);
mul_assigned *= other;
assert_eq!(mul_assigned, $type(result));
};
assert_mul(2, 2, 4);
assert_mul(1, 2, 2);
assert_mul(0, 2, 0);
// Multiplication should be saturating.
assert_mul(u64::max_value(), 2, u64::max_value());
}
#[test]
fn div_and_div_assign() {
let assert_div = |a: u64, b: u64, result: u64| {
let other: $other = $type(b).into();
assert_eq!($type(a) / other, $type(result));
let mut div_assigned = $type(a);
div_assigned /= other;
assert_eq!(div_assigned, $type(result));
};
assert_div(0, 2, 0);
assert_div(2, 2, 1);
assert_div(100, 50, 2);
assert_div(128, 2, 64);
assert_div(u64::max_value(), 2, 2_u64.pow(63) - 1);
}
#[test]
#[should_panic]
fn div_panics_with_divide_by_zero() {
let other: $other = $type(0).into();
let _ = $type(2) / other;
}
#[test]
#[should_panic]
fn div_assign_panics_with_divide_by_zero() {
let other: $other = $type(0).into();
let mut assigned = $type(2);
assigned /= other;
}
#[test]
fn rem() {
let assert_rem = |a: u64, b: u64, result: u64| {
let other: $other = $type(b).into();
assert_eq!($type(a) % other, $type(result));
};
assert_rem(3, 2, 1);
assert_rem(40, 2, 0);
assert_rem(10, 100, 10);
assert_rem(302042, 3293, 2379);
}
};
}
macro_rules! math_tests {
($type: ident) => {
#[test]
fn saturating_sub() {
let assert_saturating_sub = |a: u64, b: u64, result: u64| {
assert_eq!($type(a).saturating_sub($type(b)), $type(result));
};
assert_saturating_sub(1, 0, 1);
assert_saturating_sub(2, 1, 1);
assert_saturating_sub(14, 7, 7);
assert_saturating_sub(u64::max_value(), 1, u64::max_value() - 1);
assert_saturating_sub(u64::max_value(), u64::max_value(), 0);
// Subtraction should be saturating
assert_saturating_sub(0, 1, 0);
assert_saturating_sub(1, 2, 0);
}
#[test]
fn saturating_add() {
let assert_saturating_add = |a: u64, b: u64, result: u64| {
assert_eq!($type(a).saturating_add($type(b)), $type(result));
};
assert_saturating_add(0, 1, 1);
assert_saturating_add(1, 0, 1);
assert_saturating_add(1, 2, 3);
assert_saturating_add(2, 1, 3);
assert_saturating_add(7, 7, 14);
// Addition should be saturating.
assert_saturating_add(u64::max_value(), 1, u64::max_value());
assert_saturating_add(u64::max_value(), u64::max_value(), u64::max_value());
}
#[test]
fn checked_div() {
let assert_checked_div = |a: u64, b: u64, result: Option<u64>| {
let division_result_as_u64 = match $type(a).checked_div($type(b)) {
None => None,
Some(val) => Some(val.as_u64()),
};
assert_eq!(division_result_as_u64, result);
};
assert_checked_div(0, 2, Some(0));
assert_checked_div(2, 2, Some(1));
assert_checked_div(100, 50, Some(2));
assert_checked_div(128, 2, Some(64));
assert_checked_div(u64::max_value(), 2, Some(2_u64.pow(63) - 1));
assert_checked_div(2, 0, None);
assert_checked_div(0, 0, None);
assert_checked_div(u64::max_value(), 0, None);
}
#[test]
fn is_power_of_two() {
let assert_is_power_of_two = |a: u64, result: bool| {
assert_eq!(
$type(a).is_power_of_two(),
result,
"{}.is_power_of_two() != {}",
a,
result
);
};
assert_is_power_of_two(0, false);
assert_is_power_of_two(1, true);
assert_is_power_of_two(2, true);
assert_is_power_of_two(3, false);
assert_is_power_of_two(4, true);
assert_is_power_of_two(2_u64.pow(4), true);
assert_is_power_of_two(u64::max_value(), false);
}
#[test]
fn ord() {
let assert_ord = |a: u64, ord: Ordering, b: u64| {
assert_eq!($type(a).cmp(&$type(b)), ord);
};
assert_ord(1, Ordering::Less, 2);
assert_ord(2, Ordering::Greater, 1);
assert_ord(0, Ordering::Less, u64::max_value());
assert_ord(u64::max_value(), Ordering::Greater, 0);
}
};
}
macro_rules! ssz_tests {
($type: ident) => {
#[test]
pub fn test_ssz_round_trip() {
let mut rng = XorShiftRng::from_seed([42; 16]);
let original = $type::random_for_test(&mut rng);
let bytes = ssz_encode(&original);
let (decoded, _) = $type::ssz_decode(&bytes, 0).unwrap();
assert_eq!(original, decoded);
}
#[test]
pub fn test_hash_tree_root() {
let mut rng = XorShiftRng::from_seed([42; 16]);
let original = $type::random_for_test(&mut rng);
let result = original.hash_tree_root();
assert_eq!(result.len(), 32);
// TODO: Add further tests
// https://github.com/sigp/lighthouse/issues/170
}
};
}
macro_rules! all_tests {
($type: ident) => {
new_tests!($type);
math_between_tests!($type, $type);
math_tests!($type);
ssz_tests!($type);
mod u64_tests {
use super::*;
from_into_tests!($type, u64);
math_between_tests!($type, u64);
#[test]
pub fn as_64() {
let x = $type(0).as_u64();
assert_eq!(x, 0);
let x = $type(3).as_u64();
assert_eq!(x, 3);
let x = $type(u64::max_value()).as_u64();
assert_eq!(x, u64::max_value());
}
}
mod usize_tests {
use super::*;
from_into_tests!($type, usize);
#[test]
pub fn as_usize() {
let x = $type(0).as_usize();
assert_eq!(x, 0);
let x = $type(3).as_usize();
assert_eq!(x, 3);
let x = $type(u64::max_value()).as_usize();
assert_eq!(x, usize::max_value());
}
}
};
}
#[cfg(test)]
mod slot_tests {
mod slot_tests {
use super::*;
use crate::test_utils::{SeedableRng, TestRandom, XorShiftRng};
use ssz::ssz_encode;
all_tests!(Slot);
}
}
#[cfg(test)]
mod epoch_tests {
#[cfg(test)]
mod epoch_tests {
use super::*;
use crate::test_utils::{SeedableRng, TestRandom, XorShiftRng};
use ssz::ssz_encode;
all_tests!(Epoch);
}
}

View File

@ -0,0 +1,621 @@
macro_rules! impl_from_into_u64 {
($main: ident) => {
impl From<u64> for $main {
fn from(n: u64) -> $main {
$main(n)
}
}
impl Into<u64> for $main {
fn into(self) -> u64 {
self.0
}
}
impl $main {
pub fn as_u64(&self) -> u64 {
self.0
}
}
};
}
// need to truncate for some fork-choice algorithms
macro_rules! impl_into_u32 {
($main: ident) => {
impl Into<u32> for $main {
fn into(self) -> u32 {
self.0 as u32
}
}
impl $main {
pub fn as_u32(&self) -> u32 {
self.0 as u32
}
}
};
}
macro_rules! impl_from_into_usize {
($main: ident) => {
impl From<usize> for $main {
fn from(n: usize) -> $main {
$main(n as u64)
}
}
impl Into<usize> for $main {
fn into(self) -> usize {
self.0 as usize
}
}
impl $main {
pub fn as_usize(&self) -> usize {
self.0 as usize
}
}
};
}
macro_rules! impl_math_between {
($main: ident, $other: ident) => {
impl PartialOrd<$other> for $main {
/// Utilizes `partial_cmp` on the underlying `u64`.
fn partial_cmp(&self, other: &$other) -> Option<Ordering> {
Some(self.0.cmp(&(*other).into()))
}
}
impl PartialEq<$other> for $main {
fn eq(&self, other: &$other) -> bool {
let other: u64 = (*other).into();
self.0 == other
}
}
impl Add<$other> for $main {
type Output = $main;
fn add(self, other: $other) -> $main {
$main::from(self.0.saturating_add(other.into()))
}
}
impl AddAssign<$other> for $main {
fn add_assign(&mut self, other: $other) {
self.0 = self.0.saturating_add(other.into());
}
}
impl Sub<$other> for $main {
type Output = $main;
fn sub(self, other: $other) -> $main {
$main::from(self.0.saturating_sub(other.into()))
}
}
impl SubAssign<$other> for $main {
fn sub_assign(&mut self, other: $other) {
self.0 = self.0.saturating_sub(other.into());
}
}
impl Mul<$other> for $main {
type Output = $main;
fn mul(self, rhs: $other) -> $main {
let rhs: u64 = rhs.into();
$main::from(self.0.saturating_mul(rhs))
}
}
impl MulAssign<$other> for $main {
fn mul_assign(&mut self, rhs: $other) {
let rhs: u64 = rhs.into();
self.0 = self.0.saturating_mul(rhs)
}
}
impl Div<$other> for $main {
type Output = $main;
fn div(self, rhs: $other) -> $main {
let rhs: u64 = rhs.into();
if rhs == 0 {
panic!("Cannot divide by zero-valued Slot/Epoch")
}
$main::from(self.0 / rhs)
}
}
impl DivAssign<$other> for $main {
fn div_assign(&mut self, rhs: $other) {
let rhs: u64 = rhs.into();
if rhs == 0 {
panic!("Cannot divide by zero-valued Slot/Epoch")
}
self.0 = self.0 / rhs
}
}
impl Rem<$other> for $main {
type Output = $main;
fn rem(self, modulus: $other) -> $main {
let modulus: u64 = modulus.into();
$main::from(self.0 % modulus)
}
}
};
}
macro_rules! impl_math {
($type: ident) => {
impl $type {
pub fn saturating_sub<T: Into<$type>>(&self, other: T) -> $type {
*self - other.into()
}
pub fn saturating_add<T: Into<$type>>(&self, other: T) -> $type {
*self + other.into()
}
pub fn checked_div<T: Into<$type>>(&self, rhs: T) -> Option<$type> {
let rhs: $type = rhs.into();
if rhs == 0 {
None
} else {
Some(*self / rhs)
}
}
pub fn is_power_of_two(&self) -> bool {
self.0.is_power_of_two()
}
}
impl Ord for $type {
fn cmp(&self, other: &$type) -> Ordering {
let other: u64 = (*other).into();
self.0.cmp(&other)
}
}
};
}
macro_rules! impl_display {
($type: ident) => {
impl fmt::Display for $type {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}", self.0)
}
}
impl slog::Value for $type {
fn serialize(
&self,
record: &slog::Record,
key: slog::Key,
serializer: &mut slog::Serializer,
) -> slog::Result {
self.0.serialize(record, key, serializer)
}
}
};
}
macro_rules! impl_ssz {
($type: ident) => {
impl Encodable for $type {
fn ssz_append(&self, s: &mut SszStream) {
s.append(&self.0);
}
}
impl Decodable for $type {
fn ssz_decode(bytes: &[u8], i: usize) -> Result<(Self, usize), DecodeError> {
let (value, i) = <_>::ssz_decode(bytes, i)?;
Ok(($type(value), i))
}
}
impl TreeHash for $type {
fn hash_tree_root(&self) -> Vec<u8> {
let mut result: Vec<u8> = vec![];
result.append(&mut self.0.hash_tree_root());
hash(&result)
}
}
impl<T: RngCore> TestRandom<T> for $type {
fn random_for_test(rng: &mut T) -> Self {
$type::from(u64::random_for_test(rng))
}
}
};
}
macro_rules! impl_hash {
($type: ident) => {
// Implemented to stop clippy lint:
// https://rust-lang.github.io/rust-clippy/master/index.html#derive_hash_xor_eq
impl Hash for $type {
fn hash<H: Hasher>(&self, state: &mut H) {
ssz_encode(self).hash(state)
}
}
};
}
macro_rules! impl_common {
($type: ident) => {
impl_from_into_u64!($type);
impl_from_into_usize!($type);
impl_math_between!($type, $type);
impl_math_between!($type, u64);
impl_math!($type);
impl_display!($type);
impl_ssz!($type);
impl_hash!($type);
};
}
// test macros
#[allow(unused_macros)]
macro_rules! new_tests {
($type: ident) => {
#[test]
fn new() {
assert_eq!($type(0), $type::new(0));
assert_eq!($type(3), $type::new(3));
assert_eq!($type(u64::max_value()), $type::new(u64::max_value()));
}
};
}
#[allow(unused_macros)]
macro_rules! from_into_tests {
($type: ident, $other: ident) => {
#[test]
fn into() {
let x: $other = $type(0).into();
assert_eq!(x, 0);
let x: $other = $type(3).into();
assert_eq!(x, 3);
let x: $other = $type(u64::max_value()).into();
// Note: this will fail on 32 bit systems. This is expected as we don't have a proper
// 32-bit system strategy in place.
assert_eq!(x, $other::max_value());
}
#[test]
fn from() {
assert_eq!($type(0), $type::from(0_u64));
assert_eq!($type(3), $type::from(3_u64));
assert_eq!($type(u64::max_value()), $type::from($other::max_value()));
}
};
}
#[allow(unused_macros)]
macro_rules! math_between_tests {
($type: ident, $other: ident) => {
#[test]
fn partial_ord() {
let assert_partial_ord = |a: u64, partial_ord: Ordering, b: u64| {
let other: $other = $type(b).into();
assert_eq!($type(a).partial_cmp(&other), Some(partial_ord));
};
assert_partial_ord(1, Ordering::Less, 2);
assert_partial_ord(2, Ordering::Greater, 1);
assert_partial_ord(0, Ordering::Less, u64::max_value());
assert_partial_ord(u64::max_value(), Ordering::Greater, 0);
}
#[test]
fn partial_eq() {
let assert_partial_eq = |a: u64, b: u64, is_equal: bool| {
let other: $other = $type(b).into();
assert_eq!($type(a).eq(&other), is_equal);
};
assert_partial_eq(0, 0, true);
assert_partial_eq(0, 1, false);
assert_partial_eq(1, 0, false);
assert_partial_eq(1, 1, true);
assert_partial_eq(u64::max_value(), u64::max_value(), true);
assert_partial_eq(0, u64::max_value(), false);
assert_partial_eq(u64::max_value(), 0, false);
}
#[test]
fn add_and_add_assign() {
let assert_add = |a: u64, b: u64, result: u64| {
let other: $other = $type(b).into();
assert_eq!($type(a) + other, $type(result));
let mut add_assigned = $type(a);
add_assigned += other;
assert_eq!(add_assigned, $type(result));
};
assert_add(0, 1, 1);
assert_add(1, 0, 1);
assert_add(1, 2, 3);
assert_add(2, 1, 3);
assert_add(7, 7, 14);
// Addition should be saturating.
assert_add(u64::max_value(), 1, u64::max_value());
assert_add(u64::max_value(), u64::max_value(), u64::max_value());
}
#[test]
fn sub_and_sub_assign() {
let assert_sub = |a: u64, b: u64, result: u64| {
let other: $other = $type(b).into();
assert_eq!($type(a) - other, $type(result));
let mut sub_assigned = $type(a);
sub_assigned -= other;
assert_eq!(sub_assigned, $type(result));
};
assert_sub(1, 0, 1);
assert_sub(2, 1, 1);
assert_sub(14, 7, 7);
assert_sub(u64::max_value(), 1, u64::max_value() - 1);
assert_sub(u64::max_value(), u64::max_value(), 0);
// Subtraction should be saturating
assert_sub(0, 1, 0);
assert_sub(1, 2, 0);
}
#[test]
fn mul_and_mul_assign() {
let assert_mul = |a: u64, b: u64, result: u64| {
let other: $other = $type(b).into();
assert_eq!($type(a) * other, $type(result));
let mut mul_assigned = $type(a);
mul_assigned *= other;
assert_eq!(mul_assigned, $type(result));
};
assert_mul(2, 2, 4);
assert_mul(1, 2, 2);
assert_mul(0, 2, 0);
// Multiplication should be saturating.
assert_mul(u64::max_value(), 2, u64::max_value());
}
#[test]
fn div_and_div_assign() {
let assert_div = |a: u64, b: u64, result: u64| {
let other: $other = $type(b).into();
assert_eq!($type(a) / other, $type(result));
let mut div_assigned = $type(a);
div_assigned /= other;
assert_eq!(div_assigned, $type(result));
};
assert_div(0, 2, 0);
assert_div(2, 2, 1);
assert_div(100, 50, 2);
assert_div(128, 2, 64);
assert_div(u64::max_value(), 2, 2_u64.pow(63) - 1);
}
#[test]
#[should_panic]
fn div_panics_with_divide_by_zero() {
let other: $other = $type(0).into();
let _ = $type(2) / other;
}
#[test]
#[should_panic]
fn div_assign_panics_with_divide_by_zero() {
let other: $other = $type(0).into();
let mut assigned = $type(2);
assigned /= other;
}
#[test]
fn rem() {
let assert_rem = |a: u64, b: u64, result: u64| {
let other: $other = $type(b).into();
assert_eq!($type(a) % other, $type(result));
};
assert_rem(3, 2, 1);
assert_rem(40, 2, 0);
assert_rem(10, 100, 10);
assert_rem(302042, 3293, 2379);
}
};
}
#[allow(unused_macros)]
macro_rules! math_tests {
($type: ident) => {
#[test]
fn saturating_sub() {
let assert_saturating_sub = |a: u64, b: u64, result: u64| {
assert_eq!($type(a).saturating_sub($type(b)), $type(result));
};
assert_saturating_sub(1, 0, 1);
assert_saturating_sub(2, 1, 1);
assert_saturating_sub(14, 7, 7);
assert_saturating_sub(u64::max_value(), 1, u64::max_value() - 1);
assert_saturating_sub(u64::max_value(), u64::max_value(), 0);
// Subtraction should be saturating
assert_saturating_sub(0, 1, 0);
assert_saturating_sub(1, 2, 0);
}
#[test]
fn saturating_add() {
let assert_saturating_add = |a: u64, b: u64, result: u64| {
assert_eq!($type(a).saturating_add($type(b)), $type(result));
};
assert_saturating_add(0, 1, 1);
assert_saturating_add(1, 0, 1);
assert_saturating_add(1, 2, 3);
assert_saturating_add(2, 1, 3);
assert_saturating_add(7, 7, 14);
// Addition should be saturating.
assert_saturating_add(u64::max_value(), 1, u64::max_value());
assert_saturating_add(u64::max_value(), u64::max_value(), u64::max_value());
}
#[test]
fn checked_div() {
let assert_checked_div = |a: u64, b: u64, result: Option<u64>| {
let division_result_as_u64 = match $type(a).checked_div($type(b)) {
None => None,
Some(val) => Some(val.as_u64()),
};
assert_eq!(division_result_as_u64, result);
};
assert_checked_div(0, 2, Some(0));
assert_checked_div(2, 2, Some(1));
assert_checked_div(100, 50, Some(2));
assert_checked_div(128, 2, Some(64));
assert_checked_div(u64::max_value(), 2, Some(2_u64.pow(63) - 1));
assert_checked_div(2, 0, None);
assert_checked_div(0, 0, None);
assert_checked_div(u64::max_value(), 0, None);
}
#[test]
fn is_power_of_two() {
let assert_is_power_of_two = |a: u64, result: bool| {
assert_eq!(
$type(a).is_power_of_two(),
result,
"{}.is_power_of_two() != {}",
a,
result
);
};
assert_is_power_of_two(0, false);
assert_is_power_of_two(1, true);
assert_is_power_of_two(2, true);
assert_is_power_of_two(3, false);
assert_is_power_of_two(4, true);
assert_is_power_of_two(2_u64.pow(4), true);
assert_is_power_of_two(u64::max_value(), false);
}
#[test]
fn ord() {
let assert_ord = |a: u64, ord: Ordering, b: u64| {
assert_eq!($type(a).cmp(&$type(b)), ord);
};
assert_ord(1, Ordering::Less, 2);
assert_ord(2, Ordering::Greater, 1);
assert_ord(0, Ordering::Less, u64::max_value());
assert_ord(u64::max_value(), Ordering::Greater, 0);
}
};
}
#[allow(unused_macros)]
macro_rules! ssz_tests {
($type: ident) => {
#[test]
pub fn test_ssz_round_trip() {
let mut rng = XorShiftRng::from_seed([42; 16]);
let original = $type::random_for_test(&mut rng);
let bytes = ssz_encode(&original);
let (decoded, _) = $type::ssz_decode(&bytes, 0).unwrap();
assert_eq!(original, decoded);
}
#[test]
pub fn test_hash_tree_root() {
let mut rng = XorShiftRng::from_seed([42; 16]);
let original = $type::random_for_test(&mut rng);
let result = original.hash_tree_root();
assert_eq!(result.len(), 32);
// TODO: Add further tests
// https://github.com/sigp/lighthouse/issues/170
}
};
}
#[allow(unused_macros)]
macro_rules! all_tests {
($type: ident) => {
new_tests!($type);
math_between_tests!($type, $type);
math_tests!($type);
ssz_tests!($type);
mod u64_tests {
use super::*;
from_into_tests!($type, u64);
math_between_tests!($type, u64);
#[test]
pub fn as_64() {
let x = $type(0).as_u64();
assert_eq!(x, 0);
let x = $type(3).as_u64();
assert_eq!(x, 3);
let x = $type(u64::max_value()).as_u64();
assert_eq!(x, u64::max_value());
}
}
mod usize_tests {
use super::*;
from_into_tests!($type, usize);
#[test]
pub fn as_usize() {
let x = $type(0).as_usize();
assert_eq!(x, 0);
let x = $type(3).as_usize();
assert_eq!(x, 3);
let x = $type(u64::max_value()).as_usize();
assert_eq!(x, usize::max_value());
}
}
};
}

View File

@ -1,290 +1,13 @@
// Copyright 2019 Sigma Prime Pty Ltd.
//
// Permission is hereby granted, free of charge, to any person obtaining a
// copy of this software and associated documentation files (the "Software"),
// to deal in the Software without restriction, including without limitation
// the rights to use, copy, modify, merge, publish, distribute, sublicense,
// and/or sell copies of the Software, and to permit persons to whom the
// Software is furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
// FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
// DEALINGS IN THE SOFTWARE.
use crate::slot_epoch::{Epoch, Slot};
use crate::test_utils::TestRandom;
use rand::RngCore;
use serde_derive::Serialize;
use slog;
use ssz::{hash, ssz_encode, Decodable, DecodeError, Encodable, SszStream, TreeHash};
use std::cmp::{Ord, Ordering};
use std::fmt;
use std::hash::{Hash, Hasher};
use std::ops::{Add, AddAssign, Div, DivAssign, Mul, MulAssign, Rem, Sub, SubAssign};
macro_rules! impl_from_into_u64 {
($main: ident) => {
impl From<u64> for $main {
fn from(n: u64) -> $main {
$main(n)
}
}
impl Into<u64> for $main {
fn into(self) -> u64 {
self.0
}
}
impl $main {
pub fn as_u64(&self) -> u64 {
self.0
}
}
};
}
// need to truncate for some fork-choice algorithms
macro_rules! impl_into_u32 {
($main: ident) => {
impl Into<u32> for $main {
fn into(self) -> u32 {
self.0 as u32
}
}
impl $main {
pub fn as_u32(&self) -> u32 {
self.0 as u32
}
}
};
}
macro_rules! impl_from_into_usize {
($main: ident) => {
impl From<usize> for $main {
fn from(n: usize) -> $main {
$main(n as u64)
}
}
impl Into<usize> for $main {
fn into(self) -> usize {
self.0 as usize
}
}
impl $main {
pub fn as_usize(&self) -> usize {
self.0 as usize
}
}
};
}
macro_rules! impl_math_between {
($main: ident, $other: ident) => {
impl PartialOrd<$other> for $main {
/// Utilizes `partial_cmp` on the underlying `u64`.
fn partial_cmp(&self, other: &$other) -> Option<Ordering> {
Some(self.0.cmp(&(*other).into()))
}
}
impl PartialEq<$other> for $main {
fn eq(&self, other: &$other) -> bool {
let other: u64 = (*other).into();
self.0 == other
}
}
impl Add<$other> for $main {
type Output = $main;
fn add(self, other: $other) -> $main {
$main::from(self.0.saturating_add(other.into()))
}
}
impl AddAssign<$other> for $main {
fn add_assign(&mut self, other: $other) {
self.0 = self.0.saturating_add(other.into());
}
}
impl Sub<$other> for $main {
type Output = $main;
fn sub(self, other: $other) -> $main {
$main::from(self.0.saturating_sub(other.into()))
}
}
impl SubAssign<$other> for $main {
fn sub_assign(&mut self, other: $other) {
self.0 = self.0.saturating_sub(other.into());
}
}
impl Mul<$other> for $main {
type Output = $main;
fn mul(self, rhs: $other) -> $main {
let rhs: u64 = rhs.into();
$main::from(self.0.saturating_mul(rhs))
}
}
impl MulAssign<$other> for $main {
fn mul_assign(&mut self, rhs: $other) {
let rhs: u64 = rhs.into();
self.0 = self.0.saturating_mul(rhs)
}
}
impl Div<$other> for $main {
type Output = $main;
fn div(self, rhs: $other) -> $main {
let rhs: u64 = rhs.into();
if rhs == 0 {
panic!("Cannot divide by zero-valued Slot/Epoch")
}
$main::from(self.0 / rhs)
}
}
impl DivAssign<$other> for $main {
fn div_assign(&mut self, rhs: $other) {
let rhs: u64 = rhs.into();
if rhs == 0 {
panic!("Cannot divide by zero-valued Slot/Epoch")
}
self.0 = self.0 / rhs
}
}
impl Rem<$other> for $main {
type Output = $main;
fn rem(self, modulus: $other) -> $main {
let modulus: u64 = modulus.into();
$main::from(self.0 % modulus)
}
}
};
}
macro_rules! impl_math {
($type: ident) => {
impl $type {
pub fn saturating_sub<T: Into<$type>>(&self, other: T) -> $type {
*self - other.into()
}
pub fn saturating_add<T: Into<$type>>(&self, other: T) -> $type {
*self + other.into()
}
pub fn checked_div<T: Into<$type>>(&self, rhs: T) -> Option<$type> {
let rhs: $type = rhs.into();
if rhs == 0 {
None
} else {
Some(*self / rhs)
}
}
pub fn is_power_of_two(&self) -> bool {
self.0.is_power_of_two()
}
}
impl Ord for $type {
fn cmp(&self, other: &$type) -> Ordering {
let other: u64 = (*other).into();
self.0.cmp(&other)
}
}
};
}
macro_rules! impl_display {
($type: ident) => {
impl fmt::Display for $type {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}", self.0)
}
}
impl slog::Value for $type {
fn serialize(
&self,
record: &slog::Record,
key: slog::Key,
serializer: &mut slog::Serializer,
) -> slog::Result {
self.0.serialize(record, key, serializer)
}
}
};
}
macro_rules! impl_ssz {
($type: ident) => {
impl Encodable for $type {
fn ssz_append(&self, s: &mut SszStream) {
s.append(&self.0);
}
}
impl Decodable for $type {
fn ssz_decode(bytes: &[u8], i: usize) -> Result<(Self, usize), DecodeError> {
let (value, i) = <_>::ssz_decode(bytes, i)?;
Ok(($type(value), i))
}
}
impl TreeHash for $type {
fn hash_tree_root(&self) -> Vec<u8> {
let mut result: Vec<u8> = vec![];
result.append(&mut self.0.hash_tree_root());
hash(&result)
}
}
};
}
macro_rules! impl_hash {
($type: ident) => {
// Implemented to stop clippy lint:
// https://rust-lang.github.io/rust-clippy/master/index.html#derive_hash_xor_eq
impl Hash for $type {
fn hash<H: Hasher>(&self, state: &mut H) {
ssz_encode(self).hash(state)
}
}
};
}
macro_rules! impl_common {
($type: ident) => {
impl_from_into_u64!($type);
impl_from_into_usize!($type);
impl_math_between!($type, $type);
impl_math_between!($type, u64);
impl_math!($type);
impl_display!($type);
impl_ssz!($type);
impl_hash!($type);
};
}
/// Beacon block height, effectively `Slot/GENESIS_START_BLOCK`.
#[derive(Eq, Debug, Clone, Copy, Default, Serialize)]
pub struct SlotHeight(u64);
@ -309,3 +32,13 @@ impl SlotHeight {
SlotHeight(u64::max_value())
}
}
#[cfg(test)]
mod slot_height_tests {
use super::*;
use crate::test_utils::{SeedableRng, TestRandom, XorShiftRng};
use ssz::ssz_encode;
all_tests!(SlotHeight);
}

View File

@ -1,92 +0,0 @@
mod foundation;
use crate::{Address, Epoch, Hash256, Slot};
use bls::Signature;
/// Holds all the "constants" for a BeaconChain.
///
/// Spec v0.2.0
#[derive(PartialEq, Debug, Clone)]
pub struct ChainSpec {
/*
* Misc
*/
pub shard_count: u64,
pub target_committee_size: u64,
pub max_balance_churn_quotient: u64,
pub beacon_chain_shard_number: u64,
pub max_indices_per_slashable_vote: u64,
pub max_withdrawals_per_epoch: u64,
pub shuffle_round_count: u64,
/*
* Deposit contract
*/
pub deposit_contract_address: Address,
pub deposit_contract_tree_depth: u64,
/*
* Gwei values
*/
pub min_deposit_amount: u64,
pub max_deposit_amount: u64,
pub fork_choice_balance_increment: u64,
pub ejection_balance: u64,
/*
* Initial Values
*/
pub genesis_fork_version: u64,
pub genesis_slot: Slot,
pub genesis_epoch: Epoch,
pub genesis_start_shard: u64,
pub far_future_epoch: Epoch,
pub zero_hash: Hash256,
pub empty_signature: Signature,
pub bls_withdrawal_prefix_byte: u8,
/*
* Time parameters
*/
pub slot_duration: u64,
pub min_attestation_inclusion_delay: u64,
pub epoch_length: u64,
pub seed_lookahead: Epoch,
pub entry_exit_delay: u64,
pub eth1_data_voting_period: u64,
pub min_validator_withdrawal_epochs: Epoch,
/*
* State list lengths
*/
pub latest_block_roots_length: usize,
pub latest_randao_mixes_length: usize,
pub latest_index_roots_length: usize,
pub latest_penalized_exit_length: usize,
/*
* Reward and penalty quotients
*/
pub base_reward_quotient: u64,
pub whistleblower_reward_quotient: u64,
pub includer_reward_quotient: u64,
pub inactivity_penalty_quotient: u64,
/*
* Max operations per block
*/
pub max_proposer_slashings: u64,
pub max_attester_slashings: u64,
pub max_attestations: u64,
pub max_deposits: u64,
pub max_exits: u64,
/*
* Signature domains
*/
pub domain_deposit: u64,
pub domain_attestation: u64,
pub domain_proposal: u64,
pub domain_exit: u64,
pub domain_randao: u64,
}

View File

@ -1,5 +1,5 @@
[package]
name = "vec_shuffle"
name = "fisher_yates_shuffle"
version = "0.1.0"
authors = ["Paul Hauner <paul@paulhauner.com>"]
edition = "2018"

View File

@ -0,0 +1,12 @@
[package]
name = "int_to_bytes"
version = "0.1.0"
authors = ["Paul Hauner <paul@paulhauner.com>"]
edition = "2018"
[dependencies]
bytes = "0.4"
[dev-dependencies]
yaml-rust = "0.4.2"
hex = "0.3"

View File

@ -0,0 +1,125 @@
use bytes::{BufMut, BytesMut};
/// Returns `int` as little-endian bytes with a length of 1.
pub fn int_to_bytes1(int: u8) -> Vec<u8> {
vec![int]
}
/// Returns `int` as little-endian bytes with a length of 2.
pub fn int_to_bytes2(int: u16) -> Vec<u8> {
let mut bytes = BytesMut::with_capacity(2);
bytes.put_u16_le(int);
bytes.to_vec()
}
/// Returns `int` as little-endian bytes with a length of 3.
///
/// An `Option` is returned as Rust does not support a native
/// `u24` type.
///
/// The Eth 2.0 specification uses `int.to_bytes(2, 'little')`, which throws an error if `int`
/// doesn't fit within 3 bytes. The specification relies upon implicit asserts for some validity
/// conditions, so we ensure the calling function is aware of the error condition as opposed to
/// hiding it with a modulo.
pub fn int_to_bytes3(int: u32) -> Option<Vec<u8>> {
if int < 2_u32.pow(3 * 8) {
let mut bytes = BytesMut::with_capacity(4);
bytes.put_u32_le(int);
Some(bytes[0..3].to_vec())
} else {
None
}
}
/// Returns `int` as little-endian bytes with a length of 4.
pub fn int_to_bytes4(int: u32) -> Vec<u8> {
let mut bytes = BytesMut::with_capacity(4);
bytes.put_u32_le(int);
bytes.to_vec()
}
/// Returns `int` as little-endian bytes with a length of 8.
pub fn int_to_bytes8(int: u64) -> Vec<u8> {
let mut bytes = BytesMut::with_capacity(8);
bytes.put_u64_le(int);
bytes.to_vec()
}
/// Returns `int` as little-endian bytes with a length of 32.
pub fn int_to_bytes32(int: u64) -> Vec<u8> {
let mut bytes = BytesMut::with_capacity(32);
bytes.put_u64_le(int);
bytes.resize(32, 0);
bytes.to_vec()
}
/// Returns `int` as little-endian bytes with a length of 48.
pub fn int_to_bytes48(int: u64) -> Vec<u8> {
let mut bytes = BytesMut::with_capacity(48);
bytes.put_u64_le(int);
bytes.resize(48, 0);
bytes.to_vec()
}
/// Returns `int` as little-endian bytes with a length of 96.
pub fn int_to_bytes96(int: u64) -> Vec<u8> {
let mut bytes = BytesMut::with_capacity(96);
bytes.put_u64_le(int);
bytes.resize(96, 0);
bytes.to_vec()
}
#[cfg(test)]
mod tests {
use super::*;
use hex;
use std::{fs::File, io::prelude::*, path::PathBuf};
use yaml_rust::yaml;
#[test]
fn int_to_bytes3_returns_none() {
assert_eq!(int_to_bytes3(2_u32.pow(24)), None);
}
#[test]
fn test_vectors() {
/*
* Test vectors are generated here:
*
* https://github.com/ethereum/eth2.0-test-generators
*/
let mut file = {
let mut file_path_buf = PathBuf::from(env!("CARGO_MANIFEST_DIR"));
file_path_buf.push("src/specs/test_vector_int_to_bytes.yml");
File::open(file_path_buf).unwrap()
};
let mut yaml_str = String::new();
file.read_to_string(&mut yaml_str).unwrap();
let docs = yaml::YamlLoader::load_from_str(&yaml_str).unwrap();
let doc = &docs[0];
let test_cases = doc["test_cases"].as_vec().unwrap();
for test_case in test_cases {
let byte_length = test_case["byte_length"].as_i64().unwrap() as u64;
let int = test_case["int"].as_i64().unwrap() as u64;
let bytes_string = test_case["bytes"].clone().into_string().unwrap();
let bytes = hex::decode(bytes_string.replace("0x", "")).unwrap();
match byte_length {
1 => assert_eq!(int_to_bytes1(int as u8), bytes),
2 => assert_eq!(int_to_bytes2(int as u16), bytes),
3 => assert_eq!(int_to_bytes3(int as u32), Some(bytes)),
4 => assert_eq!(int_to_bytes4(int as u32), bytes),
8 => assert_eq!(int_to_bytes8(int), bytes),
32 => assert_eq!(int_to_bytes32(int), bytes),
48 => assert_eq!(int_to_bytes48(int), bytes),
96 => assert_eq!(int_to_bytes96(int), bytes),
_ => panic!("Unknown byte length in test vector."),
}
}
}
}

View File

@ -0,0 +1,215 @@
fork: tchaikovsky
summary: Test vectors for the `int_to_bytes[n]` functions.`
test_suite: int_to_bytes
title: int_to_bytes Tests
version: 1.0
test_cases:
- {byte_length: 1, bytes: '0x00', int: 0}
- {byte_length: 1, bytes: '0x01', int: 1}
- {byte_length: 1, bytes: '0xff', int: 255}
- {byte_length: 1, bytes: '0xc0', int: 192}
- {byte_length: 1, bytes: '0xc7', int: 199}
- {byte_length: 1, bytes: '0xf2', int: 242}
- {byte_length: 1, bytes: '0x26', int: 38}
- {byte_length: 1, bytes: '0xfb', int: 251}
- {byte_length: 1, bytes: '0xd5', int: 213}
- {byte_length: 1, bytes: '0x74', int: 116}
- {byte_length: 1, bytes: '0xa8', int: 168}
- {byte_length: 1, bytes: '0xc6', int: 198}
- {byte_length: 1, bytes: '0x3d', int: 61}
- {byte_length: 1, bytes: '0xc2', int: 194}
- {byte_length: 1, bytes: '0x68', int: 104}
- {byte_length: 1, bytes: '0x64', int: 100}
- {byte_length: 1, bytes: '0xc2', int: 194}
- {byte_length: 1, bytes: '0x78', int: 120}
- {byte_length: 1, bytes: '0x33', int: 51}
- {byte_length: 2, bytes: '0x0000', int: 0}
- {byte_length: 2, bytes: '0x0100', int: 1}
- {byte_length: 2, bytes: '0xffff', int: 65535}
- {byte_length: 2, bytes: '0xedea', int: 60141}
- {byte_length: 2, bytes: '0x2d93', int: 37677}
- {byte_length: 2, bytes: '0x611e', int: 7777}
- {byte_length: 2, bytes: '0x637c', int: 31843}
- {byte_length: 2, bytes: '0xe370', int: 28899}
- {byte_length: 2, bytes: '0x96b3', int: 45974}
- {byte_length: 2, bytes: '0xde44', int: 17630}
- {byte_length: 2, bytes: '0xa009', int: 2464}
- {byte_length: 2, bytes: '0xf6ba', int: 47862}
- {byte_length: 2, bytes: '0xef76', int: 30447}
- {byte_length: 2, bytes: '0x7e5f', int: 24446}
- {byte_length: 2, bytes: '0x393d', int: 15673}
- {byte_length: 2, bytes: '0xc820', int: 8392}
- {byte_length: 2, bytes: '0x9031', int: 12688}
- {byte_length: 2, bytes: '0x3963', int: 25401}
- {byte_length: 2, bytes: '0x033d', int: 15619}
- {byte_length: 3, bytes: '0x000000', int: 0}
- {byte_length: 3, bytes: '0x010000', int: 1}
- {byte_length: 3, bytes: '0xffffff', int: 16777215}
- {byte_length: 3, bytes: '0x1fdfb2', int: 11722527}
- {byte_length: 3, bytes: '0x2a7504', int: 292138}
- {byte_length: 3, bytes: '0x09fb20', int: 2161417}
- {byte_length: 3, bytes: '0xa4a6b2', int: 11708068}
- {byte_length: 3, bytes: '0x17feb7', int: 12058135}
- {byte_length: 3, bytes: '0x3ad0b1', int: 11653178}
- {byte_length: 3, bytes: '0xbc92c6', int: 13013692}
- {byte_length: 3, bytes: '0xb6c046', int: 4636854}
- {byte_length: 3, bytes: '0x937f00', int: 32659}
- {byte_length: 3, bytes: '0x8266cb', int: 13330050}
- {byte_length: 3, bytes: '0x8136e9', int: 15283841}
- {byte_length: 3, bytes: '0xe9e062', int: 6480105}
- {byte_length: 3, bytes: '0x50d054', int: 5558352}
- {byte_length: 3, bytes: '0xb95340', int: 4215737}
- {byte_length: 3, bytes: '0x779f52', int: 5414775}
- {byte_length: 3, bytes: '0x15aed0', int: 13676053}
- {byte_length: 4, bytes: '0x00000000', int: 0}
- {byte_length: 4, bytes: '0x01000000', int: 1}
- {byte_length: 4, bytes: '0xffffffff', int: 4294967295}
- {byte_length: 4, bytes: '0x389cd0ca', int: 3402669112}
- {byte_length: 4, bytes: '0xfb29dc70', int: 1893476859}
- {byte_length: 4, bytes: '0xf5f5c999', int: 2580149749}
- {byte_length: 4, bytes: '0xf4f0b8d1', int: 3518558452}
- {byte_length: 4, bytes: '0x830de883', int: 2213023107}
- {byte_length: 4, bytes: '0xe3b4e843', int: 1139324131}
- {byte_length: 4, bytes: '0x4c9ce594', int: 2498075724}
- {byte_length: 4, bytes: '0xa9826dab', int: 2876080809}
- {byte_length: 4, bytes: '0xc40aecb7', int: 3085699780}
- {byte_length: 4, bytes: '0x55490416', int: 369379669}
- {byte_length: 4, bytes: '0x4f2eedc5', int: 3320655439}
- {byte_length: 4, bytes: '0xdd07257e', int: 2116356061}
- {byte_length: 4, bytes: '0x481a57e9', int: 3914799688}
- {byte_length: 4, bytes: '0x4556a493', int: 2477020741}
- {byte_length: 4, bytes: '0xccb781ed', int: 3984701388}
- {byte_length: 4, bytes: '0x6b994065', int: 1698732395}
- {byte_length: 8, bytes: '0x0000000000000000', int: 0}
- {byte_length: 8, bytes: '0x0100000000000000', int: 1}
- {byte_length: 8, bytes: '0xffffffff00000000', int: 4294967295}
- {byte_length: 8, bytes: '0x77d6e31400000000', int: 350475895}
- {byte_length: 8, bytes: '0xf3e681bf00000000', int: 3212961523}
- {byte_length: 8, bytes: '0x62fa7bd800000000', int: 3632003682}
- {byte_length: 8, bytes: '0x82c67b4500000000', int: 1165739650}
- {byte_length: 8, bytes: '0x52577fba00000000', int: 3128907602}
- {byte_length: 8, bytes: '0x5eac939b00000000', int: 2610146398}
- {byte_length: 8, bytes: '0x12ba143700000000', int: 924105234}
- {byte_length: 8, bytes: '0x1d3b893a00000000', int: 982072093}
- {byte_length: 8, bytes: '0x8262153000000000', int: 806707842}
- {byte_length: 8, bytes: '0xbb9cc58e00000000', int: 2395315387}
- {byte_length: 8, bytes: '0x76fef6d100000000', int: 3522625142}
- {byte_length: 8, bytes: '0x0fc3d35700000000', int: 1473495823}
- {byte_length: 8, bytes: '0xc7f851de00000000', int: 3729914055}
- {byte_length: 8, bytes: '0x3a1e5cb200000000', int: 2992381498}
- {byte_length: 8, bytes: '0x3b748e3400000000', int: 881751099}
- {byte_length: 8, bytes: '0xdc92479600000000', int: 2521273052}
- {byte_length: 32, bytes: '0x0000000000000000000000000000000000000000000000000000000000000000',
int: 0}
- {byte_length: 32, bytes: '0x0100000000000000000000000000000000000000000000000000000000000000',
int: 1}
- {byte_length: 32, bytes: '0xffffffff00000000000000000000000000000000000000000000000000000000',
int: 4294967295}
- {byte_length: 32, bytes: '0x2395ad4c00000000000000000000000000000000000000000000000000000000',
int: 1286444323}
- {byte_length: 32, bytes: '0x38a735b800000000000000000000000000000000000000000000000000000000',
int: 3090523960}
- {byte_length: 32, bytes: '0x5a9416e100000000000000000000000000000000000000000000000000000000',
int: 3776353370}
- {byte_length: 32, bytes: '0x220f757500000000000000000000000000000000000000000000000000000000',
int: 1970605858}
- {byte_length: 32, bytes: '0x65bf635200000000000000000000000000000000000000000000000000000000',
int: 1382268773}
- {byte_length: 32, bytes: '0x033f902200000000000000000000000000000000000000000000000000000000',
int: 579878659}
- {byte_length: 32, bytes: '0x2b2d58ab00000000000000000000000000000000000000000000000000000000',
int: 2874682667}
- {byte_length: 32, bytes: '0x15af31da00000000000000000000000000000000000000000000000000000000',
int: 3660689173}
- {byte_length: 32, bytes: '0xd260642e00000000000000000000000000000000000000000000000000000000',
int: 778330322}
- {byte_length: 32, bytes: '0xcdf8429700000000000000000000000000000000000000000000000000000000',
int: 2537748685}
- {byte_length: 32, bytes: '0xc9304b0500000000000000000000000000000000000000000000000000000000',
int: 88813769}
- {byte_length: 32, bytes: '0xf7b7ba0200000000000000000000000000000000000000000000000000000000',
int: 45791223}
- {byte_length: 32, bytes: '0x1ee262d900000000000000000000000000000000000000000000000000000000',
int: 3647136286}
- {byte_length: 32, bytes: '0xb34b03d300000000000000000000000000000000000000000000000000000000',
int: 3540208563}
- {byte_length: 32, bytes: '0x3d52db4d00000000000000000000000000000000000000000000000000000000',
int: 1306219069}
- {byte_length: 32, bytes: '0xd86db47900000000000000000000000000000000000000000000000000000000',
int: 2041867736}
- {byte_length: 48, bytes: '0x000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000',
int: 0}
- {byte_length: 48, bytes: '0x010000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000',
int: 1}
- {byte_length: 48, bytes: '0xffffffff0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000',
int: 4294967295}
- {byte_length: 48, bytes: '0x61aeae650000000000000000000000000000000000000000000000000000000000000000000000000000000000000000',
int: 1705946721}
- {byte_length: 48, bytes: '0xd1c08fac0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000',
int: 2895102161}
- {byte_length: 48, bytes: '0x6f36b6c90000000000000000000000000000000000000000000000000000000000000000000000000000000000000000',
int: 3384161903}
- {byte_length: 48, bytes: '0x102f2f3b0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000',
int: 992947984}
- {byte_length: 48, bytes: '0x0f53f9240000000000000000000000000000000000000000000000000000000000000000000000000000000000000000',
int: 620319503}
- {byte_length: 48, bytes: '0x5c1d46b30000000000000000000000000000000000000000000000000000000000000000000000000000000000000000',
int: 3007716700}
- {byte_length: 48, bytes: '0x955791510000000000000000000000000000000000000000000000000000000000000000000000000000000000000000',
int: 1368479637}
- {byte_length: 48, bytes: '0xf934170f0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000',
int: 253179129}
- {byte_length: 48, bytes: '0xc1a8b76f0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000',
int: 1874307265}
- {byte_length: 48, bytes: '0xdf3f62c20000000000000000000000000000000000000000000000000000000000000000000000000000000000000000',
int: 3261218783}
- {byte_length: 48, bytes: '0xbd741bc50000000000000000000000000000000000000000000000000000000000000000000000000000000000000000',
int: 3306910909}
- {byte_length: 48, bytes: '0xfe5dc5540000000000000000000000000000000000000000000000000000000000000000000000000000000000000000',
int: 1422220798}
- {byte_length: 48, bytes: '0x364f10df0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000',
int: 3742388022}
- {byte_length: 48, bytes: '0x4a3909450000000000000000000000000000000000000000000000000000000000000000000000000000000000000000',
int: 1158232394}
- {byte_length: 48, bytes: '0xe04760380000000000000000000000000000000000000000000000000000000000000000000000000000000000000000',
int: 945833952}
- {byte_length: 48, bytes: '0x755c78540000000000000000000000000000000000000000000000000000000000000000000000000000000000000000',
int: 1417174133}
- {byte_length: 96, bytes: '0x000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000',
int: 0}
- {byte_length: 96, bytes: '0x010000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000',
int: 1}
- {byte_length: 96, bytes: '0xffffffff0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000',
int: 4294967295}
- {byte_length: 96, bytes: '0xa3274ee20000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000',
int: 3796772771}
- {byte_length: 96, bytes: '0x1658135c0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000',
int: 1544771606}
- {byte_length: 96, bytes: '0x2af24fb30000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000',
int: 3008361002}
- {byte_length: 96, bytes: '0x9e6bc40a0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000',
int: 180644766}
- {byte_length: 96, bytes: '0x0745b3c50000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000',
int: 3316860167}
- {byte_length: 96, bytes: '0xe1b59f830000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000',
int: 2208282081}
- {byte_length: 96, bytes: '0x985a9e6e0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000',
int: 1855871640}
- {byte_length: 96, bytes: '0x3d4e3a090000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000',
int: 154816061}
- {byte_length: 96, bytes: '0x6f5dfb630000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000',
int: 1677417839}
- {byte_length: 96, bytes: '0x383cdecd0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000',
int: 3453893688}
- {byte_length: 96, bytes: '0x38f55ceb0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000',
int: 3948737848}
- {byte_length: 96, bytes: '0xcd746f5d0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000',
int: 1567585485}
- {byte_length: 96, bytes: '0x3d971e910000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000',
int: 2434701117}
- {byte_length: 96, bytes: '0x3adff0c00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000',
int: 3237011258}
- {byte_length: 96, bytes: '0x5ed40a710000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000',
int: 1896535134}
- {byte_length: 96, bytes: '0x755d2ed40000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000',
int: 3559808373}

View File

@ -0,0 +1,15 @@
[package]
name = "swap_or_not_shuffle"
version = "0.1.0"
authors = ["Paul Hauner <paul@paulhauner.com>"]
edition = "2018"
[dependencies]
bytes = "0.4"
hashing = { path = "../hashing" }
int_to_bytes = { path = "../int_to_bytes" }
[dev-dependencies]
yaml-rust = "0.4.2"
hex = "0.3"
ethereum-types = "0.5"

View File

@ -0,0 +1,178 @@
use bytes::Buf;
use hashing::hash;
use int_to_bytes::{int_to_bytes1, int_to_bytes4};
use std::cmp::max;
use std::io::Cursor;
/// Return `p(index)` in a pseudorandom permutation `p` of `0...list_size-1` with ``seed`` as entropy.
///
/// Utilizes 'swap or not' shuffling found in
/// https://link.springer.com/content/pdf/10.1007%2F978-3-642-32009-5_1.pdf
/// See the 'generalized domain' algorithm on page 3.
///
/// Returns `None` under any of the following conditions:
/// - `list_size == 0`
/// - `index >= list_size`
/// - `list_size > 2**24`
/// - `list_size > usize::max_value() / 2`
pub fn get_permutated_index(
index: usize,
list_size: usize,
seed: &[u8],
shuffle_round_count: u8,
) -> Option<usize> {
if list_size == 0
|| index >= list_size
|| list_size > usize::max_value() / 2
|| list_size > 2_usize.pow(24)
{
return None;
}
let mut index = index;
for round in 0..shuffle_round_count {
let pivot = bytes_to_int64(&hash_with_round(seed, round)[..]) as usize % list_size;
let flip = (pivot + list_size - index) % list_size;
let position = max(index, flip);
let source = hash_with_round_and_position(seed, round, position)?;
let byte = source[(position % 256) / 8];
let bit = (byte >> (position % 8)) % 2;
index = if bit == 1 { flip } else { index }
}
Some(index)
}
fn hash_with_round_and_position(seed: &[u8], round: u8, position: usize) -> Option<Vec<u8>> {
let mut seed = seed.to_vec();
seed.append(&mut int_to_bytes1(round));
/*
* Note: the specification has an implicit assertion in `int_to_bytes4` that `position / 256 <
* 2**24`. For efficiency, we do not check for that here as it is checked in `get_permutated_index`.
*/
seed.append(&mut int_to_bytes4((position / 256) as u32));
Some(hash(&seed[..]))
}
fn hash_with_round(seed: &[u8], round: u8) -> Vec<u8> {
let mut seed = seed.to_vec();
seed.append(&mut int_to_bytes1(round));
hash(&seed[..])
}
fn bytes_to_int64(bytes: &[u8]) -> u64 {
let mut cursor = Cursor::new(bytes);
cursor.get_u64_le()
}
#[cfg(test)]
mod tests {
use super::*;
use ethereum_types::H256 as Hash256;
use hex;
use std::{fs::File, io::prelude::*, path::PathBuf};
use yaml_rust::yaml;
#[test]
#[ignore]
fn fuzz_test() {
let max_list_size = 2_usize.pow(24);
let test_runs = 1000;
// Test at max list_size with the end index.
for _ in 0..test_runs {
let index = max_list_size - 1;
let list_size = max_list_size;
let seed = Hash256::random();
let shuffle_rounds = 90;
assert!(get_permutated_index(index, list_size, &seed[..], shuffle_rounds).is_some());
}
// Test at max list_size low indices.
for i in 0..test_runs {
let index = i;
let list_size = max_list_size;
let seed = Hash256::random();
let shuffle_rounds = 90;
assert!(get_permutated_index(index, list_size, &seed[..], shuffle_rounds).is_some());
}
// Test at max list_size high indices.
for i in 0..test_runs {
let index = max_list_size - 1 - i;
let list_size = max_list_size;
let seed = Hash256::random();
let shuffle_rounds = 90;
assert!(get_permutated_index(index, list_size, &seed[..], shuffle_rounds).is_some());
}
}
#[test]
fn returns_none_for_zero_length_list() {
assert_eq!(None, get_permutated_index(100, 0, &[42, 42], 90));
}
#[test]
fn returns_none_for_out_of_bounds_index() {
assert_eq!(None, get_permutated_index(100, 100, &[42, 42], 90));
}
#[test]
fn returns_none_for_too_large_list() {
assert_eq!(
None,
get_permutated_index(100, usize::max_value() / 2, &[42, 42], 90)
);
}
#[test]
fn test_vectors() {
/*
* Test vectors are generated here:
*
* https://github.com/ethereum/eth2.0-test-generators
*/
let mut file = {
let mut file_path_buf = PathBuf::from(env!("CARGO_MANIFEST_DIR"));
file_path_buf.push("src/specs/test_vector_permutated_index.yml");
File::open(file_path_buf).unwrap()
};
let mut yaml_str = String::new();
file.read_to_string(&mut yaml_str).unwrap();
let docs = yaml::YamlLoader::load_from_str(&yaml_str).unwrap();
let doc = &docs[0];
let test_cases = doc["test_cases"].as_vec().unwrap();
for (i, test_case) in test_cases.iter().enumerate() {
let index = test_case["index"].as_i64().unwrap() as usize;
let list_size = test_case["list_size"].as_i64().unwrap() as usize;
let permutated_index = test_case["permutated_index"].as_i64().unwrap() as usize;
let shuffle_round_count = test_case["shuffle_round_count"].as_i64().unwrap();
let seed_string = test_case["seed"].clone().into_string().unwrap();
let seed = hex::decode(seed_string.replace("0x", "")).unwrap();
let shuffle_round_count = if shuffle_round_count < (u8::max_value() as i64) {
shuffle_round_count as u8
} else {
panic!("shuffle_round_count must be a u8")
};
assert_eq!(
Some(permutated_index),
get_permutated_index(index, list_size, &seed[..], shuffle_round_count),
"Failure on case #{} index: {}, list_size: {}, round_count: {}, seed: {}",
i,
index,
list_size,
shuffle_round_count,
seed_string,
);
}
}
}

View File

@ -0,0 +1,86 @@
fork: tchaikovsky
summary: Test vectors for list shuffling using `get_permutated_index`
test_suite: permutated_index
title: Permutated Index Tests
version: 1.0
test_cases:
- {index: 0, list_size: 1, permutated_index: 0, seed: '0xc0c7f226fbd574a8c63dc26864c27833ea931e7c70b34409ba765f3d2031633d',
shuffle_round_count: 90}
- {index: 0, list_size: 2, permutated_index: 0, seed: '0xb20420b2b7b1c64600cbe962544052d0bbe13da403950d198d4f4ea28762953f',
shuffle_round_count: 90}
- {index: 1, list_size: 2, permutated_index: 0, seed: '0x11f1322c3a4cfce20efb7d7eca50291470043d6e8a2c62956e687571607d3f0e',
shuffle_round_count: 90}
- {index: 0, list_size: 3, permutated_index: 2, seed: '0x5bd0af3f74fe6986bb99b3ecc0ea15a403456ce708c05ceeeddc0a4205caf072',
shuffle_round_count: 90}
- {index: 1, list_size: 3, permutated_index: 1, seed: '0xba06ff9bde03f37eddeacb261a51109676d549c1bea3b81edd82df68cc03a97f',
shuffle_round_count: 90}
- {index: 2, list_size: 3, permutated_index: 2, seed: '0xf58a8970c63ca86dd3b8b8a615302ec06cddea1279bf4a2725c781ce6aba348d',
shuffle_round_count: 90}
- {index: 0, list_size: 1024, permutated_index: 1005, seed: '0x383556e23fcb9e73c23ad33cfb50f4c098f49688a84b128c2885960e5f1b3982',
shuffle_round_count: 90}
- {index: 1023, list_size: 1024, permutated_index: 934, seed: '0x2ee5dab30ad1580cdabb175a4b1512cac5566866d65a15e9e22c8444f460c9dc',
shuffle_round_count: 90}
- {index: 3925, list_size: 4040, permutated_index: 32, seed: '0x34a3c13f211e63c56e9e1187f31a56a4230d8d5bf5e584f0e4fe93946af91cce',
shuffle_round_count: 90}
- {index: 885, list_size: 2417, permutated_index: 1822, seed: '0x1346e3970815107154b58b1eff411bfca3342ea0d8282a86304d79d62d5f3c52',
shuffle_round_count: 90}
- {index: 840, list_size: 1805, permutated_index: 808, seed: '0x0810c104b75e25bf89c0066deebc3461937fc0e72ae04ee74f245616c15718df',
shuffle_round_count: 90}
- {index: 881, list_size: 1788, permutated_index: 582, seed: '0x34adb35f3fc2880d220e520120a032bbaa0f4bd7a5fcf1c2269de21075e7a464',
shuffle_round_count: 90}
- {index: 1362, list_size: 1817, permutated_index: 1018, seed: '0xc9b0c76e11f4c3c3c38b447aca5352d93132ad5678da420ca2e69d92588e0fba',
shuffle_round_count: 90}
- {index: 28, list_size: 111, permutated_index: 0, seed: '0x293145c31aeb3eb29ccdf3327d0f3dd4592cdfb2fad3703229c6c2e720dc792f',
shuffle_round_count: 90}
- {index: 959, list_size: 2558, permutated_index: 2094, seed: '0xc9f4c5fbb2a397fd8ea36dbfcec0d733d0af7ec3a03d789a66231f3bc7cafa5e',
shuffle_round_count: 90}
- {index: 887, list_size: 2406, permutated_index: 831, seed: '0x565729e0d5de524e6dee54d1b8b5882ad8e55c18a30462ac02c4bb86c27d26cb',
shuffle_round_count: 90}
- {index: 3526, list_size: 3674, permutated_index: 3531, seed: '0x2951395b1a1bbda8d53b776c7fc8bdad6030de943c4e3f938202ac553f44381d',
shuffle_round_count: 90}
- {index: 978, list_size: 3175, permutated_index: 2257, seed: '0x74aac23523cb45b7ee52d5d2f7b2d24ebc6bf2d63ef189efccabc4a16bb17cd8',
shuffle_round_count: 90}
- {index: 37, list_size: 231, permutated_index: 48, seed: '0xe4083e61b31931bad662392758e8bc30a4ce7b26b6897c2221a3358f25fdc1d8',
shuffle_round_count: 90}
- {index: 340, list_size: 693, permutated_index: 234, seed: '0x8089c1f242aa48c6611180f221c120e930adeecaf3084b2b85f9b1dfebe34f63',
shuffle_round_count: 90}
- {index: 0, list_size: 9, permutated_index: 1, seed: '0x7fda0ab6a746b6b0206febb8259891e0e6f88bf52143b20d6c78caf7caf8e7b3',
shuffle_round_count: 90}
- {index: 200, list_size: 1108, permutated_index: 952, seed: '0x87b210d000b5f57e9834388d4bc2b86ae8b31383fa10a34b029546c2ebabb807',
shuffle_round_count: 90}
- {index: 1408, list_size: 1531, permutated_index: 584, seed: '0x0670a78b38e0419aaead5d1cc8f40f58044b7076ced8193c08b580dd95a13555',
shuffle_round_count: 90}
- {index: 1704, list_size: 1863, permutated_index: 1022, seed: '0xdbf78665190a6133191e91ab35b1106e8984dfc0dfa36018004f880b431c2a14',
shuffle_round_count: 90}
- {index: 793, list_size: 3938, permutated_index: 2607, seed: '0x54bf0192292ffae0bf39b39f12e0540b97591af0a2980d32f277bd33201395d3',
shuffle_round_count: 90}
- {index: 14, list_size: 28, permutated_index: 10, seed: '0x43054417c6056404c586c907dfc5fceb66ebef541d143b00a3b676f3c0fbf4c5',
shuffle_round_count: 90}
- {index: 2909, list_size: 3920, permutated_index: 726, seed: '0x5eabf289fdcfe0a3aba33a185fb1a4ae2f2b6f78daf61f5d356971e0cb270207',
shuffle_round_count: 90}
- {index: 1943, list_size: 1959, permutated_index: 1292, seed: '0xca86322db56927d727101e31c93f616f746317d29aa10d88f371592963de92aa',
shuffle_round_count: 90}
- {index: 1647, list_size: 2094, permutated_index: 1805, seed: '0x3cfe274230a112bc68614882645339fda2f134501a042079d620ec65cf8d3fa6',
shuffle_round_count: 90}
- {index: 1012, list_size: 1877, permutated_index: 216, seed: '0x7b5ff8a848af32d85c6d37c26e61a57e96780fcebc350ad1845e83fe5e4679ac',
shuffle_round_count: 90}
- {index: 35, list_size: 2081, permutated_index: 1458, seed: '0x40691aa31a49c2391e025ec272c812510cb07c055f6201e84479499326330628',
shuffle_round_count: 90}
- {index: 1136, list_size: 2189, permutated_index: 1579, seed: '0x31a0deb2c8c5f809f413b7a36ec680ee8b19bbb9a39c4e207326155864bc8be5',
shuffle_round_count: 90}
- {index: 1775, list_size: 3434, permutated_index: 707, seed: '0x92f30d8556382b72a5797db811486e7a213e0145d6c946e5121aa6a8f761d164',
shuffle_round_count: 90}
- {index: 1109, list_size: 2010, permutated_index: 433, seed: '0x093fb976f2497361897012dfa6dc019009eda2e48bbeb4b7c56d4aa5da7d5f87',
shuffle_round_count: 90}
- {index: 359, list_size: 538, permutated_index: 115, seed: '0xa79b35beacbe48c662d60884c704040024c55ab879e5f61521013c5f45eb3b70',
shuffle_round_count: 90}
- {index: 1259, list_size: 1473, permutated_index: 1351, seed: '0x02c53c9c6ddf259716ff02e49a294eba33e4ad255d7e90dbefdbc991adf603e5',
shuffle_round_count: 90}
- {index: 2087, list_size: 2634, permutated_index: 1497, seed: '0xa5a4c57c5705ec697a74e6c7161191b18f58ca882a0fcc18f68dc3b57a1aa5b6',
shuffle_round_count: 90}
- {index: 2069, list_size: 2511, permutated_index: 1837, seed: '0xe7051ebc07f2e7b4d4b28f48d1e42d7b9dcec31c240ca6e1a0c06139ccfc4b8f',
shuffle_round_count: 90}
- {index: 1660, list_size: 3932, permutated_index: 3046, seed: '0x8687c029ffc443879527a64c31b7acbb38ab6e343779d0b2c6e250046fdb9de8',
shuffle_round_count: 90}
- {index: 379, list_size: 646, permutated_index: 32, seed: '0x17e854f4e80401345e13f72af45b221c9f7a840f6a8c1328ddf9c9ca9a088379',
shuffle_round_count: 90}