Merge branch 'master' into beacon-state-caching

This commit is contained in:
Paul Hauner 2019-02-24 19:14:05 +13:00
commit 5ca7c9929a
No known key found for this signature in database
GPG Key ID: 303E4494BB28068C
44 changed files with 920 additions and 835 deletions

View File

@ -12,6 +12,7 @@ members = [
"eth2/utils/int_to_bytes", "eth2/utils/int_to_bytes",
"eth2/utils/slot_clock", "eth2/utils/slot_clock",
"eth2/utils/ssz", "eth2/utils/ssz",
"eth2/utils/ssz_derive",
"eth2/utils/swap_or_not_shuffle", "eth2/utils/swap_or_not_shuffle",
"eth2/utils/fisher_yates_shuffle", "eth2/utils/fisher_yates_shuffle",
"beacon_node", "beacon_node",

View File

@ -359,6 +359,7 @@ where
self.fork_choice.write().add_attestation( self.fork_choice.write().add_attestation(
free_attestation.validator_index, free_attestation.validator_index,
&free_attestation.data.beacon_block_root, &free_attestation.data.beacon_block_root,
&self.spec,
)?; )?;
Ok(aggregation_outcome) Ok(aggregation_outcome)
} }
@ -488,7 +489,9 @@ where
self.state_store.put(&state_root, &ssz_encode(&state)[..])?; self.state_store.put(&state_root, &ssz_encode(&state)[..])?;
// run the fork_choice add_block logic // run the fork_choice add_block logic
self.fork_choice.write().add_block(&block, &block_root)?; self.fork_choice
.write()
.add_block(&block, &block_root, &self.spec)?;
// If the parent block was the parent_block, automatically update the canonical head. // If the parent block was the parent_block, automatically update the canonical head.
// //
@ -569,7 +572,10 @@ where
pub fn fork_choice(&self) -> Result<(), Error> { pub fn fork_choice(&self) -> Result<(), Error> {
let present_head = self.finalized_head().beacon_block_root; let present_head = self.finalized_head().beacon_block_root;
let new_head = self.fork_choice.write().find_head(&present_head)?; let new_head = self
.fork_choice
.write()
.find_head(&present_head, &self.spec)?;
if new_head != present_head { if new_head != present_head {
let block = self let block = self

View File

@ -6,4 +6,4 @@ pub use self::beacon_chain::{
BeaconChain, BlockProcessingOutcome, Error, InvalidBlock, ValidBlock, BeaconChain, BlockProcessingOutcome, Error, InvalidBlock, ValidBlock,
}; };
pub use self::checkpoint::CheckPoint; pub use self::checkpoint::CheckPoint;
pub use fork_choice::{ForkChoice, ForkChoiceAlgorithms, ForkChoiceError}; pub use fork_choice::{ForkChoice, ForkChoiceAlgorithm, ForkChoiceError};

View File

@ -6,7 +6,7 @@ use db::{
stores::{BeaconBlockStore, BeaconStateStore}, stores::{BeaconBlockStore, BeaconStateStore},
MemoryDB, MemoryDB,
}; };
use fork_choice::OptimisedLMDGhost; use fork_choice::BitwiseLMDGhost;
use log::debug; use log::debug;
use rayon::prelude::*; use rayon::prelude::*;
use slot_clock::TestingSlotClock; use slot_clock::TestingSlotClock;
@ -28,7 +28,7 @@ use types::{
/// is not useful for testing that multiple beacon nodes can reach consensus. /// is not useful for testing that multiple beacon nodes can reach consensus.
pub struct BeaconChainHarness { pub struct BeaconChainHarness {
pub db: Arc<MemoryDB>, pub db: Arc<MemoryDB>,
pub beacon_chain: Arc<BeaconChain<MemoryDB, TestingSlotClock, OptimisedLMDGhost<MemoryDB>>>, pub beacon_chain: Arc<BeaconChain<MemoryDB, TestingSlotClock, BitwiseLMDGhost<MemoryDB>>>,
pub block_store: Arc<BeaconBlockStore<MemoryDB>>, pub block_store: Arc<BeaconBlockStore<MemoryDB>>,
pub state_store: Arc<BeaconStateStore<MemoryDB>>, pub state_store: Arc<BeaconStateStore<MemoryDB>>,
pub validators: Vec<ValidatorHarness>, pub validators: Vec<ValidatorHarness>,
@ -46,7 +46,7 @@ impl BeaconChainHarness {
let state_store = Arc::new(BeaconStateStore::new(db.clone())); let state_store = Arc::new(BeaconStateStore::new(db.clone()));
let genesis_time = 1_549_935_547; // 12th Feb 2018 (arbitrary value in the past). let genesis_time = 1_549_935_547; // 12th Feb 2018 (arbitrary value in the past).
let slot_clock = TestingSlotClock::new(spec.genesis_slot.as_u64()); let slot_clock = TestingSlotClock::new(spec.genesis_slot.as_u64());
let fork_choice = OptimisedLMDGhost::new(block_store.clone(), state_store.clone()); let fork_choice = BitwiseLMDGhost::new(block_store.clone(), state_store.clone());
let latest_eth1_data = Eth1Data { let latest_eth1_data = Eth1Data {
deposit_root: Hash256::zero(), deposit_root: Hash256::zero(),
block_hash: Hash256::zero(), block_hash: Hash256::zero(),

View File

@ -10,7 +10,7 @@ use block_proposer::{BlockProducer, Error as BlockPollError};
use db::MemoryDB; use db::MemoryDB;
use direct_beacon_node::DirectBeaconNode; use direct_beacon_node::DirectBeaconNode;
use direct_duties::DirectDuties; use direct_duties::DirectDuties;
use fork_choice::OptimisedLMDGhost; use fork_choice::BitwiseLMDGhost;
use local_signer::LocalSigner; use local_signer::LocalSigner;
use slot_clock::TestingSlotClock; use slot_clock::TestingSlotClock;
use std::sync::Arc; use std::sync::Arc;
@ -36,20 +36,20 @@ pub enum AttestationProduceError {
pub struct ValidatorHarness { pub struct ValidatorHarness {
pub block_producer: BlockProducer< pub block_producer: BlockProducer<
TestingSlotClock, TestingSlotClock,
DirectBeaconNode<MemoryDB, TestingSlotClock, OptimisedLMDGhost<MemoryDB>>, DirectBeaconNode<MemoryDB, TestingSlotClock, BitwiseLMDGhost<MemoryDB>>,
DirectDuties<MemoryDB, TestingSlotClock, OptimisedLMDGhost<MemoryDB>>, DirectDuties<MemoryDB, TestingSlotClock, BitwiseLMDGhost<MemoryDB>>,
LocalSigner, LocalSigner,
>, >,
pub attester: Attester< pub attester: Attester<
TestingSlotClock, TestingSlotClock,
DirectBeaconNode<MemoryDB, TestingSlotClock, OptimisedLMDGhost<MemoryDB>>, DirectBeaconNode<MemoryDB, TestingSlotClock, BitwiseLMDGhost<MemoryDB>>,
DirectDuties<MemoryDB, TestingSlotClock, OptimisedLMDGhost<MemoryDB>>, DirectDuties<MemoryDB, TestingSlotClock, BitwiseLMDGhost<MemoryDB>>,
LocalSigner, LocalSigner,
>, >,
pub spec: Arc<ChainSpec>, pub spec: Arc<ChainSpec>,
pub epoch_map: Arc<DirectDuties<MemoryDB, TestingSlotClock, OptimisedLMDGhost<MemoryDB>>>, pub epoch_map: Arc<DirectDuties<MemoryDB, TestingSlotClock, BitwiseLMDGhost<MemoryDB>>>,
pub keypair: Keypair, pub keypair: Keypair,
pub beacon_node: Arc<DirectBeaconNode<MemoryDB, TestingSlotClock, OptimisedLMDGhost<MemoryDB>>>, pub beacon_node: Arc<DirectBeaconNode<MemoryDB, TestingSlotClock, BitwiseLMDGhost<MemoryDB>>>,
pub slot_clock: Arc<TestingSlotClock>, pub slot_clock: Arc<TestingSlotClock>,
pub signer: Arc<LocalSigner>, pub signer: Arc<LocalSigner>,
} }
@ -61,7 +61,7 @@ impl ValidatorHarness {
/// A `BlockProducer` and `Attester` is created.. /// A `BlockProducer` and `Attester` is created..
pub fn new( pub fn new(
keypair: Keypair, keypair: Keypair,
beacon_chain: Arc<BeaconChain<MemoryDB, TestingSlotClock, OptimisedLMDGhost<MemoryDB>>>, beacon_chain: Arc<BeaconChain<MemoryDB, TestingSlotClock, BitwiseLMDGhost<MemoryDB>>>,
spec: Arc<ChainSpec>, spec: Arc<ChainSpec>,
) -> Self { ) -> Self {
let slot_clock = Arc::new(TestingSlotClock::new(spec.genesis_slot.as_u64())); let slot_clock = Arc::new(TestingSlotClock::new(spec.genesis_slot.as_u64()));

View File

@ -14,7 +14,7 @@ use db::{
stores::{BeaconBlockStore, BeaconStateStore}, stores::{BeaconBlockStore, BeaconStateStore},
MemoryDB, MemoryDB,
}; };
use fork_choice::optimised_lmd_ghost::OptimisedLMDGhost; use fork_choice::BitwiseLMDGhost;
use slog::{error, info, o, Drain}; use slog::{error, info, o, Drain};
use slot_clock::SystemTimeSlotClock; use slot_clock::SystemTimeSlotClock;
use std::sync::Arc; use std::sync::Arc;
@ -81,7 +81,7 @@ fn main() {
let slot_clock = SystemTimeSlotClock::new(genesis_time, spec.slot_duration) let slot_clock = SystemTimeSlotClock::new(genesis_time, spec.slot_duration)
.expect("Unable to load SystemTimeSlotClock"); .expect("Unable to load SystemTimeSlotClock");
// Choose the fork choice // Choose the fork choice
let fork_choice = OptimisedLMDGhost::new(block_store.clone(), state_store.clone()); let fork_choice = BitwiseLMDGhost::new(block_store.clone(), state_store.clone());
/* /*
* Generate some random data to start a chain with. * Generate some random data to start a chain with.

View File

@ -9,10 +9,13 @@ db = { path = "../../beacon_node/db" }
ssz = { path = "../utils/ssz" } ssz = { path = "../utils/ssz" }
types = { path = "../types" } types = { path = "../types" }
fast-math = "0.1.1" fast-math = "0.1.1"
byteorder = "1.3.1" log = "0.4.6"
bit-vec = "0.5.0"
[dev-dependencies] [dev-dependencies]
hex = "0.3.2"
yaml-rust = "0.4.2" yaml-rust = "0.4.2"
bls = { path = "../utils/bls" } bls = { path = "../utils/bls" }
slot_clock = { path = "../utils/slot_clock" } slot_clock = { path = "../utils/slot_clock" }
beacon_chain = { path = "../../beacon_node/beacon_chain" } beacon_chain = { path = "../../beacon_node/beacon_chain" }
env_logger = "0.6.0"

View File

@ -1,49 +1,25 @@
// Copyright 2019 Sigma Prime Pty Ltd. extern crate bit_vec;
//
// Permission is hereby granted, free of charge, to any person obtaining a
// copy of this software and associated documentation files (the "Software"),
// to deal in the Software without restriction, including without limitation
// the rights to use, copy, modify, merge, publish, distribute, sublicense,
// and/or sell copies of the Software, and to permit persons to whom the
// Software is furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
// FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
// DEALINGS IN THE SOFTWARE.
extern crate byteorder;
extern crate fast_math; extern crate fast_math;
use crate::{ForkChoice, ForkChoiceError}; use crate::{ForkChoice, ForkChoiceError};
use byteorder::{BigEndian, ByteOrder}; use bit_vec::BitVec;
use db::{ use db::{
stores::{BeaconBlockStore, BeaconStateStore}, stores::{BeaconBlockStore, BeaconStateStore},
ClientDB, ClientDB,
}; };
use fast_math::log2_raw; use fast_math::log2_raw;
use log::{debug, trace};
use std::collections::HashMap; use std::collections::HashMap;
use std::sync::Arc; use std::sync::Arc;
use types::{ use types::{
readers::BeaconBlockReader, validator_registry::get_active_validator_indices, BeaconBlock, readers::BeaconBlockReader, validator_registry::get_active_validator_indices, BeaconBlock,
Hash256, Slot, SlotHeight, ChainSpec, Hash256, Slot, SlotHeight,
}; };
//TODO: Pruning - Children //TODO: Pruning - Children
//TODO: Handle Syncing //TODO: Handle Syncing
//TODO: Sort out global constants /// The optimised bitwise LMD-GHOST fork choice rule.
const GENESIS_SLOT: u64 = 0;
const FORK_CHOICE_BALANCE_INCREMENT: u64 = 1e9 as u64;
const MAX_DEPOSIT_AMOUNT: u64 = 32e9 as u64;
const EPOCH_LENGTH: u64 = 64;
/// The optimised LMD-GHOST fork choice rule.
/// NOTE: This uses u32 to represent difference between block heights. Thus this is only /// NOTE: This uses u32 to represent difference between block heights. Thus this is only
/// applicable for block height differences in the range of a u32. /// applicable for block height differences in the range of a u32.
/// This can potentially be parallelized in some parts. /// This can potentially be parallelized in some parts.
@ -51,6 +27,13 @@ const EPOCH_LENGTH: u64 = 64;
// the comparison. Log2_raw takes 2ns according to the documentation. // the comparison. Log2_raw takes 2ns according to the documentation.
#[inline] #[inline]
fn log2_int(x: u32) -> u32 { fn log2_int(x: u32) -> u32 {
if x == 0 {
return 0;
}
assert!(
x <= std::f32::MAX as u32,
"Height too large for fast log in bitwise fork choice"
);
log2_raw(x as f32) as u32 log2_raw(x as f32) as u32
} }
@ -58,8 +41,8 @@ fn power_of_2_below(x: u32) -> u32 {
2u32.pow(log2_int(x)) 2u32.pow(log2_int(x))
} }
/// Stores the necessary data structures to run the optimised lmd ghost algorithm. /// Stores the necessary data structures to run the optimised bitwise lmd ghost algorithm.
pub struct OptimisedLMDGhost<T: ClientDB + Sized> { pub struct BitwiseLMDGhost<T: ClientDB + Sized> {
/// A cache of known ancestors at given heights for a specific block. /// A cache of known ancestors at given heights for a specific block.
//TODO: Consider FnvHashMap //TODO: Consider FnvHashMap
cache: HashMap<CacheKey<u32>, Hash256>, cache: HashMap<CacheKey<u32>, Hash256>,
@ -78,7 +61,7 @@ pub struct OptimisedLMDGhost<T: ClientDB + Sized> {
max_known_height: SlotHeight, max_known_height: SlotHeight,
} }
impl<T> OptimisedLMDGhost<T> impl<T> BitwiseLMDGhost<T>
where where
T: ClientDB + Sized, T: ClientDB + Sized,
{ {
@ -86,7 +69,7 @@ where
block_store: Arc<BeaconBlockStore<T>>, block_store: Arc<BeaconBlockStore<T>>,
state_store: Arc<BeaconStateStore<T>>, state_store: Arc<BeaconStateStore<T>>,
) -> Self { ) -> Self {
OptimisedLMDGhost { BitwiseLMDGhost {
cache: HashMap::new(), cache: HashMap::new(),
ancestors: vec![HashMap::new(); 16], ancestors: vec![HashMap::new(); 16],
latest_attestation_targets: HashMap::new(), latest_attestation_targets: HashMap::new(),
@ -103,6 +86,7 @@ where
&self, &self,
state_root: &Hash256, state_root: &Hash256,
block_slot: Slot, block_slot: Slot,
spec: &ChainSpec,
) -> Result<HashMap<Hash256, u64>, ForkChoiceError> { ) -> Result<HashMap<Hash256, u64>, ForkChoiceError> {
// get latest votes // get latest votes
// Note: Votes are weighted by min(balance, MAX_DEPOSIT_AMOUNT) // // Note: Votes are weighted by min(balance, MAX_DEPOSIT_AMOUNT) //
@ -117,25 +101,31 @@ where
let active_validator_indices = get_active_validator_indices( let active_validator_indices = get_active_validator_indices(
&current_state.validator_registry[..], &current_state.validator_registry[..],
block_slot.epoch(EPOCH_LENGTH), block_slot.epoch(spec.epoch_length),
); );
for index in active_validator_indices { for index in active_validator_indices {
let balance = let balance = std::cmp::min(
std::cmp::min(current_state.validator_balances[index], MAX_DEPOSIT_AMOUNT) current_state.validator_balances[index],
/ FORK_CHOICE_BALANCE_INCREMENT; spec.max_deposit_amount,
) / spec.fork_choice_balance_increment;
if balance > 0 { if balance > 0 {
if let Some(target) = self.latest_attestation_targets.get(&(index as u64)) { if let Some(target) = self.latest_attestation_targets.get(&(index as u64)) {
*latest_votes.entry(*target).or_insert_with(|| 0) += balance; *latest_votes.entry(*target).or_insert_with(|| 0) += balance;
} }
} }
} }
trace!("Latest votes: {:?}", latest_votes);
Ok(latest_votes) Ok(latest_votes)
} }
/// Gets the ancestor at a given height `at_height` of a block specified by `block_hash`. /// Gets the ancestor at a given height `at_height` of a block specified by `block_hash`.
fn get_ancestor(&mut self, block_hash: Hash256, at_height: SlotHeight) -> Option<Hash256> { fn get_ancestor(
&mut self,
block_hash: Hash256,
target_height: SlotHeight,
spec: &ChainSpec,
) -> Option<Hash256> {
// return None if we can't get the block from the db. // return None if we can't get the block from the db.
let block_height = { let block_height = {
let block_slot = self let block_slot = self
@ -145,32 +135,31 @@ where
.expect("Should have returned already if None") .expect("Should have returned already if None")
.slot; .slot;
block_slot.height(Slot::from(GENESIS_SLOT)) block_slot.height(spec.genesis_slot)
}; };
// verify we haven't exceeded the block height // verify we haven't exceeded the block height
if at_height >= block_height { if target_height >= block_height {
if at_height > block_height { if target_height > block_height {
return None; return None;
} else { } else {
return Some(block_hash); return Some(block_hash);
} }
} }
// check if the result is stored in our cache // check if the result is stored in our cache
let cache_key = CacheKey::new(&block_hash, at_height.as_u32()); let cache_key = CacheKey::new(&block_hash, target_height.as_u32());
if let Some(ancestor) = self.cache.get(&cache_key) { if let Some(ancestor) = self.cache.get(&cache_key) {
return Some(*ancestor); return Some(*ancestor);
} }
// not in the cache recursively search for ancestors using a log-lookup // not in the cache recursively search for ancestors using a log-lookup
if let Some(ancestor) = { if let Some(ancestor) = {
let ancestor_lookup = self.ancestors let ancestor_lookup = self.ancestors
[log2_int((block_height - at_height - 1u64).as_u32()) as usize] [log2_int((block_height - target_height - 1u64).as_u32()) as usize]
.get(&block_hash) .get(&block_hash)
//TODO: Panic if we can't lookup and fork choice fails //TODO: Panic if we can't lookup and fork choice fails
.expect("All blocks should be added to the ancestor log lookup table"); .expect("All blocks should be added to the ancestor log lookup table");
self.get_ancestor(*ancestor_lookup, at_height) self.get_ancestor(*ancestor_lookup, target_height, &spec)
} { } {
// add the result to the cache // add the result to the cache
self.cache.insert(cache_key, ancestor); self.cache.insert(cache_key, ancestor);
@ -185,15 +174,17 @@ where
&mut self, &mut self,
latest_votes: &HashMap<Hash256, u64>, latest_votes: &HashMap<Hash256, u64>,
block_height: SlotHeight, block_height: SlotHeight,
spec: &ChainSpec,
) -> Option<Hash256> { ) -> Option<Hash256> {
// map of vote counts for every hash at this height // map of vote counts for every hash at this height
let mut current_votes: HashMap<Hash256, u64> = HashMap::new(); let mut current_votes: HashMap<Hash256, u64> = HashMap::new();
let mut total_vote_count = 0; let mut total_vote_count = 0;
trace!("Clear winner at block height: {}", block_height);
// loop through the latest votes and count all votes // loop through the latest votes and count all votes
// these have already been weighted by balance // these have already been weighted by balance
for (hash, votes) in latest_votes.iter() { for (hash, votes) in latest_votes.iter() {
if let Some(ancestor) = self.get_ancestor(*hash, block_height) { if let Some(ancestor) = self.get_ancestor(*hash, block_height, spec) {
let current_vote_value = current_votes.get(&ancestor).unwrap_or_else(|| &0); let current_vote_value = current_votes.get(&ancestor).unwrap_or_else(|| &0);
current_votes.insert(ancestor, current_vote_value + *votes); current_votes.insert(ancestor, current_vote_value + *votes);
total_vote_count += votes; total_vote_count += votes;
@ -210,54 +201,62 @@ where
None None
} }
// Finds the best child, splitting children into a binary tree, based on their hashes // Finds the best child, splitting children into a binary tree, based on their hashes (Bitwise
// LMD Ghost)
fn choose_best_child(&self, votes: &HashMap<Hash256, u64>) -> Option<Hash256> { fn choose_best_child(&self, votes: &HashMap<Hash256, u64>) -> Option<Hash256> {
let mut bitmask = 0; if votes.is_empty() {
for bit in (0..=255).rev() { return None;
}
let mut bitmask: BitVec = BitVec::new();
// loop through all bits
for bit in 0..=256 {
let mut zero_votes = 0; let mut zero_votes = 0;
let mut one_votes = 0; let mut one_votes = 0;
let mut single_candidate = None; let mut single_candidate = (None, false);
trace!("Child vote length: {}", votes.len());
for (candidate, votes) in votes.iter() { for (candidate, votes) in votes.iter() {
let candidate_uint = BigEndian::read_u32(candidate); let candidate_bit: BitVec = BitVec::from_bytes(&candidate);
if candidate_uint >> (bit + 1) != bitmask {
// if the bitmasks don't match, exclude candidate
if !bitmask.iter().eq(candidate_bit.iter().take(bit)) {
trace!(
"Child: {} was removed in bit: {} with the bitmask: {:?}",
candidate,
bit,
bitmask
);
continue; continue;
} }
if (candidate_uint >> bit) % 2 == 0 { if candidate_bit.get(bit) == Some(false) {
zero_votes += votes; zero_votes += votes;
} else { } else {
one_votes += votes; one_votes += votes;
} }
if single_candidate.is_none() { if single_candidate.0.is_none() {
single_candidate = Some(candidate); single_candidate.0 = Some(candidate);
single_candidate.1 = true;
} else { } else {
single_candidate = None; single_candidate.1 = false;
} }
} }
bitmask = (bitmask * 2) + { bitmask.push(one_votes > zero_votes);
if one_votes > zero_votes { if single_candidate.1 {
1 return Some(*single_candidate.0.expect("Cannot reach this"));
} else {
0
}
};
if let Some(candidate) = single_candidate {
return Some(*candidate);
} }
//TODO Remove this during benchmark after testing
assert!(bit >= 1);
} }
// should never reach here // should never reach here
None None
} }
} }
impl<T: ClientDB + Sized> ForkChoice for OptimisedLMDGhost<T> { impl<T: ClientDB + Sized> ForkChoice for BitwiseLMDGhost<T> {
fn add_block( fn add_block(
&mut self, &mut self,
block: &BeaconBlock, block: &BeaconBlock,
block_hash: &Hash256, block_hash: &Hash256,
spec: &ChainSpec,
) -> Result<(), ForkChoiceError> { ) -> Result<(), ForkChoiceError> {
// get the height of the parent // get the height of the parent
let parent_height = self let parent_height = self
@ -265,7 +264,7 @@ impl<T: ClientDB + Sized> ForkChoice for OptimisedLMDGhost<T> {
.get_deserialized(&block.parent_root)? .get_deserialized(&block.parent_root)?
.ok_or_else(|| ForkChoiceError::MissingBeaconBlock(block.parent_root))? .ok_or_else(|| ForkChoiceError::MissingBeaconBlock(block.parent_root))?
.slot() .slot()
.height(Slot::from(GENESIS_SLOT)); .height(spec.genesis_slot);
let parent_hash = &block.parent_root; let parent_hash = &block.parent_root;
@ -295,22 +294,29 @@ impl<T: ClientDB + Sized> ForkChoice for OptimisedLMDGhost<T> {
&mut self, &mut self,
validator_index: u64, validator_index: u64,
target_block_root: &Hash256, target_block_root: &Hash256,
spec: &ChainSpec,
) -> Result<(), ForkChoiceError> { ) -> Result<(), ForkChoiceError> {
// simply add the attestation to the latest_attestation_target if the block_height is // simply add the attestation to the latest_attestation_target if the block_height is
// larger // larger
trace!(
"Adding attestation of validator: {:?} for block: {}",
validator_index,
target_block_root
);
let attestation_target = self let attestation_target = self
.latest_attestation_targets .latest_attestation_targets
.entry(validator_index) .entry(validator_index)
.or_insert_with(|| *target_block_root); .or_insert_with(|| *target_block_root);
// if we already have a value // if we already have a value
if attestation_target != target_block_root { if attestation_target != target_block_root {
trace!("Old attestation found: {:?}", attestation_target);
// get the height of the target block // get the height of the target block
let block_height = self let block_height = self
.block_store .block_store
.get_deserialized(&target_block_root)? .get_deserialized(&target_block_root)?
.ok_or_else(|| ForkChoiceError::MissingBeaconBlock(*target_block_root))? .ok_or_else(|| ForkChoiceError::MissingBeaconBlock(*target_block_root))?
.slot() .slot()
.height(Slot::from(GENESIS_SLOT)); .height(spec.genesis_slot);
// get the height of the past target block // get the height of the past target block
let past_block_height = self let past_block_height = self
@ -318,9 +324,10 @@ impl<T: ClientDB + Sized> ForkChoice for OptimisedLMDGhost<T> {
.get_deserialized(&attestation_target)? .get_deserialized(&attestation_target)?
.ok_or_else(|| ForkChoiceError::MissingBeaconBlock(*attestation_target))? .ok_or_else(|| ForkChoiceError::MissingBeaconBlock(*attestation_target))?
.slot() .slot()
.height(Slot::from(GENESIS_SLOT)); .height(spec.genesis_slot);
// update the attestation only if the new target is higher // update the attestation only if the new target is higher
if past_block_height < block_height { if past_block_height < block_height {
trace!("Updating old attestation");
*attestation_target = *target_block_root; *attestation_target = *target_block_root;
} }
} }
@ -328,25 +335,39 @@ impl<T: ClientDB + Sized> ForkChoice for OptimisedLMDGhost<T> {
} }
/// Perform lmd_ghost on the current chain to find the head. /// Perform lmd_ghost on the current chain to find the head.
fn find_head(&mut self, justified_block_start: &Hash256) -> Result<Hash256, ForkChoiceError> { fn find_head(
&mut self,
justified_block_start: &Hash256,
spec: &ChainSpec,
) -> Result<Hash256, ForkChoiceError> {
debug!(
"Starting optimised fork choice at block: {}",
justified_block_start
);
let block = self let block = self
.block_store .block_store
.get_deserialized(&justified_block_start)? .get_deserialized(&justified_block_start)?
.ok_or_else(|| ForkChoiceError::MissingBeaconBlock(*justified_block_start))?; .ok_or_else(|| ForkChoiceError::MissingBeaconBlock(*justified_block_start))?;
let block_slot = block.slot(); let block_slot = block.slot();
let block_height = block_slot.height(Slot::from(GENESIS_SLOT));
let state_root = block.state_root(); let state_root = block.state_root();
let mut block_height = block_slot.height(spec.genesis_slot);
let mut current_head = *justified_block_start; let mut current_head = *justified_block_start;
let mut latest_votes = self.get_latest_votes(&state_root, block_slot)?; let mut latest_votes = self.get_latest_votes(&state_root, block_slot, spec)?;
// remove any votes that don't relate to our current head. // remove any votes that don't relate to our current head.
latest_votes.retain(|hash, _| self.get_ancestor(*hash, block_height) == Some(current_head)); latest_votes
.retain(|hash, _| self.get_ancestor(*hash, block_height, spec) == Some(current_head));
// begin searching for the head // begin searching for the head
loop { loop {
debug!(
"Iteration for block: {} with vote length: {}",
current_head,
latest_votes.len()
);
// if there are no children, we are done, return the current_head // if there are no children, we are done, return the current_head
let children = match self.children.get(&current_head) { let children = match self.children.get(&current_head) {
Some(children) => children.clone(), Some(children) => children.clone(),
@ -358,9 +379,11 @@ impl<T: ClientDB + Sized> ForkChoice for OptimisedLMDGhost<T> {
let mut step = let mut step =
power_of_2_below(self.max_known_height.saturating_sub(block_height).as_u32()) / 2; power_of_2_below(self.max_known_height.saturating_sub(block_height).as_u32()) / 2;
while step > 0 { while step > 0 {
trace!("Current Step: {}", step);
if let Some(clear_winner) = self.get_clear_winner( if let Some(clear_winner) = self.get_clear_winner(
&latest_votes, &latest_votes,
block_height - (block_height % u64::from(step)) + u64::from(step), block_height - (block_height % u64::from(step)) + u64::from(step),
spec,
) { ) {
current_head = clear_winner; current_head = clear_winner;
break; break;
@ -368,17 +391,23 @@ impl<T: ClientDB + Sized> ForkChoice for OptimisedLMDGhost<T> {
step /= 2; step /= 2;
} }
if step > 0 { if step > 0 {
trace!("Found clear winner in log lookup");
} }
// if our skip lookup failed and we only have one child, progress to that child // if our skip lookup failed and we only have one child, progress to that child
else if children.len() == 1 { else if children.len() == 1 {
current_head = children[0]; current_head = children[0];
trace!(
"Lookup failed, only one child, proceeding to child: {}",
current_head
);
} }
// we need to find the best child path to progress down. // we need to find the best child path to progress down.
else { else {
trace!("Searching for best child");
let mut child_votes = HashMap::new(); let mut child_votes = HashMap::new();
for (voted_hash, vote) in latest_votes.iter() { for (voted_hash, vote) in latest_votes.iter() {
// if the latest votes correspond to a child // if the latest votes correspond to a child
if let Some(child) = self.get_ancestor(*voted_hash, block_height + 1) { if let Some(child) = self.get_ancestor(*voted_hash, block_height + 1, spec) {
// add up the votes for each child // add up the votes for each child
*child_votes.entry(child).or_insert_with(|| 0) += vote; *child_votes.entry(child).or_insert_with(|| 0) += vote;
} }
@ -387,22 +416,30 @@ impl<T: ClientDB + Sized> ForkChoice for OptimisedLMDGhost<T> {
current_head = self current_head = self
.choose_best_child(&child_votes) .choose_best_child(&child_votes)
.ok_or(ForkChoiceError::CannotFindBestChild)?; .ok_or(ForkChoiceError::CannotFindBestChild)?;
trace!("Best child found: {}", current_head);
} }
// No head was found, re-iterate // didn't find head yet, proceed to next iteration
// update block height
// update the block height for the next iteration block_height = self
let block_height = self
.block_store .block_store
.get_deserialized(&current_head)? .get_deserialized(&current_head)?
.ok_or_else(|| ForkChoiceError::MissingBeaconBlock(*justified_block_start))? .ok_or_else(|| ForkChoiceError::MissingBeaconBlock(current_head))?
.slot() .slot()
.height(Slot::from(GENESIS_SLOT)); .height(spec.genesis_slot);
// prune the latest votes for votes that are not part of current chosen chain // prune the latest votes for votes that are not part of current chosen chain
// more specifically, only keep votes that have head as an ancestor // more specifically, only keep votes that have head as an ancestor
latest_votes for hash in latest_votes.keys() {
.retain(|hash, _| self.get_ancestor(*hash, block_height) == Some(current_head)); trace!(
"Ancestor for vote: {} at height: {} is: {:?}",
hash,
block_height,
self.get_ancestor(*hash, block_height, spec)
);
}
latest_votes.retain(|hash, _| {
self.get_ancestor(*hash, block_height, spec) == Some(current_head)
});
} }
} }
} }

View File

@ -1,57 +1,36 @@
// Copyright 2019 Sigma Prime Pty Ltd.
//
// Permission is hereby granted, free of charge, to any person obtaining a
// copy of this software and associated documentation files (the "Software"),
// to deal in the Software without restriction, including without limitation
// the rights to use, copy, modify, merge, publish, distribute, sublicense,
// and/or sell copies of the Software, and to permit persons to whom the
// Software is furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
// FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
// DEALINGS IN THE SOFTWARE.
//! This crate stores the various implementations of fork-choice rules that can be used for the //! This crate stores the various implementations of fork-choice rules that can be used for the
//! beacon blockchain. //! beacon blockchain.
//! //!
//! There are four implementations. One is the naive longest chain rule (primarily for testing //! There are three implementations. One is the naive longest chain rule (primarily for testing
//! purposes). The other three are proposed implementations of the LMD-GHOST fork-choice rule with various forms of optimisation. //! purposes). The other two are proposed implementations of the LMD-GHOST fork-choice rule with various forms of optimisation.
//! //!
//! The current implementations are: //! The current implementations are:
//! - [`longest-chain`]: Simplistic longest-chain fork choice - primarily for testing, **not for //! - [`longest-chain`]: Simplistic longest-chain fork choice - primarily for testing, **not for
//! production**. //! production**.
//! - [`slow_lmd_ghost`]: This is a simple and very inefficient implementation given in the ethereum 2.0 //! - [`slow_lmd_ghost`]: This is a simple and very inefficient implementation given in the ethereum 2.0
//! specifications (https://github.com/ethereum/eth2.0-specs/blob/v0.1/specs/core/0_beacon-chain.md#get_block_root). //! specifications (https://github.com/ethereum/eth2.0-specs/blob/v0.1/specs/core/0_beacon-chain.md#get_block_root).
//! - [`optimised_lmd_ghost`]: This is an optimised version of the naive implementation as proposed //! - [`bitwise_lmd_ghost`]: This is an optimised version of bitwise LMD-GHOST as proposed
//! by Vitalik. The reference implementation can be found at: https://github.com/ethereum/research/blob/master/ghost/ghost.py //! by Vitalik. The reference implementation can be found at: https://github.com/ethereum/research/blob/master/ghost/ghost.py
//! - [`protolambda_lmd_ghost`]: Another optimised version of LMD-GHOST designed by @protolambda.
//! The go implementation can be found here: https://github.com/protolambda/lmd-ghost.
//! //!
//! [`longest-chain`]: struct.LongestChain.html
//! [`slow_lmd_ghost`]: struct.SlowLmdGhost.html //! [`slow_lmd_ghost`]: struct.SlowLmdGhost.html
//! [`optimised_lmd_ghost`]: struct.OptimisedLmdGhost.html //! [`bitwise_lmd_ghost`]: struct.OptimisedLmdGhost.html
//! [`protolambda_lmd_ghost`]: struct.ProtolambdaLmdGhost.html
extern crate db; extern crate db;
extern crate ssz; extern crate ssz;
extern crate types; extern crate types;
pub mod bitwise_lmd_ghost;
pub mod longest_chain; pub mod longest_chain;
pub mod optimised_lmd_ghost;
pub mod slow_lmd_ghost; pub mod slow_lmd_ghost;
use db::stores::BeaconBlockAtSlotError; use db::stores::BeaconBlockAtSlotError;
use db::DBError; use db::DBError;
use types::{BeaconBlock, Hash256}; use types::{BeaconBlock, ChainSpec, Hash256};
pub use bitwise_lmd_ghost::BitwiseLMDGhost;
pub use longest_chain::LongestChain; pub use longest_chain::LongestChain;
pub use optimised_lmd_ghost::OptimisedLMDGhost; pub use slow_lmd_ghost::SlowLMDGhost;
/// Defines the interface for Fork Choices. Each Fork choice will define their own data structures /// Defines the interface for Fork Choices. Each Fork choice will define their own data structures
/// which can be built in block processing through the `add_block` and `add_attestation` functions. /// which can be built in block processing through the `add_block` and `add_attestation` functions.
@ -63,6 +42,7 @@ pub trait ForkChoice: Send + Sync {
&mut self, &mut self,
block: &BeaconBlock, block: &BeaconBlock,
block_hash: &Hash256, block_hash: &Hash256,
spec: &ChainSpec,
) -> Result<(), ForkChoiceError>; ) -> Result<(), ForkChoiceError>;
/// Called when an attestation has been added. Allows generic attestation-level data structures to be built for a given fork choice. /// Called when an attestation has been added. Allows generic attestation-level data structures to be built for a given fork choice.
// This can be generalised to a full attestation if required later. // This can be generalised to a full attestation if required later.
@ -70,10 +50,15 @@ pub trait ForkChoice: Send + Sync {
&mut self, &mut self,
validator_index: u64, validator_index: u64,
target_block_hash: &Hash256, target_block_hash: &Hash256,
spec: &ChainSpec,
) -> Result<(), ForkChoiceError>; ) -> Result<(), ForkChoiceError>;
/// The fork-choice algorithm to find the current canonical head of the chain. /// The fork-choice algorithm to find the current canonical head of the chain.
// TODO: Remove the justified_start_block parameter and make it internal // TODO: Remove the justified_start_block parameter and make it internal
fn find_head(&mut self, justified_start_block: &Hash256) -> Result<Hash256, ForkChoiceError>; fn find_head(
&mut self,
justified_start_block: &Hash256,
spec: &ChainSpec,
) -> Result<Hash256, ForkChoiceError>;
} }
/// Possible fork choice errors that can occur. /// Possible fork choice errors that can occur.
@ -109,11 +94,11 @@ impl From<BeaconBlockAtSlotError> for ForkChoiceError {
} }
/// Fork choice options that are currently implemented. /// Fork choice options that are currently implemented.
pub enum ForkChoiceAlgorithms { pub enum ForkChoiceAlgorithm {
/// Chooses the longest chain becomes the head. Not for production. /// Chooses the longest chain becomes the head. Not for production.
LongestChain, LongestChain,
/// A simple and highly inefficient implementation of LMD ghost. /// A simple and highly inefficient implementation of LMD ghost.
SlowLMDGhost, SlowLMDGhost,
/// An optimised version of LMD-GHOST by Vitalik. /// An optimised version of bitwise LMD-GHOST by Vitalik.
OptimisedLMDGhost, BitwiseLMDGhost,
} }

View File

@ -1,7 +1,7 @@
use crate::{ForkChoice, ForkChoiceError}; use crate::{ForkChoice, ForkChoiceError};
use db::{stores::BeaconBlockStore, ClientDB}; use db::{stores::BeaconBlockStore, ClientDB};
use std::sync::Arc; use std::sync::Arc;
use types::{BeaconBlock, Hash256, Slot}; use types::{BeaconBlock, ChainSpec, Hash256, Slot};
pub struct LongestChain<T> pub struct LongestChain<T>
where where
@ -30,6 +30,7 @@ impl<T: ClientDB + Sized> ForkChoice for LongestChain<T> {
&mut self, &mut self,
block: &BeaconBlock, block: &BeaconBlock,
block_hash: &Hash256, block_hash: &Hash256,
_: &ChainSpec,
) -> Result<(), ForkChoiceError> { ) -> Result<(), ForkChoiceError> {
// add the block hash to head_block_hashes removing the parent if it exists // add the block hash to head_block_hashes removing the parent if it exists
self.head_block_hashes self.head_block_hashes
@ -38,12 +39,17 @@ impl<T: ClientDB + Sized> ForkChoice for LongestChain<T> {
Ok(()) Ok(())
} }
fn add_attestation(&mut self, _: u64, _: &Hash256) -> Result<(), ForkChoiceError> { fn add_attestation(
&mut self,
_: u64,
_: &Hash256,
_: &ChainSpec,
) -> Result<(), ForkChoiceError> {
// do nothing // do nothing
Ok(()) Ok(())
} }
fn find_head(&mut self, _: &Hash256) -> Result<Hash256, ForkChoiceError> { fn find_head(&mut self, _: &Hash256, _: &ChainSpec) -> Result<Hash256, ForkChoiceError> {
let mut head_blocks: Vec<(usize, BeaconBlock)> = vec![]; let mut head_blocks: Vec<(usize, BeaconBlock)> = vec![];
/* /*
* Load all the head_block hashes from the DB as SszBeaconBlocks. * Load all the head_block hashes from the DB as SszBeaconBlocks.

View File

@ -1,23 +1,3 @@
// Copyright 2019 Sigma Prime Pty Ltd.
//
// Permission is hereby granted, free of charge, to any person obtaining a
// copy of this software and associated documentation files (the "Software"),
// to deal in the Software without restriction, including without limitation
// the rights to use, copy, modify, merge, publish, distribute, sublicense,
// and/or sell copies of the Software, and to permit persons to whom the
// Software is furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
// FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
// DEALINGS IN THE SOFTWARE.
extern crate db; extern crate db;
use crate::{ForkChoice, ForkChoiceError}; use crate::{ForkChoice, ForkChoiceError};
@ -25,21 +5,16 @@ use db::{
stores::{BeaconBlockStore, BeaconStateStore}, stores::{BeaconBlockStore, BeaconStateStore},
ClientDB, ClientDB,
}; };
use log::{debug, trace};
use std::collections::HashMap; use std::collections::HashMap;
use std::sync::Arc; use std::sync::Arc;
use types::{ use types::{
readers::BeaconBlockReader, validator_registry::get_active_validator_indices, BeaconBlock, readers::BeaconBlockReader, validator_registry::get_active_validator_indices, BeaconBlock,
Hash256, Slot, ChainSpec, Hash256, Slot,
}; };
//TODO: Pruning and syncing //TODO: Pruning and syncing
//TODO: Sort out global constants
const GENESIS_SLOT: u64 = 0;
const FORK_CHOICE_BALANCE_INCREMENT: u64 = 1e9 as u64;
const MAX_DEPOSIT_AMOUNT: u64 = 32e9 as u64;
const EPOCH_LENGTH: u64 = 64;
pub struct SlowLMDGhost<T: ClientDB + Sized> { pub struct SlowLMDGhost<T: ClientDB + Sized> {
/// The latest attestation targets as a map of validator index to block hash. /// The latest attestation targets as a map of validator index to block hash.
//TODO: Could this be a fixed size vec //TODO: Could this be a fixed size vec
@ -56,12 +31,15 @@ impl<T> SlowLMDGhost<T>
where where
T: ClientDB + Sized, T: ClientDB + Sized,
{ {
pub fn new(block_store: BeaconBlockStore<T>, state_store: BeaconStateStore<T>) -> Self { pub fn new(
block_store: Arc<BeaconBlockStore<T>>,
state_store: Arc<BeaconStateStore<T>>,
) -> Self {
SlowLMDGhost { SlowLMDGhost {
latest_attestation_targets: HashMap::new(), latest_attestation_targets: HashMap::new(),
children: HashMap::new(), children: HashMap::new(),
block_store: Arc::new(block_store), block_store,
state_store: Arc::new(state_store), state_store,
} }
} }
@ -71,6 +49,7 @@ where
&self, &self,
state_root: &Hash256, state_root: &Hash256,
block_slot: Slot, block_slot: Slot,
spec: &ChainSpec,
) -> Result<HashMap<Hash256, u64>, ForkChoiceError> { ) -> Result<HashMap<Hash256, u64>, ForkChoiceError> {
// get latest votes // get latest votes
// Note: Votes are weighted by min(balance, MAX_DEPOSIT_AMOUNT) // // Note: Votes are weighted by min(balance, MAX_DEPOSIT_AMOUNT) //
@ -84,21 +63,22 @@ where
.ok_or_else(|| ForkChoiceError::MissingBeaconState(*state_root))?; .ok_or_else(|| ForkChoiceError::MissingBeaconState(*state_root))?;
let active_validator_indices = get_active_validator_indices( let active_validator_indices = get_active_validator_indices(
&current_state.validator_registry, &current_state.validator_registry[..],
block_slot.epoch(EPOCH_LENGTH), block_slot.epoch(spec.epoch_length),
); );
for index in active_validator_indices { for index in active_validator_indices {
let balance = let balance = std::cmp::min(
std::cmp::min(current_state.validator_balances[index], MAX_DEPOSIT_AMOUNT) current_state.validator_balances[index],
/ FORK_CHOICE_BALANCE_INCREMENT; spec.max_deposit_amount,
) / spec.fork_choice_balance_increment;
if balance > 0 { if balance > 0 {
if let Some(target) = self.latest_attestation_targets.get(&(index as u64)) { if let Some(target) = self.latest_attestation_targets.get(&(index as u64)) {
*latest_votes.entry(*target).or_insert_with(|| 0) += balance; *latest_votes.entry(*target).or_insert_with(|| 0) += balance;
} }
} }
} }
trace!("Latest votes: {:?}", latest_votes);
Ok(latest_votes) Ok(latest_votes)
} }
@ -117,12 +97,12 @@ where
.ok_or_else(|| ForkChoiceError::MissingBeaconBlock(*block_root))? .ok_or_else(|| ForkChoiceError::MissingBeaconBlock(*block_root))?
.slot(); .slot();
for (target_hash, votes) in latest_votes.iter() { for (vote_hash, votes) in latest_votes.iter() {
let (root_at_slot, _) = self let (root_at_slot, _) = self
.block_store .block_store
.block_at_slot(&block_root, block_slot)? .block_at_slot(&vote_hash, block_slot)?
.ok_or(ForkChoiceError::MissingBeaconBlock(*block_root))?; .ok_or_else(|| ForkChoiceError::MissingBeaconBlock(*block_root))?;
if root_at_slot == *target_hash { if root_at_slot == *block_root {
count += votes; count += votes;
} }
} }
@ -136,6 +116,7 @@ impl<T: ClientDB + Sized> ForkChoice for SlowLMDGhost<T> {
&mut self, &mut self,
block: &BeaconBlock, block: &BeaconBlock,
block_hash: &Hash256, block_hash: &Hash256,
_: &ChainSpec,
) -> Result<(), ForkChoiceError> { ) -> Result<(), ForkChoiceError> {
// build the children hashmap // build the children hashmap
// add the new block to the children of parent // add the new block to the children of parent
@ -153,22 +134,29 @@ impl<T: ClientDB + Sized> ForkChoice for SlowLMDGhost<T> {
&mut self, &mut self,
validator_index: u64, validator_index: u64,
target_block_root: &Hash256, target_block_root: &Hash256,
spec: &ChainSpec,
) -> Result<(), ForkChoiceError> { ) -> Result<(), ForkChoiceError> {
// simply add the attestation to the latest_attestation_target if the block_height is // simply add the attestation to the latest_attestation_target if the block_height is
// larger // larger
trace!(
"Adding attestation of validator: {:?} for block: {}",
validator_index,
target_block_root
);
let attestation_target = self let attestation_target = self
.latest_attestation_targets .latest_attestation_targets
.entry(validator_index) .entry(validator_index)
.or_insert_with(|| *target_block_root); .or_insert_with(|| *target_block_root);
// if we already have a value // if we already have a value
if attestation_target != target_block_root { if attestation_target != target_block_root {
trace!("Old attestation found: {:?}", attestation_target);
// get the height of the target block // get the height of the target block
let block_height = self let block_height = self
.block_store .block_store
.get_deserialized(&target_block_root)? .get_deserialized(&target_block_root)?
.ok_or_else(|| ForkChoiceError::MissingBeaconBlock(*target_block_root))? .ok_or_else(|| ForkChoiceError::MissingBeaconBlock(*target_block_root))?
.slot() .slot()
.height(Slot::from(GENESIS_SLOT)); .height(spec.genesis_slot);
// get the height of the past target block // get the height of the past target block
let past_block_height = self let past_block_height = self
@ -176,9 +164,10 @@ impl<T: ClientDB + Sized> ForkChoice for SlowLMDGhost<T> {
.get_deserialized(&attestation_target)? .get_deserialized(&attestation_target)?
.ok_or_else(|| ForkChoiceError::MissingBeaconBlock(*attestation_target))? .ok_or_else(|| ForkChoiceError::MissingBeaconBlock(*attestation_target))?
.slot() .slot()
.height(Slot::from(GENESIS_SLOT)); .height(spec.genesis_slot);
// update the attestation only if the new target is higher // update the attestation only if the new target is higher
if past_block_height < block_height { if past_block_height < block_height {
trace!("Updating old attestation");
*attestation_target = *target_block_root; *attestation_target = *target_block_root;
} }
} }
@ -186,7 +175,12 @@ impl<T: ClientDB + Sized> ForkChoice for SlowLMDGhost<T> {
} }
/// A very inefficient implementation of LMD ghost. /// A very inefficient implementation of LMD ghost.
fn find_head(&mut self, justified_block_start: &Hash256) -> Result<Hash256, ForkChoiceError> { fn find_head(
&mut self,
justified_block_start: &Hash256,
spec: &ChainSpec,
) -> Result<Hash256, ForkChoiceError> {
debug!("Running LMD Ghost Fork-choice rule");
let start = self let start = self
.block_store .block_store
.get_deserialized(&justified_block_start)? .get_deserialized(&justified_block_start)?
@ -194,12 +188,12 @@ impl<T: ClientDB + Sized> ForkChoice for SlowLMDGhost<T> {
let start_state_root = start.state_root(); let start_state_root = start.state_root();
let latest_votes = self.get_latest_votes(&start_state_root, start.slot())?; let latest_votes = self.get_latest_votes(&start_state_root, start.slot(), spec)?;
let mut head_hash = Hash256::zero(); let mut head_hash = *justified_block_start;
loop { loop {
let mut head_vote_count = 0; debug!("Iteration for block: {}", head_hash);
let children = match self.children.get(&head_hash) { let children = match self.children.get(&head_hash) {
Some(children) => children, Some(children) => children,
@ -207,8 +201,18 @@ impl<T: ClientDB + Sized> ForkChoice for SlowLMDGhost<T> {
None => break, None => break,
}; };
// if we only have one child, use it
if children.len() == 1 {
trace!("Single child found.");
head_hash = children[0];
continue;
}
trace!("Children found: {:?}", children);
let mut head_vote_count = 0;
for child_hash in children { for child_hash in children {
let vote_count = self.get_vote_count(&latest_votes, &child_hash)?; let vote_count = self.get_vote_count(&latest_votes, &child_hash)?;
trace!("Vote count for child: {} is: {}", child_hash, vote_count);
if vote_count > head_vote_count { if vote_count > head_vote_count {
head_hash = *child_hash; head_hash = *child_hash;

View File

@ -0,0 +1,37 @@
title: Fork-choice Tests
summary: A collection of abstract fork-choice tests for bitwise lmd ghost.
test_suite: Fork-Choice
test_cases:
- blocks:
- id: 'b0'
parent: 'b0'
- id: 'b1'
parent: 'b0'
- id: 'b2'
parent: 'b1'
- id: 'b3'
parent: 'b1'
weights:
- b0: 0
- b1: 0
- b2: 5
- b3: 10
heads:
- id: 'b3'
# bitwise LMD ghost example. bitwise GHOST gives b2
- blocks:
- id: 'b0'
parent: 'b0'
- id: 'b1'
parent: 'b0'
- id: 'b2'
parent: 'b0'
- id: 'b3'
parent: 'b0'
weights:
- b1: 5
- b2: 4
- b3: 3
heads:
- id: 'b2'

View File

@ -0,0 +1,37 @@
title: Fork-choice Tests
summary: A collection of abstract fork-choice tests for lmd ghost.
test_suite: Fork-Choice
test_cases:
- blocks:
- id: 'b0'
parent: 'b0'
- id: 'b1'
parent: 'b0'
- id: 'b2'
parent: 'b1'
- id: 'b3'
parent: 'b1'
weights:
- b0: 0
- b1: 0
- b2: 5
- b3: 10
heads:
- id: 'b3'
# bitwise LMD ghost example. GHOST gives b1
- blocks:
- id: 'b0'
parent: 'b0'
- id: 'b1'
parent: 'b0'
- id: 'b2'
parent: 'b0'
- id: 'b3'
parent: 'b0'
weights:
- b1: 5
- b2: 4
- b3: 3
heads:
- id: 'b1'

View File

@ -0,0 +1,51 @@
title: Fork-choice Tests
summary: A collection of abstract fork-choice tests to verify the longest chain fork-choice rule.
test_suite: Fork-Choice
test_cases:
- blocks:
- id: 'b0'
parent: 'b0'
- id: 'b1'
parent: 'b0'
- id: 'b2'
parent: 'b1'
- id: 'b3'
parent: 'b1'
- id: 'b4'
parent: 'b3'
weights:
- b0: 0
- b1: 0
- b2: 10
- b3: 1
heads:
- id: 'b4'
- blocks:
- id: 'b0'
parent: 'b0'
- id: 'b1'
parent: 'b0'
- id: 'b2'
parent: 'b1'
- id: 'b3'
parent: 'b2'
- id: 'b4'
parent: 'b3'
- id: 'b5'
parent: 'b0'
- id: 'b6'
parent: 'b5'
- id: 'b7'
parent: 'b6'
- id: 'b8'
parent: 'b7'
- id: 'b9'
parent: 'b8'
weights:
- b0: 5
- b1: 20
- b2: 10
- b3: 10
heads:
- id: 'b9'

View File

@ -0,0 +1,281 @@
// Tests the available fork-choice algorithms
extern crate beacon_chain;
extern crate bls;
extern crate db;
//extern crate env_logger; // for debugging
extern crate fork_choice;
extern crate hex;
extern crate log;
extern crate slot_clock;
extern crate types;
extern crate yaml_rust;
pub use beacon_chain::BeaconChain;
use bls::{PublicKey, Signature};
use db::stores::{BeaconBlockStore, BeaconStateStore};
use db::MemoryDB;
//use env_logger::{Builder, Env};
use fork_choice::{BitwiseLMDGhost, ForkChoice, ForkChoiceAlgorithm, LongestChain, SlowLMDGhost};
use ssz::ssz_encode;
use std::collections::HashMap;
use std::sync::Arc;
use std::{fs::File, io::prelude::*, path::PathBuf};
use types::{
BeaconBlock, BeaconBlockBody, BeaconState, ChainSpec, Epoch, Eth1Data, Hash256, Slot, Validator,
};
use yaml_rust::yaml;
// Note: We Assume the block Id's are hex-encoded.
#[test]
fn test_bitwise_lmd_ghost() {
// set up logging
//Builder::from_env(Env::default().default_filter_or("trace")).init();
test_yaml_vectors(
ForkChoiceAlgorithm::BitwiseLMDGhost,
"tests/bitwise_lmd_ghost_test_vectors.yaml",
100,
);
}
#[test]
fn test_slow_lmd_ghost() {
test_yaml_vectors(
ForkChoiceAlgorithm::SlowLMDGhost,
"tests/lmd_ghost_test_vectors.yaml",
100,
);
}
#[test]
fn test_longest_chain() {
test_yaml_vectors(
ForkChoiceAlgorithm::LongestChain,
"tests/longest_chain_test_vectors.yaml",
100,
);
}
// run a generic test over given YAML test vectors
fn test_yaml_vectors(
fork_choice_algo: ForkChoiceAlgorithm,
yaml_file_path: &str,
emulated_validators: usize, // the number of validators used to give weights.
) {
// load test cases from yaml
let test_cases = load_test_cases_from_yaml(yaml_file_path);
// default vars
let spec = ChainSpec::foundation();
let zero_hash = Hash256::zero();
let eth1_data = Eth1Data {
deposit_root: zero_hash.clone(),
block_hash: zero_hash.clone(),
};
let randao_reveal = Signature::empty_signature();
let signature = Signature::empty_signature();
let body = BeaconBlockBody {
proposer_slashings: vec![],
attester_slashings: vec![],
attestations: vec![],
deposits: vec![],
exits: vec![],
};
// process the tests
for test_case in test_cases {
// setup a fresh test
let (mut fork_choice, block_store, state_root) =
setup_inital_state(&fork_choice_algo, emulated_validators);
// keep a hashmap of block_id's to block_hashes (random hashes to abstract block_id)
//let mut block_id_map: HashMap<String, Hash256> = HashMap::new();
// keep a list of hash to slot
let mut block_slot: HashMap<Hash256, Slot> = HashMap::new();
// assume the block tree is given to us in order.
let mut genesis_hash = None;
for block in test_case["blocks"].clone().into_vec().unwrap() {
let block_id = block["id"].as_str().unwrap().to_string();
let parent_id = block["parent"].as_str().unwrap().to_string();
// default params for genesis
let block_hash = id_to_hash(&block_id);
let mut slot = spec.genesis_slot;
let parent_root = id_to_hash(&parent_id);
// set the slot and parent based off the YAML. Start with genesis;
// if not the genesis, update slot
if parent_id != block_id {
// find parent slot
slot = *(block_slot
.get(&parent_root)
.expect("Parent should have a slot number"))
+ 1;
} else {
genesis_hash = Some(block_hash);
}
// update slot mapping
block_slot.insert(block_hash, slot);
// build the BeaconBlock
let beacon_block = BeaconBlock {
slot,
parent_root,
state_root: state_root.clone(),
randao_reveal: randao_reveal.clone(),
eth1_data: eth1_data.clone(),
signature: signature.clone(),
body: body.clone(),
};
// Store the block.
block_store
.put(&block_hash, &ssz_encode(&beacon_block)[..])
.unwrap();
// run add block for fork choice if not genesis
if parent_id != block_id {
fork_choice
.add_block(&beacon_block, &block_hash, &spec)
.unwrap();
}
}
// add the weights (attestations)
let mut current_validator = 0;
for id_map in test_case["weights"].clone().into_vec().unwrap() {
// get the block id and weights
for (map_id, map_weight) in id_map.as_hash().unwrap().iter() {
let id = map_id.as_str().unwrap();
let block_root = id_to_hash(&id.to_string());
let weight = map_weight.as_i64().unwrap();
// we assume a validator has a value 1 and add an attestation for to achieve the
// correct weight
for _ in 0..weight {
assert!(
current_validator <= emulated_validators,
"Not enough validators to emulate weights"
);
fork_choice
.add_attestation(current_validator as u64, &block_root, &spec)
.unwrap();
current_validator += 1;
}
}
}
// everything is set up, run the fork choice, using genesis as the head
let head = fork_choice
.find_head(&genesis_hash.unwrap(), &spec)
.unwrap();
// compare the result to the expected test
let success = test_case["heads"]
.clone()
.into_vec()
.unwrap()
.iter()
.find(|heads| id_to_hash(&heads["id"].as_str().unwrap().to_string()) == head)
.is_some();
println!("Head found: {}", head);
assert!(success, "Did not find one of the possible heads");
}
}
// loads the test_cases from the supplied yaml file
fn load_test_cases_from_yaml(file_path: &str) -> Vec<yaml_rust::Yaml> {
// load the yaml
let mut file = {
let mut file_path_buf = PathBuf::from(env!("CARGO_MANIFEST_DIR"));
file_path_buf.push(file_path);
File::open(file_path_buf).unwrap()
};
let mut yaml_str = String::new();
file.read_to_string(&mut yaml_str).unwrap();
let docs = yaml::YamlLoader::load_from_str(&yaml_str).unwrap();
let doc = &docs[0];
doc["test_cases"].as_vec().unwrap().clone()
}
// initialise a single validator and state. All blocks will reference this state root.
fn setup_inital_state(
fork_choice_algo: &ForkChoiceAlgorithm,
no_validators: usize,
) -> (Box<ForkChoice>, Arc<BeaconBlockStore<MemoryDB>>, Hash256) {
let zero_hash = Hash256::zero();
let db = Arc::new(MemoryDB::open());
let block_store = Arc::new(BeaconBlockStore::new(db.clone()));
let state_store = Arc::new(BeaconStateStore::new(db.clone()));
// the fork choice instantiation
let fork_choice: Box<ForkChoice> = match fork_choice_algo {
ForkChoiceAlgorithm::BitwiseLMDGhost => Box::new(BitwiseLMDGhost::new(
block_store.clone(),
state_store.clone(),
)),
ForkChoiceAlgorithm::SlowLMDGhost => {
Box::new(SlowLMDGhost::new(block_store.clone(), state_store.clone()))
}
ForkChoiceAlgorithm::LongestChain => Box::new(LongestChain::new(block_store.clone())),
};
// misc vars for setting up the state
let genesis_time = 1_550_381_159;
let latest_eth1_data = Eth1Data {
deposit_root: zero_hash.clone(),
block_hash: zero_hash.clone(),
};
let initial_validator_deposits = vec![];
let spec = ChainSpec::foundation();
// create the state
let mut state = BeaconState::genesis(
genesis_time,
initial_validator_deposits,
latest_eth1_data,
&spec,
)
.unwrap();
let default_validator = Validator {
pubkey: PublicKey::default(),
withdrawal_credentials: zero_hash,
activation_epoch: Epoch::from(0u64),
exit_epoch: spec.far_future_epoch,
withdrawal_epoch: spec.far_future_epoch,
penalized_epoch: spec.far_future_epoch,
status_flags: None,
};
// activate the validators
for _ in 0..no_validators {
state.validator_registry.push(default_validator.clone());
state.validator_balances.push(32_000_000_000);
}
let state_root = state.canonical_root();
state_store
.put(&state_root, &ssz_encode(&state)[..])
.unwrap();
// return initialised vars
(fork_choice, block_store, state_root)
}
// convert a block_id into a Hash256 -- assume input is hex encoded;
fn id_to_hash(id: &String) -> Hash256 {
let bytes = hex::decode(id).expect("Block ID should be hex");
let len = std::cmp::min(bytes.len(), 32);
let mut fixed_bytes = [0u8; 32];
for (index, byte) in bytes.iter().take(32).enumerate() {
fixed_bytes[32 - len + index] = *byte;
}
Hash256::from(fixed_bytes)
}

View File

@ -18,6 +18,7 @@ serde_derive = "1.0"
serde_json = "1.0" serde_json = "1.0"
slog = "^2.2.3" slog = "^2.2.3"
ssz = { path = "../utils/ssz" } ssz = { path = "../utils/ssz" }
ssz_derive = { path = "../utils/ssz_derive" }
swap_or_not_shuffle = { path = "../utils/swap_or_not_shuffle" } swap_or_not_shuffle = { path = "../utils/swap_or_not_shuffle" }
[dev-dependencies] [dev-dependencies]

View File

@ -2,9 +2,10 @@ use super::{AggregatePublicKey, AggregateSignature, AttestationData, Bitfield, H
use crate::test_utils::TestRandom; use crate::test_utils::TestRandom;
use rand::RngCore; use rand::RngCore;
use serde_derive::Serialize; use serde_derive::Serialize;
use ssz::{hash, Decodable, DecodeError, Encodable, SszStream, TreeHash}; use ssz::{hash, TreeHash};
use ssz_derive::{Decode, Encode};
#[derive(Debug, Clone, PartialEq, Serialize)] #[derive(Debug, Clone, PartialEq, Serialize, Encode, Decode)]
pub struct Attestation { pub struct Attestation {
pub aggregation_bitfield: Bitfield, pub aggregation_bitfield: Bitfield,
pub data: AttestationData, pub data: AttestationData,
@ -33,32 +34,6 @@ impl Attestation {
} }
} }
impl Encodable for Attestation {
fn ssz_append(&self, s: &mut SszStream) {
s.append(&self.aggregation_bitfield);
s.append(&self.data);
s.append(&self.custody_bitfield);
s.append(&self.aggregate_signature);
}
}
impl Decodable for Attestation {
fn ssz_decode(bytes: &[u8], i: usize) -> Result<(Self, usize), DecodeError> {
let (aggregation_bitfield, i) = Bitfield::ssz_decode(bytes, i)?;
let (data, i) = AttestationData::ssz_decode(bytes, i)?;
let (custody_bitfield, i) = Bitfield::ssz_decode(bytes, i)?;
let (aggregate_signature, i) = AggregateSignature::ssz_decode(bytes, i)?;
let attestation_record = Self {
aggregation_bitfield,
data,
custody_bitfield,
aggregate_signature,
};
Ok((attestation_record, i))
}
}
impl TreeHash for Attestation { impl TreeHash for Attestation {
fn hash_tree_root_internal(&self) -> Vec<u8> { fn hash_tree_root_internal(&self) -> Vec<u8> {
let mut result: Vec<u8> = vec![]; let mut result: Vec<u8> = vec![];
@ -85,7 +60,7 @@ impl<T: RngCore> TestRandom<T> for Attestation {
mod tests { mod tests {
use super::*; use super::*;
use crate::test_utils::{SeedableRng, TestRandom, XorShiftRng}; use crate::test_utils::{SeedableRng, TestRandom, XorShiftRng};
use ssz::ssz_encode; use ssz::{ssz_encode, Decodable};
#[test] #[test]
pub fn test_ssz_round_trip() { pub fn test_ssz_round_trip() {

View File

@ -2,7 +2,8 @@ use crate::test_utils::TestRandom;
use crate::{AttestationDataAndCustodyBit, Crosslink, Epoch, Hash256, Slot}; use crate::{AttestationDataAndCustodyBit, Crosslink, Epoch, Hash256, Slot};
use rand::RngCore; use rand::RngCore;
use serde_derive::Serialize; use serde_derive::Serialize;
use ssz::{hash, Decodable, DecodeError, Encodable, SszStream, TreeHash}; use ssz::{hash, TreeHash};
use ssz_derive::{Decode, Encode};
pub const SSZ_ATTESTION_DATA_LENGTH: usize = { pub const SSZ_ATTESTION_DATA_LENGTH: usize = {
8 + // slot 8 + // slot
@ -15,7 +16,7 @@ pub const SSZ_ATTESTION_DATA_LENGTH: usize = {
32 // justified_block_root 32 // justified_block_root
}; };
#[derive(Debug, Clone, PartialEq, Default, Serialize, Hash)] #[derive(Debug, Clone, PartialEq, Default, Serialize, Hash, Encode, Decode)]
pub struct AttestationData { pub struct AttestationData {
pub slot: Slot, pub slot: Slot,
pub shard: u64, pub shard: u64,
@ -43,44 +44,6 @@ impl AttestationData {
} }
} }
impl Encodable for AttestationData {
fn ssz_append(&self, s: &mut SszStream) {
s.append(&self.slot);
s.append(&self.shard);
s.append(&self.beacon_block_root);
s.append(&self.epoch_boundary_root);
s.append(&self.shard_block_root);
s.append(&self.latest_crosslink);
s.append(&self.justified_epoch);
s.append(&self.justified_block_root);
}
}
impl Decodable for AttestationData {
fn ssz_decode(bytes: &[u8], i: usize) -> Result<(Self, usize), DecodeError> {
let (slot, i) = <_>::ssz_decode(bytes, i)?;
let (shard, i) = <_>::ssz_decode(bytes, i)?;
let (beacon_block_root, i) = <_>::ssz_decode(bytes, i)?;
let (epoch_boundary_root, i) = <_>::ssz_decode(bytes, i)?;
let (shard_block_root, i) = <_>::ssz_decode(bytes, i)?;
let (latest_crosslink, i) = <_>::ssz_decode(bytes, i)?;
let (justified_epoch, i) = <_>::ssz_decode(bytes, i)?;
let (justified_block_root, i) = <_>::ssz_decode(bytes, i)?;
let attestation_data = AttestationData {
slot,
shard,
beacon_block_root,
epoch_boundary_root,
shard_block_root,
latest_crosslink,
justified_epoch,
justified_block_root,
};
Ok((attestation_data, i))
}
}
impl TreeHash for AttestationData { impl TreeHash for AttestationData {
fn hash_tree_root_internal(&self) -> Vec<u8> { fn hash_tree_root_internal(&self) -> Vec<u8> {
let mut result: Vec<u8> = vec![]; let mut result: Vec<u8> = vec![];
@ -115,7 +78,7 @@ impl<T: RngCore> TestRandom<T> for AttestationData {
mod tests { mod tests {
use super::*; use super::*;
use crate::test_utils::{SeedableRng, TestRandom, XorShiftRng}; use crate::test_utils::{SeedableRng, TestRandom, XorShiftRng};
use ssz::ssz_encode; use ssz::{ssz_encode, Decodable};
#[test] #[test]
pub fn test_ssz_round_trip() { pub fn test_ssz_round_trip() {

View File

@ -2,32 +2,15 @@ use super::AttestationData;
use crate::test_utils::TestRandom; use crate::test_utils::TestRandom;
use rand::RngCore; use rand::RngCore;
use serde_derive::Serialize; use serde_derive::Serialize;
use ssz::{Decodable, DecodeError, Encodable, SszStream, TreeHash}; use ssz::TreeHash;
use ssz_derive::{Decode, Encode};
#[derive(Debug, Clone, PartialEq, Default, Serialize)] #[derive(Debug, Clone, PartialEq, Default, Serialize, Encode, Decode)]
pub struct AttestationDataAndCustodyBit { pub struct AttestationDataAndCustodyBit {
pub data: AttestationData, pub data: AttestationData,
pub custody_bit: bool, pub custody_bit: bool,
} }
impl Encodable for AttestationDataAndCustodyBit {
fn ssz_append(&self, s: &mut SszStream) {
s.append(&self.data);
// TODO: deal with bools
}
}
impl Decodable for AttestationDataAndCustodyBit {
fn ssz_decode(bytes: &[u8], i: usize) -> Result<(Self, usize), DecodeError> {
let (data, i) = <_>::ssz_decode(bytes, i)?;
let custody_bit = false;
let attestation_data_and_custody_bit = AttestationDataAndCustodyBit { data, custody_bit };
Ok((attestation_data_and_custody_bit, i))
}
}
impl TreeHash for AttestationDataAndCustodyBit { impl TreeHash for AttestationDataAndCustodyBit {
fn hash_tree_root_internal(&self) -> Vec<u8> { fn hash_tree_root_internal(&self) -> Vec<u8> {
let mut result: Vec<u8> = vec![]; let mut result: Vec<u8> = vec![];
@ -52,7 +35,7 @@ impl<T: RngCore> TestRandom<T> for AttestationDataAndCustodyBit {
mod test { mod test {
use super::*; use super::*;
use crate::test_utils::{SeedableRng, TestRandom, XorShiftRng}; use crate::test_utils::{SeedableRng, TestRandom, XorShiftRng};
use ssz::ssz_encode; use ssz::{ssz_encode, Decodable};
#[test] #[test]
pub fn test_ssz_round_trip() { pub fn test_ssz_round_trip() {

View File

@ -1,36 +1,15 @@
use crate::{test_utils::TestRandom, SlashableAttestation}; use crate::{test_utils::TestRandom, SlashableAttestation};
use rand::RngCore; use rand::RngCore;
use serde_derive::Serialize; use serde_derive::Serialize;
use ssz::{hash, Decodable, DecodeError, Encodable, SszStream, TreeHash}; use ssz::{hash, TreeHash};
use ssz_derive::{Decode, Encode};
#[derive(Debug, PartialEq, Clone, Serialize)] #[derive(Debug, PartialEq, Clone, Serialize, Encode, Decode)]
pub struct AttesterSlashing { pub struct AttesterSlashing {
pub slashable_attestation_1: SlashableAttestation, pub slashable_attestation_1: SlashableAttestation,
pub slashable_attestation_2: SlashableAttestation, pub slashable_attestation_2: SlashableAttestation,
} }
impl Encodable for AttesterSlashing {
fn ssz_append(&self, s: &mut SszStream) {
s.append(&self.slashable_attestation_1);
s.append(&self.slashable_attestation_2);
}
}
impl Decodable for AttesterSlashing {
fn ssz_decode(bytes: &[u8], i: usize) -> Result<(Self, usize), DecodeError> {
let (slashable_attestation_1, i) = <_>::ssz_decode(bytes, i)?;
let (slashable_attestation_2, i) = <_>::ssz_decode(bytes, i)?;
Ok((
AttesterSlashing {
slashable_attestation_1,
slashable_attestation_2,
},
i,
))
}
}
impl TreeHash for AttesterSlashing { impl TreeHash for AttesterSlashing {
fn hash_tree_root_internal(&self) -> Vec<u8> { fn hash_tree_root_internal(&self) -> Vec<u8> {
let mut result: Vec<u8> = vec![]; let mut result: Vec<u8> = vec![];
@ -53,7 +32,7 @@ impl<T: RngCore> TestRandom<T> for AttesterSlashing {
mod tests { mod tests {
use super::*; use super::*;
use crate::test_utils::{SeedableRng, TestRandom, XorShiftRng}; use crate::test_utils::{SeedableRng, TestRandom, XorShiftRng};
use ssz::ssz_encode; use ssz::{ssz_encode, Decodable};
#[test] #[test]
pub fn test_ssz_round_trip() { pub fn test_ssz_round_trip() {

View File

@ -3,9 +3,10 @@ use crate::{BeaconBlockBody, ChainSpec, Eth1Data, Hash256, ProposalSignedData, S
use bls::Signature; use bls::Signature;
use rand::RngCore; use rand::RngCore;
use serde_derive::Serialize; use serde_derive::Serialize;
use ssz::{hash, Decodable, DecodeError, Encodable, SszStream, TreeHash}; use ssz::{hash, TreeHash};
use ssz_derive::{Decode, Encode};
#[derive(Debug, PartialEq, Clone, Serialize)] #[derive(Debug, PartialEq, Clone, Serialize, Encode, Decode)]
pub struct BeaconBlock { pub struct BeaconBlock {
pub slot: Slot, pub slot: Slot,
pub parent_root: Hash256, pub parent_root: Hash256,
@ -59,43 +60,6 @@ impl BeaconBlock {
} }
} }
impl Encodable for BeaconBlock {
fn ssz_append(&self, s: &mut SszStream) {
s.append(&self.slot);
s.append(&self.parent_root);
s.append(&self.state_root);
s.append(&self.randao_reveal);
s.append(&self.eth1_data);
s.append(&self.signature);
s.append(&self.body);
}
}
impl Decodable for BeaconBlock {
fn ssz_decode(bytes: &[u8], i: usize) -> Result<(Self, usize), DecodeError> {
let (slot, i) = <_>::ssz_decode(bytes, i)?;
let (parent_root, i) = <_>::ssz_decode(bytes, i)?;
let (state_root, i) = <_>::ssz_decode(bytes, i)?;
let (randao_reveal, i) = <_>::ssz_decode(bytes, i)?;
let (eth1_data, i) = <_>::ssz_decode(bytes, i)?;
let (signature, i) = <_>::ssz_decode(bytes, i)?;
let (body, i) = <_>::ssz_decode(bytes, i)?;
Ok((
Self {
slot,
parent_root,
state_root,
randao_reveal,
eth1_data,
signature,
body,
},
i,
))
}
}
impl TreeHash for BeaconBlock { impl TreeHash for BeaconBlock {
fn hash_tree_root_internal(&self) -> Vec<u8> { fn hash_tree_root_internal(&self) -> Vec<u8> {
let mut result: Vec<u8> = vec![]; let mut result: Vec<u8> = vec![];
@ -128,7 +92,7 @@ impl<T: RngCore> TestRandom<T> for BeaconBlock {
mod tests { mod tests {
use super::*; use super::*;
use crate::test_utils::{SeedableRng, TestRandom, XorShiftRng}; use crate::test_utils::{SeedableRng, TestRandom, XorShiftRng};
use ssz::ssz_encode; use ssz::{ssz_encode, Decodable};
#[test] #[test]
pub fn test_ssz_round_trip() { pub fn test_ssz_round_trip() {

View File

@ -2,9 +2,10 @@ use super::{Attestation, AttesterSlashing, Deposit, Exit, ProposerSlashing};
use crate::test_utils::TestRandom; use crate::test_utils::TestRandom;
use rand::RngCore; use rand::RngCore;
use serde_derive::Serialize; use serde_derive::Serialize;
use ssz::{hash, Decodable, DecodeError, Encodable, SszStream, TreeHash}; use ssz::{hash, TreeHash};
use ssz_derive::{Decode, Encode};
#[derive(Debug, PartialEq, Clone, Default, Serialize)] #[derive(Debug, PartialEq, Clone, Default, Serialize, Encode, Decode)]
pub struct BeaconBlockBody { pub struct BeaconBlockBody {
pub proposer_slashings: Vec<ProposerSlashing>, pub proposer_slashings: Vec<ProposerSlashing>,
pub attester_slashings: Vec<AttesterSlashing>, pub attester_slashings: Vec<AttesterSlashing>,
@ -13,37 +14,6 @@ pub struct BeaconBlockBody {
pub exits: Vec<Exit>, pub exits: Vec<Exit>,
} }
impl Encodable for BeaconBlockBody {
fn ssz_append(&self, s: &mut SszStream) {
s.append_vec(&self.proposer_slashings);
s.append_vec(&self.attester_slashings);
s.append_vec(&self.attestations);
s.append_vec(&self.deposits);
s.append_vec(&self.exits);
}
}
impl Decodable for BeaconBlockBody {
fn ssz_decode(bytes: &[u8], i: usize) -> Result<(Self, usize), DecodeError> {
let (proposer_slashings, i) = <_>::ssz_decode(bytes, i)?;
let (attester_slashings, i) = <_>::ssz_decode(bytes, i)?;
let (attestations, i) = <_>::ssz_decode(bytes, i)?;
let (deposits, i) = <_>::ssz_decode(bytes, i)?;
let (exits, i) = <_>::ssz_decode(bytes, i)?;
Ok((
Self {
proposer_slashings,
attester_slashings,
attestations,
deposits,
exits,
},
i,
))
}
}
impl TreeHash for BeaconBlockBody { impl TreeHash for BeaconBlockBody {
fn hash_tree_root_internal(&self) -> Vec<u8> { fn hash_tree_root_internal(&self) -> Vec<u8> {
let mut result: Vec<u8> = vec![]; let mut result: Vec<u8> = vec![];
@ -72,7 +42,7 @@ impl<T: RngCore> TestRandom<T> for BeaconBlockBody {
mod tests { mod tests {
use super::*; use super::*;
use crate::test_utils::{SeedableRng, TestRandom, XorShiftRng}; use crate::test_utils::{SeedableRng, TestRandom, XorShiftRng};
use ssz::ssz_encode; use ssz::{ssz_encode, Decodable};
#[test] #[test]
pub fn test_ssz_round_trip() { pub fn test_ssz_round_trip() {

View File

@ -7,7 +7,7 @@ use crate::{
Hash256, Keypair, Hash256, Keypair,
}; };
use bls::create_proof_of_possession; use bls::create_proof_of_possession;
use ssz::ssz_encode; use ssz::{ssz_encode, Decodable};
struct BeaconStateTestBuilder { struct BeaconStateTestBuilder {
pub genesis_time: u64, pub genesis_time: u64,

View File

@ -2,36 +2,15 @@ use super::SlashableVoteData;
use crate::test_utils::TestRandom; use crate::test_utils::TestRandom;
use rand::RngCore; use rand::RngCore;
use serde_derive::Serialize; use serde_derive::Serialize;
use ssz::{hash, Decodable, DecodeError, Encodable, SszStream, TreeHash}; use ssz::{hash, TreeHash};
use ssz_derive::{Decode, Encode};
#[derive(Debug, PartialEq, Clone, Serialize)] #[derive(Debug, PartialEq, Clone, Serialize, Encode, Decode)]
pub struct CasperSlashing { pub struct CasperSlashing {
pub slashable_vote_data_1: SlashableVoteData, pub slashable_vote_data_1: SlashableVoteData,
pub slashable_vote_data_2: SlashableVoteData, pub slashable_vote_data_2: SlashableVoteData,
} }
impl Encodable for CasperSlashing {
fn ssz_append(&self, s: &mut SszStream) {
s.append(&self.slashable_vote_data_1);
s.append(&self.slashable_vote_data_2);
}
}
impl Decodable for CasperSlashing {
fn ssz_decode(bytes: &[u8], i: usize) -> Result<(Self, usize), DecodeError> {
let (slashable_vote_data_1, i) = <_>::ssz_decode(bytes, i)?;
let (slashable_vote_data_2, i) = <_>::ssz_decode(bytes, i)?;
Ok((
CasperSlashing {
slashable_vote_data_1,
slashable_vote_data_2,
},
i,
))
}
}
impl TreeHash for CasperSlashing { impl TreeHash for CasperSlashing {
fn hash_tree_root_internal(&self) -> Vec<u8> { fn hash_tree_root_internal(&self) -> Vec<u8> {
let mut result: Vec<u8> = vec![]; let mut result: Vec<u8> = vec![];
@ -54,7 +33,7 @@ impl<T: RngCore> TestRandom<T> for CasperSlashing {
mod tests { mod tests {
use super::*; use super::*;
use crate::test_utils::{SeedableRng, TestRandom, XorShiftRng}; use crate::test_utils::{SeedableRng, TestRandom, XorShiftRng};
use ssz::ssz_encode; use ssz::{ssz_encode, Decodable};
#[test] #[test]
pub fn test_ssz_round_trip() { pub fn test_ssz_round_trip() {

View File

@ -2,9 +2,10 @@ use crate::test_utils::TestRandom;
use crate::{Epoch, Hash256}; use crate::{Epoch, Hash256};
use rand::RngCore; use rand::RngCore;
use serde_derive::Serialize; use serde_derive::Serialize;
use ssz::{hash, Decodable, DecodeError, Encodable, SszStream, TreeHash}; use ssz::{hash, TreeHash};
use ssz_derive::{Decode, Encode};
#[derive(Debug, Clone, PartialEq, Default, Serialize, Hash)] #[derive(Debug, Clone, PartialEq, Default, Serialize, Hash, Encode, Decode)]
pub struct Crosslink { pub struct Crosslink {
pub epoch: Epoch, pub epoch: Epoch,
pub shard_block_root: Hash256, pub shard_block_root: Hash256,
@ -20,28 +21,6 @@ impl Crosslink {
} }
} }
impl Encodable for Crosslink {
fn ssz_append(&self, s: &mut SszStream) {
s.append(&self.epoch);
s.append(&self.shard_block_root);
}
}
impl Decodable for Crosslink {
fn ssz_decode(bytes: &[u8], i: usize) -> Result<(Self, usize), DecodeError> {
let (epoch, i) = <_>::ssz_decode(bytes, i)?;
let (shard_block_root, i) = <_>::ssz_decode(bytes, i)?;
Ok((
Self {
epoch,
shard_block_root,
},
i,
))
}
}
impl TreeHash for Crosslink { impl TreeHash for Crosslink {
fn hash_tree_root_internal(&self) -> Vec<u8> { fn hash_tree_root_internal(&self) -> Vec<u8> {
let mut result: Vec<u8> = vec![]; let mut result: Vec<u8> = vec![];
@ -64,7 +43,7 @@ impl<T: RngCore> TestRandom<T> for Crosslink {
mod tests { mod tests {
use super::*; use super::*;
use crate::test_utils::{SeedableRng, TestRandom, XorShiftRng}; use crate::test_utils::{SeedableRng, TestRandom, XorShiftRng};
use ssz::ssz_encode; use ssz::{ssz_encode, Decodable};
#[test] #[test]
pub fn test_ssz_round_trip() { pub fn test_ssz_round_trip() {

View File

@ -2,40 +2,16 @@ use super::{DepositData, Hash256};
use crate::test_utils::TestRandom; use crate::test_utils::TestRandom;
use rand::RngCore; use rand::RngCore;
use serde_derive::Serialize; use serde_derive::Serialize;
use ssz::{hash, Decodable, DecodeError, Encodable, SszStream, TreeHash}; use ssz::{hash, TreeHash};
use ssz_derive::{Decode, Encode};
#[derive(Debug, PartialEq, Clone, Serialize)] #[derive(Debug, PartialEq, Clone, Serialize, Encode, Decode)]
pub struct Deposit { pub struct Deposit {
pub branch: Vec<Hash256>, pub branch: Vec<Hash256>,
pub index: u64, pub index: u64,
pub deposit_data: DepositData, pub deposit_data: DepositData,
} }
impl Encodable for Deposit {
fn ssz_append(&self, s: &mut SszStream) {
s.append_vec(&self.branch);
s.append(&self.index);
s.append(&self.deposit_data);
}
}
impl Decodable for Deposit {
fn ssz_decode(bytes: &[u8], i: usize) -> Result<(Self, usize), DecodeError> {
let (branch, i) = <_>::ssz_decode(bytes, i)?;
let (index, i) = <_>::ssz_decode(bytes, i)?;
let (deposit_data, i) = <_>::ssz_decode(bytes, i)?;
Ok((
Self {
branch,
index,
deposit_data,
},
i,
))
}
}
impl TreeHash for Deposit { impl TreeHash for Deposit {
fn hash_tree_root_internal(&self) -> Vec<u8> { fn hash_tree_root_internal(&self) -> Vec<u8> {
let mut result: Vec<u8> = vec![]; let mut result: Vec<u8> = vec![];
@ -60,7 +36,7 @@ impl<T: RngCore> TestRandom<T> for Deposit {
mod tests { mod tests {
use super::*; use super::*;
use crate::test_utils::{SeedableRng, TestRandom, XorShiftRng}; use crate::test_utils::{SeedableRng, TestRandom, XorShiftRng};
use ssz::ssz_encode; use ssz::{ssz_encode, Decodable};
#[test] #[test]
pub fn test_ssz_round_trip() { pub fn test_ssz_round_trip() {

View File

@ -2,40 +2,16 @@ use super::DepositInput;
use crate::test_utils::TestRandom; use crate::test_utils::TestRandom;
use rand::RngCore; use rand::RngCore;
use serde_derive::Serialize; use serde_derive::Serialize;
use ssz::{hash, Decodable, DecodeError, Encodable, SszStream, TreeHash}; use ssz::{hash, TreeHash};
use ssz_derive::{Decode, Encode};
#[derive(Debug, PartialEq, Clone, Serialize)] #[derive(Debug, PartialEq, Clone, Serialize, Encode, Decode)]
pub struct DepositData { pub struct DepositData {
pub amount: u64, pub amount: u64,
pub timestamp: u64, pub timestamp: u64,
pub deposit_input: DepositInput, pub deposit_input: DepositInput,
} }
impl Encodable for DepositData {
fn ssz_append(&self, s: &mut SszStream) {
s.append(&self.amount);
s.append(&self.timestamp);
s.append(&self.deposit_input);
}
}
impl Decodable for DepositData {
fn ssz_decode(bytes: &[u8], i: usize) -> Result<(Self, usize), DecodeError> {
let (amount, i) = <_>::ssz_decode(bytes, i)?;
let (timestamp, i) = <_>::ssz_decode(bytes, i)?;
let (deposit_input, i) = <_>::ssz_decode(bytes, i)?;
Ok((
Self {
amount,
timestamp,
deposit_input,
},
i,
))
}
}
impl TreeHash for DepositData { impl TreeHash for DepositData {
fn hash_tree_root_internal(&self) -> Vec<u8> { fn hash_tree_root_internal(&self) -> Vec<u8> {
let mut result: Vec<u8> = vec![]; let mut result: Vec<u8> = vec![];
@ -60,7 +36,7 @@ impl<T: RngCore> TestRandom<T> for DepositData {
mod tests { mod tests {
use super::*; use super::*;
use crate::test_utils::{SeedableRng, TestRandom, XorShiftRng}; use crate::test_utils::{SeedableRng, TestRandom, XorShiftRng};
use ssz::ssz_encode; use ssz::{ssz_encode, Decodable};
#[test] #[test]
pub fn test_ssz_round_trip() { pub fn test_ssz_round_trip() {

View File

@ -3,40 +3,16 @@ use crate::test_utils::TestRandom;
use bls::{PublicKey, Signature}; use bls::{PublicKey, Signature};
use rand::RngCore; use rand::RngCore;
use serde_derive::Serialize; use serde_derive::Serialize;
use ssz::{hash, Decodable, DecodeError, Encodable, SszStream, TreeHash}; use ssz::{hash, TreeHash};
use ssz_derive::{Decode, Encode};
#[derive(Debug, PartialEq, Clone, Serialize)] #[derive(Debug, PartialEq, Clone, Serialize, Encode, Decode)]
pub struct DepositInput { pub struct DepositInput {
pub pubkey: PublicKey, pub pubkey: PublicKey,
pub withdrawal_credentials: Hash256, pub withdrawal_credentials: Hash256,
pub proof_of_possession: Signature, pub proof_of_possession: Signature,
} }
impl Encodable for DepositInput {
fn ssz_append(&self, s: &mut SszStream) {
s.append(&self.pubkey);
s.append(&self.withdrawal_credentials);
s.append(&self.proof_of_possession);
}
}
impl Decodable for DepositInput {
fn ssz_decode(bytes: &[u8], i: usize) -> Result<(Self, usize), DecodeError> {
let (pubkey, i) = <_>::ssz_decode(bytes, i)?;
let (withdrawal_credentials, i) = <_>::ssz_decode(bytes, i)?;
let (proof_of_possession, i) = <_>::ssz_decode(bytes, i)?;
Ok((
Self {
pubkey,
withdrawal_credentials,
proof_of_possession,
},
i,
))
}
}
impl TreeHash for DepositInput { impl TreeHash for DepositInput {
fn hash_tree_root_internal(&self) -> Vec<u8> { fn hash_tree_root_internal(&self) -> Vec<u8> {
let mut result: Vec<u8> = vec![]; let mut result: Vec<u8> = vec![];
@ -61,7 +37,7 @@ impl<T: RngCore> TestRandom<T> for DepositInput {
mod tests { mod tests {
use super::*; use super::*;
use crate::test_utils::{SeedableRng, TestRandom, XorShiftRng}; use crate::test_utils::{SeedableRng, TestRandom, XorShiftRng};
use ssz::ssz_encode; use ssz::{ssz_encode, Decodable};
#[test] #[test]
pub fn test_ssz_round_trip() { pub fn test_ssz_round_trip() {

View File

@ -2,37 +2,16 @@ use super::Hash256;
use crate::test_utils::TestRandom; use crate::test_utils::TestRandom;
use rand::RngCore; use rand::RngCore;
use serde_derive::Serialize; use serde_derive::Serialize;
use ssz::{hash, Decodable, DecodeError, Encodable, SszStream, TreeHash}; use ssz::{hash, TreeHash};
use ssz_derive::{Decode, Encode};
// Note: this is refer to as DepositRootVote in specs // Note: this is refer to as DepositRootVote in specs
#[derive(Debug, PartialEq, Clone, Default, Serialize)] #[derive(Debug, PartialEq, Clone, Default, Serialize, Encode, Decode)]
pub struct Eth1Data { pub struct Eth1Data {
pub deposit_root: Hash256, pub deposit_root: Hash256,
pub block_hash: Hash256, pub block_hash: Hash256,
} }
impl Encodable for Eth1Data {
fn ssz_append(&self, s: &mut SszStream) {
s.append(&self.deposit_root);
s.append(&self.block_hash);
}
}
impl Decodable for Eth1Data {
fn ssz_decode(bytes: &[u8], i: usize) -> Result<(Self, usize), DecodeError> {
let (deposit_root, i) = <_>::ssz_decode(bytes, i)?;
let (block_hash, i) = <_>::ssz_decode(bytes, i)?;
Ok((
Self {
deposit_root,
block_hash,
},
i,
))
}
}
impl TreeHash for Eth1Data { impl TreeHash for Eth1Data {
fn hash_tree_root_internal(&self) -> Vec<u8> { fn hash_tree_root_internal(&self) -> Vec<u8> {
let mut result: Vec<u8> = vec![]; let mut result: Vec<u8> = vec![];
@ -55,7 +34,7 @@ impl<T: RngCore> TestRandom<T> for Eth1Data {
mod tests { mod tests {
use super::*; use super::*;
use crate::test_utils::{SeedableRng, TestRandom, XorShiftRng}; use crate::test_utils::{SeedableRng, TestRandom, XorShiftRng};
use ssz::ssz_encode; use ssz::{ssz_encode, Decodable};
#[test] #[test]
pub fn test_ssz_round_trip() { pub fn test_ssz_round_trip() {

View File

@ -2,37 +2,16 @@ use super::Eth1Data;
use crate::test_utils::TestRandom; use crate::test_utils::TestRandom;
use rand::RngCore; use rand::RngCore;
use serde_derive::Serialize; use serde_derive::Serialize;
use ssz::{hash, Decodable, DecodeError, Encodable, SszStream, TreeHash}; use ssz::{hash, TreeHash};
use ssz_derive::{Decode, Encode};
// Note: this is refer to as DepositRootVote in specs // Note: this is refer to as DepositRootVote in specs
#[derive(Debug, PartialEq, Clone, Default, Serialize)] #[derive(Debug, PartialEq, Clone, Default, Serialize, Encode, Decode)]
pub struct Eth1DataVote { pub struct Eth1DataVote {
pub eth1_data: Eth1Data, pub eth1_data: Eth1Data,
pub vote_count: u64, pub vote_count: u64,
} }
impl Encodable for Eth1DataVote {
fn ssz_append(&self, s: &mut SszStream) {
s.append(&self.eth1_data);
s.append(&self.vote_count);
}
}
impl Decodable for Eth1DataVote {
fn ssz_decode(bytes: &[u8], i: usize) -> Result<(Self, usize), DecodeError> {
let (eth1_data, i) = <_>::ssz_decode(bytes, i)?;
let (vote_count, i) = <_>::ssz_decode(bytes, i)?;
Ok((
Self {
eth1_data,
vote_count,
},
i,
))
}
}
impl TreeHash for Eth1DataVote { impl TreeHash for Eth1DataVote {
fn hash_tree_root_internal(&self) -> Vec<u8> { fn hash_tree_root_internal(&self) -> Vec<u8> {
let mut result: Vec<u8> = vec![]; let mut result: Vec<u8> = vec![];
@ -55,7 +34,7 @@ impl<T: RngCore> TestRandom<T> for Eth1DataVote {
mod tests { mod tests {
use super::*; use super::*;
use crate::test_utils::{SeedableRng, TestRandom, XorShiftRng}; use crate::test_utils::{SeedableRng, TestRandom, XorShiftRng};
use ssz::ssz_encode; use ssz::{ssz_encode, Decodable};
#[test] #[test]
pub fn test_ssz_round_trip() { pub fn test_ssz_round_trip() {

View File

@ -2,40 +2,16 @@ use crate::{test_utils::TestRandom, Epoch};
use bls::Signature; use bls::Signature;
use rand::RngCore; use rand::RngCore;
use serde_derive::Serialize; use serde_derive::Serialize;
use ssz::{hash, Decodable, DecodeError, Encodable, SszStream, TreeHash}; use ssz::{hash, TreeHash};
use ssz_derive::{Decode, Encode};
#[derive(Debug, PartialEq, Clone, Serialize)] #[derive(Debug, PartialEq, Clone, Serialize, Encode, Decode)]
pub struct Exit { pub struct Exit {
pub epoch: Epoch, pub epoch: Epoch,
pub validator_index: u64, pub validator_index: u64,
pub signature: Signature, pub signature: Signature,
} }
impl Encodable for Exit {
fn ssz_append(&self, s: &mut SszStream) {
s.append(&self.epoch);
s.append(&self.validator_index);
s.append(&self.signature);
}
}
impl Decodable for Exit {
fn ssz_decode(bytes: &[u8], i: usize) -> Result<(Self, usize), DecodeError> {
let (epoch, i) = <_>::ssz_decode(bytes, i)?;
let (validator_index, i) = <_>::ssz_decode(bytes, i)?;
let (signature, i) = <_>::ssz_decode(bytes, i)?;
Ok((
Self {
epoch,
validator_index,
signature,
},
i,
))
}
}
impl TreeHash for Exit { impl TreeHash for Exit {
fn hash_tree_root_internal(&self) -> Vec<u8> { fn hash_tree_root_internal(&self) -> Vec<u8> {
let mut result: Vec<u8> = vec![]; let mut result: Vec<u8> = vec![];
@ -60,7 +36,7 @@ impl<T: RngCore> TestRandom<T> for Exit {
mod tests { mod tests {
use super::*; use super::*;
use crate::test_utils::{SeedableRng, TestRandom, XorShiftRng}; use crate::test_utils::{SeedableRng, TestRandom, XorShiftRng};
use ssz::ssz_encode; use ssz::{ssz_encode, Decodable};
#[test] #[test]
pub fn test_ssz_round_trip() { pub fn test_ssz_round_trip() {

View File

@ -1,40 +1,16 @@
use crate::{test_utils::TestRandom, Epoch}; use crate::{test_utils::TestRandom, Epoch};
use rand::RngCore; use rand::RngCore;
use serde_derive::Serialize; use serde_derive::Serialize;
use ssz::{hash, Decodable, DecodeError, Encodable, SszStream, TreeHash}; use ssz::{hash, TreeHash};
use ssz_derive::{Decode, Encode};
#[derive(Debug, Clone, PartialEq, Default, Serialize)] #[derive(Debug, Clone, PartialEq, Default, Serialize, Encode, Decode)]
pub struct Fork { pub struct Fork {
pub previous_version: u64, pub previous_version: u64,
pub current_version: u64, pub current_version: u64,
pub epoch: Epoch, pub epoch: Epoch,
} }
impl Encodable for Fork {
fn ssz_append(&self, s: &mut SszStream) {
s.append(&self.previous_version);
s.append(&self.current_version);
s.append(&self.epoch);
}
}
impl Decodable for Fork {
fn ssz_decode(bytes: &[u8], i: usize) -> Result<(Self, usize), DecodeError> {
let (previous_version, i) = <_>::ssz_decode(bytes, i)?;
let (current_version, i) = <_>::ssz_decode(bytes, i)?;
let (epoch, i) = <_>::ssz_decode(bytes, i)?;
Ok((
Self {
previous_version,
current_version,
epoch,
},
i,
))
}
}
impl TreeHash for Fork { impl TreeHash for Fork {
fn hash_tree_root_internal(&self) -> Vec<u8> { fn hash_tree_root_internal(&self) -> Vec<u8> {
let mut result: Vec<u8> = vec![]; let mut result: Vec<u8> = vec![];
@ -59,7 +35,7 @@ impl<T: RngCore> TestRandom<T> for Fork {
mod tests { mod tests {
use super::*; use super::*;
use crate::test_utils::{SeedableRng, TestRandom, XorShiftRng}; use crate::test_utils::{SeedableRng, TestRandom, XorShiftRng};
use ssz::ssz_encode; use ssz::{ssz_encode, Decodable};
#[test] #[test]
pub fn test_ssz_round_trip() { pub fn test_ssz_round_trip() {

View File

@ -2,9 +2,10 @@ use crate::test_utils::TestRandom;
use crate::{AttestationData, Bitfield, Slot}; use crate::{AttestationData, Bitfield, Slot};
use rand::RngCore; use rand::RngCore;
use serde_derive::Serialize; use serde_derive::Serialize;
use ssz::{hash, Decodable, DecodeError, Encodable, SszStream, TreeHash}; use ssz::{hash, TreeHash};
use ssz_derive::{Decode, Encode};
#[derive(Debug, Clone, PartialEq, Serialize)] #[derive(Debug, Clone, PartialEq, Serialize, Encode, Decode)]
pub struct PendingAttestation { pub struct PendingAttestation {
pub aggregation_bitfield: Bitfield, pub aggregation_bitfield: Bitfield,
pub data: AttestationData, pub data: AttestationData,
@ -12,34 +13,6 @@ pub struct PendingAttestation {
pub inclusion_slot: Slot, pub inclusion_slot: Slot,
} }
impl Encodable for PendingAttestation {
fn ssz_append(&self, s: &mut SszStream) {
s.append(&self.aggregation_bitfield);
s.append(&self.data);
s.append(&self.custody_bitfield);
s.append(&self.inclusion_slot);
}
}
impl Decodable for PendingAttestation {
fn ssz_decode(bytes: &[u8], i: usize) -> Result<(Self, usize), DecodeError> {
let (aggregation_bitfield, i) = <_>::ssz_decode(bytes, i)?;
let (data, i) = <_>::ssz_decode(bytes, i)?;
let (custody_bitfield, i) = <_>::ssz_decode(bytes, i)?;
let (inclusion_slot, i) = <_>::ssz_decode(bytes, i)?;
Ok((
Self {
data,
aggregation_bitfield,
custody_bitfield,
inclusion_slot,
},
i,
))
}
}
impl TreeHash for PendingAttestation { impl TreeHash for PendingAttestation {
fn hash_tree_root_internal(&self) -> Vec<u8> { fn hash_tree_root_internal(&self) -> Vec<u8> {
let mut result: Vec<u8> = vec![]; let mut result: Vec<u8> = vec![];
@ -66,7 +39,7 @@ impl<T: RngCore> TestRandom<T> for PendingAttestation {
mod tests { mod tests {
use super::*; use super::*;
use crate::test_utils::{SeedableRng, TestRandom, XorShiftRng}; use crate::test_utils::{SeedableRng, TestRandom, XorShiftRng};
use ssz::ssz_encode; use ssz::{ssz_encode, Decodable};
#[test] #[test]
pub fn test_ssz_round_trip() { pub fn test_ssz_round_trip() {

View File

@ -2,40 +2,16 @@ use crate::test_utils::TestRandom;
use crate::{Hash256, Slot}; use crate::{Hash256, Slot};
use rand::RngCore; use rand::RngCore;
use serde_derive::Serialize; use serde_derive::Serialize;
use ssz::{hash, Decodable, DecodeError, Encodable, SszStream, TreeHash}; use ssz::{hash, TreeHash};
use ssz_derive::{Decode, Encode};
#[derive(Debug, PartialEq, Clone, Default, Serialize)] #[derive(Debug, PartialEq, Clone, Default, Serialize, Encode, Decode)]
pub struct ProposalSignedData { pub struct ProposalSignedData {
pub slot: Slot, pub slot: Slot,
pub shard: u64, pub shard: u64,
pub block_root: Hash256, pub block_root: Hash256,
} }
impl Encodable for ProposalSignedData {
fn ssz_append(&self, s: &mut SszStream) {
s.append(&self.slot);
s.append(&self.shard);
s.append(&self.block_root);
}
}
impl Decodable for ProposalSignedData {
fn ssz_decode(bytes: &[u8], i: usize) -> Result<(Self, usize), DecodeError> {
let (slot, i) = <_>::ssz_decode(bytes, i)?;
let (shard, i) = <_>::ssz_decode(bytes, i)?;
let (block_root, i) = <_>::ssz_decode(bytes, i)?;
Ok((
ProposalSignedData {
slot,
shard,
block_root,
},
i,
))
}
}
impl TreeHash for ProposalSignedData { impl TreeHash for ProposalSignedData {
fn hash_tree_root_internal(&self) -> Vec<u8> { fn hash_tree_root_internal(&self) -> Vec<u8> {
let mut result: Vec<u8> = vec![]; let mut result: Vec<u8> = vec![];
@ -60,7 +36,7 @@ impl<T: RngCore> TestRandom<T> for ProposalSignedData {
mod tests { mod tests {
use super::*; use super::*;
use crate::test_utils::{SeedableRng, TestRandom, XorShiftRng}; use crate::test_utils::{SeedableRng, TestRandom, XorShiftRng};
use ssz::ssz_encode; use ssz::{ssz_encode, Decodable};
#[test] #[test]
pub fn test_ssz_round_trip() { pub fn test_ssz_round_trip() {

View File

@ -3,9 +3,10 @@ use crate::test_utils::TestRandom;
use bls::Signature; use bls::Signature;
use rand::RngCore; use rand::RngCore;
use serde_derive::Serialize; use serde_derive::Serialize;
use ssz::{hash, Decodable, DecodeError, Encodable, SszStream, TreeHash}; use ssz::{hash, TreeHash};
use ssz_derive::{Decode, Encode};
#[derive(Debug, PartialEq, Clone, Serialize)] #[derive(Debug, PartialEq, Clone, Serialize, Encode, Decode)]
pub struct ProposerSlashing { pub struct ProposerSlashing {
pub proposer_index: u64, pub proposer_index: u64,
pub proposal_data_1: ProposalSignedData, pub proposal_data_1: ProposalSignedData,
@ -14,37 +15,6 @@ pub struct ProposerSlashing {
pub proposal_signature_2: Signature, pub proposal_signature_2: Signature,
} }
impl Encodable for ProposerSlashing {
fn ssz_append(&self, s: &mut SszStream) {
s.append(&self.proposer_index);
s.append(&self.proposal_data_1);
s.append(&self.proposal_signature_1);
s.append(&self.proposal_data_2);
s.append(&self.proposal_signature_2);
}
}
impl Decodable for ProposerSlashing {
fn ssz_decode(bytes: &[u8], i: usize) -> Result<(Self, usize), DecodeError> {
let (proposer_index, i) = <_>::ssz_decode(bytes, i)?;
let (proposal_data_1, i) = <_>::ssz_decode(bytes, i)?;
let (proposal_signature_1, i) = <_>::ssz_decode(bytes, i)?;
let (proposal_data_2, i) = <_>::ssz_decode(bytes, i)?;
let (proposal_signature_2, i) = <_>::ssz_decode(bytes, i)?;
Ok((
ProposerSlashing {
proposer_index,
proposal_data_1,
proposal_signature_1,
proposal_data_2,
proposal_signature_2,
},
i,
))
}
}
impl TreeHash for ProposerSlashing { impl TreeHash for ProposerSlashing {
fn hash_tree_root_internal(&self) -> Vec<u8> { fn hash_tree_root_internal(&self) -> Vec<u8> {
let mut result: Vec<u8> = vec![]; let mut result: Vec<u8> = vec![];
@ -73,7 +43,7 @@ impl<T: RngCore> TestRandom<T> for ProposerSlashing {
mod tests { mod tests {
use super::*; use super::*;
use crate::test_utils::{SeedableRng, TestRandom, XorShiftRng}; use crate::test_utils::{SeedableRng, TestRandom, XorShiftRng};
use ssz::ssz_encode; use ssz::{ssz_encode, Decodable};
#[test] #[test]
pub fn test_ssz_round_trip() { pub fn test_ssz_round_trip() {

View File

@ -1,40 +1,16 @@
use crate::{test_utils::TestRandom, Slot}; use crate::{test_utils::TestRandom, Slot};
use rand::RngCore; use rand::RngCore;
use serde_derive::Serialize; use serde_derive::Serialize;
use ssz::{hash, Decodable, DecodeError, Encodable, SszStream, TreeHash}; use ssz::{hash, TreeHash};
use ssz_derive::{Decode, Encode};
#[derive(Debug, PartialEq, Clone, Serialize)] #[derive(Debug, PartialEq, Clone, Serialize, Encode, Decode)]
pub struct ShardReassignmentRecord { pub struct ShardReassignmentRecord {
pub validator_index: u64, pub validator_index: u64,
pub shard: u64, pub shard: u64,
pub slot: Slot, pub slot: Slot,
} }
impl Encodable for ShardReassignmentRecord {
fn ssz_append(&self, s: &mut SszStream) {
s.append(&self.validator_index);
s.append(&self.shard);
s.append(&self.slot);
}
}
impl Decodable for ShardReassignmentRecord {
fn ssz_decode(bytes: &[u8], i: usize) -> Result<(Self, usize), DecodeError> {
let (validator_index, i) = <_>::ssz_decode(bytes, i)?;
let (shard, i) = <_>::ssz_decode(bytes, i)?;
let (slot, i) = <_>::ssz_decode(bytes, i)?;
Ok((
Self {
validator_index,
shard,
slot,
},
i,
))
}
}
impl TreeHash for ShardReassignmentRecord { impl TreeHash for ShardReassignmentRecord {
fn hash_tree_root_internal(&self) -> Vec<u8> { fn hash_tree_root_internal(&self) -> Vec<u8> {
let mut result: Vec<u8> = vec![]; let mut result: Vec<u8> = vec![];
@ -59,7 +35,7 @@ impl<T: RngCore> TestRandom<T> for ShardReassignmentRecord {
mod tests { mod tests {
use super::*; use super::*;
use crate::test_utils::{SeedableRng, TestRandom, XorShiftRng}; use crate::test_utils::{SeedableRng, TestRandom, XorShiftRng};
use ssz::ssz_encode; use ssz::{ssz_encode, Decodable};
#[test] #[test]
pub fn test_ssz_round_trip() { pub fn test_ssz_round_trip() {

View File

@ -1,9 +1,10 @@
use crate::{test_utils::TestRandom, AggregateSignature, AttestationData, Bitfield}; use crate::{test_utils::TestRandom, AggregateSignature, AttestationData, Bitfield};
use rand::RngCore; use rand::RngCore;
use serde_derive::Serialize; use serde_derive::Serialize;
use ssz::{hash, Decodable, DecodeError, Encodable, SszStream, TreeHash}; use ssz::{hash, TreeHash};
use ssz_derive::{Decode, Encode};
#[derive(Debug, PartialEq, Clone, Serialize)] #[derive(Debug, PartialEq, Clone, Serialize, Encode, Decode)]
pub struct SlashableAttestation { pub struct SlashableAttestation {
pub validator_indices: Vec<u64>, pub validator_indices: Vec<u64>,
pub data: AttestationData, pub data: AttestationData,
@ -11,34 +12,6 @@ pub struct SlashableAttestation {
pub aggregate_signature: AggregateSignature, pub aggregate_signature: AggregateSignature,
} }
impl Encodable for SlashableAttestation {
fn ssz_append(&self, s: &mut SszStream) {
s.append_vec(&self.validator_indices);
s.append(&self.data);
s.append(&self.custody_bitfield);
s.append(&self.aggregate_signature);
}
}
impl Decodable for SlashableAttestation {
fn ssz_decode(bytes: &[u8], i: usize) -> Result<(Self, usize), DecodeError> {
let (validator_indices, i) = <_>::ssz_decode(bytes, i)?;
let (data, i) = <_>::ssz_decode(bytes, i)?;
let (custody_bitfield, i) = <_>::ssz_decode(bytes, i)?;
let (aggregate_signature, i) = <_>::ssz_decode(bytes, i)?;
Ok((
SlashableAttestation {
validator_indices,
data,
custody_bitfield,
aggregate_signature,
},
i,
))
}
}
impl TreeHash for SlashableAttestation { impl TreeHash for SlashableAttestation {
fn hash_tree_root_internal(&self) -> Vec<u8> { fn hash_tree_root_internal(&self) -> Vec<u8> {
let mut result: Vec<u8> = vec![]; let mut result: Vec<u8> = vec![];
@ -65,7 +38,7 @@ impl<T: RngCore> TestRandom<T> for SlashableAttestation {
mod tests { mod tests {
use super::*; use super::*;
use crate::test_utils::{SeedableRng, TestRandom, XorShiftRng}; use crate::test_utils::{SeedableRng, TestRandom, XorShiftRng};
use ssz::ssz_encode; use ssz::{ssz_encode, Decodable};
#[test] #[test]
pub fn test_ssz_round_trip() { pub fn test_ssz_round_trip() {

View File

@ -4,9 +4,10 @@ use crate::test_utils::TestRandom;
use bls::AggregateSignature; use bls::AggregateSignature;
use rand::RngCore; use rand::RngCore;
use serde_derive::Serialize; use serde_derive::Serialize;
use ssz::{hash, Decodable, DecodeError, Encodable, SszStream, TreeHash}; use ssz::{hash, TreeHash};
use ssz_derive::{Decode, Encode};
#[derive(Debug, PartialEq, Clone, Serialize)] #[derive(Debug, PartialEq, Clone, Serialize, Encode, Decode)]
pub struct SlashableVoteData { pub struct SlashableVoteData {
pub custody_bit_0_indices: Vec<u32>, pub custody_bit_0_indices: Vec<u32>,
pub custody_bit_1_indices: Vec<u32>, pub custody_bit_1_indices: Vec<u32>,
@ -35,34 +36,6 @@ impl SlashableVoteData {
} }
} }
impl Encodable for SlashableVoteData {
fn ssz_append(&self, s: &mut SszStream) {
s.append_vec(&self.custody_bit_0_indices);
s.append_vec(&self.custody_bit_1_indices);
s.append(&self.data);
s.append(&self.aggregate_signature);
}
}
impl Decodable for SlashableVoteData {
fn ssz_decode(bytes: &[u8], i: usize) -> Result<(Self, usize), DecodeError> {
let (custody_bit_0_indices, i) = <_>::ssz_decode(bytes, i)?;
let (custody_bit_1_indices, i) = <_>::ssz_decode(bytes, i)?;
let (data, i) = <_>::ssz_decode(bytes, i)?;
let (aggregate_signature, i) = <_>::ssz_decode(bytes, i)?;
Ok((
SlashableVoteData {
custody_bit_0_indices,
custody_bit_1_indices,
data,
aggregate_signature,
},
i,
))
}
}
impl TreeHash for SlashableVoteData { impl TreeHash for SlashableVoteData {
fn hash_tree_root_internal(&self) -> Vec<u8> { fn hash_tree_root_internal(&self) -> Vec<u8> {
let mut result: Vec<u8> = vec![]; let mut result: Vec<u8> = vec![];
@ -91,7 +64,7 @@ mod tests {
use crate::chain_spec::ChainSpec; use crate::chain_spec::ChainSpec;
use crate::slot_epoch::{Epoch, Slot}; use crate::slot_epoch::{Epoch, Slot};
use crate::test_utils::{SeedableRng, TestRandom, XorShiftRng}; use crate::test_utils::{SeedableRng, TestRandom, XorShiftRng};
use ssz::ssz_encode; use ssz::{ssz_encode, Decodable};
#[test] #[test]
pub fn test_is_double_vote_true() { pub fn test_is_double_vote_true() {

View File

@ -25,12 +25,14 @@ macro_rules! impl_into_u32 {
($main: ident) => { ($main: ident) => {
impl Into<u32> for $main { impl Into<u32> for $main {
fn into(self) -> u32 { fn into(self) -> u32 {
assert!(self.0 < u64::from(std::u32::MAX), "Lossy conversion to u32");
self.0 as u32 self.0 as u32
} }
} }
impl $main { impl $main {
pub fn as_u32(&self) -> u32 { pub fn as_u32(&self) -> u32 {
assert!(self.0 < u64::from(std::u32::MAX), "Lossy conversion to u32");
self.0 as u32 self.0 as u32
} }
} }

View File

@ -2,10 +2,11 @@ use crate::{test_utils::TestRandom, Hash256, Slot};
use bls::PublicKey; use bls::PublicKey;
use rand::RngCore; use rand::RngCore;
use serde_derive::Serialize; use serde_derive::Serialize;
use ssz::{hash, Decodable, DecodeError, Encodable, SszStream, TreeHash}; use ssz::{hash, TreeHash};
use ssz_derive::{Decode, Encode};
// The information gathered from the PoW chain validator registration function. // The information gathered from the PoW chain validator registration function.
#[derive(Debug, Clone, PartialEq, Serialize)] #[derive(Debug, Clone, PartialEq, Serialize, Encode, Decode)]
pub struct ValidatorRegistryDeltaBlock { pub struct ValidatorRegistryDeltaBlock {
pub latest_registry_delta_root: Hash256, pub latest_registry_delta_root: Hash256,
pub validator_index: u32, pub validator_index: u32,
@ -27,37 +28,6 @@ impl Default for ValidatorRegistryDeltaBlock {
} }
} }
impl Encodable for ValidatorRegistryDeltaBlock {
fn ssz_append(&self, s: &mut SszStream) {
s.append(&self.latest_registry_delta_root);
s.append(&self.validator_index);
s.append(&self.pubkey);
s.append(&self.slot);
s.append(&self.flag);
}
}
impl Decodable for ValidatorRegistryDeltaBlock {
fn ssz_decode(bytes: &[u8], i: usize) -> Result<(Self, usize), DecodeError> {
let (latest_registry_delta_root, i) = <_>::ssz_decode(bytes, i)?;
let (validator_index, i) = <_>::ssz_decode(bytes, i)?;
let (pubkey, i) = <_>::ssz_decode(bytes, i)?;
let (slot, i) = <_>::ssz_decode(bytes, i)?;
let (flag, i) = <_>::ssz_decode(bytes, i)?;
Ok((
Self {
latest_registry_delta_root,
validator_index,
pubkey,
slot,
flag,
},
i,
))
}
}
impl TreeHash for ValidatorRegistryDeltaBlock { impl TreeHash for ValidatorRegistryDeltaBlock {
fn hash_tree_root_internal(&self) -> Vec<u8> { fn hash_tree_root_internal(&self) -> Vec<u8> {
let mut result: Vec<u8> = vec![]; let mut result: Vec<u8> = vec![];
@ -86,7 +56,7 @@ impl<T: RngCore> TestRandom<T> for ValidatorRegistryDeltaBlock {
mod tests { mod tests {
use super::*; use super::*;
use crate::test_utils::{SeedableRng, TestRandom, XorShiftRng}; use crate::test_utils::{SeedableRng, TestRandom, XorShiftRng};
use ssz::ssz_encode; use ssz::{ssz_encode, Decodable};
#[test] #[test]
pub fn test_ssz_round_trip() { pub fn test_ssz_round_trip() {

View File

@ -39,6 +39,21 @@ impl Decodable for u8 {
} }
} }
impl Decodable for bool {
fn ssz_decode(bytes: &[u8], index: usize) -> Result<(Self, usize), DecodeError> {
if index >= bytes.len() {
Err(DecodeError::TooShort)
} else {
let result = match bytes[index] {
0b0000_0000 => false,
0b1000_0000 => true,
_ => return Err(DecodeError::Invalid),
};
Ok((result, index + 1))
}
}
}
impl Decodable for H256 { impl Decodable for H256 {
fn ssz_decode(bytes: &[u8], index: usize) -> Result<(Self, usize), DecodeError> { fn ssz_decode(bytes: &[u8], index: usize) -> Result<(Self, usize), DecodeError> {
if bytes.len() < 32 || bytes.len() - 32 < index { if bytes.len() < 32 || bytes.len() - 32 < index {
@ -215,4 +230,20 @@ mod tests {
let result: u16 = decode_ssz(&vec![0, 0, 0, 0, 1], 3).unwrap().0; let result: u16 = decode_ssz(&vec![0, 0, 0, 0, 1], 3).unwrap().0;
assert_eq!(result, 1); assert_eq!(result, 1);
} }
#[test]
fn test_decode_ssz_bool() {
let ssz = vec![0b0000_0000, 0b1000_0000];
let (result, index): (bool, usize) = decode_ssz(&ssz, 0).unwrap();
assert_eq!(index, 1);
assert_eq!(result, false);
let (result, index): (bool, usize) = decode_ssz(&ssz, 1).unwrap();
assert_eq!(index, 2);
assert_eq!(result, true);
let ssz = vec![0b0100_0000];
let result: Result<(bool, usize), DecodeError> = decode_ssz(&ssz, 0);
assert_eq!(result, Err(DecodeError::Invalid));
}
} }

View File

@ -46,6 +46,13 @@ impl_encodable_for_uint!(u32, 32);
impl_encodable_for_uint!(u64, 64); impl_encodable_for_uint!(u64, 64);
impl_encodable_for_uint!(usize, 64); impl_encodable_for_uint!(usize, 64);
impl Encodable for bool {
fn ssz_append(&self, s: &mut SszStream) {
let byte = if *self { 0b1000_0000 } else { 0b0000_0000 };
s.append_encoded_raw(&[byte]);
}
}
impl Encodable for H256 { impl Encodable for H256 {
fn ssz_append(&self, s: &mut SszStream) { fn ssz_append(&self, s: &mut SszStream) {
s.append_encoded_raw(&self.to_vec()); s.append_encoded_raw(&self.to_vec());
@ -206,4 +213,17 @@ mod tests {
ssz.append(&x); ssz.append(&x);
assert_eq!(ssz.drain(), vec![255, 255, 255, 255, 255, 255, 255, 255]); assert_eq!(ssz.drain(), vec![255, 255, 255, 255, 255, 255, 255, 255]);
} }
#[test]
fn test_ssz_encode_bool() {
let x: bool = false;
let mut ssz = SszStream::new();
ssz.append(&x);
assert_eq!(ssz.drain(), vec![0b0000_0000]);
let x: bool = true;
let mut ssz = SszStream::new();
ssz.append(&x);
assert_eq!(ssz.drain(), vec![0b1000_0000]);
}
} }

View File

@ -0,0 +1,14 @@
[package]
name = "ssz_derive"
version = "0.1.0"
authors = ["Paul Hauner <paul@paulhauner.com>"]
edition = "2018"
description = "Procedural derive macros for SSZ encoding and decoding."
[lib]
proc-macro = true
[dependencies]
syn = "0.15"
quote = "0.6"
ssz = { path = "../ssz" }

View File

@ -0,0 +1,128 @@
//! Provides the following procedural derive macros:
//!
//! - `#[derive(Encode)]`
//! - `#[derive(Decode)]`
//!
//! These macros provide SSZ encoding/decoding for a `struct`. Fields are encoded/decoded in the
//! order they are defined.
//!
//! Presently, only `structs` with named fields are supported. `enum`s and tuple-structs are
//! unsupported.
//!
//! Example:
//! ```
//! use ssz::{ssz_encode, Decodable};
//! use ssz_derive::{Encode, Decode};
//!
//! #[derive(Encode, Decode)]
//! struct Foo {
//! pub bar: bool,
//! pub baz: u64,
//! }
//!
//! fn main() {
//! let foo = Foo {
//! bar: true,
//! baz: 42,
//! };
//!
//! let bytes = ssz_encode(&foo);
//!
//! let (decoded_foo, _i) = Foo::ssz_decode(&bytes, 0).unwrap();
//!
//! assert_eq!(foo.baz, decoded_foo.baz);
//! }
//! ```
extern crate proc_macro;
use proc_macro::TokenStream;
use quote::quote;
use syn::{parse_macro_input, DeriveInput};
/// Returns a Vec of `syn::Ident` for each named field in the struct.
///
/// # Panics
/// Any unnamed struct field (like in a tuple struct) will raise a panic at compile time.
fn get_named_field_idents<'a>(struct_data: &'a syn::DataStruct) -> Vec<&'a syn::Ident> {
struct_data
.fields
.iter()
.map(|f| match &f.ident {
Some(ref ident) => ident,
_ => panic!("ssz_derive only supports named struct fields."),
})
.collect()
}
/// Implements `ssz::Encodable` for some `struct`.
///
/// Fields are encoded in the order they are defined.
#[proc_macro_derive(Encode)]
pub fn ssz_encode_derive(input: TokenStream) -> TokenStream {
let item = parse_macro_input!(input as DeriveInput);
let name = &item.ident;
let struct_data = match &item.data {
syn::Data::Struct(s) => s,
_ => panic!("ssz_derive only supports structs."),
};
let field_idents = get_named_field_idents(&struct_data);
let output = quote! {
impl ssz::Encodable for #name {
fn ssz_append(&self, s: &mut ssz::SszStream) {
#(
s.append(&self.#field_idents);
)*
}
}
};
output.into()
}
/// Implements `ssz::Decodable` for some `struct`.
///
/// Fields are decoded in the order they are defined.
#[proc_macro_derive(Decode)]
pub fn ssz_decode_derive(input: TokenStream) -> TokenStream {
let item = parse_macro_input!(input as DeriveInput);
let name = &item.ident;
let struct_data = match &item.data {
syn::Data::Struct(s) => s,
_ => panic!("ssz_derive only supports structs."),
};
let field_idents = get_named_field_idents(&struct_data);
// Using a var in an iteration always consumes the var, therefore we must make a `fields_a` and
// a `fields_b` in order to perform two loops.
//
// https://github.com/dtolnay/quote/issues/8
let field_idents_a = &field_idents;
let field_idents_b = &field_idents;
let output = quote! {
impl ssz::Decodable for #name {
fn ssz_decode(bytes: &[u8], i: usize) -> Result<(Self, usize), ssz::DecodeError> {
#(
let (#field_idents_a, i) = <_>::ssz_decode(bytes, i)?;
)*
Ok((
Self {
#(
#field_idents_b,
)*
},
i
))
}
}
};
output.into()
}