Use checked arithmetic in types and state proc (#1009)

This commit is contained in:
Michael Sproul 2020-04-20 12:35:11 +10:00 committed by GitHub
parent 50ef0d7fbf
commit 32074f0d09
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
49 changed files with 525 additions and 169 deletions

View File

@ -65,3 +65,10 @@ jobs:
- uses: actions/checkout@v1 - uses: actions/checkout@v1
- name: Typecheck benchmark code without running it - name: Typecheck benchmark code without running it
run: make check-benches run: make check-benches
clippy:
runs-on: ubuntu-latest
needs: cargo-fmt
steps:
- uses: actions/checkout@v1
- name: Lint code for quality and style with Clippy
run: make lint

8
Cargo.lock generated
View File

@ -226,6 +226,7 @@ dependencies = [
"proto_array_fork_choice 0.1.0", "proto_array_fork_choice 0.1.0",
"rand 0.7.3 (registry+https://github.com/rust-lang/crates.io-index)", "rand 0.7.3 (registry+https://github.com/rust-lang/crates.io-index)",
"rayon 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)", "rayon 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)",
"safe_arith 0.1.0",
"serde 1.0.104 (registry+https://github.com/rust-lang/crates.io-index)", "serde 1.0.104 (registry+https://github.com/rust-lang/crates.io-index)",
"serde_derive 1.0.104 (registry+https://github.com/rust-lang/crates.io-index)", "serde_derive 1.0.104 (registry+https://github.com/rust-lang/crates.io-index)",
"serde_json 1.0.44 (registry+https://github.com/rust-lang/crates.io-index)", "serde_json 1.0.44 (registry+https://github.com/rust-lang/crates.io-index)",
@ -2498,6 +2499,7 @@ dependencies = [
"lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)", "lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
"quickcheck 0.9.2 (registry+https://github.com/rust-lang/crates.io-index)", "quickcheck 0.9.2 (registry+https://github.com/rust-lang/crates.io-index)",
"quickcheck_macros 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)", "quickcheck_macros 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)",
"safe_arith 0.1.0",
] ]
[[package]] [[package]]
@ -3524,6 +3526,10 @@ name = "ryu"
version = "1.0.2" version = "1.0.2"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "safe_arith"
version = "0.1.0"
[[package]] [[package]]
name = "safemem" name = "safemem"
version = "0.3.3" version = "0.3.3"
@ -3941,6 +3947,7 @@ dependencies = [
"log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)",
"merkle_proof 0.1.0", "merkle_proof 0.1.0",
"rayon 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)", "rayon 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)",
"safe_arith 0.1.0",
"serde 1.0.104 (registry+https://github.com/rust-lang/crates.io-index)", "serde 1.0.104 (registry+https://github.com/rust-lang/crates.io-index)",
"serde_derive 1.0.104 (registry+https://github.com/rust-lang/crates.io-index)", "serde_derive 1.0.104 (registry+https://github.com/rust-lang/crates.io-index)",
"serde_yaml 0.8.11 (registry+https://github.com/rust-lang/crates.io-index)", "serde_yaml 0.8.11 (registry+https://github.com/rust-lang/crates.io-index)",
@ -4543,6 +4550,7 @@ dependencies = [
"rand 0.7.3 (registry+https://github.com/rust-lang/crates.io-index)", "rand 0.7.3 (registry+https://github.com/rust-lang/crates.io-index)",
"rand_xorshift 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)", "rand_xorshift 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
"rayon 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)", "rayon 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)",
"safe_arith 0.1.0",
"serde 1.0.104 (registry+https://github.com/rust-lang/crates.io-index)", "serde 1.0.104 (registry+https://github.com/rust-lang/crates.io-index)",
"serde_derive 1.0.104 (registry+https://github.com/rust-lang/crates.io-index)", "serde_derive 1.0.104 (registry+https://github.com/rust-lang/crates.io-index)",
"serde_json 1.0.44 (registry+https://github.com/rust-lang/crates.io-index)", "serde_json 1.0.44 (registry+https://github.com/rust-lang/crates.io-index)",

View File

@ -16,6 +16,7 @@ members = [
"eth2/utils/lighthouse_metrics", "eth2/utils/lighthouse_metrics",
"eth2/utils/merkle_proof", "eth2/utils/merkle_proof",
"eth2/utils/int_to_bytes", "eth2/utils/int_to_bytes",
"eth2/utils/safe_arith",
"eth2/utils/serde_hex", "eth2/utils/serde_hex",
"eth2/utils/slot_clock", "eth2/utils/slot_clock",
"eth2/utils/ssz", "eth2/utils/ssz",

View File

@ -41,6 +41,11 @@ test: test-release
# Runs the entire test suite, downloading test vectors if required. # Runs the entire test suite, downloading test vectors if required.
test-full: cargo-fmt test-release test-debug test-ef test-full: cargo-fmt test-release test-debug test-ef
# Lints the code for bad style and potentially unsafe arithmetic using Clippy.
# Clippy lints are opt-in per-crate for now, which is why we allow all by default.
lint:
cargo clippy --all -- -A clippy::all
# Runs the makefile in the `ef_tests` repo. # Runs the makefile in the `ef_tests` repo.
# #
# May download and extract an archive of test vectors from the ethereum # May download and extract an archive of test vectors from the ethereum

View File

@ -42,6 +42,7 @@ rand = "0.7.2"
proto_array_fork_choice = { path = "../../eth2/proto_array_fork_choice" } proto_array_fork_choice = { path = "../../eth2/proto_array_fork_choice" }
lru = "0.4.3" lru = "0.4.3"
tempfile = "3.1.0" tempfile = "3.1.0"
safe_arith = { path = "../../eth2/utils/safe_arith" }
[dev-dependencies] [dev-dependencies]
lazy_static = "1.4.0" lazy_static = "1.4.0"

View File

@ -45,6 +45,14 @@ pub enum Error {
/// ///
/// The eth1 caches are stale, or a junk value was voted into the chain. /// The eth1 caches are stale, or a junk value was voted into the chain.
UnknownPreviousEth1BlockHash, UnknownPreviousEth1BlockHash,
/// An arithmetic error occurred.
ArithError(safe_arith::ArithError),
}
impl From<safe_arith::ArithError> for Error {
fn from(e: safe_arith::ArithError) -> Self {
Self::ArithError(e)
}
} }
#[derive(Encode, Decode, Clone)] #[derive(Encode, Decode, Clone)]
@ -369,7 +377,7 @@ impl<T: EthSpec, S: Store<T>> Eth1ChainBackend<T, S> for CachingEth1Backend<T, S
_spec: &ChainSpec, _spec: &ChainSpec,
) -> Result<Vec<Deposit>, Error> { ) -> Result<Vec<Deposit>, Error> {
let deposit_index = state.eth1_deposit_index; let deposit_index = state.eth1_deposit_index;
let deposit_count = if let Some(new_eth1_data) = get_new_eth1_data(state, eth1_data_vote) { let deposit_count = if let Some(new_eth1_data) = get_new_eth1_data(state, eth1_data_vote)? {
new_eth1_data.deposit_count new_eth1_data.deposit_count
} else { } else {
state.eth1_data.deposit_count state.eth1_data.deposit_count

View File

@ -383,7 +383,8 @@ impl Eth1GenesisService {
.map_err(|e| format!("Error whilst processing deposit: {:?}", e)) .map_err(|e| format!("Error whilst processing deposit: {:?}", e))
})?; })?;
process_activations(&mut local_state, spec); process_activations(&mut local_state, spec)
.map_err(|e| format!("Error whilst processing activations: {:?}", e))?;
let is_valid = is_valid_genesis_state(&local_state, spec); let is_valid = is_valid_genesis_state(&local_state, spec);
trace!( trace!(

View File

@ -27,6 +27,7 @@ eth2_ssz = "0.1.2"
eth2_ssz_types = { path = "../utils/ssz_types" } eth2_ssz_types = { path = "../utils/ssz_types" }
merkle_proof = { path = "../utils/merkle_proof" } merkle_proof = { path = "../utils/merkle_proof" }
log = "0.4.8" log = "0.4.8"
safe_arith = { path = "../utils/safe_arith" }
tree_hash = "0.1.0" tree_hash = "0.1.0"
tree_hash_derive = "0.2" tree_hash_derive = "0.2"
types = { path = "../types" } types = { path = "../types" }

View File

@ -1,6 +1,7 @@
use eth2_hashing::hash; use eth2_hashing::hash;
use int_to_bytes::int_to_bytes32; use int_to_bytes::int_to_bytes32;
use merkle_proof::{MerkleTree, MerkleTreeError}; use merkle_proof::{MerkleTree, MerkleTreeError};
use safe_arith::SafeArith;
use types::Hash256; use types::Hash256;
/// Emulates the eth1 deposit contract merkle tree. /// Emulates the eth1 deposit contract merkle tree.
@ -46,7 +47,7 @@ impl DepositDataTree {
/// Add a deposit to the merkle tree. /// Add a deposit to the merkle tree.
pub fn push_leaf(&mut self, leaf: Hash256) -> Result<(), MerkleTreeError> { pub fn push_leaf(&mut self, leaf: Hash256) -> Result<(), MerkleTreeError> {
self.tree.push_leaf(leaf, self.depth)?; self.tree.push_leaf(leaf, self.depth)?;
self.mix_in_length += 1; self.mix_in_length.increment()?;
Ok(()) Ok(())
} }
} }

View File

@ -1,4 +1,5 @@
use integer_sqrt::IntegerSquareRoot; use integer_sqrt::IntegerSquareRoot;
use safe_arith::SafeArith;
use types::*; use types::*;
/// Returns the base reward for some validator. /// Returns the base reward for some validator.
@ -14,10 +15,10 @@ pub fn get_base_reward<T: EthSpec>(
if total_active_balance == 0 { if total_active_balance == 0 {
Ok(0) Ok(0)
} else { } else {
Ok( Ok(state
state.get_effective_balance(index, spec)? * spec.base_reward_factor .get_effective_balance(index, spec)?
/ total_active_balance.integer_sqrt() .safe_mul(spec.base_reward_factor)?
/ spec.base_rewards_per_epoch, .safe_div(total_active_balance.integer_sqrt())?
) .safe_div(spec.base_rewards_per_epoch)?)
} }
} }

View File

@ -1,4 +1,5 @@
use crate::common::initiate_validator_exit; use crate::common::initiate_validator_exit;
use safe_arith::SafeArith;
use std::cmp; use std::cmp;
use types::{BeaconStateError as Error, *}; use types::{BeaconStateError as Error, *};
@ -27,18 +28,21 @@ pub fn slash_validator<T: EthSpec>(
let validator_effective_balance = state.get_effective_balance(slashed_index, spec)?; let validator_effective_balance = state.get_effective_balance(slashed_index, spec)?;
state.set_slashings( state.set_slashings(
epoch, epoch,
state.get_slashings(epoch)? + validator_effective_balance, state
.get_slashings(epoch)?
.safe_add(validator_effective_balance)?,
)?; )?;
safe_sub_assign!( safe_sub_assign!(
state.balances[slashed_index], state.balances[slashed_index],
validator_effective_balance / spec.min_slashing_penalty_quotient validator_effective_balance.safe_div(spec.min_slashing_penalty_quotient)?
); );
// Apply proposer and whistleblower rewards // Apply proposer and whistleblower rewards
let proposer_index = state.get_beacon_proposer_index(state.slot, spec)?; let proposer_index = state.get_beacon_proposer_index(state.slot, spec)?;
let whistleblower_index = opt_whistleblower_index.unwrap_or(proposer_index); let whistleblower_index = opt_whistleblower_index.unwrap_or(proposer_index);
let whistleblower_reward = validator_effective_balance / spec.whistleblower_reward_quotient; let whistleblower_reward =
let proposer_reward = whistleblower_reward / spec.proposer_reward_quotient; validator_effective_balance.safe_div(spec.whistleblower_reward_quotient)?;
let proposer_reward = whistleblower_reward.safe_div(spec.proposer_reward_quotient)?;
safe_add_assign!(state.balances[proposer_index], proposer_reward); safe_add_assign!(state.balances[proposer_index], proposer_reward);
safe_add_assign!( safe_add_assign!(

View File

@ -1,5 +1,6 @@
use super::per_block_processing::{errors::BlockProcessingError, process_deposit}; use super::per_block_processing::{errors::BlockProcessingError, process_deposit};
use crate::common::DepositDataTree; use crate::common::DepositDataTree;
use safe_arith::SafeArith;
use tree_hash::TreeHash; use tree_hash::TreeHash;
use types::DEPOSIT_TREE_DEPTH; use types::DEPOSIT_TREE_DEPTH;
use types::*; use types::*;
@ -14,8 +15,9 @@ pub fn initialize_beacon_state_from_eth1<T: EthSpec>(
deposits: Vec<Deposit>, deposits: Vec<Deposit>,
spec: &ChainSpec, spec: &ChainSpec,
) -> Result<BeaconState<T>, BlockProcessingError> { ) -> Result<BeaconState<T>, BlockProcessingError> {
let genesis_time = let genesis_time = eth1_timestamp
eth1_timestamp - eth1_timestamp % spec.min_genesis_delay + 2 * spec.min_genesis_delay; .safe_sub(eth1_timestamp.safe_rem(spec.min_genesis_delay)?)?
.safe_add(2.safe_mul(spec.min_genesis_delay)?)?;
let eth1_data = Eth1Data { let eth1_data = Eth1Data {
// Temporary deposit root // Temporary deposit root
deposit_root: Hash256::zero(), deposit_root: Hash256::zero(),
@ -37,7 +39,7 @@ pub fn initialize_beacon_state_from_eth1<T: EthSpec>(
process_deposit(&mut state, &deposit, spec, true)?; process_deposit(&mut state, &deposit, spec, true)?;
} }
process_activations(&mut state, spec); process_activations(&mut state, spec)?;
// Now that we have our validators, initialize the caches (including the committees) // Now that we have our validators, initialize the caches (including the committees)
state.build_all_caches(spec)?; state.build_all_caches(spec)?;
@ -60,11 +62,14 @@ pub fn is_valid_genesis_state<T: EthSpec>(state: &BeaconState<T>, spec: &ChainSp
/// Activate genesis validators, if their balance is acceptable. /// Activate genesis validators, if their balance is acceptable.
/// ///
/// Spec v0.11.1 /// Spec v0.11.1
pub fn process_activations<T: EthSpec>(state: &mut BeaconState<T>, spec: &ChainSpec) { pub fn process_activations<T: EthSpec>(
state: &mut BeaconState<T>,
spec: &ChainSpec,
) -> Result<(), Error> {
for (index, validator) in state.validators.iter_mut().enumerate() { for (index, validator) in state.validators.iter_mut().enumerate() {
let balance = state.balances[index]; let balance = state.balances[index];
validator.effective_balance = std::cmp::min( validator.effective_balance = std::cmp::min(
balance - balance % spec.effective_balance_increment, balance.safe_sub(balance.safe_rem(spec.effective_balance_increment)?)?,
spec.max_effective_balance, spec.max_effective_balance,
); );
if validator.effective_balance == spec.max_effective_balance { if validator.effective_balance == spec.max_effective_balance {
@ -72,4 +77,5 @@ pub fn process_activations<T: EthSpec>(state: &mut BeaconState<T>, spec: &ChainS
validator.activation_epoch = T::genesis_epoch(); validator.activation_epoch = T::genesis_epoch();
} }
} }
Ok(())
} }

View File

@ -1,3 +1,5 @@
#![deny(clippy::integer_arithmetic)]
#[macro_use] #[macro_use]
mod macros; mod macros;

View File

@ -1,6 +1,7 @@
use crate::common::{initiate_validator_exit, slash_validator}; use crate::common::{initiate_validator_exit, slash_validator};
use errors::{BlockOperationError, BlockProcessingError, HeaderInvalid, IntoWithIndex}; use errors::{BlockOperationError, BlockProcessingError, HeaderInvalid, IntoWithIndex};
use rayon::prelude::*; use rayon::prelude::*;
use safe_arith::{ArithError, SafeArith};
use signature_sets::{block_proposal_signature_set, get_pubkey_from_state, randao_signature_set}; use signature_sets::{block_proposal_signature_set, get_pubkey_from_state, randao_signature_set};
use std::convert::TryInto; use std::convert::TryInto;
use tree_hash::TreeHash; use tree_hash::TreeHash;
@ -239,7 +240,7 @@ pub fn process_eth1_data<T: EthSpec>(
state: &mut BeaconState<T>, state: &mut BeaconState<T>,
eth1_data: &Eth1Data, eth1_data: &Eth1Data,
) -> Result<(), Error> { ) -> Result<(), Error> {
if let Some(new_eth1_data) = get_new_eth1_data(state, eth1_data) { if let Some(new_eth1_data) = get_new_eth1_data(state, eth1_data)? {
state.eth1_data = new_eth1_data; state.eth1_data = new_eth1_data;
} }
@ -248,14 +249,14 @@ pub fn process_eth1_data<T: EthSpec>(
Ok(()) Ok(())
} }
/// Returns `Some(eth1_data)` if adding the given `eth1_data` to `state.eth1_data_votes` would /// Returns `Ok(Some(eth1_data))` if adding the given `eth1_data` to `state.eth1_data_votes` would
/// result in a change to `state.eth1_data`. /// result in a change to `state.eth1_data`.
/// ///
/// Spec v0.11.1 /// Spec v0.11.1
pub fn get_new_eth1_data<T: EthSpec>( pub fn get_new_eth1_data<T: EthSpec>(
state: &BeaconState<T>, state: &BeaconState<T>,
eth1_data: &Eth1Data, eth1_data: &Eth1Data,
) -> Option<Eth1Data> { ) -> Result<Option<Eth1Data>, ArithError> {
let num_votes = state let num_votes = state
.eth1_data_votes .eth1_data_votes
.iter() .iter()
@ -263,10 +264,10 @@ pub fn get_new_eth1_data<T: EthSpec>(
.count(); .count();
// The +1 is to account for the `eth1_data` supplied to the function. // The +1 is to account for the `eth1_data` supplied to the function.
if 2 * (num_votes + 1) > T::SlotsPerEth1VotingPeriod::to_usize() { if num_votes.safe_add(1)?.safe_mul(2)? > T::SlotsPerEth1VotingPeriod::to_usize() {
Some(eth1_data.clone()) Ok(Some(eth1_data.clone()))
} else { } else {
None Ok(None)
} }
} }
@ -318,7 +319,8 @@ pub fn process_attester_slashings<T: EthSpec>(
) -> Result<(), BlockProcessingError> { ) -> Result<(), BlockProcessingError> {
// Verify the `IndexedAttestation`s in parallel (these are the resource-consuming objects, not // Verify the `IndexedAttestation`s in parallel (these are the resource-consuming objects, not
// the `AttesterSlashing`s themselves). // the `AttesterSlashing`s themselves).
let mut indexed_attestations: Vec<&_> = Vec::with_capacity(attester_slashings.len() * 2); let mut indexed_attestations: Vec<&_> =
Vec::with_capacity(attester_slashings.len().safe_mul(2)?);
for attester_slashing in attester_slashings { for attester_slashing in attester_slashings {
indexed_attestations.push(&attester_slashing.attestation_1); indexed_attestations.push(&attester_slashing.attestation_1);
indexed_attestations.push(&attester_slashing.attestation_2); indexed_attestations.push(&attester_slashing.attestation_2);
@ -432,8 +434,13 @@ pub fn process_deposits<T: EthSpec>(
.par_iter() .par_iter()
.enumerate() .enumerate()
.try_for_each(|(i, deposit)| { .try_for_each(|(i, deposit)| {
verify_deposit_merkle_proof(state, deposit, state.eth1_deposit_index + i as u64, spec) verify_deposit_merkle_proof(
.map_err(|e| e.into_with_index(i)) state,
deposit,
state.eth1_deposit_index.safe_add(i as u64)?,
spec,
)
.map_err(|e| e.into_with_index(i))
})?; })?;
// Update the state in series. // Update the state in series.
@ -459,7 +466,7 @@ pub fn process_deposit<T: EthSpec>(
.map_err(|e| e.into_with_index(deposit_index))?; .map_err(|e| e.into_with_index(deposit_index))?;
} }
state.eth1_deposit_index += 1; state.eth1_deposit_index.increment()?;
// Ensure the state's pubkey cache is fully up-to-date, it will be used to check to see if the // Ensure the state's pubkey cache is fully up-to-date, it will be used to check to see if the
// depositing validator already exists in the registry. // depositing validator already exists in the registry.
@ -495,7 +502,7 @@ pub fn process_deposit<T: EthSpec>(
exit_epoch: spec.far_future_epoch, exit_epoch: spec.far_future_epoch,
withdrawable_epoch: spec.far_future_epoch, withdrawable_epoch: spec.far_future_epoch,
effective_balance: std::cmp::min( effective_balance: std::cmp::min(
amount - amount % spec.effective_balance_increment, amount.safe_sub(amount.safe_rem(spec.effective_balance_increment)?)?,
spec.max_effective_balance, spec.max_effective_balance,
), ),
slashed: false, slashed: false,

View File

@ -1,3 +1,5 @@
#![allow(clippy::integer_arithmetic)]
use super::signature_sets::{Error as SignatureSetError, Result as SignatureSetResult, *}; use super::signature_sets::{Error as SignatureSetError, Result as SignatureSetResult, *};
use crate::common::get_indexed_attestation; use crate::common::get_indexed_attestation;

View File

@ -1,9 +1,10 @@
use super::signature_sets::Error as SignatureSetError; use super::signature_sets::Error as SignatureSetError;
use merkle_proof::MerkleTreeError; use merkle_proof::MerkleTreeError;
use safe_arith::ArithError;
use types::*; use types::*;
/// The error returned from the `per_block_processing` function. Indicates that a block is either /// The error returned from the `per_block_processing` function. Indicates that a block is either
/// invalid, or we were unable to determine it's validity (we encountered an unexpected error). /// invalid, or we were unable to determine its validity (we encountered an unexpected error).
/// ///
/// Any of the `...Error` variants indicate that at some point during block (and block operation) /// Any of the `...Error` variants indicate that at some point during block (and block operation)
/// verification, there was an error. There is no indication as to _where_ that error happened /// verification, there was an error. There is no indication as to _where_ that error happened
@ -48,6 +49,7 @@ pub enum BlockProcessingError {
SignatureSetError(SignatureSetError), SignatureSetError(SignatureSetError),
SszTypesError(ssz_types::Error), SszTypesError(ssz_types::Error),
MerkleTreeError(MerkleTreeError), MerkleTreeError(MerkleTreeError),
ArithError(ArithError),
} }
impl From<BeaconStateError> for BlockProcessingError { impl From<BeaconStateError> for BlockProcessingError {
@ -68,6 +70,12 @@ impl From<ssz_types::Error> for BlockProcessingError {
} }
} }
impl From<ArithError> for BlockProcessingError {
fn from(e: ArithError) -> Self {
BlockProcessingError::ArithError(e)
}
}
impl From<BlockOperationError<HeaderInvalid>> for BlockProcessingError { impl From<BlockOperationError<HeaderInvalid>> for BlockProcessingError {
fn from(e: BlockOperationError<HeaderInvalid>) -> BlockProcessingError { fn from(e: BlockOperationError<HeaderInvalid>) -> BlockProcessingError {
match e { match e {
@ -75,6 +83,7 @@ impl From<BlockOperationError<HeaderInvalid>> for BlockProcessingError {
BlockOperationError::BeaconStateError(e) => BlockProcessingError::BeaconStateError(e), BlockOperationError::BeaconStateError(e) => BlockProcessingError::BeaconStateError(e),
BlockOperationError::SignatureSetError(e) => BlockProcessingError::SignatureSetError(e), BlockOperationError::SignatureSetError(e) => BlockProcessingError::SignatureSetError(e),
BlockOperationError::SszTypesError(e) => BlockProcessingError::SszTypesError(e), BlockOperationError::SszTypesError(e) => BlockProcessingError::SszTypesError(e),
BlockOperationError::ArithError(e) => BlockProcessingError::ArithError(e),
} }
} }
} }
@ -101,6 +110,7 @@ macro_rules! impl_into_block_processing_error_with_index {
BlockOperationError::BeaconStateError(e) => BlockProcessingError::BeaconStateError(e), BlockOperationError::BeaconStateError(e) => BlockProcessingError::BeaconStateError(e),
BlockOperationError::SignatureSetError(e) => BlockProcessingError::SignatureSetError(e), BlockOperationError::SignatureSetError(e) => BlockProcessingError::SignatureSetError(e),
BlockOperationError::SszTypesError(e) => BlockProcessingError::SszTypesError(e), BlockOperationError::SszTypesError(e) => BlockProcessingError::SszTypesError(e),
BlockOperationError::ArithError(e) => BlockProcessingError::ArithError(e),
} }
} }
} }
@ -130,6 +140,7 @@ pub enum BlockOperationError<T> {
BeaconStateError(BeaconStateError), BeaconStateError(BeaconStateError),
SignatureSetError(SignatureSetError), SignatureSetError(SignatureSetError),
SszTypesError(ssz_types::Error), SszTypesError(ssz_types::Error),
ArithError(ArithError),
} }
impl<T> BlockOperationError<T> { impl<T> BlockOperationError<T> {
@ -155,6 +166,12 @@ impl<T> From<ssz_types::Error> for BlockOperationError<T> {
} }
} }
impl<T> From<ArithError> for BlockOperationError<T> {
fn from(e: ArithError) -> Self {
BlockOperationError::ArithError(e)
}
}
#[derive(Debug, PartialEq, Clone)] #[derive(Debug, PartialEq, Clone)]
pub enum HeaderInvalid { pub enum HeaderInvalid {
ProposalSignatureInvalid, ProposalSignatureInvalid,
@ -265,6 +282,7 @@ impl From<BlockOperationError<IndexedAttestationInvalid>>
BlockOperationError::BeaconStateError(e) => BlockOperationError::BeaconStateError(e), BlockOperationError::BeaconStateError(e) => BlockOperationError::BeaconStateError(e),
BlockOperationError::SignatureSetError(e) => BlockOperationError::SignatureSetError(e), BlockOperationError::SignatureSetError(e) => BlockOperationError::SignatureSetError(e),
BlockOperationError::SszTypesError(e) => BlockOperationError::SszTypesError(e), BlockOperationError::SszTypesError(e) => BlockOperationError::SszTypesError(e),
BlockOperationError::ArithError(e) => BlockOperationError::ArithError(e),
} }
} }
} }

View File

@ -3,6 +3,7 @@ use crate::per_block_processing::signature_sets::{
deposit_pubkey_signature_message, deposit_signature_set, deposit_pubkey_signature_message, deposit_signature_set,
}; };
use merkle_proof::verify_merkle_proof; use merkle_proof::verify_merkle_proof;
use safe_arith::SafeArith;
use tree_hash::TreeHash; use tree_hash::TreeHash;
use types::*; use types::*;
@ -59,7 +60,7 @@ pub fn verify_deposit_merkle_proof<T: EthSpec>(
verify_merkle_proof( verify_merkle_proof(
leaf, leaf,
&deposit.proof[..], &deposit.proof[..],
spec.deposit_contract_tree_depth as usize + 1, spec.deposit_contract_tree_depth.safe_add(1)? as usize,
deposit_index as usize, deposit_index as usize,
state.eth1_data.deposit_root, state.eth1_data.deposit_root,
), ),

View File

@ -1,4 +1,5 @@
use errors::EpochProcessingError as Error; use errors::EpochProcessingError as Error;
use safe_arith::SafeArith;
use tree_hash::TreeHash; use tree_hash::TreeHash;
use types::*; use types::*;
@ -90,7 +91,11 @@ pub fn process_justification_and_finalization<T: EthSpec>(
state.previous_justified_checkpoint = state.current_justified_checkpoint.clone(); state.previous_justified_checkpoint = state.current_justified_checkpoint.clone();
state.justification_bits.shift_up(1)?; state.justification_bits.shift_up(1)?;
if total_balances.previous_epoch_target_attesters() * 3 >= total_balances.current_epoch() * 2 { if total_balances
.previous_epoch_target_attesters()
.safe_mul(3)?
>= total_balances.current_epoch().safe_mul(2)?
{
state.current_justified_checkpoint = Checkpoint { state.current_justified_checkpoint = Checkpoint {
epoch: previous_epoch, epoch: previous_epoch,
root: *state.get_block_root_at_epoch(previous_epoch)?, root: *state.get_block_root_at_epoch(previous_epoch)?,
@ -98,7 +103,11 @@ pub fn process_justification_and_finalization<T: EthSpec>(
state.justification_bits.set(1, true)?; state.justification_bits.set(1, true)?;
} }
// If the current epoch gets justified, fill the last bit. // If the current epoch gets justified, fill the last bit.
if total_balances.current_epoch_target_attesters() * 3 >= total_balances.current_epoch() * 2 { if total_balances
.current_epoch_target_attesters()
.safe_mul(3)?
>= total_balances.current_epoch().safe_mul(2)?
{
state.current_justified_checkpoint = Checkpoint { state.current_justified_checkpoint = Checkpoint {
epoch: current_epoch, epoch: current_epoch,
root: *state.get_block_root_at_epoch(current_epoch)?, root: *state.get_block_root_at_epoch(current_epoch)?,
@ -152,17 +161,19 @@ pub fn process_final_updates<T: EthSpec>(
} }
// Update effective balances with hysteresis (lag). // Update effective balances with hysteresis (lag).
let hysteresis_increment = spec.effective_balance_increment / spec.hysteresis_quotient; let hysteresis_increment = spec
let downward_threshold = hysteresis_increment * spec.hysteresis_downward_multiplier; .effective_balance_increment
let upward_threshold = hysteresis_increment * spec.hysteresis_upward_multiplier; .safe_div(spec.hysteresis_quotient)?;
let downward_threshold = hysteresis_increment.safe_mul(spec.hysteresis_downward_multiplier)?;
let upward_threshold = hysteresis_increment.safe_mul(spec.hysteresis_upward_multiplier)?;
for (index, validator) in state.validators.iter_mut().enumerate() { for (index, validator) in state.validators.iter_mut().enumerate() {
let balance = state.balances[index]; let balance = state.balances[index];
if balance + downward_threshold < validator.effective_balance if balance.safe_add(downward_threshold)? < validator.effective_balance
|| validator.effective_balance + upward_threshold < balance || validator.effective_balance.safe_add(upward_threshold)? < balance
{ {
validator.effective_balance = std::cmp::min( validator.effective_balance = std::cmp::min(
balance - balance % spec.effective_balance_increment, balance.safe_sub(balance.safe_rem(spec.effective_balance_increment)?)?,
spec.max_effective_balance, spec.max_effective_balance,
); );
} }
@ -175,7 +186,11 @@ pub fn process_final_updates<T: EthSpec>(
state.set_randao_mix(next_epoch, *state.get_randao_mix(current_epoch)?)?; state.set_randao_mix(next_epoch, *state.get_randao_mix(current_epoch)?)?;
// Set historical root accumulator // Set historical root accumulator
if next_epoch.as_u64() % (T::SlotsPerHistoricalRoot::to_u64() / T::slots_per_epoch()) == 0 { if next_epoch
.as_u64()
.safe_rem(T::SlotsPerHistoricalRoot::to_u64().safe_div(T::slots_per_epoch())?)?
== 0
{
let historical_batch = state.historical_batch(); let historical_batch = state.historical_batch();
state state
.historical_roots .historical_roots

View File

@ -1,6 +1,7 @@
use super::super::common::get_base_reward; use super::super::common::get_base_reward;
use super::validator_statuses::{TotalBalances, ValidatorStatus, ValidatorStatuses}; use super::validator_statuses::{TotalBalances, ValidatorStatus, ValidatorStatuses};
use super::Error; use super::Error;
use safe_arith::SafeArith;
use types::*; use types::*;
@ -13,21 +14,21 @@ pub struct Delta {
impl Delta { impl Delta {
/// Reward the validator with the `reward`. /// Reward the validator with the `reward`.
pub fn reward(&mut self, reward: u64) { pub fn reward(&mut self, reward: u64) -> Result<(), Error> {
self.rewards += reward; self.rewards = self.rewards.safe_add(reward)?;
Ok(())
} }
/// Penalize the validator with the `penalty`. /// Penalize the validator with the `penalty`.
pub fn penalize(&mut self, penalty: u64) { pub fn penalize(&mut self, penalty: u64) -> Result<(), Error> {
self.penalties += penalty; self.penalties = self.penalties.safe_add(penalty)?;
Ok(())
} }
}
impl std::ops::AddAssign for Delta { /// Combine two deltas.
/// Use wrapping addition as that is how it's defined in the spec. fn combine(&mut self, other: Delta) -> Result<(), Error> {
fn add_assign(&mut self, other: Delta) { self.reward(other.rewards)?;
self.rewards += other.rewards; self.penalize(other.penalties)
self.penalties += other.penalties;
} }
} }
@ -56,9 +57,10 @@ pub fn process_rewards_and_penalties<T: EthSpec>(
get_proposer_deltas(&mut deltas, state, validator_statuses, spec)?; get_proposer_deltas(&mut deltas, state, validator_statuses, spec)?;
// Apply the deltas, over-flowing but not under-flowing (saturating at 0 instead). // Apply the deltas, erroring on overflow above but not on overflow below (saturating at 0
// instead).
for (i, delta) in deltas.iter().enumerate() { for (i, delta) in deltas.iter().enumerate() {
state.balances[i] += delta.rewards; state.balances[i] = state.balances[i].safe_add(delta.rewards)?;
state.balances[i] = state.balances[i].saturating_sub(delta.penalties); state.balances[i] = state.balances[i].saturating_sub(delta.penalties);
} }
@ -91,7 +93,8 @@ fn get_proposer_deltas<T: EthSpec>(
return Err(Error::ValidatorStatusesInconsistent); return Err(Error::ValidatorStatusesInconsistent);
} }
deltas[inclusion.proposer_index].reward(base_reward / spec.proposer_reward_quotient); deltas[inclusion.proposer_index]
.reward(base_reward.safe_div(spec.proposer_reward_quotient)?)?;
} }
} }
@ -123,9 +126,9 @@ fn get_attestation_deltas<T: EthSpec>(
base_reward, base_reward,
finality_delay, finality_delay,
spec, spec,
); )?;
deltas[index] += delta; deltas[index].combine(delta)?;
} }
Ok(()) Ok(())
@ -140,7 +143,7 @@ fn get_attestation_delta<T: EthSpec>(
base_reward: u64, base_reward: u64,
finality_delay: u64, finality_delay: u64,
spec: &ChainSpec, spec: &ChainSpec,
) -> Delta { ) -> Result<Delta, Error> {
let mut delta = Delta::default(); let mut delta = Delta::default();
// Is this validator eligible to be rewarded or penalized? // Is this validator eligible to be rewarded or penalized?
@ -149,7 +152,7 @@ fn get_attestation_delta<T: EthSpec>(
|| (validator.is_slashed && !validator.is_withdrawable_in_current_epoch); || (validator.is_slashed && !validator.is_withdrawable_in_current_epoch);
if !is_eligible { if !is_eligible {
return delta; return Ok(delta);
} }
// Handle integer overflow by dividing these quantities by EFFECTIVE_BALANCE_INCREMENT // Handle integer overflow by dividing these quantities by EFFECTIVE_BALANCE_INCREMENT
@ -157,59 +160,78 @@ fn get_attestation_delta<T: EthSpec>(
// - increment = EFFECTIVE_BALANCE_INCREMENT // - increment = EFFECTIVE_BALANCE_INCREMENT
// - reward_numerator = get_base_reward(state, index) * (attesting_balance // increment) // - reward_numerator = get_base_reward(state, index) * (attesting_balance // increment)
// - rewards[index] = reward_numerator // (total_balance // increment) // - rewards[index] = reward_numerator // (total_balance // increment)
let total_balance_ebi = total_balances.current_epoch() / spec.effective_balance_increment; let total_balance_ebi = total_balances
let total_attesting_balance_ebi = .current_epoch()
total_balances.previous_epoch_attesters() / spec.effective_balance_increment; .safe_div(spec.effective_balance_increment)?;
let matching_target_balance_ebi = let total_attesting_balance_ebi = total_balances
total_balances.previous_epoch_target_attesters() / spec.effective_balance_increment; .previous_epoch_attesters()
let matching_head_balance_ebi = .safe_div(spec.effective_balance_increment)?;
total_balances.previous_epoch_head_attesters() / spec.effective_balance_increment; let matching_target_balance_ebi = total_balances
.previous_epoch_target_attesters()
.safe_div(spec.effective_balance_increment)?;
let matching_head_balance_ebi = total_balances
.previous_epoch_head_attesters()
.safe_div(spec.effective_balance_increment)?;
// Expected FFG source. // Expected FFG source.
// Spec: // Spec:
// - validator index in `get_unslashed_attesting_indices(state, matching_source_attestations)` // - validator index in `get_unslashed_attesting_indices(state, matching_source_attestations)`
if validator.is_previous_epoch_attester && !validator.is_slashed { if validator.is_previous_epoch_attester && !validator.is_slashed {
delta.reward(base_reward * total_attesting_balance_ebi / total_balance_ebi); delta.reward(
base_reward
.safe_mul(total_attesting_balance_ebi)?
.safe_div(total_balance_ebi)?,
)?;
// Inclusion speed bonus // Inclusion speed bonus
let proposer_reward = base_reward / spec.proposer_reward_quotient; let proposer_reward = base_reward.safe_div(spec.proposer_reward_quotient)?;
let max_attester_reward = base_reward - proposer_reward; let max_attester_reward = base_reward.safe_sub(proposer_reward)?;
let inclusion = validator let inclusion = validator
.inclusion_info .inclusion_info
.expect("It is a logic error for an attester not to have an inclusion delay."); .expect("It is a logic error for an attester not to have an inclusion delay.");
delta.reward(max_attester_reward / inclusion.delay); delta.reward(max_attester_reward.safe_div(inclusion.delay)?)?;
} else { } else {
delta.penalize(base_reward); delta.penalize(base_reward)?;
} }
// Expected FFG target. // Expected FFG target.
// Spec: // Spec:
// - validator index in `get_unslashed_attesting_indices(state, matching_target_attestations)` // - validator index in `get_unslashed_attesting_indices(state, matching_target_attestations)`
if validator.is_previous_epoch_target_attester && !validator.is_slashed { if validator.is_previous_epoch_target_attester && !validator.is_slashed {
delta.reward(base_reward * matching_target_balance_ebi / total_balance_ebi); delta.reward(
base_reward
.safe_mul(matching_target_balance_ebi)?
.safe_div(total_balance_ebi)?,
)?;
} else { } else {
delta.penalize(base_reward); delta.penalize(base_reward)?;
} }
// Expected head. // Expected head.
// Spec: // Spec:
// - validator index in `get_unslashed_attesting_indices(state, matching_head_attestations)` // - validator index in `get_unslashed_attesting_indices(state, matching_head_attestations)`
if validator.is_previous_epoch_head_attester && !validator.is_slashed { if validator.is_previous_epoch_head_attester && !validator.is_slashed {
delta.reward(base_reward * matching_head_balance_ebi / total_balance_ebi); delta.reward(
base_reward
.safe_mul(matching_head_balance_ebi)?
.safe_div(total_balance_ebi)?,
)?;
} else { } else {
delta.penalize(base_reward); delta.penalize(base_reward)?;
} }
// Inactivity penalty // Inactivity penalty
if finality_delay > spec.min_epochs_to_inactivity_penalty { if finality_delay > spec.min_epochs_to_inactivity_penalty {
// All eligible validators are penalized // All eligible validators are penalized
delta.penalize(spec.base_rewards_per_epoch * base_reward); delta.penalize(spec.base_rewards_per_epoch.safe_mul(base_reward)?)?;
// Additionally, all validators whose FFG target didn't match are penalized extra // Additionally, all validators whose FFG target didn't match are penalized extra
if !validator.is_previous_epoch_target_attester { if !validator.is_previous_epoch_target_attester {
delta.penalize( delta.penalize(
validator.current_epoch_effective_balance * finality_delay validator
/ spec.inactivity_penalty_quotient, .current_epoch_effective_balance
); .safe_mul(finality_delay)?
.safe_div(spec.inactivity_penalty_quotient)?,
)?;
} }
} }
@ -218,5 +240,5 @@ fn get_attestation_delta<T: EthSpec>(
// This function only computes the delta for a single validator, so it cannot also return a // This function only computes the delta for a single validator, so it cannot also return a
// delta for a validator. // delta for a validator.
delta Ok(delta)
} }

View File

@ -18,6 +18,7 @@ pub enum EpochProcessingError {
BeaconStateError(BeaconStateError), BeaconStateError(BeaconStateError),
InclusionError(InclusionError), InclusionError(InclusionError),
SszTypesError(ssz_types::Error), SszTypesError(ssz_types::Error),
ArithError(safe_arith::ArithError),
} }
impl From<InclusionError> for EpochProcessingError { impl From<InclusionError> for EpochProcessingError {
@ -38,6 +39,12 @@ impl From<ssz_types::Error> for EpochProcessingError {
} }
} }
impl From<safe_arith::ArithError> for EpochProcessingError {
fn from(e: safe_arith::ArithError) -> EpochProcessingError {
EpochProcessingError::ArithError(e)
}
}
#[derive(Debug, PartialEq)] #[derive(Debug, PartialEq)]
pub enum InclusionError { pub enum InclusionError {
/// The validator did not participate in an attestation in this period. /// The validator did not participate in an attestation in this period.

View File

@ -1,3 +1,4 @@
use safe_arith::SafeArith;
use types::{BeaconStateError as Error, *}; use types::{BeaconStateError as Error, *};
/// Process slashings. /// Process slashings.
@ -13,12 +14,17 @@ pub fn process_slashings<T: EthSpec>(
for (index, validator) in state.validators.iter().enumerate() { for (index, validator) in state.validators.iter().enumerate() {
if validator.slashed if validator.slashed
&& epoch + T::EpochsPerSlashingsVector::to_u64() / 2 == validator.withdrawable_epoch && epoch + T::EpochsPerSlashingsVector::to_u64().safe_div(2)?
== validator.withdrawable_epoch
{ {
let increment = spec.effective_balance_increment; let increment = spec.effective_balance_increment;
let penalty_numerator = validator.effective_balance / increment let penalty_numerator = validator
* std::cmp::min(sum_slashings * 3, total_balance); .effective_balance
let penalty = penalty_numerator / total_balance * increment; .safe_div(increment)?
.safe_mul(std::cmp::min(sum_slashings.safe_mul(3)?, total_balance))?;
let penalty = penalty_numerator
.safe_div(total_balance)?
.safe_mul(increment)?;
safe_sub_assign!(state.balances[index], penalty); safe_sub_assign!(state.balances[index], penalty);
} }

View File

@ -1,4 +1,5 @@
use crate::common::get_attesting_indices; use crate::common::get_attesting_indices;
use safe_arith::SafeArith;
use types::*; use types::*;
/// Sets the boolean `var` on `self` to be true if it is true on `other`. Otherwise leaves `self` /// Sets the boolean `var` on `self` to be true if it is true on `other`. Otherwise leaves `self`
@ -198,12 +199,16 @@ impl ValidatorStatuses {
if validator.is_active_at(state.current_epoch()) { if validator.is_active_at(state.current_epoch()) {
status.is_active_in_current_epoch = true; status.is_active_in_current_epoch = true;
total_balances.current_epoch += effective_balance; total_balances
.current_epoch
.safe_add_assign(effective_balance)?;
} }
if validator.is_active_at(state.previous_epoch()) { if validator.is_active_at(state.previous_epoch()) {
status.is_active_in_previous_epoch = true; status.is_active_in_previous_epoch = true;
total_balances.previous_epoch += effective_balance; total_balances
.previous_epoch
.safe_add_assign(effective_balance)?;
} }
statuses.push(status); statuses.push(status);
@ -275,19 +280,29 @@ impl ValidatorStatuses {
let validator_balance = state.get_effective_balance(index, spec)?; let validator_balance = state.get_effective_balance(index, spec)?;
if v.is_current_epoch_attester { if v.is_current_epoch_attester {
self.total_balances.current_epoch_attesters += validator_balance; self.total_balances
.current_epoch_attesters
.safe_add_assign(validator_balance)?;
} }
if v.is_current_epoch_target_attester { if v.is_current_epoch_target_attester {
self.total_balances.current_epoch_target_attesters += validator_balance; self.total_balances
.current_epoch_target_attesters
.safe_add_assign(validator_balance)?;
} }
if v.is_previous_epoch_attester { if v.is_previous_epoch_attester {
self.total_balances.previous_epoch_attesters += validator_balance; self.total_balances
.previous_epoch_attesters
.safe_add_assign(validator_balance)?;
} }
if v.is_previous_epoch_target_attester { if v.is_previous_epoch_target_attester {
self.total_balances.previous_epoch_target_attesters += validator_balance; self.total_balances
.previous_epoch_target_attesters
.safe_add_assign(validator_balance)?;
} }
if v.is_previous_epoch_head_attester { if v.is_previous_epoch_head_attester {
self.total_balances.previous_epoch_head_attesters += validator_balance; self.total_balances
.previous_epoch_head_attesters
.safe_add_assign(validator_balance)?;
} }
} }
} }

View File

@ -23,6 +23,7 @@ log = "0.4.8"
merkle_proof = { path = "../utils/merkle_proof" } merkle_proof = { path = "../utils/merkle_proof" }
rayon = "1.2.0" rayon = "1.2.0"
rand = "0.7.2" rand = "0.7.2"
safe_arith = { path = "../utils/safe_arith" }
serde = "1.0.102" serde = "1.0.102"
serde_derive = "1.0.102" serde_derive = "1.0.102"
slog = "2.5.2" slog = "2.5.2"

View File

@ -7,6 +7,7 @@ use compare_fields_derive::CompareFields;
use eth2_hashing::hash; use eth2_hashing::hash;
use int_to_bytes::{int_to_bytes4, int_to_bytes8}; use int_to_bytes::{int_to_bytes4, int_to_bytes8};
use pubkey_cache::PubkeyCache; use pubkey_cache::PubkeyCache;
use safe_arith::{ArithError, SafeArith};
use serde_derive::{Deserialize, Serialize}; use serde_derive::{Deserialize, Serialize};
use ssz::ssz_encode; use ssz::ssz_encode;
use ssz_derive::{Decode, Encode}; use ssz_derive::{Decode, Encode};
@ -75,6 +76,10 @@ pub enum Error {
deposit_count: u64, deposit_count: u64,
deposit_index: u64, deposit_index: u64,
}, },
/// An arithmetic operation occurred which would have overflowed or divided by 0.
///
/// This represents a serious bug in either the spec or Lighthouse!
ArithError(ArithError),
} }
/// Control whether an epoch-indexed field can be indexed at the next epoch or not. /// Control whether an epoch-indexed field can be indexed at the next epoch or not.
@ -411,7 +416,7 @@ impl<T: EthSpec> BeaconState<T> {
let mut i = 0; let mut i = 0;
loop { loop {
let candidate_index = indices[compute_shuffled_index( let candidate_index = indices[compute_shuffled_index(
i % indices.len(), i.safe_rem(indices.len())?,
indices.len(), indices.len(),
seed, seed,
spec.shuffle_round_count, spec.shuffle_round_count,
@ -419,17 +424,19 @@ impl<T: EthSpec> BeaconState<T> {
.ok_or(Error::UnableToShuffle)?]; .ok_or(Error::UnableToShuffle)?];
let random_byte = { let random_byte = {
let mut preimage = seed.to_vec(); let mut preimage = seed.to_vec();
preimage.append(&mut int_to_bytes8((i / 32) as u64)); preimage.append(&mut int_to_bytes8(i.safe_div(32)? as u64));
let hash = hash(&preimage); let hash = hash(&preimage);
hash[i % 32] hash[i.safe_rem(32)?]
}; };
let effective_balance = self.validators[candidate_index].effective_balance; let effective_balance = self.validators[candidate_index].effective_balance;
if effective_balance * MAX_RANDOM_BYTE if effective_balance.safe_mul(MAX_RANDOM_BYTE)?
>= spec.max_effective_balance * u64::from(random_byte) >= spec
.max_effective_balance
.safe_mul(u64::from(random_byte))?
{ {
return Ok(candidate_index); return Ok(candidate_index);
} }
i += 1; i.increment()?;
} }
} }
@ -476,8 +483,8 @@ impl<T: EthSpec> BeaconState<T> {
/// ///
/// Spec v0.11.1 /// Spec v0.11.1
fn get_latest_block_roots_index(&self, slot: Slot) -> Result<usize, Error> { fn get_latest_block_roots_index(&self, slot: Slot) -> Result<usize, Error> {
if (slot < self.slot) && (self.slot <= slot + self.block_roots.len() as u64) { if slot < self.slot && self.slot <= slot + self.block_roots.len() as u64 {
Ok(slot.as_usize() % self.block_roots.len()) Ok(slot.as_usize().safe_rem(self.block_roots.len())?)
} else { } else {
Err(BeaconStateError::SlotOutOfBounds) Err(BeaconStateError::SlotOutOfBounds)
} }
@ -529,7 +536,7 @@ impl<T: EthSpec> BeaconState<T> {
let len = T::EpochsPerHistoricalVector::to_u64(); let len = T::EpochsPerHistoricalVector::to_u64();
if current_epoch < epoch + len && epoch <= allow_next_epoch.upper_bound_of(current_epoch) { if current_epoch < epoch + len && epoch <= allow_next_epoch.upper_bound_of(current_epoch) {
Ok(epoch.as_usize() % len as usize) Ok(epoch.as_usize().safe_rem(len as usize)?)
} else { } else {
Err(Error::EpochOutOfBounds) Err(Error::EpochOutOfBounds)
} }
@ -543,7 +550,9 @@ impl<T: EthSpec> BeaconState<T> {
/// ///
/// Spec v0.11.1 /// Spec v0.11.1
pub fn update_randao_mix(&mut self, epoch: Epoch, signature: &Signature) -> Result<(), Error> { pub fn update_randao_mix(&mut self, epoch: Epoch, signature: &Signature) -> Result<(), Error> {
let i = epoch.as_usize() % T::EpochsPerHistoricalVector::to_usize(); let i = epoch
.as_usize()
.safe_rem(T::EpochsPerHistoricalVector::to_usize())?;
let signature_hash = Hash256::from_slice(&hash(&ssz_encode(signature))); let signature_hash = Hash256::from_slice(&hash(&ssz_encode(signature)));
@ -573,8 +582,8 @@ impl<T: EthSpec> BeaconState<T> {
/// ///
/// Spec v0.11.1 /// Spec v0.11.1
fn get_latest_state_roots_index(&self, slot: Slot) -> Result<usize, Error> { fn get_latest_state_roots_index(&self, slot: Slot) -> Result<usize, Error> {
if (slot < self.slot) && (self.slot <= slot + Slot::from(self.state_roots.len())) { if slot < self.slot && self.slot <= slot + self.state_roots.len() as u64 {
Ok(slot.as_usize() % self.state_roots.len()) Ok(slot.as_usize().safe_rem(self.state_roots.len())?)
} else { } else {
Err(BeaconStateError::SlotOutOfBounds) Err(BeaconStateError::SlotOutOfBounds)
} }
@ -628,7 +637,9 @@ impl<T: EthSpec> BeaconState<T> {
if current_epoch < epoch + T::EpochsPerSlashingsVector::to_u64() if current_epoch < epoch + T::EpochsPerSlashingsVector::to_u64()
&& epoch <= allow_next_epoch.upper_bound_of(current_epoch) && epoch <= allow_next_epoch.upper_bound_of(current_epoch)
{ {
Ok(epoch.as_usize() % T::EpochsPerSlashingsVector::to_usize()) Ok(epoch
.as_usize()
.safe_rem(T::EpochsPerSlashingsVector::to_usize())?)
} else { } else {
Err(Error::EpochOutOfBounds) Err(Error::EpochOutOfBounds)
} }
@ -687,20 +698,20 @@ impl<T: EthSpec> BeaconState<T> {
// == 0`. // == 0`.
let mix = { let mix = {
let i = epoch + T::EpochsPerHistoricalVector::to_u64() - spec.min_seed_lookahead - 1; let i = epoch + T::EpochsPerHistoricalVector::to_u64() - spec.min_seed_lookahead - 1;
self.randao_mixes[i.as_usize() % self.randao_mixes.len()] self.randao_mixes[i.as_usize().safe_rem(self.randao_mixes.len())?]
}; };
let domain_bytes = int_to_bytes4(spec.get_domain_constant(domain_type)); let domain_bytes = int_to_bytes4(spec.get_domain_constant(domain_type));
let epoch_bytes = int_to_bytes8(epoch.as_u64()); let epoch_bytes = int_to_bytes8(epoch.as_u64());
const NUM_DOMAIN_BYTES: usize = 4; const NUM_DOMAIN_BYTES: usize = 4;
const NUM_EPOCH_BYTES: usize = 8; const NUM_EPOCH_BYTES: usize = 8;
const MIX_OFFSET: usize = NUM_DOMAIN_BYTES + NUM_EPOCH_BYTES;
const NUM_MIX_BYTES: usize = 32; const NUM_MIX_BYTES: usize = 32;
let mut preimage = [0; NUM_DOMAIN_BYTES + NUM_EPOCH_BYTES + NUM_MIX_BYTES]; let mut preimage = [0; NUM_DOMAIN_BYTES + NUM_EPOCH_BYTES + NUM_MIX_BYTES];
preimage[0..NUM_DOMAIN_BYTES].copy_from_slice(&domain_bytes); preimage[0..NUM_DOMAIN_BYTES].copy_from_slice(&domain_bytes);
preimage[NUM_DOMAIN_BYTES..NUM_DOMAIN_BYTES + NUM_EPOCH_BYTES] preimage[NUM_DOMAIN_BYTES..MIX_OFFSET].copy_from_slice(&epoch_bytes);
.copy_from_slice(&epoch_bytes); preimage[MIX_OFFSET..].copy_from_slice(mix.as_bytes());
preimage[NUM_DOMAIN_BYTES + NUM_EPOCH_BYTES..].copy_from_slice(mix.as_bytes());
Ok(Hash256::from_slice(&hash(&preimage))) Ok(Hash256::from_slice(&hash(&preimage)))
} }
@ -734,9 +745,10 @@ impl<T: EthSpec> BeaconState<T> {
pub fn get_churn_limit(&self, spec: &ChainSpec) -> Result<u64, Error> { pub fn get_churn_limit(&self, spec: &ChainSpec) -> Result<u64, Error> {
Ok(std::cmp::max( Ok(std::cmp::max(
spec.min_per_epoch_churn_limit, spec.min_per_epoch_churn_limit,
self.committee_cache(RelativeEpoch::Current)? (self
.active_validator_count() as u64 .committee_cache(RelativeEpoch::Current)?
/ spec.churn_limit_quotient, .active_validator_count() as u64)
.safe_div(spec.churn_limit_quotient)?,
)) ))
} }
@ -766,7 +778,7 @@ impl<T: EthSpec> BeaconState<T> {
) -> Result<u64, Error> { ) -> Result<u64, Error> {
validator_indices.iter().try_fold(0_u64, |acc, i| { validator_indices.iter().try_fold(0_u64, |acc, i| {
self.get_effective_balance(*i, spec) self.get_effective_balance(*i, spec)
.and_then(|bal| Ok(bal + acc)) .and_then(|bal| Ok(acc.safe_add(bal)?))
}) })
} }
@ -1072,3 +1084,9 @@ impl From<tree_hash::Error> for Error {
Error::TreeHashError(e) Error::TreeHashError(e)
} }
} }
impl From<ArithError> for Error {
fn from(e: ArithError) -> Error {
Error::ArithError(e)
}
}

View File

@ -1,3 +1,5 @@
#![allow(clippy::integer_arithmetic)]
use super::BeaconState; use super::BeaconState;
use crate::*; use crate::*;
use core::num::NonZeroUsize; use core::num::NonZeroUsize;
@ -43,7 +45,7 @@ impl CommitteeCache {
} }
let committees_per_slot = let committees_per_slot =
T::get_committee_count_per_slot(active_validator_indices.len(), spec) as u64; T::get_committee_count_per_slot(active_validator_indices.len(), spec)? as u64;
let seed = state.get_seed(epoch, Domain::BeaconAttester, spec)?; let seed = state.get_seed(epoch, Domain::BeaconAttester, spec)?;
@ -56,7 +58,7 @@ impl CommitteeCache {
.ok_or_else(|| Error::UnableToShuffle)?; .ok_or_else(|| Error::UnableToShuffle)?;
// The use of `NonZeroUsize` reduces the maximum number of possible validators by one. // The use of `NonZeroUsize` reduces the maximum number of possible validators by one.
if state.validators.len() > usize::max_value() - 1 { if state.validators.len() == usize::max_value() {
return Err(Error::TooManyValidators); return Err(Error::TooManyValidators);
} }

View File

@ -1,4 +1,5 @@
use super::{BeaconStateError, ChainSpec, Epoch, Validator}; use super::{BeaconStateError, ChainSpec, Epoch, Validator};
use safe_arith::SafeArith;
use serde_derive::{Deserialize, Serialize}; use serde_derive::{Deserialize, Serialize};
use std::collections::HashMap; use std::collections::HashMap;
@ -50,7 +51,10 @@ impl ExitCache {
/// Must only be called once per exiting validator. /// Must only be called once per exiting validator.
pub fn record_validator_exit(&mut self, exit_epoch: Epoch) -> Result<(), BeaconStateError> { pub fn record_validator_exit(&mut self, exit_epoch: Epoch) -> Result<(), BeaconStateError> {
self.check_initialized()?; self.check_initialized()?;
*self.exits_per_epoch.entry(exit_epoch).or_insert(0) += 1; self.exits_per_epoch
.entry(exit_epoch)
.or_insert(0)
.increment()?;
Ok(()) Ok(())
} }

View File

@ -23,6 +23,7 @@ impl PubkeyCache {
/// ///
/// The added index must equal the number of validators already added to the map. This ensures /// The added index must equal the number of validators already added to the map. This ensures
/// that an index is never skipped. /// that an index is never skipped.
#[allow(clippy::integer_arithmetic)]
pub fn insert(&mut self, pubkey: PublicKeyBytes, index: ValidatorIndex) -> bool { pub fn insert(&mut self, pubkey: PublicKeyBytes, index: ValidatorIndex) -> bool {
if index == self.len { if index == self.len {
self.map.insert(pubkey, index); self.map.insert(pubkey, index);

View File

@ -1,3 +1,5 @@
#![allow(clippy::integer_arithmetic)]
use super::Error; use super::Error;
use crate::{BeaconState, EthSpec, Hash256, Unsigned, Validator}; use crate::{BeaconState, EthSpec, Hash256, Unsigned, Validator};
use cached_tree_hash::{int_log, CacheArena, CachedTreeHash, TreeHashCache}; use cached_tree_hash::{int_log, CacheArena, CachedTreeHash, TreeHashCache};
@ -195,7 +197,7 @@ impl ValidatorsListTreeHashCache {
/// This function makes assumptions that the `validators` list will only change in accordance /// This function makes assumptions that the `validators` list will only change in accordance
/// with valid per-block/per-slot state transitions. /// with valid per-block/per-slot state transitions.
fn recalculate_tree_hash_root(&mut self, validators: &[Validator]) -> Result<Hash256, Error> { fn recalculate_tree_hash_root(&mut self, validators: &[Validator]) -> Result<Hash256, Error> {
let mut list_arena = std::mem::replace(&mut self.list_arena, CacheArena::default()); let mut list_arena = std::mem::take(&mut self.list_arena);
let leaves = self let leaves = self
.values .values

View File

@ -238,10 +238,10 @@ impl ChainSpec {
/* /*
* Gwei values * Gwei values
*/ */
min_deposit_amount: u64::pow(2, 0) * u64::pow(10, 9), min_deposit_amount: u64::pow(2, 0).saturating_mul(u64::pow(10, 9)),
max_effective_balance: u64::pow(2, 5) * u64::pow(10, 9), max_effective_balance: u64::pow(2, 5).saturating_mul(u64::pow(10, 9)),
ejection_balance: u64::pow(2, 4) * u64::pow(10, 9), ejection_balance: u64::pow(2, 4).saturating_mul(u64::pow(10, 9)),
effective_balance_increment: u64::pow(2, 0) * u64::pow(10, 9), effective_balance_increment: u64::pow(2, 0).saturating_mul(u64::pow(10, 9)),
/* /*
* Initial Values * Initial Values
@ -522,6 +522,7 @@ impl Default for YamlConfig {
/// Spec v0.11.1 /// Spec v0.11.1
impl YamlConfig { impl YamlConfig {
#[allow(clippy::integer_arithmetic)]
pub fn from_spec<T: EthSpec>(spec: &ChainSpec) -> Self { pub fn from_spec<T: EthSpec>(spec: &ChainSpec) -> Self {
Self { Self {
// ChainSpec // ChainSpec
@ -557,7 +558,7 @@ impl YamlConfig {
proposer_reward_quotient: spec.proposer_reward_quotient, proposer_reward_quotient: spec.proposer_reward_quotient,
inactivity_penalty_quotient: spec.inactivity_penalty_quotient, inactivity_penalty_quotient: spec.inactivity_penalty_quotient,
min_slashing_penalty_quotient: spec.min_slashing_penalty_quotient, min_slashing_penalty_quotient: spec.min_slashing_penalty_quotient,
genesis_fork_version: spec.genesis_fork_version.clone(), genesis_fork_version: spec.genesis_fork_version,
safe_slots_to_update_justified: spec.safe_slots_to_update_justified, safe_slots_to_update_justified: spec.safe_slots_to_update_justified,
domain_beacon_proposer: spec.domain_beacon_proposer, domain_beacon_proposer: spec.domain_beacon_proposer,
domain_beacon_attester: spec.domain_beacon_attester, domain_beacon_attester: spec.domain_beacon_attester,
@ -642,7 +643,7 @@ impl YamlConfig {
effective_balance_increment: self.effective_balance_increment, effective_balance_increment: self.effective_balance_increment,
genesis_slot: Slot::from(self.genesis_slot), genesis_slot: Slot::from(self.genesis_slot),
bls_withdrawal_prefix_byte: self.bls_withdrawal_prefix, bls_withdrawal_prefix_byte: self.bls_withdrawal_prefix,
milliseconds_per_slot: self.seconds_per_slot * 1000, milliseconds_per_slot: self.seconds_per_slot.saturating_mul(1000),
min_attestation_inclusion_delay: self.min_attestation_inclusion_delay, min_attestation_inclusion_delay: self.min_attestation_inclusion_delay,
min_seed_lookahead: Epoch::from(self.min_seed_lookahead), min_seed_lookahead: Epoch::from(self.min_seed_lookahead),
max_seed_lookahead: Epoch::from(self.max_seed_lookahead), max_seed_lookahead: Epoch::from(self.max_seed_lookahead),
@ -662,7 +663,7 @@ impl YamlConfig {
domain_deposit: self.domain_deposit, domain_deposit: self.domain_deposit,
domain_voluntary_exit: self.domain_voluntary_exit, domain_voluntary_exit: self.domain_voluntary_exit,
boot_nodes: chain_spec.boot_nodes.clone(), boot_nodes: chain_spec.boot_nodes.clone(),
genesis_fork_version: self.genesis_fork_version.clone(), genesis_fork_version: self.genesis_fork_version,
eth1_follow_distance: self.eth1_follow_distance, eth1_follow_distance: self.eth1_follow_distance,
..*chain_spec ..*chain_spec
}) })

View File

@ -1,4 +1,5 @@
use crate::*; use crate::*;
use safe_arith::SafeArith;
use serde_derive::{Deserialize, Serialize}; use serde_derive::{Deserialize, Serialize};
use ssz_types::typenum::{ use ssz_types::typenum::{
Unsigned, U0, U1, U1024, U1099511627776, U128, U16, U16777216, U2, U2048, U32, U4, U4096, U64, Unsigned, U0, U1, U1024, U1099511627776, U128, U16, U16777216, U2, U2048, U32, U4, U4096, U64,
@ -63,16 +64,21 @@ pub trait EthSpec: 'static + Default + Sync + Send + Clone + Debug + PartialEq {
/// the `active_validator_count` during the slot's epoch. /// the `active_validator_count` during the slot's epoch.
/// ///
/// Spec v0.11.1 /// Spec v0.11.1
fn get_committee_count_per_slot(active_validator_count: usize, spec: &ChainSpec) -> usize { fn get_committee_count_per_slot(
active_validator_count: usize,
spec: &ChainSpec,
) -> Result<usize, Error> {
let slots_per_epoch = Self::SlotsPerEpoch::to_usize(); let slots_per_epoch = Self::SlotsPerEpoch::to_usize();
std::cmp::max( Ok(std::cmp::max(
1, 1,
std::cmp::min( std::cmp::min(
spec.max_committees_per_slot, spec.max_committees_per_slot,
active_validator_count / slots_per_epoch / spec.target_committee_size, active_validator_count
.safe_div(slots_per_epoch)?
.safe_div(spec.target_committee_size)?,
), ),
) ))
} }
/// Returns the minimum number of validators required for this spec. /// Returns the minimum number of validators required for this spec.

View File

@ -2,6 +2,8 @@
// Required for big type-level numbers // Required for big type-level numbers
#![recursion_limit = "128"] #![recursion_limit = "128"]
// Clippy lint set up
#![deny(clippy::integer_arithmetic)]
#[macro_use] #[macro_use]
pub mod test_utils; pub mod test_utils;

View File

@ -14,7 +14,6 @@ use crate::test_utils::TestRandom;
use crate::SignedRoot; use crate::SignedRoot;
use rand::RngCore; use rand::RngCore;
use serde_derive::{Deserialize, Serialize}; use serde_derive::{Deserialize, Serialize};
use slog;
use ssz::{ssz_encode, Decode, DecodeError, Encode}; use ssz::{ssz_encode, Decode, DecodeError, Encode};
use std::cmp::{Ord, Ordering}; use std::cmp::{Ord, Ordering};
use std::fmt; use std::fmt;
@ -38,7 +37,7 @@ impl Slot {
} }
pub fn epoch(self, slots_per_epoch: u64) -> Epoch { pub fn epoch(self, slots_per_epoch: u64) -> Epoch {
Epoch::from(self.0 / slots_per_epoch) Epoch::from(self.0) / Epoch::from(slots_per_epoch)
} }
pub fn max_value() -> Slot { pub fn max_value() -> Slot {
@ -77,8 +76,8 @@ impl Epoch {
let start = self.start_slot(slots_per_epoch); let start = self.start_slot(slots_per_epoch);
let end = self.end_slot(slots_per_epoch); let end = self.end_slot(slots_per_epoch);
if (slot >= start) && (slot <= end) { if slot >= start && slot <= end {
Some(slot.as_usize() - start.as_usize()) slot.as_usize().checked_sub(start.as_usize())
} else { } else {
None None
} }
@ -110,7 +109,7 @@ impl<'a> Iterator for SlotIter<'a> {
} else { } else {
let start_slot = self.epoch.start_slot(self.slots_per_epoch); let start_slot = self.epoch.start_slot(self.slots_per_epoch);
let previous = self.current_iteration; let previous = self.current_iteration;
self.current_iteration += 1; self.current_iteration = self.current_iteration.checked_add(1)?;
Some(start_slot + previous) Some(start_slot + previous)
} }
} }

View File

@ -107,20 +107,21 @@ macro_rules! impl_math_between {
fn div(self, rhs: $other) -> $main { fn div(self, rhs: $other) -> $main {
let rhs: u64 = rhs.into(); let rhs: u64 = rhs.into();
if rhs == 0 { $main::from(
panic!("Cannot divide by zero-valued Slot/Epoch") self.0
} .checked_div(rhs)
$main::from(self.0 / rhs) .expect("Cannot divide by zero-valued Slot/Epoch"),
)
} }
} }
impl DivAssign<$other> for $main { impl DivAssign<$other> for $main {
fn div_assign(&mut self, rhs: $other) { fn div_assign(&mut self, rhs: $other) {
let rhs: u64 = rhs.into(); let rhs: u64 = rhs.into();
if rhs == 0 { self.0 = self
panic!("Cannot divide by zero-valued Slot/Epoch") .0
} .checked_div(rhs)
self.0 = self.0 / rhs .expect("Cannot divide by zero-valued Slot/Epoch");
} }
} }
@ -129,7 +130,11 @@ macro_rules! impl_math_between {
fn rem(self, modulus: $other) -> $main { fn rem(self, modulus: $other) -> $main {
let modulus: u64 = modulus.into(); let modulus: u64 = modulus.into();
$main::from(self.0 % modulus) $main::from(
self.0
.checked_rem(modulus)
.expect("Cannot divide by zero-valued Slot/Epoch"),
)
} }
} }
}; };
@ -234,7 +239,7 @@ macro_rules! impl_ssz {
} }
fn tree_hash_packing_factor() -> usize { fn tree_hash_packing_factor() -> usize {
32 / 8 32usize.wrapping_div(8)
} }
fn tree_hash_root(&self) -> tree_hash::Hash256 { fn tree_hash_root(&self) -> tree_hash::Hash256 {

View File

@ -2,7 +2,6 @@ use super::super::{generate_deterministic_keypairs, KeypairsFile};
use crate::test_utils::{AttestationTestTask, TestingPendingAttestationBuilder}; use crate::test_utils::{AttestationTestTask, TestingPendingAttestationBuilder};
use crate::*; use crate::*;
use bls::get_withdrawal_credentials; use bls::get_withdrawal_credentials;
use dirs;
use log::debug; use log::debug;
use rayon::prelude::*; use rayon::prelude::*;
use std::path::{Path, PathBuf}; use std::path::{Path, PathBuf};

View File

@ -36,7 +36,7 @@ impl TestingDepositBuilder {
let mut secret_key = keypair.sk.clone(); let mut secret_key = keypair.sk.clone();
match test_task { match test_task {
DepositTestTask::BadPubKey => pubkeybytes = PublicKeyBytes::from(new_key.pk.clone()), DepositTestTask::BadPubKey => pubkeybytes = PublicKeyBytes::from(new_key.pk),
DepositTestTask::InvalidPubKey => { DepositTestTask::InvalidPubKey => {
// Creating invalid public key bytes // Creating invalid public key bytes
let mut public_key_bytes: Vec<u8> = vec![0; 48]; let mut public_key_bytes: Vec<u8> = vec![0; 48];

View File

@ -50,7 +50,7 @@ impl TestingProposerSlashingBuilder {
message: BeaconBlockHeader { message: BeaconBlockHeader {
parent_root: hash_2, parent_root: hash_2,
slot: slot_2, slot: slot_2,
..signed_header_1.message.clone() ..signed_header_1.message
}, },
signature: Signature::empty_signature(), signature: Signature::empty_signature(),
}; };

View File

@ -1,3 +1,5 @@
#![allow(clippy::integer_arithmetic)]
#[macro_use] #[macro_use]
mod macros; mod macros;
mod builders; mod builders;

View File

@ -32,12 +32,10 @@ impl CachedTreeHash<TreeHashCache> for Validator {
// Fields pubkey and withdrawal_credentials are constant // Fields pubkey and withdrawal_credentials are constant
if (i == 0 || i == 1) && cache.initialized { if (i == 0 || i == 1) && cache.initialized {
None None
} else if process_field_by_index(self, i, leaf, !cache.initialized) {
Some(i)
} else { } else {
if process_field_by_index(self, i, leaf, !cache.initialized) { None
Some(i)
} else {
None
}
} }
}) })
.collect(); .collect();

View File

@ -1,4 +1,3 @@
use hex;
use serde::de::Error; use serde::de::Error;
use serde::{Deserialize, Deserializer, Serializer}; use serde::{Deserialize, Deserializer, Serializer};

View File

@ -19,9 +19,7 @@ impl AggregatePublicKey {
pub fn from_bytes(bytes: &[u8]) -> Result<Self, DecodeError> { pub fn from_bytes(bytes: &[u8]) -> Result<Self, DecodeError> {
let pubkey = RawAggregatePublicKey::from_bytes(&bytes).map_err(|_| { let pubkey = RawAggregatePublicKey::from_bytes(&bytes).map_err(|_| {
DecodeError::BytesInvalid( DecodeError::BytesInvalid(format!("Invalid AggregatePublicKey bytes: {:?}", bytes))
format!("Invalid AggregatePublicKey bytes: {:?}", bytes).to_string(),
)
})?; })?;
Ok(AggregatePublicKey(pubkey)) Ok(AggregatePublicKey(pubkey))

View File

@ -164,8 +164,8 @@ impl TreeHashCache {
fn lift_dirty(dirty_indices: &[usize]) -> SmallVec8<usize> { fn lift_dirty(dirty_indices: &[usize]) -> SmallVec8<usize> {
let mut new_dirty = SmallVec8::with_capacity(dirty_indices.len()); let mut new_dirty = SmallVec8::with_capacity(dirty_indices.len());
for i in 0..dirty_indices.len() { for index in dirty_indices {
new_dirty.push(dirty_indices[i] / 2) new_dirty.push(index / 2)
} }
new_dirty.dedup(); new_dirty.dedup();

View File

@ -89,6 +89,7 @@ impl<T: Encode + Decode> CacheArena<T> {
/// To reiterate, the given `range` should be relative to the given `alloc_id`, not /// To reiterate, the given `range` should be relative to the given `alloc_id`, not
/// `self.backing`. E.g., if the allocation has an offset of `20` and the range is `0..1`, then /// `self.backing`. E.g., if the allocation has an offset of `20` and the range is `0..1`, then
/// the splice will translate to `self.backing[20..21]`. /// the splice will translate to `self.backing[20..21]`.
#[allow(clippy::comparison_chain)]
fn splice_forgetful<I: IntoIterator<Item = T>>( fn splice_forgetful<I: IntoIterator<Item = T>>(
&mut self, &mut self,
alloc_id: usize, alloc_id: usize,

View File

@ -8,6 +8,7 @@ edition = "2018"
ethereum-types = "0.8.0" ethereum-types = "0.8.0"
eth2_hashing = "0.1.0" eth2_hashing = "0.1.0"
lazy_static = "1.4.0" lazy_static = "1.4.0"
safe_arith = { path = "../safe_arith" }
[dev-dependencies] [dev-dependencies]
quickcheck = "0.9.0" quickcheck = "0.9.0"

View File

@ -1,6 +1,7 @@
use eth2_hashing::{hash, hash32_concat, ZERO_HASHES}; use eth2_hashing::{hash, hash32_concat, ZERO_HASHES};
use ethereum_types::H256; use ethereum_types::H256;
use lazy_static::lazy_static; use lazy_static::lazy_static;
use safe_arith::ArithError;
const MAX_TREE_DEPTH: usize = 32; const MAX_TREE_DEPTH: usize = 32;
const EMPTY_SLICE: &[H256] = &[]; const EMPTY_SLICE: &[H256] = &[];
@ -38,6 +39,8 @@ pub enum MerkleTreeError {
Invalid, Invalid,
// Incorrect Depth provided // Incorrect Depth provided
DepthTooSmall, DepthTooSmall,
// Overflow occurred
ArithError,
} }
impl MerkleTree { impl MerkleTree {
@ -232,6 +235,12 @@ fn merkle_root_from_branch(leaf: H256, branch: &[H256], depth: usize, index: usi
H256::from_slice(&merkle_root) H256::from_slice(&merkle_root)
} }
impl From<ArithError> for MerkleTreeError {
fn from(_: ArithError) -> Self {
MerkleTreeError::ArithError
}
}
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use super::*; use super::*;

View File

@ -0,0 +1,9 @@
[package]
name = "safe_arith"
version = "0.1.0"
authors = ["Michael Sproul <michael@sigmaprime.io>"]
edition = "2018"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]

View File

@ -0,0 +1,161 @@
//! Library for safe arithmetic on integers, avoiding overflow and division by zero.
/// Error representing the failure of an arithmetic operation.
#[derive(Debug, PartialEq, Eq, Clone, Copy)]
pub enum ArithError {
Overflow,
DivisionByZero,
}
type Result<T> = std::result::Result<T, ArithError>;
macro_rules! assign_method {
($name:ident, $op:ident, $doc_op:expr) => {
assign_method!($name, $op, Self, $doc_op);
};
($name:ident, $op:ident, $rhs_ty:ty, $doc_op:expr) => {
#[doc = "Safe variant of `"]
#[doc = $doc_op]
#[doc = "`."]
fn $name(&mut self, other: $rhs_ty) -> Result<()> {
*self = self.$op(other)?;
Ok(())
}
};
}
/// Trait providing safe arithmetic operations for built-in types.
pub trait SafeArith: Sized + Copy {
const ZERO: Self;
const ONE: Self;
/// Safe variant of `+` that guards against overflow.
fn safe_add(&self, other: Self) -> Result<Self>;
/// Safe variant of `-` that guards against overflow.
fn safe_sub(&self, other: Self) -> Result<Self>;
/// Safe variant of `*` that guards against overflow.
fn safe_mul(&self, other: Self) -> Result<Self>;
/// Safe variant of `/` that guards against division by 0.
fn safe_div(&self, other: Self) -> Result<Self>;
/// Safe variant of `%` that guards against division by 0.
fn safe_rem(&self, other: Self) -> Result<Self>;
/// Safe variant of `<<` that guards against overflow.
fn safe_shl(&self, other: u32) -> Result<Self>;
/// Safe variant of `>>` that guards against overflow.
fn safe_shr(&self, other: u32) -> Result<Self>;
assign_method!(safe_add_assign, safe_add, "+=");
assign_method!(safe_sub_assign, safe_sub, "-=");
assign_method!(safe_mul_assign, safe_mul, "*=");
assign_method!(safe_div_assign, safe_div, "/=");
assign_method!(safe_rem_assign, safe_rem, "%=");
assign_method!(safe_shl_assign, safe_shl, u32, "<<=");
assign_method!(safe_shr_assign, safe_shr, u32, ">>=");
/// Mutate `self` by adding 1, erroring on overflow.
fn increment(&mut self) -> Result<()> {
self.safe_add_assign(Self::ONE)
}
}
macro_rules! impl_safe_arith {
($typ:ty) => {
impl SafeArith for $typ {
const ZERO: Self = 0;
const ONE: Self = 1;
fn safe_add(&self, other: Self) -> Result<Self> {
self.checked_add(other).ok_or(ArithError::Overflow)
}
fn safe_sub(&self, other: Self) -> Result<Self> {
self.checked_sub(other).ok_or(ArithError::Overflow)
}
fn safe_mul(&self, other: Self) -> Result<Self> {
self.checked_mul(other).ok_or(ArithError::Overflow)
}
fn safe_div(&self, other: Self) -> Result<Self> {
self.checked_div(other).ok_or(ArithError::DivisionByZero)
}
fn safe_rem(&self, other: Self) -> Result<Self> {
self.checked_rem(other).ok_or(ArithError::DivisionByZero)
}
fn safe_shl(&self, other: u32) -> Result<Self> {
self.checked_shl(other).ok_or(ArithError::Overflow)
}
fn safe_shr(&self, other: u32) -> Result<Self> {
self.checked_shr(other).ok_or(ArithError::Overflow)
}
}
};
}
impl_safe_arith!(u8);
impl_safe_arith!(u16);
impl_safe_arith!(u32);
impl_safe_arith!(u64);
impl_safe_arith!(usize);
impl_safe_arith!(i8);
impl_safe_arith!(i16);
impl_safe_arith!(i32);
impl_safe_arith!(i64);
impl_safe_arith!(isize);
#[cfg(test)]
mod test {
use super::*;
#[test]
fn basic() {
let x = 10u32;
let y = 11;
assert_eq!(x.safe_add(y), Ok(x + y));
assert_eq!(y.safe_sub(x), Ok(y - x));
assert_eq!(x.safe_mul(y), Ok(x * y));
assert_eq!(x.safe_div(y), Ok(x / y));
assert_eq!(x.safe_rem(y), Ok(x % y));
assert_eq!(x.safe_shl(1), Ok(x << 1));
assert_eq!(x.safe_shr(1), Ok(x >> 1));
}
#[test]
fn mutate() {
let mut x = 0u8;
x.increment().unwrap();
x.increment().unwrap();
assert_eq!(x, 2);
x.safe_sub_assign(1).unwrap();
assert_eq!(x, 1);
x.safe_shl_assign(1).unwrap();
assert_eq!(x, 2);
x.safe_mul_assign(3).unwrap();
assert_eq!(x, 6);
x.safe_div_assign(4).unwrap();
assert_eq!(x, 1);
x.safe_shr_assign(1).unwrap();
assert_eq!(x, 0);
}
#[test]
fn errors() {
assert!(u32::max_value().safe_add(1).is_err());
assert!(u32::min_value().safe_sub(1).is_err());
assert!(u32::max_value().safe_mul(2).is_err());
assert!(u32::max_value().safe_div(0).is_err());
assert!(u32::max_value().safe_rem(0).is_err());
assert!(u32::max_value().safe_shl(32).is_err());
assert!(u32::max_value().safe_shr(32).is_err());
}
}

View File

@ -1,4 +1,3 @@
use hex;
use hex::ToHex; use hex::ToHex;
use serde::de::{self, Visitor}; use serde::de::{self, Visitor};
use std::fmt; use std::fmt;

View File

@ -86,6 +86,7 @@ pub fn ssz_encode_derive(input: TokenStream) -> TokenStream {
let field_types_f = field_types_a.clone(); let field_types_f = field_types_a.clone();
let output = quote! { let output = quote! {
#[allow(clippy::integer_arithmetic)]
impl #impl_generics ssz::Encode for #name #ty_generics #where_clause { impl #impl_generics ssz::Encode for #name #ty_generics #where_clause {
fn is_ssz_fixed_len() -> bool { fn is_ssz_fixed_len() -> bool {
#( #(
@ -221,6 +222,7 @@ pub fn ssz_decode_derive(input: TokenStream) -> TokenStream {
} }
let output = quote! { let output = quote! {
#[allow(clippy::integer_arithmetic)]
impl #impl_generics ssz::Decode for #name #ty_generics #where_clause { impl #impl_generics ssz::Decode for #name #ty_generics #where_clause {
fn is_ssz_fixed_len() -> bool { fn is_ssz_fixed_len() -> bool {
#( #(

View File

@ -274,22 +274,20 @@ impl MerkleHasher {
loop { loop {
if let Some(root) = self.root { if let Some(root) = self.root {
break Ok(root); break Ok(root);
} else if let Some(node) = self.half_nodes.last() {
let right_child = node.id * 2 + 1;
self.process_right_node(right_child, self.zero_hash(right_child));
} else if self.next_leaf == 1 {
// The next_leaf can only be 1 if the tree has a depth of one. If have been no
// leaves supplied, assume a root of zero.
break Ok(Hash256::zero());
} else { } else {
if let Some(node) = self.half_nodes.last() { // The only scenario where there are (a) no half nodes and (b) a tree of depth
let right_child = node.id * 2 + 1; // two or more is where no leaves have been supplied at all.
self.process_right_node(right_child, self.zero_hash(right_child)); //
} else if self.next_leaf == 1 { // Once we supply this first zero-hash leaf then all future operations will be
// The next_leaf can only be 1 if the tree has a depth of one. If have been no // triggered via the `process_right_node` branch.
// leaves supplied, assume a root of zero. self.process_left_node(self.next_leaf, self.zero_hash(self.next_leaf))
break Ok(Hash256::zero());
} else {
// The only scenario where there are (a) no half nodes and (b) a tree of depth
// two or more is where no leaves have been supplied at all.
//
// Once we supply this first zero-hash leaf then all future operations will be
// triggered via the `process_right_node` branch.
self.process_left_node(self.next_leaf, self.zero_hash(self.next_leaf))
}
} }
} }
} }