diff --git a/beacon_node/src/config.rs b/beacon_node/src/config.rs index 83757fc7c..f0721bce3 100644 --- a/beacon_node/src/config.rs +++ b/beacon_node/src/config.rs @@ -21,7 +21,6 @@ use types::{ChainSpec, Checkpoint, Epoch, EthSpec, Hash256, GRAFFITI_BYTES_LEN}; /// The output of this function depends primarily upon the given `cli_args`, however it's behaviour /// may be influenced by other external services like the contents of the file system or the /// response of some remote server. -#[allow(clippy::cognitive_complexity)] pub fn get_config( cli_args: &ArgMatches, spec_constants: &str, diff --git a/consensus/cached_tree_hash/src/cache_arena.rs b/consensus/cached_tree_hash/src/cache_arena.rs index b6ade1474..17a23ed0c 100644 --- a/consensus/cached_tree_hash/src/cache_arena.rs +++ b/consensus/cached_tree_hash/src/cache_arena.rs @@ -1,6 +1,7 @@ use crate::SmallVec8; use ssz::{Decode, Encode}; use ssz_derive::{Decode, Encode}; +use std::cmp::Ordering; use std::marker::PhantomData; use std::ops::Range; @@ -89,7 +90,6 @@ impl CacheArena { /// To reiterate, the given `range` should be relative to the given `alloc_id`, not /// `self.backing`. E.g., if the allocation has an offset of `20` and the range is `0..1`, then /// the splice will translate to `self.backing[20..21]`. - #[allow(clippy::comparison_chain)] fn splice_forgetful>( &mut self, alloc_id: usize, @@ -113,10 +113,10 @@ impl CacheArena { self.backing.splice(start..end, replace_with); - if prev_len < self.backing.len() { - self.grow(alloc_id, self.backing.len() - prev_len)?; - } else if prev_len > self.backing.len() { - self.shrink(alloc_id, prev_len - self.backing.len())?; + match prev_len.cmp(&self.backing.len()) { + Ordering::Greater => self.shrink(alloc_id, prev_len - self.backing.len())?, + Ordering::Less => self.grow(alloc_id, self.backing.len() - prev_len)?, + Ordering::Equal => {} } Ok(()) diff --git a/consensus/fork_choice/src/fork_choice.rs b/consensus/fork_choice/src/fork_choice.rs index f6c43ae42..ae4d06a60 100644 --- a/consensus/fork_choice/src/fork_choice.rs +++ b/consensus/fork_choice/src/fork_choice.rs @@ -300,7 +300,6 @@ where /// Equivalent to: /// /// https://github.com/ethereum/eth2.0-specs/blob/v0.12.1/specs/phase0/fork-choice.md#get_ancestor - #[allow(clippy::if_same_then_else)] fn get_ancestor( &self, block_root: Hash256, diff --git a/consensus/ssz/tests/tests.rs b/consensus/ssz/tests/tests.rs index 2eada5c51..049ee30c8 100644 --- a/consensus/ssz/tests/tests.rs +++ b/consensus/ssz/tests/tests.rs @@ -2,7 +2,6 @@ use ethereum_types::H256; use ssz::{Decode, DecodeError, Encode}; use ssz_derive::{Decode, Encode}; -#[allow(clippy::zero_prefixed_literal)] mod round_trip { use super::*; diff --git a/consensus/ssz_derive/src/lib.rs b/consensus/ssz_derive/src/lib.rs index cf3a2399a..cee167012 100644 --- a/consensus/ssz_derive/src/lib.rs +++ b/consensus/ssz_derive/src/lib.rs @@ -87,7 +87,6 @@ pub fn ssz_encode_derive(input: TokenStream) -> TokenStream { let field_types_f = field_types_a.clone(); let output = quote! { - #[allow(clippy::integer_arithmetic)] impl #impl_generics ssz::Encode for #name #ty_generics #where_clause { fn is_ssz_fixed_len() -> bool { #( @@ -98,10 +97,13 @@ pub fn ssz_encode_derive(input: TokenStream) -> TokenStream { fn ssz_fixed_len() -> usize { if ::is_ssz_fixed_len() { + let mut len: usize = 0; #( - <#field_types_b as ssz::Encode>::ssz_fixed_len() + + len = len + .checked_add(<#field_types_b as ssz::Encode>::ssz_fixed_len()) + .expect("encode ssz_fixed_len length overflow"); )* - 0 + len } else { ssz::BYTES_PER_LENGTH_OFFSET } @@ -111,13 +113,19 @@ pub fn ssz_encode_derive(input: TokenStream) -> TokenStream { if ::is_ssz_fixed_len() { ::ssz_fixed_len() } else { - let mut len = 0; + let mut len: usize = 0; #( if <#field_types_d as ssz::Encode>::is_ssz_fixed_len() { - len += <#field_types_e as ssz::Encode>::ssz_fixed_len(); + len = len + .checked_add(<#field_types_e as ssz::Encode>::ssz_fixed_len()) + .expect("encode ssz_bytes_len length overflow"); } else { - len += ssz::BYTES_PER_LENGTH_OFFSET; - len += self.#field_idents_a.ssz_bytes_len(); + len = len + .checked_add(ssz::BYTES_PER_LENGTH_OFFSET) + .expect("encode ssz_bytes_len length overflow for offset"); + len = len + .checked_add(self.#field_idents_a.ssz_bytes_len()) + .expect("encode ssz_bytes_len length overflow for bytes"); } )* @@ -126,10 +134,12 @@ pub fn ssz_encode_derive(input: TokenStream) -> TokenStream { } fn ssz_append(&self, buf: &mut Vec) { - let offset = #( - <#field_types_f as ssz::Encode>::ssz_fixed_len() + - )* - 0; + let mut offset: usize = 0; + #( + offset = offset + .checked_add(<#field_types_f as ssz::Encode>::ssz_fixed_len()) + .expect("encode ssz_append offset overflow"); + )* let mut encoder = ssz::SszEncoder::container(buf, offset); @@ -229,7 +239,6 @@ pub fn ssz_decode_derive(input: TokenStream) -> TokenStream { } let output = quote! { - #[allow(clippy::integer_arithmetic)] impl #impl_generics ssz::Decode for #name #ty_generics #where_clause { fn is_ssz_fixed_len() -> bool { #( @@ -240,10 +249,13 @@ pub fn ssz_decode_derive(input: TokenStream) -> TokenStream { fn ssz_fixed_len() -> usize { if ::is_ssz_fixed_len() { + let mut len: usize = 0; #( - #fixed_lens + + len = len + .checked_add(#fixed_lens) + .expect("decode ssz_fixed_len overflow"); )* - 0 + len } else { ssz::BYTES_PER_LENGTH_OFFSET } @@ -258,13 +270,17 @@ pub fn ssz_decode_derive(input: TokenStream) -> TokenStream { }); } - let mut start = 0; + let mut start: usize = 0; let mut end = start; macro_rules! decode_field { ($type: ty) => {{ start = end; - end += <$type as ssz::Decode>::ssz_fixed_len(); + end = end + .checked_add(<$type as ssz::Decode>::ssz_fixed_len()) + .ok_or_else(|| ssz::DecodeError::OutOfBoundsByte { + i: usize::max_value() + })?; let slice = bytes.get(start..end) .ok_or_else(|| ssz::DecodeError::InvalidByteLength { len: bytes.len(), diff --git a/consensus/tree_hash/src/impls.rs b/consensus/tree_hash/src/impls.rs index ebd7e1be7..7fdcfceb7 100644 --- a/consensus/tree_hash/src/impls.rs +++ b/consensus/tree_hash/src/impls.rs @@ -22,7 +22,7 @@ macro_rules! impl_for_bitsize { HASHSIZE / ($bit_size / 8) } - #[allow(clippy::cast_lossless)] + #[allow(clippy::cast_lossless)] // Lint does not apply to all uses of this macro. fn tree_hash_root(&self) -> Hash256 { int_to_hash256(*self as u64) } diff --git a/consensus/types/src/beacon_state/pubkey_cache.rs b/consensus/types/src/beacon_state/pubkey_cache.rs index 6b9f8e838..42fc6cc95 100644 --- a/consensus/types/src/beacon_state/pubkey_cache.rs +++ b/consensus/types/src/beacon_state/pubkey_cache.rs @@ -23,11 +23,13 @@ impl PubkeyCache { /// /// The added index must equal the number of validators already added to the map. This ensures /// that an index is never skipped. - #[allow(clippy::integer_arithmetic)] pub fn insert(&mut self, pubkey: PublicKeyBytes, index: ValidatorIndex) -> bool { if index == self.len { self.map.insert(pubkey, index); - self.len += 1; + self.len = self + .len + .checked_add(1) + .expect("map length cannot exceed usize"); true } else { false diff --git a/consensus/types/src/chain_spec.rs b/consensus/types/src/chain_spec.rs index 4cf56afe0..985b30ea1 100644 --- a/consensus/types/src/chain_spec.rs +++ b/consensus/types/src/chain_spec.rs @@ -587,9 +587,13 @@ impl Default for YamlConfig { } } +#[allow(clippy::integer_arithmetic)] // Arith cannot overflow or panic. +fn milliseconds_to_seconds(millis: u64) -> u64 { + millis / 1000 +} + /// Spec v0.12.1 impl YamlConfig { - #[allow(clippy::integer_arithmetic)] pub fn from_spec(spec: &ChainSpec) -> Self { Self { config_name: T::spec_name().to_string(), @@ -611,7 +615,7 @@ impl YamlConfig { hysteresis_upward_multiplier: spec.hysteresis_upward_multiplier, proportional_slashing_multiplier: spec.proportional_slashing_multiplier, bls_withdrawal_prefix: spec.bls_withdrawal_prefix_byte, - seconds_per_slot: spec.milliseconds_per_slot / 1000, + seconds_per_slot: milliseconds_to_seconds(spec.milliseconds_per_slot), min_attestation_inclusion_delay: spec.min_attestation_inclusion_delay, min_seed_lookahead: spec.min_seed_lookahead.into(), max_seed_lookahead: spec.max_seed_lookahead.into(), diff --git a/consensus/types/src/graffiti.rs b/consensus/types/src/graffiti.rs index f35df9383..15cf089c5 100644 --- a/consensus/types/src/graffiti.rs +++ b/consensus/types/src/graffiti.rs @@ -18,7 +18,7 @@ pub struct Graffiti(#[serde(with = "serde_graffiti")] pub [u8; GRAFFITI_BYTES_LE impl Graffiti { pub fn as_utf8_lossy(&self) -> String { - #[allow(clippy::invalid_regex)] + #[allow(clippy::invalid_regex)] // This is a false positive, this regex is valid. let re = Regex::new("\\p{C}").expect("graffiti regex is valid"); String::from_utf8_lossy(&re.replace_all(&self.0[..], &b""[..])).to_string() } diff --git a/validator_client/src/initialized_validators.rs b/validator_client/src/initialized_validators.rs index 09fd2ae9d..93736e22e 100644 --- a/validator_client/src/initialized_validators.rs +++ b/validator_client/src/initialized_validators.rs @@ -414,7 +414,6 @@ impl InitializedValidators { /// validator will be removed from `self.validators`. /// /// Saves the `ValidatorDefinitions` to file, even if no definitions were changed. - #[allow(dead_code)] // Will be used once VC API is enabled. pub async fn set_validator_status( &mut self, voting_public_key: &PublicKey,