Address clippy lints, panic in ssz_derive on overflow (#1714)
## Issue Addressed NA ## Proposed Changes - Panic or return error if we overflow `usize` in SSZ decoding/encoding derive macros. - I claim that the panics can only be triggered by a faulty type definition in lighthouse, they cannot be triggered externally on a validly defined struct. - Use `Ordering` instead of some `if` statements, as demanded by clippy. - Remove some old clippy `allow` that seem to no longer be required. - Add comments to interesting clippy statements that we're going to continue to ignore. - Create #1713 ## Additional Info NA
This commit is contained in:
parent
eba51f0973
commit
f157d61cc7
@ -21,7 +21,6 @@ use types::{ChainSpec, Checkpoint, Epoch, EthSpec, Hash256, GRAFFITI_BYTES_LEN};
|
|||||||
/// The output of this function depends primarily upon the given `cli_args`, however it's behaviour
|
/// The output of this function depends primarily upon the given `cli_args`, however it's behaviour
|
||||||
/// may be influenced by other external services like the contents of the file system or the
|
/// may be influenced by other external services like the contents of the file system or the
|
||||||
/// response of some remote server.
|
/// response of some remote server.
|
||||||
#[allow(clippy::cognitive_complexity)]
|
|
||||||
pub fn get_config<E: EthSpec>(
|
pub fn get_config<E: EthSpec>(
|
||||||
cli_args: &ArgMatches,
|
cli_args: &ArgMatches,
|
||||||
spec_constants: &str,
|
spec_constants: &str,
|
||||||
|
@ -1,6 +1,7 @@
|
|||||||
use crate::SmallVec8;
|
use crate::SmallVec8;
|
||||||
use ssz::{Decode, Encode};
|
use ssz::{Decode, Encode};
|
||||||
use ssz_derive::{Decode, Encode};
|
use ssz_derive::{Decode, Encode};
|
||||||
|
use std::cmp::Ordering;
|
||||||
use std::marker::PhantomData;
|
use std::marker::PhantomData;
|
||||||
use std::ops::Range;
|
use std::ops::Range;
|
||||||
|
|
||||||
@ -89,7 +90,6 @@ impl<T: Encode + Decode> CacheArena<T> {
|
|||||||
/// To reiterate, the given `range` should be relative to the given `alloc_id`, not
|
/// To reiterate, the given `range` should be relative to the given `alloc_id`, not
|
||||||
/// `self.backing`. E.g., if the allocation has an offset of `20` and the range is `0..1`, then
|
/// `self.backing`. E.g., if the allocation has an offset of `20` and the range is `0..1`, then
|
||||||
/// the splice will translate to `self.backing[20..21]`.
|
/// the splice will translate to `self.backing[20..21]`.
|
||||||
#[allow(clippy::comparison_chain)]
|
|
||||||
fn splice_forgetful<I: IntoIterator<Item = T>>(
|
fn splice_forgetful<I: IntoIterator<Item = T>>(
|
||||||
&mut self,
|
&mut self,
|
||||||
alloc_id: usize,
|
alloc_id: usize,
|
||||||
@ -113,10 +113,10 @@ impl<T: Encode + Decode> CacheArena<T> {
|
|||||||
|
|
||||||
self.backing.splice(start..end, replace_with);
|
self.backing.splice(start..end, replace_with);
|
||||||
|
|
||||||
if prev_len < self.backing.len() {
|
match prev_len.cmp(&self.backing.len()) {
|
||||||
self.grow(alloc_id, self.backing.len() - prev_len)?;
|
Ordering::Greater => self.shrink(alloc_id, prev_len - self.backing.len())?,
|
||||||
} else if prev_len > self.backing.len() {
|
Ordering::Less => self.grow(alloc_id, self.backing.len() - prev_len)?,
|
||||||
self.shrink(alloc_id, prev_len - self.backing.len())?;
|
Ordering::Equal => {}
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
|
@ -300,7 +300,6 @@ where
|
|||||||
/// Equivalent to:
|
/// Equivalent to:
|
||||||
///
|
///
|
||||||
/// https://github.com/ethereum/eth2.0-specs/blob/v0.12.1/specs/phase0/fork-choice.md#get_ancestor
|
/// https://github.com/ethereum/eth2.0-specs/blob/v0.12.1/specs/phase0/fork-choice.md#get_ancestor
|
||||||
#[allow(clippy::if_same_then_else)]
|
|
||||||
fn get_ancestor(
|
fn get_ancestor(
|
||||||
&self,
|
&self,
|
||||||
block_root: Hash256,
|
block_root: Hash256,
|
||||||
|
@ -2,7 +2,6 @@ use ethereum_types::H256;
|
|||||||
use ssz::{Decode, DecodeError, Encode};
|
use ssz::{Decode, DecodeError, Encode};
|
||||||
use ssz_derive::{Decode, Encode};
|
use ssz_derive::{Decode, Encode};
|
||||||
|
|
||||||
#[allow(clippy::zero_prefixed_literal)]
|
|
||||||
mod round_trip {
|
mod round_trip {
|
||||||
use super::*;
|
use super::*;
|
||||||
|
|
||||||
|
@ -87,7 +87,6 @@ pub fn ssz_encode_derive(input: TokenStream) -> TokenStream {
|
|||||||
let field_types_f = field_types_a.clone();
|
let field_types_f = field_types_a.clone();
|
||||||
|
|
||||||
let output = quote! {
|
let output = quote! {
|
||||||
#[allow(clippy::integer_arithmetic)]
|
|
||||||
impl #impl_generics ssz::Encode for #name #ty_generics #where_clause {
|
impl #impl_generics ssz::Encode for #name #ty_generics #where_clause {
|
||||||
fn is_ssz_fixed_len() -> bool {
|
fn is_ssz_fixed_len() -> bool {
|
||||||
#(
|
#(
|
||||||
@ -98,10 +97,13 @@ pub fn ssz_encode_derive(input: TokenStream) -> TokenStream {
|
|||||||
|
|
||||||
fn ssz_fixed_len() -> usize {
|
fn ssz_fixed_len() -> usize {
|
||||||
if <Self as ssz::Encode>::is_ssz_fixed_len() {
|
if <Self as ssz::Encode>::is_ssz_fixed_len() {
|
||||||
|
let mut len: usize = 0;
|
||||||
#(
|
#(
|
||||||
<#field_types_b as ssz::Encode>::ssz_fixed_len() +
|
len = len
|
||||||
|
.checked_add(<#field_types_b as ssz::Encode>::ssz_fixed_len())
|
||||||
|
.expect("encode ssz_fixed_len length overflow");
|
||||||
)*
|
)*
|
||||||
0
|
len
|
||||||
} else {
|
} else {
|
||||||
ssz::BYTES_PER_LENGTH_OFFSET
|
ssz::BYTES_PER_LENGTH_OFFSET
|
||||||
}
|
}
|
||||||
@ -111,13 +113,19 @@ pub fn ssz_encode_derive(input: TokenStream) -> TokenStream {
|
|||||||
if <Self as ssz::Encode>::is_ssz_fixed_len() {
|
if <Self as ssz::Encode>::is_ssz_fixed_len() {
|
||||||
<Self as ssz::Encode>::ssz_fixed_len()
|
<Self as ssz::Encode>::ssz_fixed_len()
|
||||||
} else {
|
} else {
|
||||||
let mut len = 0;
|
let mut len: usize = 0;
|
||||||
#(
|
#(
|
||||||
if <#field_types_d as ssz::Encode>::is_ssz_fixed_len() {
|
if <#field_types_d as ssz::Encode>::is_ssz_fixed_len() {
|
||||||
len += <#field_types_e as ssz::Encode>::ssz_fixed_len();
|
len = len
|
||||||
|
.checked_add(<#field_types_e as ssz::Encode>::ssz_fixed_len())
|
||||||
|
.expect("encode ssz_bytes_len length overflow");
|
||||||
} else {
|
} else {
|
||||||
len += ssz::BYTES_PER_LENGTH_OFFSET;
|
len = len
|
||||||
len += self.#field_idents_a.ssz_bytes_len();
|
.checked_add(ssz::BYTES_PER_LENGTH_OFFSET)
|
||||||
|
.expect("encode ssz_bytes_len length overflow for offset");
|
||||||
|
len = len
|
||||||
|
.checked_add(self.#field_idents_a.ssz_bytes_len())
|
||||||
|
.expect("encode ssz_bytes_len length overflow for bytes");
|
||||||
}
|
}
|
||||||
)*
|
)*
|
||||||
|
|
||||||
@ -126,10 +134,12 @@ pub fn ssz_encode_derive(input: TokenStream) -> TokenStream {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn ssz_append(&self, buf: &mut Vec<u8>) {
|
fn ssz_append(&self, buf: &mut Vec<u8>) {
|
||||||
let offset = #(
|
let mut offset: usize = 0;
|
||||||
<#field_types_f as ssz::Encode>::ssz_fixed_len() +
|
#(
|
||||||
|
offset = offset
|
||||||
|
.checked_add(<#field_types_f as ssz::Encode>::ssz_fixed_len())
|
||||||
|
.expect("encode ssz_append offset overflow");
|
||||||
)*
|
)*
|
||||||
0;
|
|
||||||
|
|
||||||
let mut encoder = ssz::SszEncoder::container(buf, offset);
|
let mut encoder = ssz::SszEncoder::container(buf, offset);
|
||||||
|
|
||||||
@ -229,7 +239,6 @@ pub fn ssz_decode_derive(input: TokenStream) -> TokenStream {
|
|||||||
}
|
}
|
||||||
|
|
||||||
let output = quote! {
|
let output = quote! {
|
||||||
#[allow(clippy::integer_arithmetic)]
|
|
||||||
impl #impl_generics ssz::Decode for #name #ty_generics #where_clause {
|
impl #impl_generics ssz::Decode for #name #ty_generics #where_clause {
|
||||||
fn is_ssz_fixed_len() -> bool {
|
fn is_ssz_fixed_len() -> bool {
|
||||||
#(
|
#(
|
||||||
@ -240,10 +249,13 @@ pub fn ssz_decode_derive(input: TokenStream) -> TokenStream {
|
|||||||
|
|
||||||
fn ssz_fixed_len() -> usize {
|
fn ssz_fixed_len() -> usize {
|
||||||
if <Self as ssz::Decode>::is_ssz_fixed_len() {
|
if <Self as ssz::Decode>::is_ssz_fixed_len() {
|
||||||
|
let mut len: usize = 0;
|
||||||
#(
|
#(
|
||||||
#fixed_lens +
|
len = len
|
||||||
|
.checked_add(#fixed_lens)
|
||||||
|
.expect("decode ssz_fixed_len overflow");
|
||||||
)*
|
)*
|
||||||
0
|
len
|
||||||
} else {
|
} else {
|
||||||
ssz::BYTES_PER_LENGTH_OFFSET
|
ssz::BYTES_PER_LENGTH_OFFSET
|
||||||
}
|
}
|
||||||
@ -258,13 +270,17 @@ pub fn ssz_decode_derive(input: TokenStream) -> TokenStream {
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
let mut start = 0;
|
let mut start: usize = 0;
|
||||||
let mut end = start;
|
let mut end = start;
|
||||||
|
|
||||||
macro_rules! decode_field {
|
macro_rules! decode_field {
|
||||||
($type: ty) => {{
|
($type: ty) => {{
|
||||||
start = end;
|
start = end;
|
||||||
end += <$type as ssz::Decode>::ssz_fixed_len();
|
end = end
|
||||||
|
.checked_add(<$type as ssz::Decode>::ssz_fixed_len())
|
||||||
|
.ok_or_else(|| ssz::DecodeError::OutOfBoundsByte {
|
||||||
|
i: usize::max_value()
|
||||||
|
})?;
|
||||||
let slice = bytes.get(start..end)
|
let slice = bytes.get(start..end)
|
||||||
.ok_or_else(|| ssz::DecodeError::InvalidByteLength {
|
.ok_or_else(|| ssz::DecodeError::InvalidByteLength {
|
||||||
len: bytes.len(),
|
len: bytes.len(),
|
||||||
|
@ -22,7 +22,7 @@ macro_rules! impl_for_bitsize {
|
|||||||
HASHSIZE / ($bit_size / 8)
|
HASHSIZE / ($bit_size / 8)
|
||||||
}
|
}
|
||||||
|
|
||||||
#[allow(clippy::cast_lossless)]
|
#[allow(clippy::cast_lossless)] // Lint does not apply to all uses of this macro.
|
||||||
fn tree_hash_root(&self) -> Hash256 {
|
fn tree_hash_root(&self) -> Hash256 {
|
||||||
int_to_hash256(*self as u64)
|
int_to_hash256(*self as u64)
|
||||||
}
|
}
|
||||||
|
@ -23,11 +23,13 @@ impl PubkeyCache {
|
|||||||
///
|
///
|
||||||
/// The added index must equal the number of validators already added to the map. This ensures
|
/// The added index must equal the number of validators already added to the map. This ensures
|
||||||
/// that an index is never skipped.
|
/// that an index is never skipped.
|
||||||
#[allow(clippy::integer_arithmetic)]
|
|
||||||
pub fn insert(&mut self, pubkey: PublicKeyBytes, index: ValidatorIndex) -> bool {
|
pub fn insert(&mut self, pubkey: PublicKeyBytes, index: ValidatorIndex) -> bool {
|
||||||
if index == self.len {
|
if index == self.len {
|
||||||
self.map.insert(pubkey, index);
|
self.map.insert(pubkey, index);
|
||||||
self.len += 1;
|
self.len = self
|
||||||
|
.len
|
||||||
|
.checked_add(1)
|
||||||
|
.expect("map length cannot exceed usize");
|
||||||
true
|
true
|
||||||
} else {
|
} else {
|
||||||
false
|
false
|
||||||
|
@ -587,9 +587,13 @@ impl Default for YamlConfig {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[allow(clippy::integer_arithmetic)] // Arith cannot overflow or panic.
|
||||||
|
fn milliseconds_to_seconds(millis: u64) -> u64 {
|
||||||
|
millis / 1000
|
||||||
|
}
|
||||||
|
|
||||||
/// Spec v0.12.1
|
/// Spec v0.12.1
|
||||||
impl YamlConfig {
|
impl YamlConfig {
|
||||||
#[allow(clippy::integer_arithmetic)]
|
|
||||||
pub fn from_spec<T: EthSpec>(spec: &ChainSpec) -> Self {
|
pub fn from_spec<T: EthSpec>(spec: &ChainSpec) -> Self {
|
||||||
Self {
|
Self {
|
||||||
config_name: T::spec_name().to_string(),
|
config_name: T::spec_name().to_string(),
|
||||||
@ -611,7 +615,7 @@ impl YamlConfig {
|
|||||||
hysteresis_upward_multiplier: spec.hysteresis_upward_multiplier,
|
hysteresis_upward_multiplier: spec.hysteresis_upward_multiplier,
|
||||||
proportional_slashing_multiplier: spec.proportional_slashing_multiplier,
|
proportional_slashing_multiplier: spec.proportional_slashing_multiplier,
|
||||||
bls_withdrawal_prefix: spec.bls_withdrawal_prefix_byte,
|
bls_withdrawal_prefix: spec.bls_withdrawal_prefix_byte,
|
||||||
seconds_per_slot: spec.milliseconds_per_slot / 1000,
|
seconds_per_slot: milliseconds_to_seconds(spec.milliseconds_per_slot),
|
||||||
min_attestation_inclusion_delay: spec.min_attestation_inclusion_delay,
|
min_attestation_inclusion_delay: spec.min_attestation_inclusion_delay,
|
||||||
min_seed_lookahead: spec.min_seed_lookahead.into(),
|
min_seed_lookahead: spec.min_seed_lookahead.into(),
|
||||||
max_seed_lookahead: spec.max_seed_lookahead.into(),
|
max_seed_lookahead: spec.max_seed_lookahead.into(),
|
||||||
|
@ -18,7 +18,7 @@ pub struct Graffiti(#[serde(with = "serde_graffiti")] pub [u8; GRAFFITI_BYTES_LE
|
|||||||
|
|
||||||
impl Graffiti {
|
impl Graffiti {
|
||||||
pub fn as_utf8_lossy(&self) -> String {
|
pub fn as_utf8_lossy(&self) -> String {
|
||||||
#[allow(clippy::invalid_regex)]
|
#[allow(clippy::invalid_regex)] // This is a false positive, this regex is valid.
|
||||||
let re = Regex::new("\\p{C}").expect("graffiti regex is valid");
|
let re = Regex::new("\\p{C}").expect("graffiti regex is valid");
|
||||||
String::from_utf8_lossy(&re.replace_all(&self.0[..], &b""[..])).to_string()
|
String::from_utf8_lossy(&re.replace_all(&self.0[..], &b""[..])).to_string()
|
||||||
}
|
}
|
||||||
|
@ -414,7 +414,6 @@ impl InitializedValidators {
|
|||||||
/// validator will be removed from `self.validators`.
|
/// validator will be removed from `self.validators`.
|
||||||
///
|
///
|
||||||
/// Saves the `ValidatorDefinitions` to file, even if no definitions were changed.
|
/// Saves the `ValidatorDefinitions` to file, even if no definitions were changed.
|
||||||
#[allow(dead_code)] // Will be used once VC API is enabled.
|
|
||||||
pub async fn set_validator_status(
|
pub async fn set_validator_status(
|
||||||
&mut self,
|
&mut self,
|
||||||
voting_public_key: &PublicKey,
|
voting_public_key: &PublicKey,
|
||||||
|
Loading…
Reference in New Issue
Block a user