diff --git a/beacon_node/beacon_chain/src/beacon_chain.rs b/beacon_node/beacon_chain/src/beacon_chain.rs index 6036ce397..bfe7ca143 100644 --- a/beacon_node/beacon_chain/src/beacon_chain.rs +++ b/beacon_node/beacon_chain/src/beacon_chain.rs @@ -1602,7 +1602,8 @@ impl BeaconChain { // This method is called for API and gossip attestations, so this covers all unaggregated attestation events if let Some(event_handler) = self.event_handler.as_ref() { if event_handler.has_attestation_subscribers() { - event_handler.register(EventKind::Attestation(v.attestation().clone())); + event_handler + .register(EventKind::Attestation(Box::new(v.attestation().clone()))); } } metrics::inc_counter(&metrics::UNAGGREGATED_ATTESTATION_PROCESSING_SUCCESSES); @@ -1638,7 +1639,8 @@ impl BeaconChain { // This method is called for API and gossip attestations, so this covers all aggregated attestation events if let Some(event_handler) = self.event_handler.as_ref() { if event_handler.has_attestation_subscribers() { - event_handler.register(EventKind::Attestation(v.attestation().clone())); + event_handler + .register(EventKind::Attestation(Box::new(v.attestation().clone()))); } } metrics::inc_counter(&metrics::AGGREGATED_ATTESTATION_PROCESSING_SUCCESSES); diff --git a/beacon_node/beacon_chain/src/state_advance_timer.rs b/beacon_node/beacon_chain/src/state_advance_timer.rs index 947e8c38e..6a3c3ea00 100644 --- a/beacon_node/beacon_chain/src/state_advance_timer.rs +++ b/beacon_node/beacon_chain/src/state_advance_timer.rs @@ -41,9 +41,17 @@ const MAX_ADVANCE_DISTANCE: u64 = 4; enum Error { BeaconChain(BeaconChainError), HeadMissingFromSnapshotCache(Hash256), - MaxDistanceExceeded { current_slot: Slot, head_slot: Slot }, - StateAlreadyAdvanced { block_root: Hash256 }, - BadStateSlot { state_slot: Slot, block_slot: Slot }, + MaxDistanceExceeded { + current_slot: Slot, + head_slot: Slot, + }, + StateAlreadyAdvanced { + block_root: Hash256, + }, + BadStateSlot { + _state_slot: Slot, + _block_slot: Slot, + }, } impl From for Error { @@ -224,8 +232,8 @@ fn advance_head( // Advancing more than one slot without storing the intermediate state would corrupt the // database. Future works might store temporary, intermediate states inside this function. return Err(Error::BadStateSlot { - block_slot: head_slot, - state_slot: state.slot(), + _block_slot: head_slot, + _state_slot: state.slot(), }); }; diff --git a/beacon_node/beacon_chain/src/validator_pubkey_cache.rs b/beacon_node/beacon_chain/src/validator_pubkey_cache.rs index 2dbe8ce7b..da877cf4e 100644 --- a/beacon_node/beacon_chain/src/validator_pubkey_cache.rs +++ b/beacon_node/beacon_chain/src/validator_pubkey_cache.rs @@ -243,8 +243,8 @@ enum Error { /// The file read from disk does not have a contiguous list of validator public keys. The file /// has become corrupted. InconsistentIndex { - expected: Option, - found: usize, + _expected: Option, + _found: usize, }, } @@ -296,8 +296,8 @@ impl ValidatorPubkeyCacheFile { indices.insert(pubkey, index); } else { return Err(Error::InconsistentIndex { - expected, - found: index, + _expected: expected, + _found: index, }); } } diff --git a/beacon_node/http_api/tests/tests.rs b/beacon_node/http_api/tests/tests.rs index 95f087130..878af7a03 100644 --- a/beacon_node/http_api/tests/tests.rs +++ b/beacon_node/http_api/tests/tests.rs @@ -2310,7 +2310,7 @@ impl ApiTester { self.attestations .clone() .into_iter() - .map(|attestation| EventKind::Attestation(attestation)) + .map(|attestation| EventKind::Attestation(Box::new(attestation))) .collect::>() .as_slice() ); diff --git a/beacon_node/network/src/beacon_processor/mod.rs b/beacon_node/network/src/beacon_processor/mod.rs index c9b4bfa34..7c3d482fa 100644 --- a/beacon_node/network/src/beacon_processor/mod.rs +++ b/beacon_node/network/src/beacon_processor/mod.rs @@ -63,7 +63,7 @@ use types::{ SyncCommitteeMessage, SyncSubnetId, }; use work_reprocessing_queue::{ - spawn_reprocess_scheduler, QueuedAggregate, QueuedBlock, QueuedUnaggregate, ReadyWork, + spawn_reprocess_scheduler, QueuedAggregate, QueuedUnaggregate, ReadyWork, }; use worker::{Toolbox, Worker}; @@ -72,6 +72,7 @@ mod tests; mod work_reprocessing_queue; mod worker; +use crate::beacon_processor::work_reprocessing_queue::QueuedBlock; pub use worker::{GossipAggregatePackage, GossipAttestationPackage, ProcessId}; /// The maximum size of the channel for work events to the `BeaconProcessor`. @@ -574,7 +575,7 @@ impl std::convert::From> for WorkEvent { drop_during_sync: false, work: Work::DelayedImportBlock { peer_id, - block: Box::new(block), + block, seen_timestamp, }, }, diff --git a/beacon_node/network/src/beacon_processor/work_reprocessing_queue.rs b/beacon_node/network/src/beacon_processor/work_reprocessing_queue.rs index 00b5c009a..299e71c8d 100644 --- a/beacon_node/network/src/beacon_processor/work_reprocessing_queue.rs +++ b/beacon_node/network/src/beacon_processor/work_reprocessing_queue.rs @@ -91,7 +91,7 @@ pub struct QueuedAggregate { /// A block that arrived early and has been queued for later import. pub struct QueuedBlock { pub peer_id: PeerId, - pub block: GossipVerifiedBlock, + pub block: Box>, pub seen_timestamp: Duration, } diff --git a/beacon_node/network/src/beacon_processor/worker/gossip_methods.rs b/beacon_node/network/src/beacon_processor/worker/gossip_methods.rs index 21a8c7618..365d53f49 100644 --- a/beacon_node/network/src/beacon_processor/worker/gossip_methods.rs +++ b/beacon_node/network/src/beacon_processor/worker/gossip_methods.rs @@ -121,7 +121,6 @@ pub struct GossipAttestationPackage { peer_id: PeerId, attestation: Box>, subnet_id: SubnetId, - beacon_block_root: Hash256, should_import: bool, seen_timestamp: Duration, } @@ -138,7 +137,6 @@ impl GossipAttestationPackage { Self { message_id, peer_id, - beacon_block_root: attestation.data.beacon_block_root, attestation, subnet_id, should_import, @@ -830,7 +828,7 @@ impl Worker { if reprocess_tx .try_send(ReprocessQueueMessage::EarlyBlock(QueuedBlock { peer_id, - block: verified_block, + block: Box::new(verified_block), seen_timestamp: seen_duration, })) .is_err() diff --git a/beacon_node/src/config.rs b/beacon_node/src/config.rs index ab51c218b..ac2ba9d47 100644 --- a/beacon_node/src/config.rs +++ b/beacon_node/src/config.rs @@ -226,7 +226,7 @@ pub fn get_config( client_config.sync_eth1_chain = true; client_config.eth1.endpoints = endpoints .split(',') - .map(|s| SensitiveUrl::parse(s)) + .map(SensitiveUrl::parse) .collect::>() .map_err(|e| format!("eth1-endpoints contains an invalid URL {:?}", e))?; } @@ -245,7 +245,7 @@ pub fn get_config( client_config.sync_eth1_chain = true; client_config.execution_endpoints = endpoints .split(',') - .map(|s| SensitiveUrl::parse(s)) + .map(SensitiveUrl::parse) .collect::>() .map(Some) .map_err(|e| format!("execution-endpoints contains an invalid URL {:?}", e))?; diff --git a/common/eth2/src/types.rs b/common/eth2/src/types.rs index 42131b49c..be65dd877 100644 --- a/common/eth2/src/types.rs +++ b/common/eth2/src/types.rs @@ -777,7 +777,7 @@ pub struct SseLateHead { #[derive(PartialEq, Debug, Serialize, Clone)] #[serde(bound = "T: EthSpec", untagged)] pub enum EventKind { - Attestation(Attestation), + Attestation(Box>), Block(SseBlock), FinalizedCheckpoint(SseFinalizedCheckpoint), Head(SseHead), diff --git a/common/lockfile/src/lib.rs b/common/lockfile/src/lib.rs index 82e28256f..adb8be7bb 100644 --- a/common/lockfile/src/lib.rs +++ b/common/lockfile/src/lib.rs @@ -11,7 +11,7 @@ use std::path::{Path, PathBuf}; /// outage) caused the lockfile not to be deleted. #[derive(Debug)] pub struct Lockfile { - file: File, + _file: File, path: PathBuf, file_existed: bool, } @@ -43,7 +43,7 @@ impl Lockfile { _ => LockfileError::IoError(path.clone(), e), })?; Ok(Self { - file, + _file: file, path, file_existed, }) diff --git a/common/logging/src/lib.rs b/common/logging/src/lib.rs index 6cbf7e00b..eab8e326b 100644 --- a/common/logging/src/lib.rs +++ b/common/logging/src/lib.rs @@ -99,7 +99,7 @@ impl<'a> AlignedRecordDecorator<'a> { impl<'a> Write for AlignedRecordDecorator<'a> { fn write(&mut self, buf: &[u8]) -> Result { - if buf.iter().any(|c| is_ascii_control(c)) { + if buf.iter().any(u8::is_ascii_control) { let filtered = buf .iter() .cloned() diff --git a/common/monitoring_api/src/lib.rs b/common/monitoring_api/src/lib.rs index 5eb7ea719..03cdf87c2 100644 --- a/common/monitoring_api/src/lib.rs +++ b/common/monitoring_api/src/lib.rs @@ -128,7 +128,7 @@ impl MonitoringHttpClient { Error::BeaconMetricsFailed("Beacon metrics require db path".to_string()) })?; - let freezer_db_path = self.db_path.as_ref().ok_or_else(|| { + let freezer_db_path = self.freezer_db_path.as_ref().ok_or_else(|| { Error::BeaconMetricsFailed("Beacon metrics require freezer db path".to_string()) })?; let metrics = diff --git a/common/validator_dir/src/validator_dir.rs b/common/validator_dir/src/validator_dir.rs index bfa3e2553..2fabebc74 100644 --- a/common/validator_dir/src/validator_dir.rs +++ b/common/validator_dir/src/validator_dir.rs @@ -63,7 +63,7 @@ pub struct Eth1DepositData { pub struct ValidatorDir { dir: PathBuf, #[derivative(PartialEq = "ignore")] - lockfile: Lockfile, + _lockfile: Lockfile, } impl ValidatorDir { @@ -85,7 +85,10 @@ impl ValidatorDir { let lockfile_path = dir.join(format!("{}.lock", VOTING_KEYSTORE_FILE)); let lockfile = Lockfile::new(lockfile_path).map_err(Error::LockfileError)?; - Ok(Self { dir, lockfile }) + Ok(Self { + dir, + _lockfile: lockfile, + }) } /// Returns the `dir` provided to `Self::open`. diff --git a/consensus/cached_tree_hash/src/cache_arena.rs b/consensus/cached_tree_hash/src/cache_arena.rs index 9e11134aa..a938d4826 100644 --- a/consensus/cached_tree_hash/src/cache_arena.rs +++ b/consensus/cached_tree_hash/src/cache_arena.rs @@ -491,8 +491,8 @@ mod tests { subs.push(sub); } - for mut sub in subs.iter_mut() { - test_routine(arena, &mut sub); + for sub in subs.iter_mut() { + test_routine(arena, sub); } } } diff --git a/consensus/ssz/src/encode.rs b/consensus/ssz/src/encode.rs index cecd615a8..a46ef80e0 100644 --- a/consensus/ssz/src/encode.rs +++ b/consensus/ssz/src/encode.rs @@ -113,7 +113,7 @@ impl<'a> SszEncoder<'a> { F: Fn(&mut Vec), { if is_ssz_fixed_len { - ssz_append(&mut self.buf); + ssz_append(self.buf); } else { self.buf .extend_from_slice(&encode_length(self.offset + self.variable_bytes.len())); @@ -129,7 +129,7 @@ impl<'a> SszEncoder<'a> { pub fn finalize(&mut self) -> &mut Vec { self.buf.append(&mut self.variable_bytes); - &mut self.buf + self.buf } } diff --git a/consensus/state_processing/src/per_block_processing/tests.rs b/consensus/state_processing/src/per_block_processing/tests.rs index 78c034caa..f04b0ca90 100644 --- a/consensus/state_processing/src/per_block_processing/tests.rs +++ b/consensus/state_processing/src/per_block_processing/tests.rs @@ -187,14 +187,13 @@ fn valid_4_deposits() { let harness = get_harness::(EPOCH_OFFSET, VALIDATOR_COUNT); let mut state = harness.get_current_state(); - let (deposits, mut state) = harness.make_deposits(&mut state, 4, None, None); + let (deposits, state) = harness.make_deposits(&mut state, 4, None, None); let deposits = VariableList::from(deposits); let mut head_block = harness.chain.head_beacon_block().unwrap().deconstruct().0; *head_block.to_mut().body_mut().deposits_mut() = deposits; - let result = - process_operations::process_deposits(&mut state, head_block.body().deposits(), &spec); + let result = process_operations::process_deposits(state, head_block.body().deposits(), &spec); // Expecting Ok because these are valid deposits. assert_eq!(result, Ok(())); @@ -206,7 +205,7 @@ fn invalid_deposit_deposit_count_too_big() { let harness = get_harness::(EPOCH_OFFSET, VALIDATOR_COUNT); let mut state = harness.get_current_state(); - let (deposits, mut state) = harness.make_deposits(&mut state, 1, None, None); + let (deposits, state) = harness.make_deposits(&mut state, 1, None, None); let deposits = VariableList::from(deposits); let mut head_block = harness.chain.head_beacon_block().unwrap().deconstruct().0; @@ -214,8 +213,7 @@ fn invalid_deposit_deposit_count_too_big() { let big_deposit_count = NUM_DEPOSITS + 1; state.eth1_data_mut().deposit_count = big_deposit_count; - let result = - process_operations::process_deposits(&mut state, head_block.body().deposits(), &spec); + let result = process_operations::process_deposits(state, head_block.body().deposits(), &spec); // Expecting DepositCountInvalid because we incremented the deposit_count assert_eq!( @@ -233,7 +231,7 @@ fn invalid_deposit_count_too_small() { let harness = get_harness::(EPOCH_OFFSET, VALIDATOR_COUNT); let mut state = harness.get_current_state(); - let (deposits, mut state) = harness.make_deposits(&mut state, 1, None, None); + let (deposits, state) = harness.make_deposits(&mut state, 1, None, None); let deposits = VariableList::from(deposits); let mut head_block = harness.chain.head_beacon_block().unwrap().deconstruct().0; @@ -241,8 +239,7 @@ fn invalid_deposit_count_too_small() { let small_deposit_count = NUM_DEPOSITS - 1; state.eth1_data_mut().deposit_count = small_deposit_count; - let result = - process_operations::process_deposits(&mut state, head_block.body().deposits(), &spec); + let result = process_operations::process_deposits(state, head_block.body().deposits(), &spec); // Expecting DepositCountInvalid because we decremented the deposit_count assert_eq!( @@ -260,7 +257,7 @@ fn invalid_deposit_bad_merkle_proof() { let harness = get_harness::(EPOCH_OFFSET, VALIDATOR_COUNT); let mut state = harness.get_current_state(); - let (deposits, mut state) = harness.make_deposits(&mut state, 1, None, None); + let (deposits, state) = harness.make_deposits(&mut state, 1, None, None); let deposits = VariableList::from(deposits); let mut head_block = harness.chain.head_beacon_block().unwrap().deconstruct().0; @@ -270,8 +267,7 @@ fn invalid_deposit_bad_merkle_proof() { // Manually offsetting deposit count and index to trigger bad merkle proof state.eth1_data_mut().deposit_count += 1; *state.eth1_deposit_index_mut() += 1; - let result = - process_operations::process_deposits(&mut state, head_block.body().deposits(), &spec); + let result = process_operations::process_deposits(state, head_block.body().deposits(), &spec); // Expecting BadMerkleProof because the proofs were created with different indices assert_eq!( @@ -289,15 +285,14 @@ fn invalid_deposit_wrong_sig() { let harness = get_harness::(EPOCH_OFFSET, VALIDATOR_COUNT); let mut state = harness.get_current_state(); - let (deposits, mut state) = + let (deposits, state) = harness.make_deposits(&mut state, 1, None, Some(SignatureBytes::empty())); let deposits = VariableList::from(deposits); let mut head_block = harness.chain.head_beacon_block().unwrap().deconstruct().0; *head_block.to_mut().body_mut().deposits_mut() = deposits; - let result = - process_operations::process_deposits(&mut state, head_block.body().deposits(), &spec); + let result = process_operations::process_deposits(state, head_block.body().deposits(), &spec); // Expecting Ok(()) even though the block signature does not correspond to the correct public key assert_eq!(result, Ok(())); } @@ -308,15 +303,14 @@ fn invalid_deposit_invalid_pub_key() { let harness = get_harness::(EPOCH_OFFSET, VALIDATOR_COUNT); let mut state = harness.get_current_state(); - let (deposits, mut state) = + let (deposits, state) = harness.make_deposits(&mut state, 1, Some(PublicKeyBytes::empty()), None); let deposits = VariableList::from(deposits); let mut head_block = harness.chain.head_beacon_block().unwrap().deconstruct().0; *head_block.to_mut().body_mut().deposits_mut() = deposits; - let result = - process_operations::process_deposits(&mut state, head_block.body().deposits(), &spec); + let result = process_operations::process_deposits(state, head_block.body().deposits(), &spec); // Expecting Ok(()) even though we passed in invalid publickeybytes in the public key field of the deposit data. assert_eq!(result, Ok(())); diff --git a/testing/ef_tests/src/cases/fork_choice.rs b/testing/ef_tests/src/cases/fork_choice.rs index dc1190466..682fa8146 100644 --- a/testing/ef_tests/src/cases/fork_choice.rs +++ b/testing/ef_tests/src/cases/fork_choice.rs @@ -62,7 +62,8 @@ pub enum Step { #[derive(Debug, Clone, Deserialize)] #[serde(deny_unknown_fields)] pub struct Meta { - description: String, + #[serde(rename(deserialize = "description"))] + _description: String, } #[derive(Debug)] diff --git a/testing/ef_tests/src/cases/genesis_validity.rs b/testing/ef_tests/src/cases/genesis_validity.rs index e645d69ad..abdc1ed4a 100644 --- a/testing/ef_tests/src/cases/genesis_validity.rs +++ b/testing/ef_tests/src/cases/genesis_validity.rs @@ -7,7 +7,8 @@ use types::{BeaconState, EthSpec, ForkName}; #[derive(Debug, Clone, Deserialize)] pub struct Metadata { - description: String, + #[serde(rename(deserialize = "description"))] + _description: String, } #[derive(Debug, Clone, Deserialize)] diff --git a/testing/ef_tests/src/cases/ssz_generic.rs b/testing/ef_tests/src/cases/ssz_generic.rs index 022da9223..2374ead88 100644 --- a/testing/ef_tests/src/cases/ssz_generic.rs +++ b/testing/ef_tests/src/cases/ssz_generic.rs @@ -15,7 +15,8 @@ use types::{BitList, BitVector, FixedVector, ForkName, VariableList}; #[derive(Debug, Clone, Deserialize)] struct Metadata { root: String, - signing_root: Option, + #[serde(rename(deserialize = "signing_root"))] + _signing_root: Option, } #[derive(Debug, Clone)] diff --git a/testing/ef_tests/src/cases/ssz_static.rs b/testing/ef_tests/src/cases/ssz_static.rs index 732a7d851..d0cc5f9ea 100644 --- a/testing/ef_tests/src/cases/ssz_static.rs +++ b/testing/ef_tests/src/cases/ssz_static.rs @@ -10,7 +10,8 @@ use types::{BeaconBlock, BeaconState, ForkName, Hash256, SignedBeaconBlock}; #[derive(Debug, Clone, Deserialize)] struct SszStaticRoots { root: String, - signing_root: Option, + #[serde(rename(deserialize = "signing_root"))] + _signing_root: Option, } /// Runner for types that implement `ssz::Decode`. diff --git a/validator_client/src/config.rs b/validator_client/src/config.rs index 4b07c72b8..0695012fb 100644 --- a/validator_client/src/config.rs +++ b/validator_client/src/config.rs @@ -131,7 +131,7 @@ impl Config { if let Some(beacon_nodes) = parse_optional::(cli_args, "beacon-nodes")? { config.beacon_nodes = beacon_nodes .split(',') - .map(|s| SensitiveUrl::parse(s)) + .map(SensitiveUrl::parse) .collect::>() .map_err(|e| format!("Unable to parse beacon node URL: {:?}", e))?; } diff --git a/validator_client/src/lib.rs b/validator_client/src/lib.rs index a3ab10316..a721496fc 100644 --- a/validator_client/src/lib.rs +++ b/validator_client/src/lib.rs @@ -84,7 +84,6 @@ pub struct ProductionValidatorClient { doppelganger_service: Option>, validator_store: Arc>, http_api_listen_addr: Option, - http_metrics_ctx: Option>>, config: Config, } @@ -431,7 +430,6 @@ impl ProductionValidatorClient { validator_store, config, http_api_listen_addr: None, - http_metrics_ctx, }) }