From b1c33361ea1559fd32bd23f1c3e5940b320a3ae9 Mon Sep 17 00:00:00 2001 From: ethDreamer <37123614+ethDreamer@users.noreply.github.com> Date: Tue, 13 Dec 2022 10:50:24 -0600 Subject: [PATCH] Fixed Clippy Complaints & Some Failing Tests (#3791) * Fixed Clippy Complaints & Some Failing Tests * Update Dockerfile to Rust-1.65 * EF test file renamed * Touch up comments based on feedback --- Dockerfile | 2 +- beacon_node/beacon_chain/src/beacon_chain.rs | 10 ++-- .../src/engine_api/json_structures.rs | 6 +-- beacon_node/execution_layer/src/engines.rs | 2 +- beacon_node/execution_layer/src/lib.rs | 2 +- .../src/test_utils/handle_rpc.rs | 27 ++++++++-- beacon_node/http_api/src/publish_blocks.rs | 2 +- .../lighthouse_network/src/types/pubsub.rs | 6 +-- beacon_node/operation_pool/src/lib.rs | 4 ++ consensus/types/src/payload.rs | 49 +++++++++++-------- lcli/src/new_testnet.rs | 4 +- .../src/cases/merkle_proof_validity.rs | 2 +- testing/ef_tests/src/cases/operations.rs | 6 +-- testing/ef_tests/src/lib.rs | 4 +- testing/ef_tests/tests/tests.rs | 2 + 15 files changed, 79 insertions(+), 49 deletions(-) diff --git a/Dockerfile b/Dockerfile index 72423b17c..7a0602a22 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,4 +1,4 @@ -FROM rust:1.62.1-bullseye AS builder +FROM rust:1.65.0-bullseye AS builder RUN apt-get update && apt-get -y upgrade && apt-get install -y cmake libclang-dev protobuf-compiler COPY . lighthouse ARG FEATURES diff --git a/beacon_node/beacon_chain/src/beacon_chain.rs b/beacon_node/beacon_chain/src/beacon_chain.rs index 0bbbe9235..fcd097d4d 100644 --- a/beacon_node/beacon_chain/src/beacon_chain.rs +++ b/beacon_node/beacon_chain/src/beacon_chain.rs @@ -2205,8 +2205,10 @@ impl BeaconChain { .verify_and_observe(bls_to_execution_change, &wall_clock_state, &self.spec)?) } + // TODO: remove this whole block once withdrawals-processing is removed #[cfg(not(feature = "withdrawals-processing"))] { + #[allow(clippy::drop_non_drop)] drop(bls_to_execution_change); Ok(ObservationOutcome::AlreadyKnown) } @@ -4342,17 +4344,17 @@ impl BeaconChain { // Might implement caching here in the future.. let prepare_state = self .state_at_slot(prepare_slot, StateSkipConfig::WithoutStateRoots) - .or_else(|e| { + .map_err(|e| { error!(self.log, "State advance for withdrawals failed"; "error" => ?e); - Err(e) + e })?; Some(get_expected_withdrawals(&prepare_state, &self.spec)) } } .transpose() - .or_else(|e| { + .map_err(|e| { error!(self.log, "Error preparing beacon proposer"; "error" => ?e); - Err(e) + e }) .map(|withdrawals_opt| withdrawals_opt.map(|w| w.into())) .map_err(Error::PrepareProposerFailed)?; diff --git a/beacon_node/execution_layer/src/engine_api/json_structures.rs b/beacon_node/execution_layer/src/engine_api/json_structures.rs index 1b125cde4..ea2bb4941 100644 --- a/beacon_node/execution_layer/src/engine_api/json_structures.rs +++ b/beacon_node/execution_layer/src/engine_api/json_structures.rs @@ -176,7 +176,7 @@ impl JsonExecutionPayload { .collect::>() .into() }) - .ok_or(Error::BadConversion("Null withdrawal field converting JsonExecutionPayloadV2 -> ExecutionPayloadCapella".to_string()))? + .ok_or_else(|| Error::BadConversion("Null withdrawal field converting JsonExecutionPayloadV2 -> ExecutionPayloadCapella".to_string()))? })), ForkName::Eip4844 => Ok(ExecutionPayload::Eip4844(ExecutionPayloadEip4844 { parent_hash: v2.parent_hash, @@ -191,7 +191,7 @@ impl JsonExecutionPayload { timestamp: v2.timestamp, extra_data: v2.extra_data, base_fee_per_gas: v2.base_fee_per_gas, - excess_data_gas: v2.excess_data_gas.ok_or(Error::BadConversion("Null `excess_data_gas` field converting JsonExecutionPayloadV2 -> ExecutionPayloadEip4844".to_string()))?, + excess_data_gas: v2.excess_data_gas.ok_or_else(|| Error::BadConversion("Null `excess_data_gas` field converting JsonExecutionPayloadV2 -> ExecutionPayloadEip4844".to_string()))?, block_hash: v2.block_hash, transactions: v2.transactions, #[cfg(feature = "withdrawals")] @@ -204,7 +204,7 @@ impl JsonExecutionPayload { .collect::>() .into() }) - .ok_or(Error::BadConversion("Null withdrawal field converting JsonExecutionPayloadV2 -> ExecutionPayloadEip4844".to_string()))? + .ok_or_else(|| Error::BadConversion("Null withdrawal field converting JsonExecutionPayloadV2 -> ExecutionPayloadEip4844".to_string()))? })), _ => Err(Error::UnsupportedForkVariant(format!("Unsupported conversion from JsonExecutionPayloadV2 for {}", fork_name))), } diff --git a/beacon_node/execution_layer/src/engines.rs b/beacon_node/execution_layer/src/engines.rs index 16562267c..271cca26c 100644 --- a/beacon_node/execution_layer/src/engines.rs +++ b/beacon_node/execution_layer/src/engines.rs @@ -342,7 +342,7 @@ impl Engine { impl PayloadIdCacheKey { fn new(head_block_hash: &ExecutionBlockHash, attributes: &PayloadAttributes) -> Self { Self { - head_block_hash: head_block_hash.clone(), + head_block_hash: *head_block_hash, payload_attributes: attributes.clone(), } } diff --git a/beacon_node/execution_layer/src/lib.rs b/beacon_node/execution_layer/src/lib.rs index b6e85f67d..a97bbc4fa 100644 --- a/beacon_node/execution_layer/src/lib.rs +++ b/beacon_node/execution_layer/src/lib.rs @@ -1582,7 +1582,7 @@ impl ExecutionLayer { let transactions = VariableList::new( block .transactions() - .into_iter() + .iter() .map(|transaction| VariableList::new(transaction.rlp().to_vec())) .collect::>() .map_err(ApiError::DeserializeTransaction)?, diff --git a/beacon_node/execution_layer/src/test_utils/handle_rpc.rs b/beacon_node/execution_layer/src/test_utils/handle_rpc.rs index fe765cc94..c83aeccdc 100644 --- a/beacon_node/execution_layer/src/test_utils/handle_rpc.rs +++ b/beacon_node/execution_layer/src/test_utils/handle_rpc.rs @@ -74,11 +74,29 @@ pub async fn handle_rpc( .unwrap()) } } - ENGINE_NEW_PAYLOAD_V1 => { - let request: JsonExecutionPayload = get_param(params, 0)?; + ENGINE_NEW_PAYLOAD_V1 | ENGINE_NEW_PAYLOAD_V2 => { + let request = match method { + ENGINE_NEW_PAYLOAD_V1 => { + JsonExecutionPayload::V1(get_param::>(params, 0)?) + } + ENGINE_NEW_PAYLOAD_V2 => { + JsonExecutionPayload::V2(get_param::>(params, 0)?) + } + _ => unreachable!(), + }; + let fork = match request { + JsonExecutionPayload::V1(_) => ForkName::Merge, + JsonExecutionPayload::V2(ref payload) => { + if payload.withdrawals.is_none() { + ForkName::Merge + } else { + ForkName::Capella + } + } + }; // Canned responses set by block hash take priority. - if let Some(status) = ctx.get_new_payload_status(&request.block_hash()) { + if let Some(status) = ctx.get_new_payload_status(request.block_hash()) { return Ok(serde_json::to_value(JsonPayloadStatusV1::from(status)).unwrap()); } @@ -97,8 +115,7 @@ pub async fn handle_rpc( Some( ctx.execution_block_generator .write() - // FIXME: should this worry about other forks? - .new_payload(request.try_into_execution_payload(ForkName::Merge).unwrap()), + .new_payload(request.try_into_execution_payload(fork).unwrap()), ) } else { None diff --git a/beacon_node/http_api/src/publish_blocks.rs b/beacon_node/http_api/src/publish_blocks.rs index fb296168d..c471da7d5 100644 --- a/beacon_node/http_api/src/publish_blocks.rs +++ b/beacon_node/http_api/src/publish_blocks.rs @@ -41,7 +41,7 @@ pub async fn publish_block( )) } else { //TODO(pawan): return an empty sidecar instead - return Err(warp_utils::reject::broadcast_without_import(format!(""))); + return Err(warp_utils::reject::broadcast_without_import(String::new())); } } _ => PubsubMessage::BeaconBlock(block.clone()), diff --git a/beacon_node/lighthouse_network/src/types/pubsub.rs b/beacon_node/lighthouse_network/src/types/pubsub.rs index 02f2bfff1..9cce98db9 100644 --- a/beacon_node/lighthouse_network/src/types/pubsub.rs +++ b/beacon_node/lighthouse_network/src/types/pubsub.rs @@ -224,12 +224,10 @@ impl PubsubMessage { | ForkName::Merge | ForkName::Capella, ) - | None => { - return Err(format!( + | None => Err(format!( "beacon_blobs_and_sidecar topic invalid for given fork digest {:?}", gossip_topic.fork_digest - )) - } + )), } } GossipKind::VoluntaryExit => { diff --git a/beacon_node/operation_pool/src/lib.rs b/beacon_node/operation_pool/src/lib.rs index 159454b9e..37fa68938 100644 --- a/beacon_node/operation_pool/src/lib.rs +++ b/beacon_node/operation_pool/src/lib.rs @@ -558,8 +558,10 @@ impl OperationPool { ) } + // TODO: remove this whole block once withdrwals-processing is removed #[cfg(not(feature = "withdrawals-processing"))] { + #[allow(clippy::drop_copy)] drop((state, spec)); vec![] } @@ -597,8 +599,10 @@ impl OperationPool { ); } + // TODO: remove this whole block once withdrwals-processing is removed #[cfg(not(feature = "withdrawals-processing"))] { + #[allow(clippy::drop_copy)] drop((head_block, head_state, spec)); } } diff --git a/consensus/types/src/payload.rs b/consensus/types/src/payload.rs index dba94cfd7..2d9e37b81 100644 --- a/consensus/types/src/payload.rs +++ b/consensus/types/src/payload.rs @@ -261,7 +261,7 @@ impl ExecPayload for FullPayload { }) } - fn is_default_with_empty_roots<'a>(&'a self) -> bool { + fn is_default_with_empty_roots(&self) -> bool { // For full payloads the empty/zero distinction does not exist. self.is_default_with_zero_roots() } @@ -536,7 +536,7 @@ impl ExecPayload for BlindedPayload { } } - fn is_default_with_zero_roots<'a>(&'a self) -> bool { + fn is_default_with_zero_roots(&self) -> bool { self.to_ref().is_default_with_zero_roots() } @@ -643,13 +643,13 @@ impl<'b, T: EthSpec> ExecPayload for BlindedPayloadRef<'b, T> { } macro_rules! impl_exec_payload_common { - ($wrapper_type:ident, - $wrapped_type:ident, - $wrapped_type_full:ident, - $wrapped_type_header:ident, - $wrapped_field:ident, - $fork_variant:ident, - $block_type_variant:ident, + ($wrapper_type:ident, // BlindedPayloadMerge | FullPayloadMerge + $wrapped_type:ident, // ExecutionPayloadHeaderMerge | ExecutionPayloadMerge + $wrapped_type_full:ident, // ExecutionPayloadMerge | ExecutionPayloadMerge + $wrapped_type_header:ident, // ExecutionPayloadHeaderMerge | ExecutionPayloadHeaderMerge + $wrapped_field:ident, // execution_payload_header | execution_payload + $fork_variant:ident, // Merge | Merge + $block_type_variant:ident, // Blinded | Full $f:block, $g:block) => { impl ExecPayload for $wrapper_type { @@ -696,7 +696,15 @@ macro_rules! impl_exec_payload_common { } fn is_default_with_empty_roots(&self) -> bool { - self.$wrapped_field == $wrapped_type::from($wrapped_type_full::default()) + // FIXME: is there a better way than ignoring this lint? + // This is necessary because the first invocation of this macro might expand to: + // self.execution_payload_header == ExecutionPayloadHeaderMerge::from(ExecutionPayloadMerge::default()) + // but the second invocation might expand to: + // self.execution_payload == ExecutionPayloadMerge::from(ExecutionPayloadMerge::default()) + #[allow(clippy::cmp_owned)] + { + self.$wrapped_field == $wrapped_type::from($wrapped_type_full::default()) + } } fn transactions(&self) -> Option<&Transactions> { @@ -720,16 +728,17 @@ macro_rules! impl_exec_payload_common { } macro_rules! impl_exec_payload_for_fork { + // BlindedPayloadMerge, FullPayloadMerge, ExecutionPayloadHeaderMerge, ExecutionPayloadMerge, Merge ($wrapper_type_header:ident, $wrapper_type_full:ident, $wrapped_type_header:ident, $wrapped_type_full:ident, $fork_variant:ident) => { //*************** Blinded payload implementations ******************// impl_exec_payload_common!( - $wrapper_type_header, - $wrapped_type_header, - $wrapped_type_full, - $wrapped_type_header, + $wrapper_type_header, // BlindedPayloadMerge + $wrapped_type_header, // ExecutionPayloadHeaderMerge + $wrapped_type_full, // ExecutionPayloadMerge + $wrapped_type_header, // ExecutionPayloadHeaderMerge execution_payload_header, - $fork_variant, + $fork_variant, // Merge Blinded, { |_| { None } }, { @@ -794,12 +803,12 @@ macro_rules! impl_exec_payload_for_fork { //*************** Full payload implementations ******************// impl_exec_payload_common!( - $wrapper_type_full, - $wrapped_type_full, - $wrapped_type_full, - $wrapped_type_header, + $wrapper_type_full, // FullPayloadMerge + $wrapped_type_full, // ExecutionPayloadMerge + $wrapped_type_full, // ExecutionPayloadMerge + $wrapped_type_header, // ExecutionPayloadHeaderMerge execution_payload, - $fork_variant, + $fork_variant, // Merge Full, { let c: for<'a> fn(&'a $wrapper_type_full) -> Option<&'a Transactions> = diff --git a/lcli/src/new_testnet.rs b/lcli/src/new_testnet.rs index 693560457..58a7c49b3 100644 --- a/lcli/src/new_testnet.rs +++ b/lcli/src/new_testnet.rs @@ -87,9 +87,9 @@ pub fn run(testnet_dir_path: PathBuf, matches: &ArgMatches) -> Resul execution_payload_header.as_ref() { let eth1_block_hash = - parse_optional(matches, "eth1-block-hash")?.unwrap_or(payload.block_hash()); + parse_optional(matches, "eth1-block-hash")?.unwrap_or_else(|| payload.block_hash()); let genesis_time = - parse_optional(matches, "genesis-time")?.unwrap_or(payload.timestamp()); + parse_optional(matches, "genesis-time")?.unwrap_or_else(|| payload.timestamp()); (eth1_block_hash, genesis_time) } else { let eth1_block_hash = parse_required(matches, "eth1-block-hash").map_err(|_| { diff --git a/testing/ef_tests/src/cases/merkle_proof_validity.rs b/testing/ef_tests/src/cases/merkle_proof_validity.rs index a57abc2e0..c180774bb 100644 --- a/testing/ef_tests/src/cases/merkle_proof_validity.rs +++ b/testing/ef_tests/src/cases/merkle_proof_validity.rs @@ -29,7 +29,7 @@ pub struct MerkleProofValidity { impl LoadCase for MerkleProofValidity { fn load_from_dir(path: &Path, fork_name: ForkName) -> Result { let spec = &testing_spec::(fork_name); - let state = ssz_decode_state(&path.join("state.ssz_snappy"), spec)?; + let state = ssz_decode_state(&path.join("object.ssz_snappy"), spec)?; let merkle_proof = yaml_decode_file(&path.join("proof.yaml"))?; // Metadata does not exist in these tests but it is left like this just in case. let meta_path = path.join("meta.yaml"); diff --git a/testing/ef_tests/src/cases/operations.rs b/testing/ef_tests/src/cases/operations.rs index 431fd829f..f5487a694 100644 --- a/testing/ef_tests/src/cases/operations.rs +++ b/testing/ef_tests/src/cases/operations.rs @@ -21,8 +21,6 @@ use state_processing::{ ConsensusContext, }; use std::fmt::Debug; -#[cfg(not(all(feature = "withdrawals", feature = "withdrawals-processing")))] -use std::marker::PhantomData; use std::path::Path; #[cfg(all(feature = "withdrawals", feature = "withdrawals-processing"))] use types::SignedBlsToExecutionChange; @@ -44,12 +42,10 @@ struct ExecutionMetadata { } /// Newtype for testing withdrawals. +#[cfg(all(feature = "withdrawals", feature = "withdrawals-processing"))] #[derive(Debug, Clone, Deserialize)] pub struct WithdrawalsPayload { - #[cfg(all(feature = "withdrawals", feature = "withdrawals-processing"))] payload: FullPayload, - #[cfg(not(all(feature = "withdrawals", feature = "withdrawals-processing")))] - _phantom_data: PhantomData, } #[derive(Debug, Clone)] diff --git a/testing/ef_tests/src/lib.rs b/testing/ef_tests/src/lib.rs index d45b1e15c..fd3bf2bd1 100644 --- a/testing/ef_tests/src/lib.rs +++ b/testing/ef_tests/src/lib.rs @@ -1,9 +1,11 @@ pub use case_result::CaseResult; +#[cfg(all(feature = "withdrawals", feature = "withdrawals-processing"))] +pub use cases::WithdrawalsPayload; pub use cases::{ Case, EffectiveBalanceUpdates, Eth1DataReset, HistoricalRootsUpdate, InactivityUpdates, JustificationAndFinalization, ParticipationFlagUpdates, ParticipationRecordUpdates, RandaoMixesReset, RegistryUpdates, RewardsAndPenalties, Slashings, SlashingsReset, - SyncCommitteeUpdates, WithdrawalsPayload, + SyncCommitteeUpdates, }; pub use decode::log_file_access; pub use error::Error; diff --git a/testing/ef_tests/tests/tests.rs b/testing/ef_tests/tests/tests.rs index f84be64da..0227b92ec 100644 --- a/testing/ef_tests/tests/tests.rs +++ b/testing/ef_tests/tests/tests.rs @@ -82,12 +82,14 @@ fn operations_execution_payload_blinded() { OperationsHandler::>::default().run(); } +#[cfg(all(feature = "withdrawals", feature = "withdrawals-processing"))] #[test] fn operations_withdrawals() { OperationsHandler::>::default().run(); OperationsHandler::>::default().run(); } +#[cfg(all(feature = "withdrawals", feature = "withdrawals-processing"))] #[test] fn operations_bls_to_execution_change() { OperationsHandler::::default().run();