merge with upstream

This commit is contained in:
realbigsean 2022-11-22 18:38:30 -05:00
commit 48b2efce9f
No known key found for this signature in database
GPG Key ID: B372B64D866BF8CC
70 changed files with 1521 additions and 761 deletions

View File

@ -20,6 +20,9 @@ CROSS_FEATURES ?= gnosis,slasher-lmdb,slasher-mdbx
# Cargo profile for Cross builds. Default is for local builds, CI uses an override.
CROSS_PROFILE ?= release
# List of features to use when running EF tests.
EF_TEST_FEATURES ?= beacon_chain/withdrawals,beacon_chain/withdrawals-processing
# Cargo profile for regular builds.
PROFILE ?= release
@ -108,9 +111,9 @@ check-consensus:
# Runs only the ef-test vectors.
run-ef-tests:
rm -rf $(EF_TESTS)/.accessed_file_log.txt
cargo test --release -p ef_tests --features "ef_tests"
cargo test --release -p ef_tests --features "ef_tests,fake_crypto"
cargo test --release -p ef_tests --features "ef_tests,milagro"
cargo test --release -p ef_tests --features "ef_tests,$(EF_TEST_FEATURES)"
cargo test --release -p ef_tests --features "ef_tests,$(EF_TEST_FEATURES),fake_crypto"
cargo test --release -p ef_tests --features "ef_tests,$(EF_TEST_FEATURES),milagro"
./$(EF_TESTS)/check_all_files_accessed.py $(EF_TESTS)/.accessed_file_log.txt $(EF_TESTS)/consensus-spec-tests
# Run the tests in the `beacon_chain` crate for all known forks.

View File

@ -77,6 +77,8 @@ use slasher::Slasher;
use slog::{crit, debug, error, info, trace, warn, Logger};
use slot_clock::SlotClock;
use ssz::Encode;
#[cfg(feature = "withdrawals")]
use state_processing::per_block_processing::get_expected_withdrawals;
use state_processing::{
common::{get_attesting_indices_from_state, get_indexed_attestation},
per_block_processing,
@ -261,6 +263,8 @@ struct PartialBeaconBlock<E: EthSpec, Payload: AbstractExecPayload<E>> {
voluntary_exits: Vec<SignedVoluntaryExit>,
sync_aggregate: Option<SyncAggregate<E>>,
prepare_payload_handle: Option<PreparePayloadHandle<E, Payload>>,
#[cfg(feature = "withdrawals")]
bls_to_execution_changes: Vec<SignedBlsToExecutionChange>,
}
pub type BeaconForkChoice<T> = ForkChoice<
@ -3407,12 +3411,14 @@ impl<T: BeaconChainTypes> BeaconChain<T> {
// Wait for the execution layer to return an execution payload (if one is required).
let prepare_payload_handle = partial_beacon_block.prepare_payload_handle.take();
let block_contents = if let Some(prepare_payload_handle) = prepare_payload_handle {
prepare_payload_handle
.await
.map_err(BlockProductionError::TokioJoin)?
.ok_or(BlockProductionError::ShuttingDown)??
Some(
prepare_payload_handle
.await
.map_err(BlockProductionError::TokioJoin)?
.ok_or(BlockProductionError::ShuttingDown)??,
)
} else {
return Err(BlockProductionError::MissingExecutionPayload);
None
};
// Part 3/3 (blocking)
@ -3507,6 +3513,9 @@ impl<T: BeaconChainTypes> BeaconChain<T> {
let eth1_data = eth1_chain.eth1_data_for_block_production(&state, &self.spec)?;
let deposits = eth1_chain.deposits_for_block_inclusion(&state, &eth1_data, &self.spec)?;
let bls_to_execution_changes = self
.op_pool
.get_bls_to_execution_changes(&state, &self.spec);
// Iterate through the naive aggregation pool and ensure all the attestations from there
// are included in the operation pool.
@ -3664,13 +3673,15 @@ impl<T: BeaconChainTypes> BeaconChain<T> {
voluntary_exits,
sync_aggregate,
prepare_payload_handle,
#[cfg(feature = "withdrawals")]
bls_to_execution_changes,
})
}
fn complete_partial_beacon_block<Payload: AbstractExecPayload<T::EthSpec>>(
&self,
partial_beacon_block: PartialBeaconBlock<T::EthSpec, Payload>,
block_contents: BlockProposalContents<T::EthSpec, Payload>,
block_contents: Option<BlockProposalContents<T::EthSpec, Payload>>,
verification: ProduceBlockVerification,
) -> Result<BeaconBlockAndState<T::EthSpec, Payload>, BlockProductionError> {
let PartialBeaconBlock {
@ -3691,6 +3702,8 @@ impl<T: BeaconChainTypes> BeaconChain<T> {
// this function. We can assume that the handle has already been consumed in order to
// produce said `execution_payload`.
prepare_payload_handle: _,
#[cfg(feature = "withdrawals")]
bls_to_execution_changes,
} = partial_beacon_block;
let (payload, kzg_commitments_opt, blobs) = block_contents.deconstruct();
@ -3749,6 +3762,7 @@ impl<T: BeaconChainTypes> BeaconChain<T> {
sync_aggregate: sync_aggregate
.ok_or(BlockProductionError::MissingSyncAggregate)?,
execution_payload: payload
.ok_or(BlockProductionError::MissingExecutionPayload)?
.try_into()
.map_err(|_| BlockProductionError::InvalidPayloadFork)?,
},
@ -3770,8 +3784,11 @@ impl<T: BeaconChainTypes> BeaconChain<T> {
sync_aggregate: sync_aggregate
.ok_or(BlockProductionError::MissingSyncAggregate)?,
execution_payload: payload
.ok_or(BlockProductionError::MissingExecutionPayload)?
.try_into()
.map_err(|_| BlockProductionError::InvalidPayloadFork)?,
#[cfg(feature = "withdrawals")]
bls_to_execution_changes: bls_to_execution_changes.into(),
},
}),
BeaconState::Eip4844(_) => {
@ -3794,8 +3811,11 @@ impl<T: BeaconChainTypes> BeaconChain<T> {
sync_aggregate: sync_aggregate
.ok_or(BlockProductionError::MissingSyncAggregate)?,
execution_payload: payload
.ok_or(BlockProductionError::MissingExecutionPayload)?
.try_into()
.map_err(|_| BlockProductionError::InvalidPayloadFork)?,
#[cfg(feature = "withdrawals")]
bls_to_execution_changes: bls_to_execution_changes.into(),
blob_kzg_commitments: VariableList::from(kzg_commitments),
},
})
@ -4134,35 +4154,52 @@ impl<T: BeaconChainTypes> BeaconChain<T> {
return Ok(());
}
let payload_attributes = match self.spec.fork_name_at_epoch(prepare_epoch) {
#[cfg(feature = "withdrawals")]
let head_state = &self.canonical_head.cached_head().snapshot.beacon_state;
#[cfg(feature = "withdrawals")]
let withdrawals = match self.spec.fork_name_at_epoch(prepare_epoch) {
ForkName::Base | ForkName::Altair | ForkName::Merge => {
PayloadAttributes::V1(PayloadAttributesV1 {
timestamp: self
.slot_clock
.start_of(prepare_slot)
.ok_or(Error::InvalidSlot(prepare_slot))?
.as_secs(),
prev_randao: head_random,
suggested_fee_recipient: execution_layer
.get_suggested_fee_recipient(proposer as u64)
.await,
})
}
ForkName::Capella | ForkName::Eip4844 => PayloadAttributes::V2(PayloadAttributesV2 {
timestamp: self
.slot_clock
.start_of(prepare_slot)
.ok_or(Error::InvalidSlot(prepare_slot))?
.as_secs(),
prev_randao: head_random,
suggested_fee_recipient: execution_layer
.get_suggested_fee_recipient(proposer as u64)
.await,
//FIXME(sean)
#[cfg(feature = "withdrawals")]
withdrawals: vec![],
}),
};
None
},
ForkName::Capella | ForkName::Eip4844 => match &head_state {
&BeaconState::Capella(_) | &BeaconState::Eip4844(_) => {
// The head_state is already BeaconState::Capella or later
// FIXME(mark)
// Might implement caching here in the future..
Some(get_expected_withdrawals(head_state, &self.spec))
}
&BeaconState::Base(_) | &BeaconState::Altair(_) | &BeaconState::Merge(_) => {
// We are the Capella transition block proposer, need advanced state
let mut prepare_state = self
.state_at_slot(prepare_slot, StateSkipConfig::WithoutStateRoots)
.or_else(|e| {
error!(self.log, "Capella Transition Proposer"; "Error Advancing State: " => ?e);
Err(e)
})?;
// FIXME(mark)
// Might implement caching here in the future..
Some(get_expected_withdrawals(&prepare_state, &self.spec))
}
},
}.transpose().or_else(|e| {
error!(self.log, "Error preparing beacon proposer"; "while calculating expected withdrawals" => ?e);
Err(e)
}).map(|withdrawals_opt| withdrawals_opt.map(|w| w.into()))
.map_err(Error::PrepareProposerFailed)?;
let payload_attributes = PayloadAttributes::V2(PayloadAttributesV2 {
timestamp: self
.slot_clock
.start_of(prepare_slot)
.ok_or(Error::InvalidSlot(prepare_slot))?
.as_secs(),
prev_randao: head_random,
suggested_fee_recipient: execution_layer
.get_suggested_fee_recipient(proposer as u64)
.await,
#[cfg(feature = "withdrawals")]
withdrawals,
});
debug!(
self.log,

View File

@ -153,7 +153,7 @@ pub enum BeaconChainError {
},
AddPayloadLogicError,
ExecutionForkChoiceUpdateFailed(execution_layer::Error),
PrepareProposerBlockingFailed(execution_layer::Error),
PrepareProposerFailed(BlockProcessingError),
ExecutionForkChoiceUpdateInvalid {
status: PayloadStatus,
},

View File

@ -17,6 +17,8 @@ use fork_choice::{InvalidationOperation, PayloadVerificationStatus};
use proto_array::{Block as ProtoBlock, ExecutionStatus};
use slog::debug;
use slot_clock::SlotClock;
#[cfg(feature = "withdrawals")]
use state_processing::per_block_processing::get_expected_withdrawals;
use state_processing::per_block_processing::{
compute_timestamp_at_slot, is_execution_enabled, is_merge_transition_complete,
partially_verify_execution_payload,
@ -362,6 +364,15 @@ pub fn get_execution_payload<
let random = *state.get_randao_mix(current_epoch)?;
let latest_execution_payload_header_block_hash =
state.latest_execution_payload_header()?.block_hash();
#[cfg(feature = "withdrawals")]
let withdrawals = match state {
&BeaconState::Capella(_) | &BeaconState::Eip4844(_) => {
Some(get_expected_withdrawals(state, spec)?.into())
}
&BeaconState::Merge(_) => None,
// These shouldn't happen but they're here to make the pattern irrefutable
&BeaconState::Base(_) | &BeaconState::Altair(_) => None,
};
// Spawn a task to obtain the execution payload from the EL via a series of async calls. The
// `join_handle` can be used to await the result of the function.
@ -378,6 +389,8 @@ pub fn get_execution_payload<
proposer_index,
latest_execution_payload_header_block_hash,
builder_params,
#[cfg(feature = "withdrawals")]
withdrawals,
)
.await
},
@ -411,6 +424,7 @@ pub async fn prepare_execution_payload<T, Payload>(
proposer_index: u64,
latest_execution_payload_header_block_hash: ExecutionBlockHash,
builder_params: BuilderParams,
#[cfg(feature = "withdrawals")] withdrawals: Option<Vec<Withdrawal>>,
) -> Result<BlockProposalContents<T::EthSpec, Payload>, BlockProductionError>
where
T: BeaconChainTypes,
@ -480,6 +494,9 @@ where
proposer_index,
forkchoice_update_params,
builder_params,
fork,
#[cfg(feature = "withdrawals")]
withdrawals,
&chain.spec,
)
.await

View File

@ -12,9 +12,9 @@ use beacon_chain::{
INVALID_JUSTIFIED_PAYLOAD_SHUTDOWN_REASON,
};
use execution_layer::{
json_structures::{JsonForkChoiceStateV1, JsonPayloadAttributesV1},
json_structures::{JsonForkchoiceStateV1, JsonPayloadAttributesV1},
test_utils::ExecutionBlockGenerator,
ExecutionLayer, ForkChoiceState, PayloadAttributes,
ExecutionLayer, ForkchoiceState, PayloadAttributes,
};
use fork_choice::{
CountUnrealized, Error as ForkChoiceError, InvalidationOperation, PayloadVerificationStatus,
@ -117,7 +117,7 @@ impl InvalidPayloadRig {
&self.harness.chain.canonical_head
}
fn previous_forkchoice_update_params(&self) -> (ForkChoiceState, PayloadAttributes) {
fn previous_forkchoice_update_params(&self) -> (ForkchoiceState, PayloadAttributes) {
let mock_execution_layer = self.harness.mock_execution_layer.as_ref().unwrap();
let json = mock_execution_layer
.server
@ -126,7 +126,7 @@ impl InvalidPayloadRig {
let params = json.get("params").expect("no params");
let fork_choice_state_json = params.get(0).expect("no payload param");
let fork_choice_state: JsonForkChoiceStateV1 =
let fork_choice_state: JsonForkchoiceStateV1 =
serde_json::from_value(fork_choice_state_json.clone()).unwrap();
let payload_param_json = params.get(1).expect("no payload param");

View File

@ -5,8 +5,8 @@ edition = "2021"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[features]
withdrawals = ["state_processing/withdrawals", "types/withdrawals"]
withdrawals-processing = ["state_processing/withdrawals-processing"]
withdrawals = ["state_processing/withdrawals", "types/withdrawals", "eth2/withdrawals"]
withdrawals-processing = ["state_processing/withdrawals-processing", "eth2/withdrawals-processing"]
[dependencies]
types = { path = "../../consensus/types"}

View File

@ -1,4 +1,4 @@
use crate::engines::ForkChoiceState;
use crate::engines::ForkchoiceState;
pub use ethers_core::types::Transaction;
use ethers_core::utils::rlp::{Decodable, Rlp};
use http::deposit_methods::RpcError;
@ -7,10 +7,11 @@ use reqwest::StatusCode;
use serde::{Deserialize, Serialize};
use strum::IntoStaticStr;
use superstruct::superstruct;
#[cfg(feature = "withdrawals")]
use types::Withdrawal;
pub use types::{
Address, EthSpec, ExecutionBlockHash, ExecutionPayload, ExecutionPayloadHeader, FixedVector,
Hash256, Uint256, VariableList,
ForkName, Hash256, Uint256, VariableList,
};
pub mod auth;
@ -44,6 +45,9 @@ pub enum Error {
DeserializeWithdrawals(ssz_types::Error),
BuilderApi(builder_client::Error),
IncorrectStateVariant,
RequiredMethodUnsupported(&'static str),
UnsupportedForkVariant(String),
BadConversion(String),
}
impl From<reqwest::Error> for Error {
@ -255,7 +259,29 @@ pub struct PayloadAttributes {
pub suggested_fee_recipient: Address,
#[cfg(feature = "withdrawals")]
#[superstruct(only(V2))]
pub withdrawals: Vec<Withdrawal>,
pub withdrawals: Option<Vec<Withdrawal>>,
}
impl PayloadAttributes {
pub fn downgrade_to_v1(self) -> Result<Self, Error> {
match self {
PayloadAttributes::V1(_) => Ok(self),
PayloadAttributes::V2(v2) => {
#[cfg(features = "withdrawals")]
if v2.withdrawals.is_some() {
return Err(Error::BadConversion(
"Downgrading from PayloadAttributesV2 with non-null withdrawaals"
.to_string(),
));
}
Ok(PayloadAttributes::V1(PayloadAttributesV1 {
timestamp: v2.timestamp,
prev_randao: v2.prev_randao,
suggested_fee_recipient: v2.suggested_fee_recipient,
}))
}
}
}
}
#[derive(Clone, Debug, PartialEq)]
@ -277,3 +303,17 @@ pub struct ProposeBlindedBlockResponse {
pub latest_valid_hash: Option<Hash256>,
pub validation_error: Option<String>,
}
// This name is work in progress, it could
// change when this method is actually proposed
// but I'm writing this as it has been described
#[derive(Clone, Copy)]
pub struct SupportedApis {
pub new_payload_v1: bool,
pub new_payload_v2: bool,
pub forkchoice_updated_v1: bool,
pub forkchoice_updated_v2: bool,
pub get_payload_v1: bool,
pub get_payload_v2: bool,
pub exchange_transition_configuration_v1: bool,
}

View File

@ -7,6 +7,7 @@ use reqwest::header::CONTENT_TYPE;
use sensitive_url::SensitiveUrl;
use serde::de::DeserializeOwned;
use serde_json::json;
use tokio::sync::RwLock;
use std::time::Duration;
use types::EthSpec;
@ -29,15 +30,18 @@ pub const ETH_SYNCING: &str = "eth_syncing";
pub const ETH_SYNCING_TIMEOUT: Duration = Duration::from_secs(1);
pub const ENGINE_NEW_PAYLOAD_V1: &str = "engine_newPayloadV1";
pub const ENGINE_NEW_PAYLOAD_V2: &str = "engine_newPayloadV2";
pub const ENGINE_NEW_PAYLOAD_TIMEOUT: Duration = Duration::from_secs(8);
pub const ENGINE_GET_PAYLOAD_V1: &str = "engine_getPayloadV1";
pub const ENGINE_GET_PAYLOAD_V2: &str = "engine_getPayloadV2";
pub const ENGINE_GET_PAYLOAD_TIMEOUT: Duration = Duration::from_secs(2);
pub const ENGINE_GET_BLOBS_BUNDLE_V1: &str = "engine_getBlobsBundleV1";
pub const ENGINE_GET_BLOBS_BUNDLE_TIMEOUT: Duration = Duration::from_secs(2);
pub const ENGINE_FORKCHOICE_UPDATED_V1: &str = "engine_forkchoiceUpdatedV1";
pub const ENGINE_FORKCHOICE_UPDATED_V2: &str = "engine_forkchoiceUpdatedV2";
pub const ENGINE_FORKCHOICE_UPDATED_TIMEOUT: Duration = Duration::from_secs(8);
pub const ENGINE_EXCHANGE_TRANSITION_CONFIGURATION_V1: &str =
@ -526,6 +530,7 @@ pub struct HttpJsonRpc {
pub client: Client,
pub url: SensitiveUrl,
pub execution_timeout_multiplier: u32,
pub cached_supported_apis: RwLock<Option<SupportedApis>>,
auth: Option<Auth>,
}
@ -538,6 +543,7 @@ impl HttpJsonRpc {
client: Client::builder().build()?,
url,
execution_timeout_multiplier: execution_timeout_multiplier.unwrap_or(1),
cached_supported_apis: Default::default(),
auth: None,
})
}
@ -551,6 +557,7 @@ impl HttpJsonRpc {
client: Client::builder().build()?,
url,
execution_timeout_multiplier: execution_timeout_multiplier.unwrap_or(1),
cached_supported_apis: Default::default(),
auth: Some(auth),
})
}
@ -671,7 +678,7 @@ impl HttpJsonRpc {
&self,
execution_payload: ExecutionPayload<T>,
) -> Result<PayloadStatusV1, Error> {
let params = json!([JsonExecutionPayload::from(execution_payload)]);
let params = json!([JsonExecutionPayloadV1::try_from(execution_payload)?]);
let response: JsonPayloadStatusV1 = self
.rpc_request(
@ -684,13 +691,31 @@ impl HttpJsonRpc {
Ok(response.into())
}
pub async fn new_payload_v2<T: EthSpec>(
&self,
execution_payload: ExecutionPayload<T>,
) -> Result<PayloadStatusV1, Error> {
let params = json!([JsonExecutionPayloadV2::try_from(execution_payload)?]);
let response: JsonPayloadStatusV1 = self
.rpc_request(
ENGINE_NEW_PAYLOAD_V2,
params,
ENGINE_NEW_PAYLOAD_TIMEOUT * self.execution_timeout_multiplier,
)
.await?;
Ok(response.into())
}
pub async fn get_payload_v1<T: EthSpec>(
&self,
fork_name: ForkName,
payload_id: PayloadId,
) -> Result<ExecutionPayload<T>, Error> {
let params = json!([JsonPayloadIdRequest::from(payload_id)]);
let response: JsonExecutionPayload<T> = self
let payload_v1: JsonExecutionPayloadV1<T> = self
.rpc_request(
ENGINE_GET_PAYLOAD_V1,
params,
@ -698,7 +723,25 @@ impl HttpJsonRpc {
)
.await?;
Ok(response.into())
JsonExecutionPayload::V1(payload_v1).try_into_execution_payload(fork_name)
}
pub async fn get_payload_v2<T: EthSpec>(
&self,
fork_name: ForkName,
payload_id: PayloadId,
) -> Result<ExecutionPayload<T>, Error> {
let params = json!([JsonPayloadIdRequest::from(payload_id)]);
let payload_v2: JsonExecutionPayloadV2<T> = self
.rpc_request(
ENGINE_GET_PAYLOAD_V2,
params,
ENGINE_GET_PAYLOAD_TIMEOUT * self.execution_timeout_multiplier,
)
.await?;
JsonExecutionPayload::V2(payload_v2).try_into_execution_payload(fork_name)
}
pub async fn get_blobs_bundle_v1<T: EthSpec>(
@ -720,11 +763,11 @@ impl HttpJsonRpc {
pub async fn forkchoice_updated_v1(
&self,
forkchoice_state: ForkChoiceState,
forkchoice_state: ForkchoiceState,
payload_attributes: Option<PayloadAttributes>,
) -> Result<ForkchoiceUpdatedResponse, Error> {
let params = json!([
JsonForkChoiceStateV1::from(forkchoice_state),
JsonForkchoiceStateV1::from(forkchoice_state),
payload_attributes.map(JsonPayloadAttributes::from)
]);
@ -739,6 +782,27 @@ impl HttpJsonRpc {
Ok(response.into())
}
pub async fn forkchoice_updated_v2(
&self,
forkchoice_state: ForkchoiceState,
payload_attributes: Option<PayloadAttributes>,
) -> Result<ForkchoiceUpdatedResponse, Error> {
let params = json!([
JsonForkchoiceStateV1::from(forkchoice_state),
payload_attributes.map(JsonPayloadAttributes::from)
]);
let response: JsonForkchoiceUpdatedV1Response = self
.rpc_request(
ENGINE_FORKCHOICE_UPDATED_V2,
params,
ENGINE_FORKCHOICE_UPDATED_TIMEOUT * self.execution_timeout_multiplier,
)
.await?;
Ok(response.into())
}
pub async fn exchange_transition_configuration_v1(
&self,
transition_configuration: TransitionConfigurationV1,
@ -756,6 +820,94 @@ impl HttpJsonRpc {
Ok(response)
}
// this is a stub as this method hasn't been defined yet
pub async fn supported_apis_v1(&self) -> Result<SupportedApis, Error> {
Ok(SupportedApis {
new_payload_v1: true,
new_payload_v2: cfg!(all(feature = "withdrawals", not(test))),
forkchoice_updated_v1: true,
forkchoice_updated_v2: cfg!(all(feature = "withdrawals", not(test))),
get_payload_v1: true,
get_payload_v2: cfg!(all(feature = "withdrawals", not(test))),
exchange_transition_configuration_v1: true,
})
}
pub async fn set_cached_supported_apis(&self, supported_apis: SupportedApis) {
*self.cached_supported_apis.write().await = Some(supported_apis);
}
pub async fn get_cached_supported_apis(&self) -> Result<SupportedApis, Error> {
let cached_opt = *self.cached_supported_apis.read().await;
if let Some(supported_apis) = cached_opt {
Ok(supported_apis)
} else {
let supported_apis = self.supported_apis_v1().await?;
self.set_cached_supported_apis(supported_apis).await;
Ok(supported_apis)
}
}
// automatically selects the latest version of
// new_payload that the execution engine supports
pub async fn new_payload<T: EthSpec>(
&self,
execution_payload: ExecutionPayload<T>,
) -> Result<PayloadStatusV1, Error> {
let supported_apis = self.get_cached_supported_apis().await?;
if supported_apis.new_payload_v2 {
// FIXME: I haven't thought at all about how to handle 4844..
self.new_payload_v2(execution_payload).await
} else if supported_apis.new_payload_v1 {
self.new_payload_v1(execution_payload).await
} else {
Err(Error::RequiredMethodUnsupported("engine_newPayload"))
}
}
// automatically selects the latest version of
// get_payload that the execution engine supports
pub async fn get_payload<T: EthSpec>(
&self,
fork_name: ForkName,
payload_id: PayloadId,
) -> Result<ExecutionPayload<T>, Error> {
let supported_apis = self.get_cached_supported_apis().await?;
if supported_apis.get_payload_v2 {
// FIXME: I haven't thought at all about how to handle 4844..
self.get_payload_v2(fork_name, payload_id).await
} else if supported_apis.new_payload_v1 {
self.get_payload_v1(fork_name, payload_id).await
} else {
Err(Error::RequiredMethodUnsupported("engine_getPayload"))
}
}
// automatically selects the latest version of
// forkchoice_updated that the execution engine supports
pub async fn forkchoice_updated(
&self,
forkchoice_state: ForkchoiceState,
payload_attributes: Option<PayloadAttributes>,
) -> Result<ForkchoiceUpdatedResponse, Error> {
let supported_apis = self.get_cached_supported_apis().await?;
if supported_apis.forkchoice_updated_v2 {
// FIXME: I haven't thought at all about how to handle 4844..
self.forkchoice_updated_v2(forkchoice_state, payload_attributes)
.await
} else if supported_apis.forkchoice_updated_v1 {
self.forkchoice_updated_v1(
forkchoice_state,
payload_attributes
.map(|pa| pa.downgrade_to_v1())
.transpose()?,
)
.await
} else {
Err(Error::RequiredMethodUnsupported("engine_forkchoiceUpdated"))
}
}
}
#[cfg(test)]
@ -767,8 +919,8 @@ mod test {
use std::str::FromStr;
use std::sync::Arc;
use types::{
AbstractExecPayload, ExecutionPayloadMerge, ForkName, FullPayload, MainnetEthSpec,
Transactions, Unsigned, VariableList,
ExecutionPayloadMerge, ForkName, FullPayload, MainnetEthSpec, Transactions, Unsigned,
VariableList,
};
struct Tester {
@ -1052,7 +1204,7 @@ mod test {
|client| async move {
let _ = client
.forkchoice_updated_v1(
ForkChoiceState {
ForkchoiceState {
head_block_hash: ExecutionBlockHash::repeat_byte(1),
safe_block_hash: ExecutionBlockHash::repeat_byte(1),
finalized_block_hash: ExecutionBlockHash::zero(),
@ -1087,7 +1239,7 @@ mod test {
.assert_auth_failure(|client| async move {
client
.forkchoice_updated_v1(
ForkChoiceState {
ForkchoiceState {
head_block_hash: ExecutionBlockHash::repeat_byte(1),
safe_block_hash: ExecutionBlockHash::repeat_byte(1),
finalized_block_hash: ExecutionBlockHash::zero(),
@ -1108,7 +1260,9 @@ mod test {
Tester::new(true)
.assert_request_equals(
|client| async move {
let _ = client.get_payload_v1::<MainnetEthSpec>([42; 8]).await;
let _ = client
.get_payload_v1::<MainnetEthSpec>(ForkName::Merge, [42; 8])
.await;
},
json!({
"id": STATIC_ID,
@ -1121,7 +1275,9 @@ mod test {
Tester::new(false)
.assert_auth_failure(|client| async move {
client.get_payload_v1::<MainnetEthSpec>([42; 8]).await
client
.get_payload_v1::<MainnetEthSpec>(ForkName::Merge, [42; 8])
.await
})
.await;
}
@ -1209,7 +1365,7 @@ mod test {
|client| async move {
let _ = client
.forkchoice_updated_v1(
ForkChoiceState {
ForkchoiceState {
head_block_hash: ExecutionBlockHash::repeat_byte(0),
safe_block_hash: ExecutionBlockHash::repeat_byte(0),
finalized_block_hash: ExecutionBlockHash::repeat_byte(1),
@ -1235,7 +1391,7 @@ mod test {
.assert_auth_failure(|client| async move {
client
.forkchoice_updated_v1(
ForkChoiceState {
ForkchoiceState {
head_block_hash: ExecutionBlockHash::repeat_byte(0),
safe_block_hash: ExecutionBlockHash::repeat_byte(0),
finalized_block_hash: ExecutionBlockHash::repeat_byte(1),
@ -1274,7 +1430,7 @@ mod test {
|client| async move {
let _ = client
.forkchoice_updated_v1(
ForkChoiceState {
ForkchoiceState {
head_block_hash: ExecutionBlockHash::from_str("0x3b8fb240d288781d4aac94d3fd16809ee413bc99294a085798a589dae51ddd4a").unwrap(),
safe_block_hash: ExecutionBlockHash::from_str("0x3b8fb240d288781d4aac94d3fd16809ee413bc99294a085798a589dae51ddd4a").unwrap(),
finalized_block_hash: ExecutionBlockHash::zero(),
@ -1321,7 +1477,7 @@ mod test {
|client| async move {
let response = client
.forkchoice_updated_v1(
ForkChoiceState {
ForkchoiceState {
head_block_hash: ExecutionBlockHash::from_str("0x3b8fb240d288781d4aac94d3fd16809ee413bc99294a085798a589dae51ddd4a").unwrap(),
safe_block_hash: ExecutionBlockHash::from_str("0x3b8fb240d288781d4aac94d3fd16809ee413bc99294a085798a589dae51ddd4a").unwrap(),
finalized_block_hash: ExecutionBlockHash::zero(),
@ -1350,7 +1506,7 @@ mod test {
// engine_getPayloadV1 REQUEST validation
|client| async move {
let _ = client
.get_payload_v1::<MainnetEthSpec>(str_to_payload_id("0xa247243752eb10b4"))
.get_payload_v1::<MainnetEthSpec>(ForkName::Merge,str_to_payload_id("0xa247243752eb10b4"))
.await;
},
json!({
@ -1385,7 +1541,7 @@ mod test {
})],
|client| async move {
let payload = client
.get_payload_v1::<MainnetEthSpec>(str_to_payload_id("0xa247243752eb10b4"))
.get_payload_v1::<MainnetEthSpec>(ForkName::Merge,str_to_payload_id("0xa247243752eb10b4"))
.await
.unwrap();
@ -1468,7 +1624,7 @@ mod test {
})],
|client| async move {
let response = client
.new_payload_v1::<MainnetEthSpec>(FullPayload::default_at_fork(ForkName::Merge).into())
.new_payload_v1::<MainnetEthSpec>(ExecutionPayload::Merge(ExecutionPayloadMerge::default()))
.await
.unwrap();
@ -1487,7 +1643,7 @@ mod test {
|client| async move {
let _ = client
.forkchoice_updated_v1(
ForkChoiceState {
ForkchoiceState {
head_block_hash: ExecutionBlockHash::from_str("0x3559e851470f6e7bbed1db474980683e8c315bfce99b2a6ef47c057c04de7858").unwrap(),
safe_block_hash: ExecutionBlockHash::from_str("0x3559e851470f6e7bbed1db474980683e8c315bfce99b2a6ef47c057c04de7858").unwrap(),
finalized_block_hash: ExecutionBlockHash::from_str("0x3b8fb240d288781d4aac94d3fd16809ee413bc99294a085798a589dae51ddd4a").unwrap(),
@ -1526,7 +1682,7 @@ mod test {
|client| async move {
let response = client
.forkchoice_updated_v1(
ForkChoiceState {
ForkchoiceState {
head_block_hash: ExecutionBlockHash::from_str("0x3559e851470f6e7bbed1db474980683e8c315bfce99b2a6ef47c057c04de7858").unwrap(),
safe_block_hash: ExecutionBlockHash::from_str("0x3559e851470f6e7bbed1db474980683e8c315bfce99b2a6ef47c057c04de7858").unwrap(),
finalized_block_hash: ExecutionBlockHash::from_str("0x3b8fb240d288781d4aac94d3fd16809ee413bc99294a085798a589dae51ddd4a").unwrap(),

View File

@ -3,11 +3,12 @@ use serde::{Deserialize, Serialize};
use strum::EnumString;
use superstruct::superstruct;
use types::{
Blob, EthSpec, ExecutionBlockHash, ExecutionPayloadEip4844, ExecutionPayloadHeaderEip4844,
FixedVector, KzgCommitment, Transaction, Unsigned, VariableList,
Blob, EthSpec, ExecutionBlockHash, FixedVector, KzgCommitment, Transaction, Unsigned,
VariableList, Withdrawal,
};
use types::{
ExecutionPayload, ExecutionPayloadCapella, ExecutionPayloadEip4844, ExecutionPayloadMerge,
};
use types::{ExecutionPayload, ExecutionPayloadCapella, ExecutionPayloadMerge};
use types::{ExecutionPayloadHeader, ExecutionPayloadHeaderCapella, ExecutionPayloadHeaderMerge};
#[derive(Debug, PartialEq, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
@ -62,169 +63,6 @@ pub struct JsonPayloadIdResponse {
pub payload_id: PayloadId,
}
// (V1,V2,V3) -> (Merge,Capella,EIP4844)
#[superstruct(
variants(V1, V2, V3),
variant_attributes(
derive(Debug, PartialEq, Default, Serialize, Deserialize,),
serde(bound = "T: EthSpec", rename_all = "camelCase"),
),
cast_error(ty = "Error", expr = "Error::IncorrectStateVariant"),
partial_getter_error(ty = "Error", expr = "Error::IncorrectStateVariant")
)]
#[derive(Debug, PartialEq, Serialize, Deserialize)]
#[serde(bound = "T: EthSpec", rename_all = "camelCase", untagged)]
pub struct JsonExecutionPayloadHeader<T: EthSpec> {
pub parent_hash: ExecutionBlockHash,
pub fee_recipient: Address,
pub state_root: Hash256,
pub receipts_root: Hash256,
#[serde(with = "serde_logs_bloom")]
pub logs_bloom: FixedVector<u8, T::BytesPerLogsBloom>,
pub prev_randao: Hash256,
#[serde(with = "eth2_serde_utils::u64_hex_be")]
pub block_number: u64,
#[serde(with = "eth2_serde_utils::u64_hex_be")]
pub gas_limit: u64,
#[serde(with = "eth2_serde_utils::u64_hex_be")]
pub gas_used: u64,
#[serde(with = "eth2_serde_utils::u64_hex_be")]
pub timestamp: u64,
#[serde(with = "ssz_types::serde_utils::hex_var_list")]
pub extra_data: VariableList<u8, T::MaxExtraDataBytes>,
#[serde(with = "eth2_serde_utils::u256_hex_be")]
pub base_fee_per_gas: Uint256,
#[serde(with = "eth2_serde_utils::u64_hex_be")]
#[superstruct(only(V3))]
pub excess_blobs: u64,
pub block_hash: ExecutionBlockHash,
pub transactions_root: Hash256,
#[cfg(feature = "withdrawals")]
#[superstruct(only(V2, V3))]
pub withdrawals_root: Hash256,
}
impl<T: EthSpec> From<JsonExecutionPayloadHeader<T>> for ExecutionPayloadHeader<T> {
fn from(json_header: JsonExecutionPayloadHeader<T>) -> Self {
match json_header {
JsonExecutionPayloadHeader::V1(v1) => Self::Merge(ExecutionPayloadHeaderMerge {
parent_hash: v1.parent_hash,
fee_recipient: v1.fee_recipient,
state_root: v1.state_root,
receipts_root: v1.receipts_root,
logs_bloom: v1.logs_bloom,
prev_randao: v1.prev_randao,
block_number: v1.block_number,
gas_limit: v1.gas_limit,
gas_used: v1.gas_used,
timestamp: v1.timestamp,
extra_data: v1.extra_data,
base_fee_per_gas: v1.base_fee_per_gas,
block_hash: v1.block_hash,
transactions_root: v1.transactions_root,
}),
JsonExecutionPayloadHeader::V2(v2) => Self::Capella(ExecutionPayloadHeaderCapella {
parent_hash: v2.parent_hash,
fee_recipient: v2.fee_recipient,
state_root: v2.state_root,
receipts_root: v2.receipts_root,
logs_bloom: v2.logs_bloom,
prev_randao: v2.prev_randao,
block_number: v2.block_number,
gas_limit: v2.gas_limit,
gas_used: v2.gas_used,
timestamp: v2.timestamp,
extra_data: v2.extra_data,
base_fee_per_gas: v2.base_fee_per_gas,
block_hash: v2.block_hash,
transactions_root: v2.transactions_root,
#[cfg(feature = "withdrawals")]
withdrawals_root: v2.withdrawals_root,
}),
JsonExecutionPayloadHeader::V3(v3) => Self::Eip4844(ExecutionPayloadHeaderEip4844 {
parent_hash: v3.parent_hash,
fee_recipient: v3.fee_recipient,
state_root: v3.state_root,
receipts_root: v3.receipts_root,
logs_bloom: v3.logs_bloom,
prev_randao: v3.prev_randao,
block_number: v3.block_number,
gas_limit: v3.gas_limit,
gas_used: v3.gas_used,
timestamp: v3.timestamp,
extra_data: v3.extra_data,
base_fee_per_gas: v3.base_fee_per_gas,
excess_blobs: v3.excess_blobs,
block_hash: v3.block_hash,
transactions_root: v3.transactions_root,
#[cfg(feature = "withdrawals")]
withdrawals_root: v3.withdrawals_root,
}),
}
}
}
impl<T: EthSpec> From<ExecutionPayloadHeader<T>> for JsonExecutionPayloadHeader<T> {
fn from(header: ExecutionPayloadHeader<T>) -> Self {
match header {
ExecutionPayloadHeader::Merge(merge) => Self::V1(JsonExecutionPayloadHeaderV1 {
parent_hash: merge.parent_hash,
fee_recipient: merge.fee_recipient,
state_root: merge.state_root,
receipts_root: merge.receipts_root,
logs_bloom: merge.logs_bloom,
prev_randao: merge.prev_randao,
block_number: merge.block_number,
gas_limit: merge.gas_limit,
gas_used: merge.gas_used,
timestamp: merge.timestamp,
extra_data: merge.extra_data,
base_fee_per_gas: merge.base_fee_per_gas,
block_hash: merge.block_hash,
transactions_root: merge.transactions_root,
}),
ExecutionPayloadHeader::Capella(capella) => Self::V2(JsonExecutionPayloadHeaderV2 {
parent_hash: capella.parent_hash,
fee_recipient: capella.fee_recipient,
state_root: capella.state_root,
receipts_root: capella.receipts_root,
logs_bloom: capella.logs_bloom,
prev_randao: capella.prev_randao,
block_number: capella.block_number,
gas_limit: capella.gas_limit,
gas_used: capella.gas_used,
timestamp: capella.timestamp,
extra_data: capella.extra_data,
base_fee_per_gas: capella.base_fee_per_gas,
block_hash: capella.block_hash,
transactions_root: capella.transactions_root,
#[cfg(feature = "withdrawals")]
withdrawals_root: capella.withdrawals_root,
}),
ExecutionPayloadHeader::Eip4844(eip4844) => Self::V3(JsonExecutionPayloadHeaderV3 {
parent_hash: eip4844.parent_hash,
fee_recipient: eip4844.fee_recipient,
state_root: eip4844.state_root,
receipts_root: eip4844.receipts_root,
logs_bloom: eip4844.logs_bloom,
prev_randao: eip4844.prev_randao,
block_number: eip4844.block_number,
gas_limit: eip4844.gas_limit,
gas_used: eip4844.gas_used,
timestamp: eip4844.timestamp,
extra_data: eip4844.extra_data,
base_fee_per_gas: eip4844.base_fee_per_gas,
excess_blobs: eip4844.excess_blobs,
block_hash: eip4844.block_hash,
transactions_root: eip4844.transactions_root,
#[cfg(feature = "withdrawals")]
withdrawals_root: eip4844.withdrawals_root,
}),
}
}
}
// (V1,V2, V2) -> (Merge,Capella,EIP4844)
#[superstruct(
variants(V1, V2, V3),
variant_attributes(
@ -257,81 +95,173 @@ pub struct JsonExecutionPayload<T: EthSpec> {
#[serde(with = "eth2_serde_utils::u256_hex_be")]
pub base_fee_per_gas: Uint256,
#[superstruct(only(V3))]
// FIXME: can't easily make this an option because of custom deserialization..
#[serde(with = "eth2_serde_utils::u64_hex_be")]
pub excess_blobs: u64,
pub block_hash: ExecutionBlockHash,
#[serde(with = "ssz_types::serde_utils::list_of_hex_var_list")]
pub transactions:
VariableList<Transaction<T::MaxBytesPerTransaction>, T::MaxTransactionsPerPayload>,
#[cfg(feature = "withdrawals")]
#[superstruct(only(V2, V3))]
pub withdrawals: VariableList<Withdrawal, T::MaxWithdrawalsPerPayload>,
pub withdrawals: Option<VariableList<JsonWithdrawal, T::MaxWithdrawalsPerPayload>>,
}
impl<T: EthSpec> From<JsonExecutionPayload<T>> for ExecutionPayload<T> {
fn from(json_payload: JsonExecutionPayload<T>) -> Self {
match json_payload {
JsonExecutionPayload::V1(v1) => Self::Merge(ExecutionPayloadMerge {
parent_hash: v1.parent_hash,
fee_recipient: v1.fee_recipient,
state_root: v1.state_root,
receipts_root: v1.receipts_root,
logs_bloom: v1.logs_bloom,
prev_randao: v1.prev_randao,
block_number: v1.block_number,
gas_limit: v1.gas_limit,
gas_used: v1.gas_used,
timestamp: v1.timestamp,
extra_data: v1.extra_data,
base_fee_per_gas: v1.base_fee_per_gas,
block_hash: v1.block_hash,
transactions: v1.transactions,
}),
JsonExecutionPayload::V2(v2) => Self::Capella(ExecutionPayloadCapella {
parent_hash: v2.parent_hash,
fee_recipient: v2.fee_recipient,
state_root: v2.state_root,
receipts_root: v2.receipts_root,
logs_bloom: v2.logs_bloom,
prev_randao: v2.prev_randao,
block_number: v2.block_number,
gas_limit: v2.gas_limit,
gas_used: v2.gas_used,
timestamp: v2.timestamp,
extra_data: v2.extra_data,
base_fee_per_gas: v2.base_fee_per_gas,
block_hash: v2.block_hash,
transactions: v2.transactions,
#[cfg(feature = "withdrawals")]
withdrawals: v2.withdrawals,
}),
JsonExecutionPayload::V3(v3) => Self::Eip4844(ExecutionPayloadEip4844 {
parent_hash: v3.parent_hash,
fee_recipient: v3.fee_recipient,
state_root: v3.state_root,
receipts_root: v3.receipts_root,
logs_bloom: v3.logs_bloom,
prev_randao: v3.prev_randao,
block_number: v3.block_number,
gas_limit: v3.gas_limit,
gas_used: v3.gas_used,
timestamp: v3.timestamp,
extra_data: v3.extra_data,
base_fee_per_gas: v3.base_fee_per_gas,
excess_blobs: v3.excess_blobs,
block_hash: v3.block_hash,
transactions: v3.transactions,
#[cfg(feature = "withdrawals")]
withdrawals: v3.withdrawals,
}),
impl<T: EthSpec> JsonExecutionPayload<T> {
pub fn try_into_execution_payload(
self,
fork_name: ForkName,
) -> Result<ExecutionPayload<T>, Error> {
match self {
JsonExecutionPayload::V1(v1) => match fork_name {
ForkName::Merge => Ok(ExecutionPayload::Merge(ExecutionPayloadMerge {
parent_hash: v1.parent_hash,
fee_recipient: v1.fee_recipient,
state_root: v1.state_root,
receipts_root: v1.receipts_root,
logs_bloom: v1.logs_bloom,
prev_randao: v1.prev_randao,
block_number: v1.block_number,
gas_limit: v1.gas_limit,
gas_used: v1.gas_used,
timestamp: v1.timestamp,
extra_data: v1.extra_data,
base_fee_per_gas: v1.base_fee_per_gas,
block_hash: v1.block_hash,
transactions: v1.transactions,
})),
_ => Err(Error::UnsupportedForkVariant(format!("Unsupported conversion from JsonExecutionPayloadV1 for {}", fork_name))),
}
JsonExecutionPayload::V2(v2) => match fork_name {
ForkName::Merge => Ok(ExecutionPayload::Merge(ExecutionPayloadMerge {
parent_hash: v2.parent_hash,
fee_recipient: v2.fee_recipient,
state_root: v2.state_root,
receipts_root: v2.receipts_root,
logs_bloom: v2.logs_bloom,
prev_randao: v2.prev_randao,
block_number: v2.block_number,
gas_limit: v2.gas_limit,
gas_used: v2.gas_used,
timestamp: v2.timestamp,
extra_data: v2.extra_data,
base_fee_per_gas: v2.base_fee_per_gas,
block_hash: v2.block_hash,
transactions: v2.transactions,
})),
ForkName::Capella => Ok(ExecutionPayload::Capella(ExecutionPayloadCapella {
parent_hash: v2.parent_hash,
fee_recipient: v2.fee_recipient,
state_root: v2.state_root,
receipts_root: v2.receipts_root,
logs_bloom: v2.logs_bloom,
prev_randao: v2.prev_randao,
block_number: v2.block_number,
gas_limit: v2.gas_limit,
gas_used: v2.gas_used,
timestamp: v2.timestamp,
extra_data: v2.extra_data,
base_fee_per_gas: v2.base_fee_per_gas,
block_hash: v2.block_hash,
transactions: v2.transactions,
#[cfg(feature = "withdrawals")]
withdrawals: v2
.withdrawals
.map(|v| {
Into::<Vec<_>>::into(v)
.into_iter()
.map(Into::into)
.collect::<Vec<_>>()
.into()
})
.ok_or(Error::BadConversion("Null withdrawal field converting JsonExecutionPayloadV2 -> ExecutionPayloadCapella".to_string()))?
})),
ForkName::Eip4844 => Err(Error::UnsupportedForkVariant("JsonExecutionPayloadV2 -> ExecutionPayloadEip4844 not implemented yet as it might never be".to_string())),
_ => Err(Error::UnsupportedForkVariant(format!("Unsupported conversion from JsonExecutionPayloadV2 for {}", fork_name))),
}
JsonExecutionPayload::V3(v3) => match fork_name {
ForkName::Merge => Ok(ExecutionPayload::Merge(ExecutionPayloadMerge {
parent_hash: v3.parent_hash,
fee_recipient: v3.fee_recipient,
state_root: v3.state_root,
receipts_root: v3.receipts_root,
logs_bloom: v3.logs_bloom,
prev_randao: v3.prev_randao,
block_number: v3.block_number,
gas_limit: v3.gas_limit,
gas_used: v3.gas_used,
timestamp: v3.timestamp,
extra_data: v3.extra_data,
base_fee_per_gas: v3.base_fee_per_gas,
block_hash: v3.block_hash,
transactions: v3.transactions,
})),
ForkName::Capella => Ok(ExecutionPayload::Capella(ExecutionPayloadCapella {
parent_hash: v3.parent_hash,
fee_recipient: v3.fee_recipient,
state_root: v3.state_root,
receipts_root: v3.receipts_root,
logs_bloom: v3.logs_bloom,
prev_randao: v3.prev_randao,
block_number: v3.block_number,
gas_limit: v3.gas_limit,
gas_used: v3.gas_used,
timestamp: v3.timestamp,
extra_data: v3.extra_data,
base_fee_per_gas: v3.base_fee_per_gas,
block_hash: v3.block_hash,
transactions: v3.transactions,
#[cfg(feature = "withdrawals")]
withdrawals: v3
.withdrawals
.map(|v| {
Into::<Vec<_>>::into(v)
.into_iter()
.map(Into::into)
.collect::<Vec<_>>()
.into()
})
.ok_or(Error::BadConversion("Null withdrawal field converting JsonExecutionPayloadV3 -> ExecutionPayloadCapella".to_string()))?
})),
ForkName::Eip4844 => Ok(ExecutionPayload::Eip4844(ExecutionPayloadEip4844 {
parent_hash: v3.parent_hash,
fee_recipient: v3.fee_recipient,
state_root: v3.state_root,
receipts_root: v3.receipts_root,
logs_bloom: v3.logs_bloom,
prev_randao: v3.prev_randao,
block_number: v3.block_number,
gas_limit: v3.gas_limit,
gas_used: v3.gas_used,
timestamp: v3.timestamp,
extra_data: v3.extra_data,
base_fee_per_gas: v3.base_fee_per_gas,
// FIXME: excess_blobs probably will be an option whenever the engine API is finalized
excess_blobs: v3.excess_blobs,
block_hash: v3.block_hash,
transactions: v3.transactions,
#[cfg(feature = "withdrawals")]
withdrawals: v3
.withdrawals
.map(|v| {
Vec::from(v)
.into_iter()
.map(Into::into)
.collect::<Vec<_>>()
.into()
})
.ok_or(Error::BadConversion("Null withdrawal field converting JsonExecutionPayloadV3 -> ExecutionPayloadEip4844".to_string()))?,
})),
_ => Err(Error::UnsupportedForkVariant(format!("Unsupported conversion from JsonExecutionPayloadV2 for {}", fork_name))),
}
}
}
}
impl<T: EthSpec> From<ExecutionPayload<T>> for JsonExecutionPayload<T> {
fn from(payload: ExecutionPayload<T>) -> Self {
impl<T: EthSpec> TryFrom<ExecutionPayload<T>> for JsonExecutionPayloadV1<T> {
type Error = Error;
fn try_from(payload: ExecutionPayload<T>) -> Result<Self, Error> {
match payload {
ExecutionPayload::Merge(merge) => Self::V1(JsonExecutionPayloadV1 {
ExecutionPayload::Merge(merge) => Ok(JsonExecutionPayloadV1 {
parent_hash: merge.parent_hash,
fee_recipient: merge.fee_recipient,
state_root: merge.state_root,
@ -347,7 +277,40 @@ impl<T: EthSpec> From<ExecutionPayload<T>> for JsonExecutionPayload<T> {
block_hash: merge.block_hash,
transactions: merge.transactions,
}),
ExecutionPayload::Capella(capella) => Self::V2(JsonExecutionPayloadV2 {
ExecutionPayload::Capella(_) => Err(Error::UnsupportedForkVariant(format!(
"Unsupported conversion to JsonExecutionPayloadV1 for {}",
ForkName::Capella
))),
ExecutionPayload::Eip4844(_) => Err(Error::UnsupportedForkVariant(format!(
"Unsupported conversion to JsonExecutionPayloadV1 for {}",
ForkName::Eip4844
))),
}
}
}
impl<T: EthSpec> TryFrom<ExecutionPayload<T>> for JsonExecutionPayloadV2<T> {
type Error = Error;
fn try_from(payload: ExecutionPayload<T>) -> Result<Self, Error> {
match payload {
ExecutionPayload::Merge(merge) => Ok(JsonExecutionPayloadV2 {
parent_hash: merge.parent_hash,
fee_recipient: merge.fee_recipient,
state_root: merge.state_root,
receipts_root: merge.receipts_root,
logs_bloom: merge.logs_bloom,
prev_randao: merge.prev_randao,
block_number: merge.block_number,
gas_limit: merge.gas_limit,
gas_used: merge.gas_used,
timestamp: merge.timestamp,
extra_data: merge.extra_data,
base_fee_per_gas: merge.base_fee_per_gas,
block_hash: merge.block_hash,
transactions: merge.transactions,
withdrawals: None,
}),
ExecutionPayload::Capella(capella) => Ok(JsonExecutionPayloadV2 {
parent_hash: capella.parent_hash,
fee_recipient: capella.fee_recipient,
state_root: capella.state_root,
@ -363,27 +326,20 @@ impl<T: EthSpec> From<ExecutionPayload<T>> for JsonExecutionPayload<T> {
block_hash: capella.block_hash,
transactions: capella.transactions,
#[cfg(feature = "withdrawals")]
withdrawals: capella.withdrawals,
}),
ExecutionPayload::Eip4844(eip4844) => Self::V3(JsonExecutionPayloadV3 {
parent_hash: eip4844.parent_hash,
fee_recipient: eip4844.fee_recipient,
state_root: eip4844.state_root,
receipts_root: eip4844.receipts_root,
logs_bloom: eip4844.logs_bloom,
prev_randao: eip4844.prev_randao,
block_number: eip4844.block_number,
gas_limit: eip4844.gas_limit,
gas_used: eip4844.gas_used,
timestamp: eip4844.timestamp,
extra_data: eip4844.extra_data,
base_fee_per_gas: eip4844.base_fee_per_gas,
excess_blobs: eip4844.excess_blobs,
block_hash: eip4844.block_hash,
transactions: eip4844.transactions,
#[cfg(feature = "withdrawals")]
withdrawals: eip4844.withdrawals,
withdrawals: Some(
Vec::from(capella.withdrawals)
.into_iter()
.map(Into::into)
.collect::<Vec<_>>()
.into(),
),
#[cfg(not(feature = "withdrawals"))]
withdrawals: None,
}),
ExecutionPayload::Eip4844(_) => Err(Error::UnsupportedForkVariant(format!(
"Unsupported conversion to JsonExecutionPayloadV1 for {}",
ForkName::Eip4844
))),
}
}
}
@ -424,12 +380,15 @@ impl From<JsonWithdrawal> for Withdrawal {
#[superstruct(
variants(V1, V2),
variant_attributes(derive(Clone, Debug, PartialEq, Serialize, Deserialize),),
variant_attributes(
derive(Clone, Debug, PartialEq, Serialize, Deserialize),
serde(rename_all = "camelCase")
),
cast_error(ty = "Error", expr = "Error::IncorrectStateVariant"),
partial_getter_error(ty = "Error", expr = "Error::IncorrectStateVariant")
)]
#[derive(Debug, PartialEq, Serialize, Deserialize)]
#[serde(rename_all = "camelCase", untagged)]
#[serde(untagged)]
pub struct JsonPayloadAttributes {
#[serde(with = "eth2_serde_utils::u64_hex_be")]
pub timestamp: u64,
@ -437,7 +396,7 @@ pub struct JsonPayloadAttributes {
pub suggested_fee_recipient: Address,
#[cfg(feature = "withdrawals")]
#[superstruct(only(V2))]
pub withdrawals: Vec<JsonWithdrawal>,
pub withdrawals: Option<Vec<JsonWithdrawal>>,
}
impl From<PayloadAttributes> for JsonPayloadAttributes {
@ -453,7 +412,9 @@ impl From<PayloadAttributes> for JsonPayloadAttributes {
prev_randao: pa.prev_randao,
suggested_fee_recipient: pa.suggested_fee_recipient,
#[cfg(feature = "withdrawals")]
withdrawals: pa.withdrawals.into_iter().map(Into::into).collect(),
withdrawals: pa
.withdrawals
.map(|w| w.into_iter().map(Into::into).collect()),
}),
}
}
@ -472,7 +433,9 @@ impl From<JsonPayloadAttributes> for PayloadAttributes {
prev_randao: jpa.prev_randao,
suggested_fee_recipient: jpa.suggested_fee_recipient,
#[cfg(feature = "withdrawals")]
withdrawals: jpa.withdrawals.into_iter().map(Into::into).collect(),
withdrawals: jpa
.withdrawals
.map(|jw| jw.into_iter().map(Into::into).collect()),
}),
}
}
@ -488,16 +451,16 @@ pub struct JsonBlobBundles<T: EthSpec> {
#[derive(Debug, PartialEq, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct JsonForkChoiceStateV1 {
pub struct JsonForkchoiceStateV1 {
pub head_block_hash: ExecutionBlockHash,
pub safe_block_hash: ExecutionBlockHash,
pub finalized_block_hash: ExecutionBlockHash,
}
impl From<ForkChoiceState> for JsonForkChoiceStateV1 {
fn from(f: ForkChoiceState) -> Self {
impl From<ForkchoiceState> for JsonForkchoiceStateV1 {
fn from(f: ForkchoiceState) -> Self {
// Use this verbose deconstruction pattern to ensure no field is left unused.
let ForkChoiceState {
let ForkchoiceState {
head_block_hash,
safe_block_hash,
finalized_block_hash,
@ -511,10 +474,10 @@ impl From<ForkChoiceState> for JsonForkChoiceStateV1 {
}
}
impl From<JsonForkChoiceStateV1> for ForkChoiceState {
fn from(j: JsonForkChoiceStateV1) -> Self {
impl From<JsonForkchoiceStateV1> for ForkchoiceState {
fn from(j: JsonForkchoiceStateV1) -> Self {
// Use this verbose deconstruction pattern to ensure no field is left unused.
let JsonForkChoiceStateV1 {
let JsonForkchoiceStateV1 {
head_block_hash,
safe_block_hash,
finalized_block_hash,

View File

@ -88,7 +88,7 @@ impl State {
}
#[derive(Copy, Clone, PartialEq, Debug)]
pub struct ForkChoiceState {
pub struct ForkchoiceState {
pub head_block_hash: ExecutionBlockHash,
pub safe_block_hash: ExecutionBlockHash,
pub finalized_block_hash: ExecutionBlockHash,
@ -115,7 +115,7 @@ pub struct Engine {
pub api: HttpJsonRpc,
payload_id_cache: Mutex<LruCache<PayloadIdCacheKey, PayloadId>>,
state: RwLock<State>,
latest_forkchoice_state: RwLock<Option<ForkChoiceState>>,
latest_forkchoice_state: RwLock<Option<ForkchoiceState>>,
executor: TaskExecutor,
log: Logger,
}
@ -161,13 +161,13 @@ impl Engine {
pub async fn notify_forkchoice_updated(
&self,
forkchoice_state: ForkChoiceState,
forkchoice_state: ForkchoiceState,
payload_attributes: Option<PayloadAttributes>,
log: &Logger,
) -> Result<ForkchoiceUpdatedResponse, EngineApiError> {
let response = self
.api
.forkchoice_updated_v1(forkchoice_state, payload_attributes.clone())
.forkchoice_updated(forkchoice_state, payload_attributes.clone())
.await?;
if let Some(payload_id) = response.payload_id {
@ -187,11 +187,11 @@ impl Engine {
Ok(response)
}
async fn get_latest_forkchoice_state(&self) -> Option<ForkChoiceState> {
async fn get_latest_forkchoice_state(&self) -> Option<ForkchoiceState> {
*self.latest_forkchoice_state.read().await
}
pub async fn set_latest_forkchoice_state(&self, state: ForkChoiceState) {
pub async fn set_latest_forkchoice_state(&self, state: ForkchoiceState) {
*self.latest_forkchoice_state.write().await = Some(state);
}
@ -216,7 +216,7 @@ impl Engine {
// For simplicity, payload attributes are never included in this call. It may be
// reasonable to include them in the future.
if let Err(e) = self.api.forkchoice_updated_v1(forkchoice_state, None).await {
if let Err(e) = self.api.forkchoice_updated(forkchoice_state, None).await {
debug!(
self.log,
"Failed to issue latest head to engine";
@ -349,7 +349,7 @@ impl Engine {
// TODO: revisit this - do we need to key on withdrawals as well here?
impl PayloadIdCacheKey {
fn new(state: &ForkChoiceState, attributes: &PayloadAttributes) -> Self {
fn new(state: &ForkchoiceState, attributes: &PayloadAttributes) -> Self {
Self {
head_block_hash: state.head_block_hash,
timestamp: attributes.timestamp(),

View File

@ -11,7 +11,7 @@ use engine_api::Error as ApiError;
pub use engine_api::*;
pub use engine_api::{http, http::deposit_methods, http::HttpJsonRpc};
use engines::{Engine, EngineError};
pub use engines::{EngineState, ForkChoiceState};
pub use engines::{EngineState, ForkchoiceState};
use fork_choice::ForkchoiceUpdateParameters;
use lru::LruCache;
use payload_status::process_payload_status;
@ -32,6 +32,8 @@ use tokio::{
time::sleep,
};
use tokio_stream::wrappers::WatchStream;
#[cfg(feature = "withdrawals")]
use types::Withdrawal;
use types::{AbstractExecPayload, Blob, ExecPayload, ExecutionPayloadEip4844, KzgCommitment};
use types::{
BlindedPayload, BlockType, ChainSpec, Epoch, ExecutionBlockHash, ForkName,
@ -623,6 +625,8 @@ impl<T: EthSpec> ExecutionLayer<T> {
proposer_index: u64,
forkchoice_update_params: ForkchoiceUpdateParameters,
builder_params: BuilderParams,
current_fork: ForkName,
#[cfg(feature = "withdrawals")] withdrawals: Option<Vec<Withdrawal>>,
spec: &ChainSpec,
) -> Result<BlockProposalContents<T, Payload>, Error> {
let suggested_fee_recipient = self.get_suggested_fee_recipient(proposer_index).await;
@ -640,6 +644,9 @@ impl<T: EthSpec> ExecutionLayer<T> {
suggested_fee_recipient,
forkchoice_update_params,
builder_params,
current_fork,
#[cfg(feature = "withdrawals")]
withdrawals,
spec,
)
.await
@ -655,6 +662,9 @@ impl<T: EthSpec> ExecutionLayer<T> {
prev_randao,
suggested_fee_recipient,
forkchoice_update_params,
current_fork,
#[cfg(feature = "withdrawals")]
withdrawals,
)
.await
}
@ -670,6 +680,8 @@ impl<T: EthSpec> ExecutionLayer<T> {
suggested_fee_recipient: Address,
forkchoice_update_params: ForkchoiceUpdateParameters,
builder_params: BuilderParams,
current_fork: ForkName,
#[cfg(feature = "withdrawals")] withdrawals: Option<Vec<Withdrawal>>,
spec: &ChainSpec,
) -> Result<BlockProposalContents<T, Payload>, Error> {
if let Some(builder) = self.builder() {
@ -693,6 +705,9 @@ impl<T: EthSpec> ExecutionLayer<T> {
prev_randao,
suggested_fee_recipient,
forkchoice_update_params,
current_fork,
#[cfg(feature = "withdrawals")]
withdrawals,
)
);
@ -822,6 +837,9 @@ impl<T: EthSpec> ExecutionLayer<T> {
prev_randao,
suggested_fee_recipient,
forkchoice_update_params,
current_fork,
#[cfg(feature = "withdrawals")]
withdrawals,
)
.await
}
@ -834,6 +852,8 @@ impl<T: EthSpec> ExecutionLayer<T> {
prev_randao: Hash256,
suggested_fee_recipient: Address,
forkchoice_update_params: ForkchoiceUpdateParameters,
current_fork: ForkName,
#[cfg(feature = "withdrawals")] withdrawals: Option<Vec<Withdrawal>>,
) -> Result<BlockProposalContents<T, Payload>, Error> {
self.get_full_payload_with(
parent_hash,
@ -841,6 +861,9 @@ impl<T: EthSpec> ExecutionLayer<T> {
prev_randao,
suggested_fee_recipient,
forkchoice_update_params,
current_fork,
#[cfg(feature = "withdrawals")]
withdrawals,
noop,
)
.await
@ -854,6 +877,8 @@ impl<T: EthSpec> ExecutionLayer<T> {
prev_randao: Hash256,
suggested_fee_recipient: Address,
forkchoice_update_params: ForkchoiceUpdateParameters,
current_fork: ForkName,
#[cfg(feature = "withdrawals")] withdrawals: Option<Vec<Withdrawal>>,
) -> Result<BlockProposalContents<T, Payload>, Error> {
self.get_full_payload_with(
parent_hash,
@ -861,6 +886,9 @@ impl<T: EthSpec> ExecutionLayer<T> {
prev_randao,
suggested_fee_recipient,
forkchoice_update_params,
current_fork,
#[cfg(feature = "withdrawals")]
withdrawals,
Self::cache_payload,
)
.await
@ -873,10 +901,14 @@ impl<T: EthSpec> ExecutionLayer<T> {
prev_randao: Hash256,
suggested_fee_recipient: Address,
forkchoice_update_params: ForkchoiceUpdateParameters,
current_fork: ForkName,
#[cfg(feature = "withdrawals")] withdrawals: Option<Vec<Withdrawal>>,
f: fn(&ExecutionLayer<T>, &ExecutionPayload<T>) -> Option<ExecutionPayload<T>>,
) -> Result<BlockProposalContents<T, Payload>, Error> {
#[cfg(feature = "withdrawals")]
let withdrawals_ref = &withdrawals;
self.engine()
.request(|engine| async move {
.request(move |engine| async move {
let payload_id = if let Some(id) = engine
.get_payload_id(parent_hash, timestamp, prev_randao, suggested_fee_recipient)
.await
@ -894,7 +926,7 @@ impl<T: EthSpec> ExecutionLayer<T> {
&metrics::EXECUTION_LAYER_PRE_PREPARED_PAYLOAD_ID,
&[metrics::MISS],
);
let fork_choice_state = ForkChoiceState {
let fork_choice_state = ForkchoiceState {
head_block_hash: parent_hash,
safe_block_hash: forkchoice_update_params
.justified_hash
@ -903,12 +935,14 @@ impl<T: EthSpec> ExecutionLayer<T> {
.finalized_hash
.unwrap_or_else(ExecutionBlockHash::zero),
};
// FIXME: This will have to properly handle forks. To do that,
// withdrawals will need to be passed into this function
let payload_attributes = PayloadAttributes::V1(PayloadAttributesV1 {
// This must always be the latest PayloadAttributes
// FIXME: How to non-capella EIP4844 testnets handle this?
let payload_attributes = PayloadAttributes::V2(PayloadAttributesV2 {
timestamp,
prev_randao,
suggested_fee_recipient,
#[cfg(feature = "withdrawals")]
withdrawals: withdrawals_ref.clone(),
});
let response = engine
@ -935,16 +969,22 @@ impl<T: EthSpec> ExecutionLayer<T> {
};
let blob_fut = async {
//FIXME(sean) do a fork check here and return None otherwise
debug!(
self.log(),
"Issuing engine_getBlobsBundle";
"suggested_fee_recipient" => ?suggested_fee_recipient,
"prev_randao" => ?prev_randao,
"timestamp" => timestamp,
"parent_hash" => ?parent_hash,
);
Some(engine.api.get_blobs_bundle_v1::<T>(payload_id).await)
match current_fork {
ForkName::Base | ForkName::Altair | ForkName::Merge | ForkName::Capella => {
None
}
ForkName::Eip4844 => {
debug!(
self.log(),
"Issuing engine_getBlobsBundle";
"suggested_fee_recipient" => ?suggested_fee_recipient,
"prev_randao" => ?prev_randao,
"timestamp" => timestamp,
"parent_hash" => ?parent_hash,
);
Some(engine.api.get_blobs_bundle_v1::<T>(payload_id).await)
}
}
};
let payload_fut = async {
debug!(
@ -955,9 +995,8 @@ impl<T: EthSpec> ExecutionLayer<T> {
"timestamp" => timestamp,
"parent_hash" => ?parent_hash,
);
engine.api.get_payload_v1::<T>(payload_id).await
engine.api.get_payload::<T>(current_fork, payload_id).await
};
let (blob, payload) = tokio::join!(blob_fut, payload_fut);
let payload = payload.map(|full_payload| {
if full_payload.fee_recipient() != suggested_fee_recipient {
@ -1030,7 +1069,7 @@ impl<T: EthSpec> ExecutionLayer<T> {
let result = self
.engine()
.request(|engine| engine.api.new_payload_v1(execution_payload.clone()))
.request(|engine| engine.api.new_payload(execution_payload.clone()))
.await;
if let Ok(status) = &result {
@ -1160,7 +1199,7 @@ impl<T: EthSpec> ExecutionLayer<T> {
}
}
let forkchoice_state = ForkChoiceState {
let forkchoice_state = ForkchoiceState {
head_block_hash,
safe_block_hash: justified_block_hash,
finalized_block_hash,

View File

@ -1,4 +1,4 @@
use crate::engines::ForkChoiceState;
use crate::engines::ForkchoiceState;
use crate::{
engine_api::{
json_structures::{
@ -13,8 +13,7 @@ use std::collections::HashMap;
use tree_hash::TreeHash;
use tree_hash_derive::TreeHash;
use types::{
EthSpec, ExecutionBlockHash, ExecutionPayload, ExecutionPayloadCapella, ExecutionPayloadMerge,
Hash256, Uint256,
EthSpec, ExecutionBlockHash, ExecutionPayload, ExecutionPayloadMerge, Hash256, Uint256,
};
const GAS_LIMIT: u64 = 16384;
@ -315,7 +314,7 @@ impl<T: EthSpec> ExecutionBlockGenerator<T> {
pub fn forkchoice_updated_v1(
&mut self,
forkchoice_state: ForkChoiceState,
forkchoice_state: ForkchoiceState,
payload_attributes: Option<PayloadAttributes>,
) -> Result<JsonForkchoiceUpdatedV1Response, String> {
if let Some(payload) = self
@ -369,7 +368,6 @@ impl<T: EthSpec> ExecutionBlockGenerator<T> {
let id = payload_id_from_u64(self.next_payload_id);
self.next_payload_id += 1;
// FIXME: think about how to test different forks
let mut execution_payload = match &attributes {
PayloadAttributes::V1(pa) => ExecutionPayload::Merge(ExecutionPayloadMerge {
parent_hash: forkchoice_state.head_block_hash,
@ -388,7 +386,8 @@ impl<T: EthSpec> ExecutionBlockGenerator<T> {
transactions: vec![].into(),
}),
PayloadAttributes::V2(pa) => {
ExecutionPayload::Capella(ExecutionPayloadCapella {
// FIXME: think about how to test different forks
ExecutionPayload::Merge(ExecutionPayloadMerge {
parent_hash: forkchoice_state.head_block_hash,
fee_recipient: pa.suggested_fee_recipient,
receipts_root: Hash256::repeat_byte(42),
@ -403,14 +402,6 @@ impl<T: EthSpec> ExecutionBlockGenerator<T> {
base_fee_per_gas: Uint256::one(),
block_hash: ExecutionBlockHash::zero(),
transactions: vec![].into(),
#[cfg(feature = "withdrawals")]
withdrawals: pa
.withdrawals
.iter()
.cloned()
.map(Into::into)
.collect::<Vec<_>>()
.into(),
})
}
};

View File

@ -4,7 +4,7 @@ use crate::json_structures::*;
use serde::de::DeserializeOwned;
use serde_json::Value as JsonValue;
use std::sync::Arc;
use types::EthSpec;
use types::{EthSpec, ForkName};
pub async fn handle_rpc<T: EthSpec>(
body: JsonValue,
@ -97,7 +97,8 @@ pub async fn handle_rpc<T: EthSpec>(
Some(
ctx.execution_block_generator
.write()
.new_payload(request.into()),
// FIXME: should this worry about other forks?
.new_payload(request.try_into_execution_payload(ForkName::Merge).unwrap()),
)
} else {
None
@ -117,10 +118,10 @@ pub async fn handle_rpc<T: EthSpec>(
.get_payload(&id)
.ok_or_else(|| format!("no payload for id {:?}", id))?;
Ok(serde_json::to_value(JsonExecutionPayload::from(response)).unwrap())
Ok(serde_json::to_value(JsonExecutionPayloadV1::try_from(response).unwrap()).unwrap())
}
ENGINE_FORKCHOICE_UPDATED_V1 => {
let forkchoice_state: JsonForkChoiceStateV1 = get_param(params, 0)?;
let forkchoice_state: JsonForkchoiceStateV1 = get_param(params, 0)?;
let payload_attributes: Option<JsonPayloadAttributes> = get_param(params, 1)?;
let head_block_hash = forkchoice_state.head_block_hash;

View File

@ -26,7 +26,8 @@ use task_executor::TaskExecutor;
use tempfile::NamedTempFile;
use tree_hash::TreeHash;
use types::{
Address, BeaconState, BlindedPayload, ChainSpec, EthSpec, ExecPayload, Hash256, Slot, Uint256,
Address, BeaconState, BlindedPayload, ChainSpec, EthSpec, ExecPayload, ForkName, Hash256, Slot,
Uint256,
};
#[derive(Clone)]
@ -313,6 +314,10 @@ impl<E: EthSpec> mev_build_rs::BlindedBlockProvider for MockBuilder<E> {
*prev_randao,
fee_recipient,
forkchoice_update_params,
// TODO: do we need to write a test for this if this is Capella fork?
ForkName::Merge,
#[cfg(feature = "withdrawals")]
None,
)
.await
.map_err(convert_err)?

View File

@ -114,7 +114,7 @@ impl<T: EthSpec> MockExecutionLayer<T> {
suggested_fee_recipient: Address::repeat_byte(42),
// FIXME: think about adding withdrawals here..
#[cfg(feature = "withdrawals")]
withdrawals: vec![],
withdrawals: Some(vec![]),
})
}
},
@ -159,6 +159,10 @@ impl<T: EthSpec> MockExecutionLayer<T> {
validator_index,
forkchoice_update_params,
builder_params,
// FIXME: do we need to consider other forks somehow? What about withdrawals?
ForkName::Merge,
#[cfg(feature = "withdrawals")]
Some(vec![]),
&self.spec,
)
.await
@ -191,6 +195,10 @@ impl<T: EthSpec> MockExecutionLayer<T> {
validator_index,
forkchoice_update_params,
builder_params,
// FIXME: do we need to consider other forks somehow? What about withdrawals?
ForkName::Merge,
#[cfg(feature = "withdrawals")]
Some(vec![]),
&self.spec,
)
.await

View File

@ -672,12 +672,12 @@ fn handle_v2_response<T: EthSpec>(
decoded_buffer,
)?),
)))),
ForkName::Capella => Ok(Some(RPCResponse::BlocksByRange(Arc::new(
ForkName::Capella => Ok(Some(RPCResponse::BlocksByRoot(Arc::new(
SignedBeaconBlock::Capella(SignedBeaconBlockCapella::from_ssz_bytes(
decoded_buffer,
)?),
)))),
ForkName::Eip4844 => Ok(Some(RPCResponse::BlocksByRange(Arc::new(
ForkName::Eip4844 => Ok(Some(RPCResponse::BlocksByRoot(Arc::new(
SignedBeaconBlock::Eip4844(SignedBeaconBlockEip4844::from_ssz_bytes(
decoded_buffer,
)?),
@ -723,8 +723,8 @@ mod tests {
};
use std::sync::Arc;
use types::{
BeaconBlock, BeaconBlockAltair, BeaconBlockBase, BeaconBlockMerge, Epoch, ForkContext,
FullPayload, Hash256, Signature, SignedBeaconBlock, Slot,
BeaconBlock, BeaconBlockAltair, BeaconBlockBase, BeaconBlockMerge, EmptyBlock, Epoch,
ForkContext, FullPayload, Hash256, Signature, SignedBeaconBlock, Slot,
};
use snap::write::FrameEncoder;

View File

@ -22,8 +22,8 @@ use tokio_util::{
};
use types::BlobsSidecar;
use types::{
BeaconBlock, BeaconBlockAltair, BeaconBlockBase, BeaconBlockMerge, Blob, EthSpec, ForkContext,
ForkName, Hash256, MainnetEthSpec, Signature, SignedBeaconBlock,
BeaconBlock, BeaconBlockAltair, BeaconBlockBase, BeaconBlockMerge, Blob, EmptyBlock, EthSpec,
ForkContext, ForkName, Hash256, MainnetEthSpec, Signature, SignedBeaconBlock,
};
lazy_static! {

View File

@ -11,7 +11,7 @@ use crate::Subnet;
pub const TOPIC_PREFIX: &str = "eth2";
pub const SSZ_SNAPPY_ENCODING_POSTFIX: &str = "ssz_snappy";
pub const BEACON_BLOCK_TOPIC: &str = "beacon_block";
pub const BEACON_BLOCK_AND_BLOBS_SIDECAR_TOPIC: &str = "beacon_blocks_and_blobs_sidecar";
pub const BEACON_BLOCK_AND_BLOBS_SIDECAR_TOPIC: &str = "beacon_block_and_blobs_sidecar";
pub const BEACON_AGGREGATE_AND_PROOF_TOPIC: &str = "beacon_aggregate_and_proof";
pub const BEACON_ATTESTATION_PREFIX: &str = "beacon_attestation_";
pub const VOLUNTARY_EXIT_TOPIC: &str = "voluntary_exit";

View File

@ -32,7 +32,8 @@ use std::ptr;
use types::{
sync_aggregate::Error as SyncAggregateError, typenum::Unsigned, Attestation, AttestationData,
AttesterSlashing, BeaconState, BeaconStateError, ChainSpec, Epoch, EthSpec, ProposerSlashing,
SignedVoluntaryExit, Slot, SyncAggregate, SyncCommitteeContribution, Validator,
SignedBlsToExecutionChange, SignedVoluntaryExit, Slot, SyncAggregate,
SyncCommitteeContribution, Validator,
};
type SyncContributions<T> = RwLock<HashMap<SyncAggregateId, Vec<SyncCommitteeContribution<T>>>>;
@ -49,6 +50,8 @@ pub struct OperationPool<T: EthSpec + Default> {
proposer_slashings: RwLock<HashMap<u64, SigVerifiedOp<ProposerSlashing, T>>>,
/// Map from exiting validator to their exit data.
voluntary_exits: RwLock<HashMap<u64, SigVerifiedOp<SignedVoluntaryExit, T>>>,
/// Map from credential changing validator to their execution change data.
bls_to_execution_changes: RwLock<HashMap<u64, SigVerifiedOp<SignedBlsToExecutionChange, T>>>,
/// Reward cache for accelerating attestation packing.
reward_cache: RwLock<RewardCache>,
_phantom: PhantomData<T>,
@ -509,6 +512,16 @@ impl<T: EthSpec> OperationPool<T> {
);
}
/// Get a list of execution changes for inclusion in a block.
pub fn get_bls_to_execution_changes(
&self,
state: &BeaconState<T>,
spec: &ChainSpec,
) -> Vec<SignedBlsToExecutionChange> {
// FIXME: actually implement this
return vec![];
}
/// Prune all types of transactions given the latest head state and head fork.
pub fn prune_all(&self, head_state: &BeaconState<T>, current_epoch: Epoch) {
self.prune_attestations(current_epoch);

View File

@ -142,6 +142,8 @@ impl<T: EthSpec> PersistedOperationPool<T> {
attester_slashings,
proposer_slashings,
voluntary_exits,
// FIXME: IMPLEMENT THIS
bls_to_execution_changes: Default::default(),
reward_cache: Default::default(),
_phantom: Default::default(),
};

View File

@ -492,17 +492,15 @@ impl<E: EthSpec, Hot: ItemStore<E>, Cold: ItemStore<E>> HotColdDB<E, Hot, Cold>
pub fn get_blobs(&self, block_root: &Hash256) -> Result<Option<BlobsSidecar<E>>, Error> {
if let Some(blobs) = self.blob_cache.lock().get(block_root) {
Ok(Some(blobs.clone()))
} else if let Some(bytes) = self
.hot_db
.get_bytes(DBColumn::BeaconBlob.into(), block_root.as_bytes())?
{
let ret = BlobsSidecar::from_ssz_bytes(&bytes)?;
self.blob_cache.lock().put(*block_root, ret.clone());
Ok(Some(ret))
} else {
if let Some(bytes) = self
.hot_db
.get_bytes(DBColumn::BeaconBlob.into(), block_root.as_bytes())?
{
let ret = BlobsSidecar::from_ssz_bytes(&bytes)?;
self.blob_cache.lock().put(*block_root, ret.clone());
Ok(Some(ret))
} else {
Ok(None)
}
Ok(None)
}
}

View File

@ -107,13 +107,10 @@ where
// Withdrawals
#[cfg(feature = "withdrawals")]
#[superstruct(only(Capella, Eip4844))]
pub withdrawal_queue: VariableList<Withdrawal, T::WithdrawalQueueLimit>,
#[cfg(feature = "withdrawals")]
#[superstruct(only(Capella, Eip4844))]
pub next_withdrawal_index: u64,
#[cfg(feature = "withdrawals")]
#[superstruct(only(Capella, Eip4844))]
pub next_partial_withdrawal_validator_index: u64,
pub next_withdrawal_validator_index: u64,
}
/// Implement the conversion function from BeaconState -> PartialBeaconState.
@ -215,9 +212,8 @@ impl<T: EthSpec> PartialBeaconState<T> {
next_sync_committee,
inactivity_scores,
latest_execution_payload_header,
withdrawal_queue,
next_withdrawal_index,
next_partial_withdrawal_validator_index
next_withdrawal_validator_index
]
),
#[cfg(not(feature = "withdrawals"))]
@ -248,9 +244,8 @@ impl<T: EthSpec> PartialBeaconState<T> {
next_sync_committee,
inactivity_scores,
latest_execution_payload_header,
withdrawal_queue,
next_withdrawal_index,
next_partial_withdrawal_validator_index
next_withdrawal_validator_index
]
),
#[cfg(not(feature = "withdrawals"))]
@ -467,9 +462,8 @@ impl<E: EthSpec> TryInto<BeaconState<E>> for PartialBeaconState<E> {
next_sync_committee,
inactivity_scores,
latest_execution_payload_header,
withdrawal_queue,
next_withdrawal_index,
next_partial_withdrawal_validator_index
next_withdrawal_validator_index
]
),
#[cfg(not(feature = "withdrawals"))]
@ -498,9 +492,8 @@ impl<E: EthSpec> TryInto<BeaconState<E>> for PartialBeaconState<E> {
next_sync_committee,
inactivity_scores,
latest_execution_payload_header,
withdrawal_queue,
next_withdrawal_index,
next_partial_withdrawal_validator_index
next_withdrawal_validator_index
]
),
#[cfg(not(feature = "withdrawals"))]

View File

@ -35,3 +35,5 @@ procinfo = { version = "0.4.2", optional = true }
[features]
default = ["lighthouse"]
lighthouse = ["proto_array", "psutil", "procinfo", "store", "slashing_protection"]
withdrawals = ["store/withdrawals"]
withdrawals-processing = ["store/withdrawals-processing"]

View File

@ -4,7 +4,6 @@ mod get_attesting_indices;
mod get_indexed_attestation;
mod initiate_validator_exit;
mod slash_validator;
mod withdraw_balance;
pub mod altair;
pub mod base;
@ -15,8 +14,6 @@ pub use get_attesting_indices::{get_attesting_indices, get_attesting_indices_fro
pub use get_indexed_attestation::get_indexed_attestation;
pub use initiate_validator_exit::initiate_validator_exit;
pub use slash_validator::slash_validator;
#[cfg(feature = "withdrawals")]
pub use withdraw_balance::withdraw_balance;
use safe_arith::SafeArith;
use types::{BeaconState, BeaconStateError, EthSpec};

View File

@ -1,29 +0,0 @@
use crate::common::decrease_balance;
use safe_arith::SafeArith;
use types::{BeaconStateError as Error, *};
#[cfg(feature = "withdrawals")]
pub fn withdraw_balance<T: EthSpec>(
state: &mut BeaconState<T>,
validator_index: usize,
amount: u64,
) -> Result<(), Error> {
decrease_balance(state, validator_index as usize, amount)?;
let withdrawal_address = Address::from_slice(
&state
.get_validator(validator_index)?
.withdrawal_credentials
.as_bytes()[12..],
);
let withdrawal = Withdrawal {
index: *state.next_withdrawal_index()?,
validator_index: validator_index as u64,
address: withdrawal_address,
amount,
};
state.next_withdrawal_index_mut()?.safe_add_assign(1)?;
state.withdrawal_queue_mut()?.push(withdrawal)?;
Ok(())
}

View File

@ -19,6 +19,8 @@ pub use process_operations::process_operations;
pub use verify_attestation::{
verify_attestation_for_block_inclusion, verify_attestation_for_state,
};
#[cfg(all(feature = "withdrawals", feature = "withdrawals-processing"))]
pub use verify_bls_to_execution_change::verify_bls_to_execution_change;
pub use verify_deposit::{
get_existing_validator_index, verify_deposit_merkle_proof, verify_deposit_signature,
};
@ -34,10 +36,15 @@ pub mod signature_sets;
pub mod tests;
mod verify_attestation;
mod verify_attester_slashing;
#[cfg(all(feature = "withdrawals", feature = "withdrawals-processing"))]
mod verify_bls_to_execution_change;
mod verify_deposit;
mod verify_exit;
mod verify_proposer_slashing;
#[cfg(feature = "withdrawals-processing")]
use crate::common::decrease_balance;
#[cfg(feature = "arbitrary-fuzz")]
use arbitrary::Arbitrary;
@ -161,6 +168,8 @@ pub fn per_block_processing<T: EthSpec, Payload: AbstractExecPayload<T>>(
// previous block.
if is_execution_enabled(state, block.body()) {
let payload = block.body().execution_payload()?;
#[cfg(all(feature = "withdrawals", feature = "withdrawals-processing"))]
process_withdrawals::<T, Payload>(state, payload, spec)?;
process_execution_payload::<T, Payload>(state, payload, spec)?;
}
@ -454,3 +463,100 @@ pub fn compute_timestamp_at_slot<T: EthSpec>(
.safe_mul(spec.seconds_per_slot)
.and_then(|since_genesis| state.genesis_time().safe_add(since_genesis))
}
/// FIXME: add link to this function once the spec is stable
#[cfg(feature = "withdrawals")]
pub fn get_expected_withdrawals<T: EthSpec>(
state: &BeaconState<T>,
spec: &ChainSpec,
) -> Result<Withdrawals<T>, BlockProcessingError> {
let epoch = state.current_epoch();
let mut withdrawal_index = state.next_withdrawal_index()?;
let mut validator_index = state.next_withdrawal_validator_index()?;
let mut withdrawals = vec![];
if cfg!(not(feature = "withdrawals-processing")) {
return Ok(withdrawals.into());
}
for _ in 0..state.validators().len() {
let validator = state.get_validator(validator_index as usize)?;
let balance = *state.balances().get(validator_index as usize).ok_or(
BeaconStateError::BalancesOutOfBounds(validator_index as usize),
)?;
if validator.is_fully_withdrawable_at(balance, epoch, spec) {
withdrawals.push(Withdrawal {
index: withdrawal_index,
validator_index,
address: validator
.get_eth1_withdrawal_address(spec)
.ok_or(BlockProcessingError::WithdrawalCredentialsInvalid)?,
amount: balance,
});
withdrawal_index.safe_add_assign(1)?;
} else if validator.is_partially_withdrawable_validator(balance, spec) {
withdrawals.push(Withdrawal {
index: withdrawal_index,
validator_index,
address: validator
.get_eth1_withdrawal_address(spec)
.ok_or(BlockProcessingError::WithdrawalCredentialsInvalid)?,
amount: balance.safe_sub(spec.max_effective_balance)?,
});
withdrawal_index.safe_add_assign(1)?;
}
if withdrawals.len() == T::max_withdrawals_per_payload() {
break;
}
validator_index = validator_index
.safe_add(1)?
.safe_rem(state.validators().len() as u64)?;
}
Ok(withdrawals.into())
}
/// FIXME: add link to this function once the spec is stable
#[cfg(all(feature = "withdrawals", feature = "withdrawals-processing"))]
pub fn process_withdrawals<'payload, T: EthSpec, Payload: AbstractExecPayload<T>>(
state: &mut BeaconState<T>,
payload: Payload::Ref<'payload>,
spec: &ChainSpec,
) -> Result<(), BlockProcessingError> {
match state {
BeaconState::Merge(_) => Ok(()),
BeaconState::Capella(_) | BeaconState::Eip4844(_) => {
let expected_withdrawals = get_expected_withdrawals(state, spec)?;
let expected_root = expected_withdrawals.tree_hash_root();
let withdrawals_root = payload.withdrawals_root()?;
if expected_root != withdrawals_root {
return Err(BlockProcessingError::WithdrawalsRootMismatch {
expected: expected_root,
found: withdrawals_root,
});
}
for withdrawal in expected_withdrawals.iter() {
decrease_balance(
state,
withdrawal.validator_index as usize,
withdrawal.amount,
)?;
}
if let Some(latest_withdrawal) = expected_withdrawals.last() {
*state.next_withdrawal_index_mut()? = latest_withdrawal.index.safe_add(1)?;
let next_validator_index = latest_withdrawal
.validator_index
.safe_add(1)?
.safe_rem(state.validators().len() as u64)?;
*state.next_withdrawal_validator_index_mut()? = next_validator_index;
}
Ok(())
}
// these shouldn't even be encountered but they're here for completeness
BeaconState::Base(_) | BeaconState::Altair(_) => Ok(()),
}
}

View File

@ -1 +1,2 @@
#[allow(clippy::module_inception)]
pub mod eip4844;

View File

@ -6,8 +6,8 @@ use ssz::Decode;
use ssz_types::VariableList;
use types::consts::eip4844::{BLOB_TX_TYPE, VERSIONED_HASH_VERSION_KZG};
use types::{
AbstractExecPayload, BeaconBlockBodyRef, EthSpec, ExecPayload, FullPayload, FullPayloadRef,
KzgCommitment, Transaction, Transactions, VersionedHash,
AbstractExecPayload, BeaconBlockBodyRef, EthSpec, ExecPayload, KzgCommitment, Transaction,
Transactions, VersionedHash,
};
pub fn process_blob_kzg_commitments<T: EthSpec, Payload: AbstractExecPayload<T>>(
@ -35,7 +35,7 @@ pub fn verify_kzg_commitments_against_transactions<T: EthSpec>(
let nested_iter = transactions
.into_iter()
.filter(|tx| {
tx.get(0)
tx.first()
.map(|tx_type| *tx_type == BLOB_TX_TYPE)
.unwrap_or(false)
})

View File

@ -49,6 +49,10 @@ pub enum BlockProcessingError {
index: usize,
reason: ExitInvalid,
},
BlsExecutionChangeInvalid {
index: usize,
reason: BlsExecutionChangeInvalid,
},
SyncAggregateInvalid {
reason: SyncAggregateInvalid,
},
@ -74,6 +78,10 @@ pub enum BlockProcessingError {
},
ExecutionInvalid,
ConsensusContext(ContextError),
WithdrawalsRootMismatch {
expected: Hash256,
found: Hash256,
},
BlobVersionHashMismatch,
/// The number of commitments in blob transactions in the payload does not match the number
/// of commitments in the block.
@ -86,6 +94,7 @@ pub enum BlockProcessingError {
index: usize,
length: usize,
},
WithdrawalCredentialsInvalid,
}
impl From<BeaconStateError> for BlockProcessingError {
@ -180,7 +189,8 @@ impl_into_block_processing_error_with_index!(
IndexedAttestationInvalid,
AttestationInvalid,
DepositInvalid,
ExitInvalid
ExitInvalid,
BlsExecutionChangeInvalid
);
pub type HeaderValidationError = BlockOperationError<HeaderInvalid>;
@ -190,6 +200,7 @@ pub type AttestationValidationError = BlockOperationError<AttestationInvalid>;
pub type SyncCommitteeMessageValidationError = BlockOperationError<SyncAggregateInvalid>;
pub type DepositValidationError = BlockOperationError<DepositInvalid>;
pub type ExitValidationError = BlockOperationError<ExitInvalid>;
pub type BlsExecutionChangeValidationError = BlockOperationError<BlsExecutionChangeInvalid>;
#[derive(Debug, PartialEq, Clone)]
pub enum BlockOperationError<T> {
@ -405,6 +416,18 @@ pub enum ExitInvalid {
SignatureSetError(SignatureSetError),
}
#[derive(Debug, PartialEq, Clone)]
pub enum BlsExecutionChangeInvalid {
/// The specified validator is not in the state's validator registry.
ValidatorUnknown(u64),
/// Validator does not have BLS Withdrawal credentials before this change
NonBlsWithdrawalCredentials,
/// Provided BLS pubkey does not match withdrawal credentials
WithdrawalCredentialsMismatch,
/// The signature is invalid
BadSignature,
}
#[derive(Debug, PartialEq, Clone)]
pub enum SyncAggregateInvalid {
/// One or more of the aggregate public keys is invalid.

View File

@ -33,6 +33,12 @@ pub fn process_operations<'a, T: EthSpec, Payload: AbstractExecPayload<T>>(
process_attestations(state, block_body, verify_signatures, ctxt, spec)?;
process_deposits(state, block_body.deposits(), spec)?;
process_exits(state, block_body.voluntary_exits(), verify_signatures, spec)?;
#[cfg(all(feature = "withdrawals", feature = "withdrawals-processing"))]
if let Ok(bls_to_execution_changes) = block_body.bls_to_execution_changes() {
process_bls_to_execution_changes(state, bls_to_execution_changes, verify_signatures, spec)?;
}
Ok(())
}
@ -279,6 +285,32 @@ pub fn process_exits<T: EthSpec>(
Ok(())
}
/// Validates each `bls_to_execution_change` and updates the state
///
/// Returns `Ok(())` if the validation and state updates completed successfully. Otherwise returs
/// an `Err` describing the invalid object or cause of failure.
#[cfg(all(feature = "withdrawals", feature = "withdrawals-processing"))]
pub fn process_bls_to_execution_changes<T: EthSpec>(
state: &mut BeaconState<T>,
bls_to_execution_changes: &[SignedBlsToExecutionChange],
verify_signatures: VerifySignatures,
spec: &ChainSpec,
) -> Result<(), BlockProcessingError> {
for (i, signed_address_change) in bls_to_execution_changes.iter().enumerate() {
verify_bls_to_execution_change(state, signed_address_change, verify_signatures, spec)
.map_err(|e| e.into_with_index(i))?;
state
.get_validator_mut(signed_address_change.message.validator_index as usize)?
.change_withdrawal_credentials(
&signed_address_change.message.to_execution_address,
spec,
);
}
Ok(())
}
/// Validates each `Deposit` and updates the state, short-circuiting on an invalid object.
///
/// Returns `Ok(())` if the validation and state updates completed successfully, otherwise returns

View File

@ -11,8 +11,8 @@ use types::{
BeaconStateError, ChainSpec, DepositData, Domain, Epoch, EthSpec, Fork, Hash256,
InconsistentFork, IndexedAttestation, ProposerSlashing, PublicKey, PublicKeyBytes, Signature,
SignedAggregateAndProof, SignedBeaconBlock, SignedBeaconBlockHeader,
SignedContributionAndProof, SignedRoot, SignedVoluntaryExit, SigningData, Slot, SyncAggregate,
SyncAggregatorSelectionData, Unsigned,
SignedBlsToExecutionChange, SignedContributionAndProof, SignedRoot, SignedVoluntaryExit,
SigningData, Slot, SyncAggregate, SyncAggregatorSelectionData, Unsigned,
};
pub type Result<T> = std::result::Result<T, Error>;
@ -156,6 +156,33 @@ where
))
}
pub fn bls_execution_change_signature_set<'a, T: EthSpec>(
state: &'a BeaconState<T>,
signed_address_change: &'a SignedBlsToExecutionChange,
spec: &'a ChainSpec,
) -> Result<SignatureSet<'a>> {
let domain = spec.get_domain(
state.current_epoch(),
Domain::BlsToExecutionChange,
&state.fork(),
state.genesis_validators_root(),
);
let message = signed_address_change.message.signing_root(domain);
let signing_key = Cow::Owned(
signed_address_change
.message
.from_bls_pubkey
.decompress()
.map_err(|_| Error::PublicKeyDecompressionFailed)?,
);
Ok(SignatureSet::single_pubkey(
&signed_address_change.signature,
signing_key,
message,
))
}
/// A signature set that is valid if the block proposers randao reveal signature is correct.
pub fn randao_signature_set<'a, T, F, Payload: AbstractExecPayload<T>>(
state: &'a BeaconState<T>,

View File

@ -0,0 +1,57 @@
use super::errors::{BlockOperationError, BlsExecutionChangeInvalid as Invalid};
use crate::per_block_processing::signature_sets::bls_execution_change_signature_set;
use crate::VerifySignatures;
use eth2_hashing::hash;
use types::*;
type Result<T> = std::result::Result<T, BlockOperationError<Invalid>>;
fn error(reason: Invalid) -> BlockOperationError<Invalid> {
BlockOperationError::invalid(reason)
}
/// Indicates if a `BlsToExecutionChange` is valid to be included in a block in the current epoch of the given
/// state.
///
/// Returns `Ok(())` if the `SignedBlsToExecutionChange` is valid, otherwise indicates the reason for invalidity.
pub fn verify_bls_to_execution_change<T: EthSpec>(
state: &BeaconState<T>,
signed_address_change: &SignedBlsToExecutionChange,
verify_signatures: VerifySignatures,
spec: &ChainSpec,
) -> Result<()> {
let address_change = &signed_address_change.message;
let validator = state
.validators()
.get(address_change.validator_index as usize)
.ok_or_else(|| error(Invalid::ValidatorUnknown(address_change.validator_index)))?;
verify!(
validator
.withdrawal_credentials
.as_bytes()
.first()
.map(|byte| *byte == spec.bls_withdrawal_prefix_byte)
.unwrap_or(false),
Invalid::NonBlsWithdrawalCredentials
);
let pubkey_hash = hash(address_change.from_bls_pubkey.as_serialized());
// FIXME: Should this check be put inside the verify_signatures.is_true() condition?
// I believe that's used for fuzzing so this is a Mehdi question..
verify!(
validator.withdrawal_credentials.as_bytes().get(1..) == pubkey_hash.get(1..),
Invalid::WithdrawalCredentialsMismatch
);
if verify_signatures.is_true() {
verify!(
bls_execution_change_signature_set(state, signed_address_change, spec)?.verify(),
Invalid::BadSignature
);
}
Ok(())
}

View File

@ -11,7 +11,6 @@ pub use weigh_justification_and_finalization::weigh_justification_and_finalizati
pub mod altair;
pub mod base;
pub mod capella;
pub mod effective_balance_updates;
pub mod epoch_processing_summary;
pub mod errors;
@ -38,8 +37,10 @@ pub fn process_epoch<T: EthSpec>(
match state {
BeaconState::Base(_) => base::process_epoch(state, spec),
BeaconState::Altair(_) | BeaconState::Merge(_) => altair::process_epoch(state, spec),
BeaconState::Capella(_) | BeaconState::Eip4844(_) => capella::process_epoch(state, spec),
BeaconState::Altair(_)
| BeaconState::Merge(_)
| BeaconState::Capella(_)
| BeaconState::Eip4844(_) => altair::process_epoch(state, spec),
}
}

View File

@ -1,87 +0,0 @@
use super::{process_registry_updates, process_slashings, EpochProcessingSummary, Error};
use crate::per_epoch_processing::{
altair,
effective_balance_updates::process_effective_balance_updates,
historical_roots_update::process_historical_roots_update,
resets::{process_eth1_data_reset, process_randao_mixes_reset, process_slashings_reset},
};
#[cfg(all(feature = "withdrawals", feature = "withdrawals-processing"))]
pub use full_withdrawals::process_full_withdrawals;
#[cfg(all(feature = "withdrawals", feature = "withdrawals-processing"))]
pub use partial_withdrawals::process_partial_withdrawals;
use types::{BeaconState, ChainSpec, EthSpec, RelativeEpoch};
#[cfg(all(feature = "withdrawals", feature = "withdrawals-processing"))]
pub mod full_withdrawals;
#[cfg(all(feature = "withdrawals", feature = "withdrawals-processing"))]
pub mod partial_withdrawals;
pub fn process_epoch<T: EthSpec>(
state: &mut BeaconState<T>,
spec: &ChainSpec,
) -> Result<EpochProcessingSummary<T>, Error> {
// Ensure the committee caches are built.
state.build_committee_cache(RelativeEpoch::Previous, spec)?;
state.build_committee_cache(RelativeEpoch::Current, spec)?;
state.build_committee_cache(RelativeEpoch::Next, spec)?;
// Pre-compute participating indices and total balances.
let participation_cache = altair::ParticipationCache::new(state, spec)?;
let sync_committee = state.current_sync_committee()?.clone();
// Justification and finalization.
let justification_and_finalization_state =
altair::process_justification_and_finalization(state, &participation_cache)?;
justification_and_finalization_state.apply_changes_to_state(state);
altair::process_inactivity_updates(state, &participation_cache, spec)?;
// Rewards and Penalties.
altair::process_rewards_and_penalties(state, &participation_cache, spec)?;
// Registry Updates.
process_registry_updates(state, spec)?;
// Slashings.
process_slashings(
state,
participation_cache.current_epoch_total_active_balance(),
spec,
)?;
// Reset eth1 data votes.
process_eth1_data_reset(state)?;
// Update effective balances with hysteresis (lag).
process_effective_balance_updates(state, spec)?;
// Reset slashings
process_slashings_reset(state)?;
// Set randao mix
process_randao_mixes_reset(state)?;
// Set historical root accumulator
process_historical_roots_update(state)?;
// Rotate current/previous epoch participation
altair::process_participation_flag_updates(state)?;
altair::process_sync_committee_updates(state, spec)?;
// Withdrawals
#[cfg(all(feature = "withdrawals", feature = "withdrawals-processing"))]
process_full_withdrawals(state, spec)?;
#[cfg(all(feature = "withdrawals", feature = "withdrawals-processing"))]
process_partial_withdrawals(state, spec)?;
// Rotate the epoch caches to suit the epoch transition.
state.advance_caches(spec)?;
// FIXME: do we need a Capella variant for this?
Ok(EpochProcessingSummary::Altair {
participation_cache,
sync_committee,
})
}

View File

@ -1,25 +0,0 @@
#[cfg(all(feature = "withdrawals", feature = "withdrawals-processing"))]
use crate::common::withdraw_balance;
use crate::EpochProcessingError;
use types::{beacon_state::BeaconState, eth_spec::EthSpec, ChainSpec};
#[cfg(all(feature = "withdrawals", feature = "withdrawals-processing"))]
pub fn process_full_withdrawals<T: EthSpec>(
state: &mut BeaconState<T>,
spec: &ChainSpec,
) -> Result<(), EpochProcessingError> {
let current_epoch = state.current_epoch();
// FIXME: is this the most efficient way to do this?
for validator_index in 0..state.validators().len() {
// TODO: is this the correct way to handle validators not existing?
if let (Some(validator), Some(balance)) = (
state.validators().get(validator_index),
state.balances().get(validator_index),
) {
if validator.is_fully_withdrawable_at(*balance, current_epoch, spec) {
withdraw_balance(state, validator_index, *balance)?;
}
}
}
Ok(())
}

View File

@ -1,41 +0,0 @@
#[cfg(all(feature = "withdrawals", feature = "withdrawals-processing"))]
use crate::common::withdraw_balance;
use crate::EpochProcessingError;
use safe_arith::SafeArith;
use types::{beacon_state::BeaconState, eth_spec::EthSpec, ChainSpec};
#[cfg(all(feature = "withdrawals", feature = "withdrawals-processing"))]
pub fn process_partial_withdrawals<T: EthSpec>(
state: &mut BeaconState<T>,
spec: &ChainSpec,
) -> Result<(), EpochProcessingError> {
let mut partial_withdrawals_count = 0;
let mut validator_index = *state.next_partial_withdrawal_validator_index()? as usize;
let n_validators = state.validators().len();
// FIXME: is this the most efficient way to do this?
for _ in 0..n_validators {
// TODO: is this the correct way to handle validators not existing?
if let (Some(validator), Some(balance)) = (
state.validators().get(validator_index),
state.balances().get(validator_index),
) {
if validator.is_partially_withdrawable_validator(*balance, spec) {
withdraw_balance(
state,
validator_index,
*balance - spec.max_effective_balance,
)?;
partial_withdrawals_count.safe_add_assign(1)?;
validator_index = validator_index.safe_add(1)? % n_validators;
if partial_withdrawals_count == T::max_partial_withdrawals_per_epoch() {
break;
}
}
}
}
*state.next_partial_withdrawal_validator_index_mut()? = validator_index as u64;
Ok(())
}

View File

@ -1,4 +1,6 @@
use crate::upgrade::{upgrade_to_altair, upgrade_to_bellatrix};
use crate::upgrade::{
upgrade_to_altair, upgrade_to_bellatrix, upgrade_to_capella, upgrade_to_eip4844,
};
use crate::{per_epoch_processing::EpochProcessingSummary, *};
use safe_arith::{ArithError, SafeArith};
use types::*;
@ -55,6 +57,14 @@ pub fn per_slot_processing<T: EthSpec>(
if spec.bellatrix_fork_epoch == Some(state.current_epoch()) {
upgrade_to_bellatrix(state, spec)?;
}
// Capella.
if spec.capella_fork_epoch == Some(state.current_epoch()) {
upgrade_to_capella(state, spec)?;
}
// Eip4844
if spec.eip4844_fork_epoch == Some(state.current_epoch()) {
upgrade_to_eip4844(state, spec)?;
}
}
Ok(summary)

View File

@ -1,4 +1,3 @@
use ssz_types::VariableList;
use std::mem;
use types::{BeaconState, BeaconStateCapella, BeaconStateError as Error, ChainSpec, EthSpec, Fork};
@ -58,11 +57,9 @@ pub fn upgrade_to_capella<E: EthSpec>(
latest_execution_payload_header: pre.latest_execution_payload_header.upgrade_to_capella(),
// Withdrawals
#[cfg(feature = "withdrawals")]
withdrawal_queue: VariableList::empty(),
#[cfg(feature = "withdrawals")]
next_withdrawal_index: 0,
#[cfg(feature = "withdrawals")]
next_partial_withdrawal_validator_index: 0,
next_withdrawal_validator_index: 0,
// Caches
total_active_balance: pre.total_active_balance,
committee_caches: mem::take(&mut pre.committee_caches),

View File

@ -65,11 +65,9 @@ pub fn upgrade_to_eip4844<E: EthSpec>(
latest_execution_payload_header: pre.latest_execution_payload_header.upgrade_to_eip4844(),
// Withdrawals
#[cfg(feature = "withdrawals")]
withdrawal_queue: mem::take(&mut pre.withdrawal_queue),
#[cfg(feature = "withdrawals")]
next_withdrawal_index: pre.next_withdrawal_index,
#[cfg(feature = "withdrawals")]
next_partial_withdrawal_validator_index: pre.next_partial_withdrawal_validator_index,
next_withdrawal_validator_index: 0,
// Caches
total_active_balance: pre.total_active_balance,
committee_caches: mem::take(&mut pre.committee_caches),

View File

@ -0,0 +1,12 @@
# Mainnet preset - Capella
# Misc
# Max operations per block
# ---------------------------------------------------------------
# 2**4 (= 16)
MAX_BLS_TO_EXECUTION_CHANGES: 16
# Execution
# ---------------------------------------------------------------
# 2**4 (= 16) withdrawals
MAX_WITHDRAWALS_PER_PAYLOAD: 16

View File

@ -0,0 +1,12 @@
# Minimal preset - Capella
# Max operations per block
# ---------------------------------------------------------------
# 2**4 (= 16)
MAX_BLS_TO_EXECUTION_CHANGES: 16
# Execution
# ---------------------------------------------------------------
# [customized] 2**2 (= 4)
MAX_WITHDRAWALS_PER_PAYLOAD: 4

View File

@ -78,17 +78,20 @@ impl<'a, T: EthSpec, Payload: AbstractExecPayload<T>> SignedRoot
{
}
/// Empty block trait for each block variant to implement.
pub trait EmptyBlock {
/// Returns an empty block to be used during genesis.
fn empty(spec: &ChainSpec) -> Self;
}
impl<T: EthSpec, Payload: AbstractExecPayload<T>> BeaconBlock<T, Payload> {
// FIXME: deal with capella / eip4844 forks here as well
/// Returns an empty block to be used during genesis.
pub fn empty(spec: &ChainSpec) -> Self {
if spec.bellatrix_fork_epoch == Some(T::genesis_epoch()) {
Self::Merge(BeaconBlockMerge::empty(spec))
} else if spec.altair_fork_epoch == Some(T::genesis_epoch()) {
Self::Altair(BeaconBlockAltair::empty(spec))
} else {
Self::Base(BeaconBlockBase::empty(spec))
}
map_fork_name!(
spec.fork_name_at_epoch(T::genesis_epoch()),
Self,
EmptyBlock::empty(spec)
)
}
/// Custom SSZ decoder that takes a `ChainSpec` as context.
@ -117,13 +120,12 @@ impl<T: EthSpec, Payload: AbstractExecPayload<T>> BeaconBlock<T, Payload> {
/// Usually it's better to prefer `from_ssz_bytes` which will decode the correct variant based
/// on the fork slot.
pub fn any_from_ssz_bytes(bytes: &[u8]) -> Result<Self, ssz::DecodeError> {
BeaconBlockMerge::from_ssz_bytes(bytes)
.map(BeaconBlock::Merge)
.or_else(|_| {
BeaconBlockAltair::from_ssz_bytes(bytes)
.map(BeaconBlock::Altair)
.or_else(|_| BeaconBlockBase::from_ssz_bytes(bytes).map(BeaconBlock::Base))
})
BeaconBlockEip4844::from_ssz_bytes(bytes)
.map(BeaconBlock::Eip4844)
.or_else(|_| BeaconBlockCapella::from_ssz_bytes(bytes).map(BeaconBlock::Capella))
.or_else(|_| BeaconBlockMerge::from_ssz_bytes(bytes).map(BeaconBlock::Merge))
.or_else(|_| BeaconBlockAltair::from_ssz_bytes(bytes).map(BeaconBlock::Altair))
.or_else(|_| BeaconBlockBase::from_ssz_bytes(bytes).map(BeaconBlock::Base))
}
/// Convenience accessor for the `body` as a `BeaconBlockBodyRef`.
@ -266,9 +268,8 @@ impl<'a, T: EthSpec, Payload: AbstractExecPayload<T>> BeaconBlockRefMut<'a, T, P
}
}
impl<T: EthSpec, Payload: AbstractExecPayload<T>> BeaconBlockBase<T, Payload> {
/// Returns an empty block to be used during genesis.
pub fn empty(spec: &ChainSpec) -> Self {
impl<T: EthSpec, Payload: AbstractExecPayload<T>> EmptyBlock for BeaconBlockBase<T, Payload> {
fn empty(spec: &ChainSpec) -> Self {
BeaconBlockBase {
slot: spec.genesis_slot,
proposer_index: 0,
@ -291,7 +292,9 @@ impl<T: EthSpec, Payload: AbstractExecPayload<T>> BeaconBlockBase<T, Payload> {
},
}
}
}
impl<T: EthSpec, Payload: AbstractExecPayload<T>> BeaconBlockBase<T, Payload> {
/// Return a block where the block has maximum size.
pub fn full(spec: &ChainSpec) -> Self {
let header = BeaconBlockHeader {
@ -387,9 +390,9 @@ impl<T: EthSpec, Payload: AbstractExecPayload<T>> BeaconBlockBase<T, Payload> {
}
}
impl<T: EthSpec, Payload: AbstractExecPayload<T>> BeaconBlockAltair<T, Payload> {
impl<T: EthSpec, Payload: AbstractExecPayload<T>> EmptyBlock for BeaconBlockAltair<T, Payload> {
/// Returns an empty Altair block to be used during genesis.
pub fn empty(spec: &ChainSpec) -> Self {
fn empty(spec: &ChainSpec) -> Self {
BeaconBlockAltair {
slot: spec.genesis_slot,
proposer_index: 0,
@ -413,7 +416,9 @@ impl<T: EthSpec, Payload: AbstractExecPayload<T>> BeaconBlockAltair<T, Payload>
},
}
}
}
impl<T: EthSpec, Payload: AbstractExecPayload<T>> BeaconBlockAltair<T, Payload> {
/// Return an Altair block where the block has maximum size.
pub fn full(spec: &ChainSpec) -> Self {
let base_block: BeaconBlockBase<_, Payload> = BeaconBlockBase::full(spec);
@ -446,9 +451,9 @@ impl<T: EthSpec, Payload: AbstractExecPayload<T>> BeaconBlockAltair<T, Payload>
}
}
impl<T: EthSpec, Payload: AbstractExecPayload<T>> BeaconBlockMerge<T, Payload> {
impl<T: EthSpec, Payload: AbstractExecPayload<T>> EmptyBlock for BeaconBlockMerge<T, Payload> {
/// Returns an empty Merge block to be used during genesis.
pub fn empty(spec: &ChainSpec) -> Self {
fn empty(spec: &ChainSpec) -> Self {
BeaconBlockMerge {
slot: spec.genesis_slot,
proposer_index: 0,
@ -474,6 +479,67 @@ impl<T: EthSpec, Payload: AbstractExecPayload<T>> BeaconBlockMerge<T, Payload> {
}
}
impl<T: EthSpec, Payload: AbstractExecPayload<T>> EmptyBlock for BeaconBlockCapella<T, Payload> {
/// Returns an empty Capella block to be used during genesis.
fn empty(spec: &ChainSpec) -> Self {
BeaconBlockCapella {
slot: spec.genesis_slot,
proposer_index: 0,
parent_root: Hash256::zero(),
state_root: Hash256::zero(),
body: BeaconBlockBodyCapella {
randao_reveal: Signature::empty(),
eth1_data: Eth1Data {
deposit_root: Hash256::zero(),
block_hash: Hash256::zero(),
deposit_count: 0,
},
graffiti: Graffiti::default(),
proposer_slashings: VariableList::empty(),
attester_slashings: VariableList::empty(),
attestations: VariableList::empty(),
deposits: VariableList::empty(),
voluntary_exits: VariableList::empty(),
sync_aggregate: SyncAggregate::empty(),
execution_payload: Payload::Capella::default(),
#[cfg(feature = "withdrawals")]
bls_to_execution_changes: VariableList::empty(),
},
}
}
}
impl<T: EthSpec, Payload: AbstractExecPayload<T>> EmptyBlock for BeaconBlockEip4844<T, Payload> {
/// Returns an empty Eip4844 block to be used during genesis.
fn empty(spec: &ChainSpec) -> Self {
BeaconBlockEip4844 {
slot: spec.genesis_slot,
proposer_index: 0,
parent_root: Hash256::zero(),
state_root: Hash256::zero(),
body: BeaconBlockBodyEip4844 {
randao_reveal: Signature::empty(),
eth1_data: Eth1Data {
deposit_root: Hash256::zero(),
block_hash: Hash256::zero(),
deposit_count: 0,
},
graffiti: Graffiti::default(),
proposer_slashings: VariableList::empty(),
attester_slashings: VariableList::empty(),
attestations: VariableList::empty(),
deposits: VariableList::empty(),
voluntary_exits: VariableList::empty(),
sync_aggregate: SyncAggregate::empty(),
execution_payload: Payload::Eip4844::default(),
#[cfg(feature = "withdrawals")]
bls_to_execution_changes: VariableList::empty(),
blob_kzg_commitments: VariableList::empty(),
},
}
}
}
// We can convert pre-Bellatrix blocks without payloads into blocks "with" payloads.
impl<E: EthSpec> From<BeaconBlockBase<E, BlindedPayload<E>>>
for BeaconBlockBase<E, FullPayload<E>>

View File

@ -62,6 +62,10 @@ pub struct BeaconBlockBody<T: EthSpec, Payload: AbstractExecPayload<T> = FullPay
#[superstruct(only(Eip4844), partial_getter(rename = "execution_payload_eip4844"))]
#[serde(flatten)]
pub execution_payload: Payload::Eip4844,
#[cfg(feature = "withdrawals")]
#[superstruct(only(Capella, Eip4844))]
pub bls_to_execution_changes:
VariableList<SignedBlsToExecutionChange, T::MaxBlsToExecutionChanges>,
#[superstruct(only(Eip4844))]
pub blob_kzg_commitments: VariableList<KzgCommitment, T::MaxBlobsPerBlock>,
#[superstruct(only(Base, Altair))]
@ -72,7 +76,7 @@ pub struct BeaconBlockBody<T: EthSpec, Payload: AbstractExecPayload<T> = FullPay
}
impl<T: EthSpec, Payload: AbstractExecPayload<T>> BeaconBlockBody<T, Payload> {
pub fn execution_payload<'a>(&'a self) -> Result<Payload::Ref<'a>, Error> {
pub fn execution_payload(&self) -> Result<Payload::Ref<'_>, Error> {
self.to_ref().execution_payload()
}
}
@ -297,6 +301,8 @@ impl<E: EthSpec> From<BeaconBlockBodyCapella<E, FullPayload<E>>>
voluntary_exits,
sync_aggregate,
execution_payload: FullPayloadCapella { execution_payload },
#[cfg(feature = "withdrawals")]
bls_to_execution_changes,
} = body;
(
@ -313,6 +319,8 @@ impl<E: EthSpec> From<BeaconBlockBodyCapella<E, FullPayload<E>>>
execution_payload: BlindedPayloadCapella {
execution_payload_header: From::from(execution_payload.clone()),
},
#[cfg(feature = "withdrawals")]
bls_to_execution_changes,
},
Some(execution_payload),
)
@ -337,6 +345,8 @@ impl<E: EthSpec> From<BeaconBlockBodyEip4844<E, FullPayload<E>>>
voluntary_exits,
sync_aggregate,
execution_payload: FullPayloadEip4844 { execution_payload },
#[cfg(feature = "withdrawals")]
bls_to_execution_changes,
blob_kzg_commitments,
} = body;
@ -354,6 +364,8 @@ impl<E: EthSpec> From<BeaconBlockBodyEip4844<E, FullPayload<E>>>
execution_payload: BlindedPayloadEip4844 {
execution_payload_header: From::from(execution_payload.clone()),
},
#[cfg(feature = "withdrawals")]
bls_to_execution_changes,
blob_kzg_commitments,
},
Some(execution_payload),
@ -421,6 +433,8 @@ impl<E: EthSpec> BeaconBlockBodyCapella<E, FullPayload<E>> {
voluntary_exits,
sync_aggregate,
execution_payload: FullPayloadCapella { execution_payload },
#[cfg(feature = "withdrawals")]
bls_to_execution_changes,
} = self;
BeaconBlockBodyCapella {
@ -436,6 +450,8 @@ impl<E: EthSpec> BeaconBlockBodyCapella<E, FullPayload<E>> {
execution_payload: BlindedPayloadCapella {
execution_payload_header: From::from(execution_payload.clone()),
},
#[cfg(feature = "withdrawals")]
bls_to_execution_changes: bls_to_execution_changes.clone(),
}
}
}
@ -453,6 +469,8 @@ impl<E: EthSpec> BeaconBlockBodyEip4844<E, FullPayload<E>> {
voluntary_exits,
sync_aggregate,
execution_payload: FullPayloadEip4844 { execution_payload },
#[cfg(feature = "withdrawals")]
bls_to_execution_changes,
blob_kzg_commitments,
} = self;
@ -469,6 +487,8 @@ impl<E: EthSpec> BeaconBlockBodyEip4844<E, FullPayload<E>> {
execution_payload: BlindedPayloadEip4844 {
execution_payload_header: From::from(execution_payload.clone()),
},
#[cfg(feature = "withdrawals")]
bls_to_execution_changes: bls_to_execution_changes.clone(),
blob_kzg_commitments: blob_kzg_commitments.clone(),
}
}

View File

@ -296,14 +296,11 @@ where
// Withdrawals
#[cfg(feature = "withdrawals")]
#[superstruct(only(Capella, Eip4844))]
pub withdrawal_queue: VariableList<Withdrawal, T::WithdrawalQueueLimit>,
#[cfg(feature = "withdrawals")]
#[superstruct(only(Capella, Eip4844))]
#[superstruct(only(Capella, Eip4844), partial_getter(copy))]
pub next_withdrawal_index: u64,
#[cfg(feature = "withdrawals")]
#[superstruct(only(Capella, Eip4844))]
pub next_partial_withdrawal_validator_index: u64,
#[superstruct(only(Capella, Eip4844), partial_getter(copy))]
pub next_withdrawal_validator_index: u64,
// Caching (not in the spec)
#[serde(skip_serializing, skip_deserializing)]
@ -1787,6 +1784,8 @@ impl<T: EthSpec> CompareFields for BeaconState<T> {
(BeaconState::Base(x), BeaconState::Base(y)) => x.compare_fields(y),
(BeaconState::Altair(x), BeaconState::Altair(y)) => x.compare_fields(y),
(BeaconState::Merge(x), BeaconState::Merge(y)) => x.compare_fields(y),
(BeaconState::Capella(x), BeaconState::Capella(y)) => x.compare_fields(y),
(BeaconState::Eip4844(x), BeaconState::Eip4844(y)) => x.compare_fields(y),
_ => panic!("compare_fields: mismatched state variants",),
}
}

View File

@ -363,6 +363,16 @@ impl<T: EthSpec> BeaconTreeHashCacheInner<T> {
hasher.write(payload_header.tree_hash_root().as_bytes())?;
}
// Withdrawal indices (Capella and later).
#[cfg(feature = "withdrawals")]
if let Ok(next_withdrawal_index) = state.next_withdrawal_index() {
hasher.write(next_withdrawal_index.tree_hash_root().as_bytes())?;
}
#[cfg(feature = "withdrawals")]
if let Ok(next_withdrawal_validator_index) = state.next_withdrawal_validator_index() {
hasher.write(next_withdrawal_validator_index.tree_hash_root().as_bytes())?;
}
let root = hasher.finish()?;
self.previous_state = Some((root, state.slot()));

View File

@ -4,7 +4,6 @@ use serde_derive::{Deserialize, Serialize};
use ssz::Encode;
use ssz_derive::{Decode, Encode};
use ssz_types::VariableList;
use tree_hash::TreeHash;
use tree_hash_derive::TreeHash;
#[cfg_attr(feature = "arbitrary-fuzz", derive(arbitrary::Arbitrary))]
@ -23,6 +22,7 @@ impl<T: EthSpec> BlobsSidecar<T> {
pub fn empty() -> Self {
Self::default()
}
#[allow(clippy::integer_arithmetic)]
pub fn max_size() -> usize {
// Fixed part
Self::empty().as_ssz_bytes().len()

View File

@ -0,0 +1,30 @@
use crate::test_utils::TestRandom;
use crate::*;
use bls::PublicKeyBytes;
use serde_derive::{Deserialize, Serialize};
use ssz_derive::{Decode, Encode};
use test_random_derive::TestRandom;
use tree_hash_derive::TreeHash;
/// A deposit to potentially become a beacon chain validator.
///
/// Spec v0.12.1
#[cfg_attr(feature = "arbitrary-fuzz", derive(arbitrary::Arbitrary))]
#[derive(
Debug, PartialEq, Hash, Clone, Serialize, Deserialize, Encode, Decode, TreeHash, TestRandom,
)]
pub struct BlsToExecutionChange {
#[serde(with = "eth2_serde_utils::quoted_u64")]
pub validator_index: u64,
pub from_bls_pubkey: PublicKeyBytes,
pub to_execution_address: Address,
}
impl SignedRoot for BlsToExecutionChange {}
#[cfg(test)]
mod tests {
use super::*;
ssz_and_tree_hash_tests!(BlsToExecutionChange);
}

View File

@ -98,8 +98,6 @@ pub trait EthSpec: 'static + Default + Sync + Send + Clone + Debug + PartialEq +
/*
* New in Capella
*/
type MaxPartialWithdrawalsPerEpoch: Unsigned + Clone + Sync + Send + Debug + PartialEq;
type WithdrawalQueueLimit: Unsigned + Clone + Sync + Send + Debug + PartialEq;
type MaxBlsToExecutionChanges: Unsigned + Clone + Sync + Send + Debug + PartialEq;
type MaxWithdrawalsPerPayload: Unsigned + Clone + Sync + Send + Debug + PartialEq;
/*
@ -235,16 +233,6 @@ pub trait EthSpec: 'static + Default + Sync + Send + Clone + Debug + PartialEq +
Self::BytesPerLogsBloom::to_usize()
}
/// Returns the `MAX_PARTIAL_WITHDRAWALS_PER_EPOCH` constant for this specification.
fn max_partial_withdrawals_per_epoch() -> usize {
Self::MaxPartialWithdrawalsPerEpoch::to_usize()
}
/// Returns the `WITHDRAWAL_QUEUE_LIMIT` constant for this specification.
fn withdrawal_queue_limit() -> usize {
Self::WithdrawalQueueLimit::to_usize()
}
/// Returns the `MAX_BLS_TO_EXECUTION_CHANGES` constant for this specification.
fn max_bls_to_execution_changes() -> usize {
Self::MaxBlsToExecutionChanges::to_usize()
@ -309,8 +297,6 @@ impl EthSpec for MainnetEthSpec {
type SyncSubcommitteeSize = U128; // 512 committee size / 4 sync committee subnet count
type MaxPendingAttestations = U4096; // 128 max attestations * 32 slots per epoch
type SlotsPerEth1VotingPeriod = U2048; // 64 epochs * 32 slots per epoch
type MaxPartialWithdrawalsPerEpoch = U256;
type WithdrawalQueueLimit = U1099511627776;
type MaxBlsToExecutionChanges = U16;
type MaxWithdrawalsPerPayload = U16;
@ -338,6 +324,7 @@ impl EthSpec for MinimalEthSpec {
type SyncSubcommitteeSize = U8; // 32 committee size / 4 sync committee subnet count
type MaxPendingAttestations = U1024; // 128 max attestations * 8 slots per epoch
type SlotsPerEth1VotingPeriod = U32; // 4 epochs * 8 slots per epoch
type MaxWithdrawalsPerPayload = U4;
params_from_eth_spec!(MainnetEthSpec {
JustificationBitsLength,
@ -358,10 +345,7 @@ impl EthSpec for MinimalEthSpec {
GasLimitDenominator,
MinGasLimit,
MaxExtraDataBytes,
MaxPartialWithdrawalsPerEpoch,
WithdrawalQueueLimit,
MaxBlsToExecutionChanges,
MaxWithdrawalsPerPayload,
MaxBlobsPerBlock,
FieldElementsPerBlob
});
@ -408,8 +392,6 @@ impl EthSpec for GnosisEthSpec {
type SyncSubcommitteeSize = U128; // 512 committee size / 4 sync committee subnet count
type MaxPendingAttestations = U2048; // 128 max attestations * 16 slots per epoch
type SlotsPerEth1VotingPeriod = U1024; // 64 epochs * 16 slots per epoch
type MaxPartialWithdrawalsPerEpoch = U256;
type WithdrawalQueueLimit = U1099511627776;
type MaxBlsToExecutionChanges = U16;
type MaxWithdrawalsPerPayload = U16;
type MaxBlobsPerBlock = U16; // 2**4 = 16

View File

@ -1,9 +1,8 @@
use crate::{test_utils::TestRandom, *};
use derivative::Derivative;
use serde_derive::{Deserialize, Serialize};
use ssz::Encode;
use ssz::{Decode, Encode};
use ssz_derive::{Decode, Encode};
use std::slice::Iter;
use test_random_derive::TestRandom;
use tree_hash_derive::TreeHash;
@ -13,6 +12,8 @@ pub type Transactions<T> = VariableList<
<T as EthSpec>::MaxTransactionsPerPayload,
>;
pub type Withdrawals<T> = VariableList<Withdrawal, <T as EthSpec>::MaxWithdrawalsPerPayload>;
#[superstruct(
variants(Merge, Capella, Eip4844),
variant_attributes(
@ -82,10 +83,21 @@ pub struct ExecutionPayload<T: EthSpec> {
pub transactions: Transactions<T>,
#[cfg(feature = "withdrawals")]
#[superstruct(only(Capella, Eip4844))]
pub withdrawals: VariableList<Withdrawal, T::MaxWithdrawalsPerPayload>,
pub withdrawals: Withdrawals<T>,
}
impl<T: EthSpec> ExecutionPayload<T> {
pub fn from_ssz_bytes(bytes: &[u8], fork_name: ForkName) -> Result<Self, ssz::DecodeError> {
match fork_name {
ForkName::Base | ForkName::Altair => Err(ssz::DecodeError::BytesInvalid(format!(
"unsupported fork for ExecutionPayload: {fork_name}",
))),
ForkName::Merge => ExecutionPayloadMerge::from_ssz_bytes(bytes).map(Self::Merge),
ForkName::Capella => ExecutionPayloadCapella::from_ssz_bytes(bytes).map(Self::Capella),
ForkName::Eip4844 => ExecutionPayloadEip4844::from_ssz_bytes(bytes).map(Self::Eip4844),
}
}
#[allow(clippy::integer_arithmetic)]
/// Returns the maximum size of an execution payload.
pub fn max_execution_payload_merge_size() -> usize {

View File

@ -1,6 +1,7 @@
use crate::{test_utils::TestRandom, *};
use derivative::Derivative;
use serde_derive::{Deserialize, Serialize};
use ssz::Decode;
use ssz_derive::{Decode, Encode};
use test_random_derive::TestRandom;
use tree_hash::TreeHash;
@ -84,31 +85,34 @@ impl<T: EthSpec> ExecutionPayloadHeader<T> {
pub fn transactions(&self) -> Option<&Transactions<T>> {
None
}
}
impl<'a, T: EthSpec> ExecutionPayloadHeaderRef<'a, T> {
// FIXME: maybe this could be a derived trait..
pub fn is_default(self) -> bool {
match self {
ExecutionPayloadHeaderRef::Merge(header) => {
*header == ExecutionPayloadHeaderMerge::default()
pub fn from_ssz_bytes(bytes: &[u8], fork_name: ForkName) -> Result<Self, ssz::DecodeError> {
match fork_name {
ForkName::Base | ForkName::Altair => Err(ssz::DecodeError::BytesInvalid(format!(
"unsupported fork for ExecutionPayloadHeader: {fork_name}",
))),
ForkName::Merge => ExecutionPayloadHeaderMerge::from_ssz_bytes(bytes).map(Self::Merge),
ForkName::Capella => {
ExecutionPayloadHeaderCapella::from_ssz_bytes(bytes).map(Self::Capella)
}
ExecutionPayloadHeaderRef::Capella(header) => {
*header == ExecutionPayloadHeaderCapella::default()
}
ExecutionPayloadHeaderRef::Eip4844(header) => {
*header == ExecutionPayloadHeaderEip4844::default()
ForkName::Eip4844 => {
ExecutionPayloadHeaderEip4844::from_ssz_bytes(bytes).map(Self::Eip4844)
}
}
}
}
impl<'a, T: EthSpec> ExecutionPayloadHeaderRef<'a, T> {
pub fn is_default(self) -> bool {
map_execution_payload_header_ref!(&'a _, self, |inner, cons| {
let _ = cons(inner);
*inner == Default::default()
})
}
}
impl<T: EthSpec> ExecutionPayloadHeaderMerge<T> {
pub fn upgrade_to_capella(&self) -> ExecutionPayloadHeaderCapella<T> {
#[cfg(feature = "withdrawals")]
// TODO: if this is correct we should calculate and hardcode this..
let empty_withdrawals_root =
VariableList::<Withdrawal, T::MaxWithdrawalsPerPayload>::empty().tree_hash_root();
ExecutionPayloadHeaderCapella {
parent_hash: self.parent_hash,
fee_recipient: self.fee_recipient,
@ -125,8 +129,7 @@ impl<T: EthSpec> ExecutionPayloadHeaderMerge<T> {
block_hash: self.block_hash,
transactions_root: self.transactions_root,
#[cfg(feature = "withdrawals")]
// FIXME: the spec doesn't seem to define what to do here..
withdrawals_root: empty_withdrawals_root,
withdrawals_root: Hash256::zero(),
}
}
}

View File

@ -14,7 +14,7 @@ pub struct KzgCommitment(#[serde(with = "BigArray")] pub [u8; 48]);
impl Display for KzgCommitment {
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
write!(f, "{}", eth2_serde_utils::hex::encode(&self.0))
write!(f, "{}", eth2_serde_utils::hex::encode(self.0))
}
}

View File

@ -1,7 +1,6 @@
use crate::test_utils::{RngCore, TestRandom};
use serde::{Deserialize, Serialize};
use serde_big_array::BigArray;
use ssz::{Decode, DecodeError, Encode};
use ssz_derive::{Decode, Encode};
use std::fmt;
use tree_hash::{PackedEncoding, TreeHash};
@ -15,7 +14,7 @@ pub struct KzgProof(#[serde(with = "BigArray")] pub [u8; KZG_PROOF_BYTES_LEN]);
impl fmt::Display for KzgProof {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{}", eth2_serde_utils::hex::encode(&self.0))
write!(f, "{}", eth2_serde_utils::hex::encode(self.0))
}
}

View File

@ -27,6 +27,7 @@ pub mod beacon_block_body;
pub mod beacon_block_header;
pub mod beacon_committee;
pub mod beacon_state;
pub mod bls_to_execution_change;
pub mod builder_bid;
pub mod chain_spec;
pub mod checkpoint;
@ -61,6 +62,7 @@ pub mod shuffling_id;
pub mod signed_aggregate_and_proof;
pub mod signed_beacon_block;
pub mod signed_beacon_block_header;
pub mod signed_bls_to_execution_change;
pub mod signed_contribution_and_proof;
pub mod signed_voluntary_exit;
pub mod signing_data;
@ -108,7 +110,7 @@ pub use crate::attestation_duty::AttestationDuty;
pub use crate::attester_slashing::AttesterSlashing;
pub use crate::beacon_block::{
BeaconBlock, BeaconBlockAltair, BeaconBlockBase, BeaconBlockCapella, BeaconBlockEip4844,
BeaconBlockMerge, BeaconBlockRef, BeaconBlockRefMut, BlindedBeaconBlock,
BeaconBlockMerge, BeaconBlockRef, BeaconBlockRefMut, BlindedBeaconBlock, EmptyBlock,
};
pub use crate::beacon_block_body::{
BeaconBlockBody, BeaconBlockBodyAltair, BeaconBlockBodyBase, BeaconBlockBodyCapella,
@ -118,6 +120,7 @@ pub use crate::beacon_block_header::BeaconBlockHeader;
pub use crate::beacon_committee::{BeaconCommittee, OwnedBeaconCommittee};
pub use crate::beacon_state::{BeaconTreeHashCache, Error as BeaconStateError, *};
pub use crate::blobs_sidecar::BlobsSidecar;
pub use crate::bls_to_execution_change::BlsToExecutionChange;
pub use crate::chain_spec::{ChainSpec, Config, Domain};
pub use crate::checkpoint::Checkpoint;
pub use crate::config_and_preset::{
@ -134,7 +137,7 @@ pub use crate::eth_spec::EthSpecId;
pub use crate::execution_block_hash::ExecutionBlockHash;
pub use crate::execution_payload::{
ExecutionPayload, ExecutionPayloadCapella, ExecutionPayloadEip4844, ExecutionPayloadMerge,
ExecutionPayloadRef, Transaction, Transactions,
ExecutionPayloadRef, Transaction, Transactions, Withdrawals,
};
pub use crate::execution_payload_header::{
ExecutionPayloadHeader, ExecutionPayloadHeaderCapella, ExecutionPayloadHeaderEip4844,
@ -171,6 +174,7 @@ pub use crate::signed_beacon_block::{
SignedBlindedBeaconBlock,
};
pub use crate::signed_beacon_block_header::SignedBeaconBlockHeader;
pub use crate::signed_bls_to_execution_change::SignedBlsToExecutionChange;
pub use crate::signed_block_and_blobs::SignedBeaconBlockAndBlobsSidecar;
pub use crate::signed_contribution_and_proof::SignedContributionAndProof;
pub use crate::signed_voluntary_exit::SignedVoluntaryExit;

View File

@ -36,6 +36,9 @@ pub trait ExecPayload<T: EthSpec>: Debug + Clone + PartialEq + Hash + TreeHash +
fn fee_recipient(&self) -> Address;
fn gas_limit(&self) -> u64;
fn transactions(&self) -> Option<&Transactions<T>>;
/// fork-specific fields
#[cfg(feature = "withdrawals")]
fn withdrawals_root(&self) -> Result<Hash256, Error>;
/// Is this a default payload? (pre-merge)
fn is_default(&self) -> bool;
@ -218,13 +221,26 @@ impl<T: EthSpec> ExecPayload<T> for FullPayload<T> {
})
}
fn transactions<'a>(&'a self) -> Option<&Transactions<T>> {
fn transactions<'a>(&'a self) -> Option<&'a Transactions<T>> {
map_full_payload_ref!(&'a _, self.to_ref(), move |payload, cons| {
cons(payload);
Some(&payload.execution_payload.transactions)
})
}
#[cfg(feature = "withdrawals")]
fn withdrawals_root(&self) -> Result<Hash256, Error> {
match self {
FullPayload::Merge(_) => Err(Error::IncorrectStateVariant),
FullPayload::Capella(ref inner) => {
Ok(inner.execution_payload.withdrawals.tree_hash_root())
}
FullPayload::Eip4844(ref inner) => {
Ok(inner.execution_payload.withdrawals.tree_hash_root())
}
}
}
fn is_default<'a>(&'a self) -> bool {
map_full_payload_ref!(&'a _, self.to_ref(), move |payload, cons| {
cons(payload);
@ -249,7 +265,7 @@ impl<'b, T: EthSpec> ExecPayload<T> for FullPayloadRef<'b, T> {
fn to_execution_payload_header<'a>(&'a self) -> ExecutionPayloadHeader<T> {
map_full_payload_ref!(&'a _, self, move |payload, cons| {
cons(payload);
ExecutionPayloadHeader::from(payload.to_execution_payload_header())
payload.to_execution_payload_header()
})
}
@ -302,13 +318,26 @@ impl<'b, T: EthSpec> ExecPayload<T> for FullPayloadRef<'b, T> {
})
}
fn transactions<'a>(&'a self) -> Option<&Transactions<T>> {
fn transactions<'a>(&'a self) -> Option<&'a Transactions<T>> {
map_full_payload_ref!(&'a _, self, move |payload, cons| {
cons(payload);
Some(&payload.execution_payload.transactions)
})
}
#[cfg(feature = "withdrawals")]
fn withdrawals_root(&self) -> Result<Hash256, Error> {
match self {
FullPayloadRef::Merge(_) => Err(Error::IncorrectStateVariant),
FullPayloadRef::Capella(inner) => {
Ok(inner.execution_payload.withdrawals.tree_hash_root())
}
FullPayloadRef::Eip4844(inner) => {
Ok(inner.execution_payload.withdrawals.tree_hash_root())
}
}
}
// TODO: can this function be optimized?
fn is_default<'a>(&'a self) -> bool {
map_full_payload_ref!(&'a _, self, move |payload, cons| {
@ -459,10 +488,23 @@ impl<T: EthSpec> ExecPayload<T> for BlindedPayload<T> {
})
}
fn transactions<'a>(&'a self) -> Option<&Transactions<T>> {
fn transactions(&self) -> Option<&Transactions<T>> {
None
}
#[cfg(feature = "withdrawals")]
fn withdrawals_root(&self) -> Result<Hash256, Error> {
match self {
BlindedPayload::Merge(_) => Err(Error::IncorrectStateVariant),
BlindedPayload::Capella(ref inner) => {
Ok(inner.execution_payload_header.withdrawals_root)
}
BlindedPayload::Eip4844(ref inner) => {
Ok(inner.execution_payload_header.withdrawals_root)
}
}
}
fn is_default<'a>(&'a self) -> bool {
map_blinded_payload_ref!(&'a _, self.to_ref(), move |payload, cons| {
cons(payload);
@ -532,10 +574,23 @@ impl<'b, T: EthSpec> ExecPayload<T> for BlindedPayloadRef<'b, T> {
})
}
fn transactions<'a>(&'a self) -> Option<&Transactions<T>> {
fn transactions(&self) -> Option<&Transactions<T>> {
None
}
#[cfg(feature = "withdrawals")]
fn withdrawals_root(&self) -> Result<Hash256, Error> {
match self {
BlindedPayloadRef::Merge(_) => Err(Error::IncorrectStateVariant),
BlindedPayloadRef::Capella(inner) => {
Ok(inner.execution_payload_header.withdrawals_root)
}
BlindedPayloadRef::Eip4844(inner) => {
Ok(inner.execution_payload_header.withdrawals_root)
}
}
}
// TODO: can this function be optimized?
fn is_default<'a>(&'a self) -> bool {
map_blinded_payload_ref!(&'a _, self, move |payload, cons| {
@ -546,7 +601,7 @@ impl<'b, T: EthSpec> ExecPayload<T> for BlindedPayloadRef<'b, T> {
}
macro_rules! impl_exec_payload_common {
($wrapper_type:ident, $wrapped_type_full:ident, $wrapped_header_type:ident, $wrapped_field:ident, $fork_variant:ident, $block_type_variant:ident, $f:block) => {
($wrapper_type:ident, $wrapped_type_full:ident, $wrapped_header_type:ident, $wrapped_field:ident, $fork_variant:ident, $block_type_variant:ident, $f:block, $g:block) => {
impl<T: EthSpec> ExecPayload<T> for $wrapper_type<T> {
fn block_type() -> BlockType {
BlockType::$block_type_variant
@ -594,6 +649,12 @@ macro_rules! impl_exec_payload_common {
let f = $f;
f(self)
}
#[cfg(feature = "withdrawals")]
fn withdrawals_root(&self) -> Result<Hash256, Error> {
let g = $g;
g(self)
}
}
impl<T: EthSpec> From<$wrapped_type_full<T>> for $wrapper_type<T> {
@ -615,7 +676,15 @@ macro_rules! impl_exec_payload_for_fork {
execution_payload_header,
$fork_variant,
Blinded,
{ |_| { None } }
{ |_| { None } },
{
let c: for<'a> fn(&'a $wrapper_type_header<T>) -> Result<Hash256, Error> =
|payload: &$wrapper_type_header<T>| {
let wrapper_ref_type = BlindedPayloadRef::$fork_variant(&payload);
wrapper_ref_type.withdrawals_root()
};
c
}
);
impl<T: EthSpec> TryInto<$wrapper_type_header<T>> for BlindedPayload<T> {
@ -680,6 +749,14 @@ macro_rules! impl_exec_payload_for_fork {
let c: for<'a> fn(&'a $wrapper_type_full<T>) -> Option<&'a Transactions<T>> =
|payload: &$wrapper_type_full<T>| Some(&payload.execution_payload.transactions);
c
},
{
let c: for<'a> fn(&'a $wrapper_type_full<T>) -> Result<Hash256, Error> =
|payload: &$wrapper_type_full<T>| {
let wrapper_ref_type = FullPayloadRef::$fork_variant(&payload);
wrapper_ref_type.withdrawals_root()
};
c
}
);

View File

@ -341,6 +341,8 @@ impl<E: EthSpec> SignedBeaconBlockCapella<E, BlindedPayload<E>> {
voluntary_exits,
sync_aggregate,
execution_payload: BlindedPayloadCapella { .. },
#[cfg(feature = "withdrawals")]
bls_to_execution_changes,
},
},
signature,
@ -362,6 +364,8 @@ impl<E: EthSpec> SignedBeaconBlockCapella<E, BlindedPayload<E>> {
voluntary_exits,
sync_aggregate,
execution_payload: FullPayloadCapella { execution_payload },
#[cfg(feature = "withdrawals")]
bls_to_execution_changes,
},
},
signature,
@ -393,6 +397,8 @@ impl<E: EthSpec> SignedBeaconBlockEip4844<E, BlindedPayload<E>> {
voluntary_exits,
sync_aggregate,
execution_payload: BlindedPayloadEip4844 { .. },
#[cfg(feature = "withdrawals")]
bls_to_execution_changes,
blob_kzg_commitments,
},
},
@ -415,6 +421,8 @@ impl<E: EthSpec> SignedBeaconBlockEip4844<E, BlindedPayload<E>> {
voluntary_exits,
sync_aggregate,
execution_payload: FullPayloadEip4844 { execution_payload },
#[cfg(feature = "withdrawals")]
bls_to_execution_changes,
blob_kzg_commitments,
},
},

View File

@ -0,0 +1,26 @@
use crate::test_utils::TestRandom;
use crate::*;
use bls::Signature;
use serde_derive::{Deserialize, Serialize};
use ssz_derive::{Decode, Encode};
use test_random_derive::TestRandom;
use tree_hash_derive::TreeHash;
/// A deposit to potentially become a beacon chain validator.
///
/// Spec v0.12.1
#[cfg_attr(feature = "arbitrary-fuzz", derive(arbitrary::Arbitrary))]
#[derive(
Debug, PartialEq, Hash, Clone, Serialize, Deserialize, Encode, Decode, TreeHash, TestRandom,
)]
pub struct SignedBlsToExecutionChange {
pub message: BlsToExecutionChange,
pub signature: Signature,
}
#[cfg(test)]
mod tests {
use super::*;
ssz_and_tree_hash_tests!(SignedBlsToExecutionChange);
}

View File

@ -17,7 +17,7 @@ impl CachedTreeHash<TreeHashCache> for Validator {
/// Efficiently tree hash a `Validator`, assuming it was updated by a valid state transition.
///
/// Specifically, we assume that the `pubkey` and `withdrawal_credentials` fields are constant.
/// Specifically, we assume that the `pubkey` field is constant.
fn recalculate_tree_hash_root(
&self,
arena: &mut CacheArena,
@ -29,8 +29,8 @@ impl CachedTreeHash<TreeHashCache> for Validator {
.iter_mut(arena)?
.enumerate()
.flat_map(|(i, leaf)| {
// Fields pubkey and withdrawal_credentials are constant
if (i == 0 || i == 1) && cache.initialized {
// Pubkey field (index 0) is constant.
if i == 0 && cache.initialized {
None
} else if process_field_by_index(self, i, leaf, !cache.initialized) {
Some(i)

View File

@ -1,5 +1,6 @@
use crate::{
test_utils::TestRandom, BeaconState, ChainSpec, Epoch, EthSpec, Hash256, PublicKeyBytes,
test_utils::TestRandom, Address, BeaconState, ChainSpec, Epoch, EthSpec, Hash256,
PublicKeyBytes,
};
use serde_derive::{Deserialize, Serialize};
use ssz_derive::{Decode, Encode};
@ -75,6 +76,28 @@ impl Validator {
.unwrap_or(false)
}
/// Get the eth1 withdrawal address if this validator has one initialized.
pub fn get_eth1_withdrawal_address(&self, spec: &ChainSpec) -> Option<Address> {
self.has_eth1_withdrawal_credential(spec)
.then(|| {
self.withdrawal_credentials
.as_bytes()
.get(12..)
.map(Address::from_slice)
})
.flatten()
}
/// Changes withdrawal credentials to the provided eth1 execution address
///
/// WARNING: this function does NO VALIDATION - it just does it!
pub fn change_withdrawal_credentials(&mut self, execution_address: &Address, spec: &ChainSpec) {
let mut bytes = [0u8; 32];
bytes[0] = spec.eth1_address_withdrawal_prefix_byte;
bytes[12..].copy_from_slice(execution_address.as_bytes());
self.withdrawal_credentials = Hash256::from(bytes);
}
/// Returns `true` if the validator is fully withdrawable at some epoch
pub fn is_fully_withdrawable_at(&self, balance: u64, epoch: Epoch, spec: &ChainSpec) -> bool {
self.has_eth1_withdrawal_credential(spec) && self.withdrawable_epoch <= epoch && balance > 0

View File

@ -1,4 +1,4 @@
TESTS_TAG := v1.2.0
TESTS_TAG := f5c7cf78
TESTS = general minimal mainnet
TARBALLS = $(patsubst %,%-$(TESTS_TAG).tar.gz,$(TESTS))

View File

@ -289,8 +289,9 @@ impl<E: EthSpec, T: EpochTransition<E>> Case for EpochProcessing<E, T> {
&& T::name() != "participation_flag_updates"
}
// No phase0 tests for Altair and later.
ForkName::Altair | ForkName::Merge => T::name() != "participation_record_updates",
ForkName::Capella => false, // TODO: revisit when tests are out
ForkName::Altair | ForkName::Merge | ForkName::Capella => {
T::name() != "participation_record_updates"
}
ForkName::Eip4844 => false, // TODO: revisit when tests are out
}
}

View File

@ -3,7 +3,7 @@ use crate::case_result::compare_beacon_state_results_without_caches;
use crate::cases::common::previous_fork;
use crate::decode::{ssz_decode_state, yaml_decode_file};
use serde_derive::Deserialize;
use state_processing::upgrade::{upgrade_to_altair, upgrade_to_bellatrix};
use state_processing::upgrade::{upgrade_to_altair, upgrade_to_bellatrix, upgrade_to_capella};
use types::{BeaconState, ForkName};
#[derive(Debug, Clone, Default, Deserialize)]
@ -61,8 +61,8 @@ impl<E: EthSpec> Case for ForkTest<E> {
ForkName::Base => panic!("phase0 not supported"),
ForkName::Altair => upgrade_to_altair(&mut result_state, spec).map(|_| result_state),
ForkName::Merge => upgrade_to_bellatrix(&mut result_state, spec).map(|_| result_state),
ForkName::Capella => upgrade_to_capella(&mut result_state, spec).map(|_| result_state),
ForkName::Eip4844 => panic!("eip4844 not supported"),
ForkName::Capella => panic!("capella not supported"),
};
compare_beacon_state_results_without_caches(&mut result, &mut expected)

View File

@ -1,13 +1,10 @@
use super::*;
use crate::case_result::compare_beacon_state_results_without_caches;
use crate::decode::{ssz_decode_file, ssz_decode_state, yaml_decode_file};
use crate::decode::{ssz_decode_file, ssz_decode_file_with, ssz_decode_state, yaml_decode_file};
use serde_derive::Deserialize;
use state_processing::initialize_beacon_state_from_eth1;
use std::path::PathBuf;
use types::{
BeaconState, Deposit, EthSpec, ExecutionPayloadHeader, ExecutionPayloadHeaderMerge, ForkName,
Hash256,
};
use types::{BeaconState, Deposit, EthSpec, ExecutionPayloadHeader, ForkName, Hash256};
#[derive(Debug, Clone, Deserialize)]
struct Metadata {
@ -41,14 +38,10 @@ impl<E: EthSpec> LoadCase for GenesisInitialization<E> {
let meta: Metadata = yaml_decode_file(&path.join("meta.yaml"))?;
let execution_payload_header: Option<ExecutionPayloadHeader<E>> =
if meta.execution_payload_header.unwrap_or(false) {
//FIXME(sean) we could decode based on timestamp - we probably don't do decode a payload
// without a block this elsewhere at presetn. But when we support SSZ in the builder api we may need to.
// Although that API should include fork info. Hardcoding this for now
Some(ExecutionPayloadHeader::Merge(ssz_decode_file::<
ExecutionPayloadHeaderMerge<E>,
>(
Some(ssz_decode_file_with(
&path.join("execution_payload_header.ssz_snappy"),
)?))
|bytes| ExecutionPayloadHeader::from_ssz_bytes(bytes, fork_name),
)?)
} else {
None
};

View File

@ -3,8 +3,11 @@ use crate::bls_setting::BlsSetting;
use crate::case_result::compare_beacon_state_results_without_caches;
use crate::decode::{ssz_decode_file, ssz_decode_file_with, ssz_decode_state, yaml_decode_file};
use crate::testing_spec;
use crate::type_name::TypeName;
use serde_derive::Deserialize;
#[cfg(all(feature = "withdrawals", feature = "withdrawals-processing"))]
use state_processing::per_block_processing::process_operations::{
process_bls_to_execution_changes, process_bls_to_execution_changes,
};
use state_processing::{
per_block_processing::{
errors::BlockProcessingError,
@ -21,7 +24,7 @@ use std::fmt::Debug;
use std::path::Path;
use types::{
Attestation, AttesterSlashing, BeaconBlock, BeaconState, BlindedPayload, ChainSpec, Deposit,
EthSpec, ExecutionPayload, ExecutionPayloadMerge, ForkName, FullPayload, ProposerSlashing,
EthSpec, ExecutionPayload, ForkName, FullPayload, ProposerSlashing, SignedBlsToExecutionChange,
SignedVoluntaryExit, SyncAggregate,
};
@ -36,6 +39,12 @@ struct ExecutionMetadata {
execution_valid: bool,
}
/// Newtype for testing withdrawals.
#[derive(Debug, Clone, Deserialize)]
pub struct WithdrawalsPayload<T: EthSpec> {
payload: FullPayload<T>,
}
#[derive(Debug, Clone)]
pub struct Operations<E: EthSpec, O: Operation<E>> {
metadata: Metadata,
@ -45,10 +54,8 @@ pub struct Operations<E: EthSpec, O: Operation<E>> {
pub post: Option<BeaconState<E>>,
}
pub trait Operation<E: EthSpec>: TypeName + Debug + Sync + Sized {
fn handler_name() -> String {
Self::name().to_lowercase()
}
pub trait Operation<E: EthSpec>: Debug + Sync + Sized {
fn handler_name() -> String;
fn filename() -> String {
format!("{}.ssz_snappy", Self::handler_name())
@ -58,7 +65,7 @@ pub trait Operation<E: EthSpec>: TypeName + Debug + Sync + Sized {
true
}
fn decode(path: &Path, spec: &ChainSpec) -> Result<Self, Error>;
fn decode(path: &Path, fork_name: ForkName, spec: &ChainSpec) -> Result<Self, Error>;
fn apply_to(
&self,
@ -69,7 +76,11 @@ pub trait Operation<E: EthSpec>: TypeName + Debug + Sync + Sized {
}
impl<E: EthSpec> Operation<E> for Attestation<E> {
fn decode(path: &Path, _spec: &ChainSpec) -> Result<Self, Error> {
fn handler_name() -> String {
"attestation".into()
}
fn decode(path: &Path, _fork_name: ForkName, _spec: &ChainSpec) -> Result<Self, Error> {
ssz_decode_file(path)
}
@ -109,7 +120,7 @@ impl<E: EthSpec> Operation<E> for AttesterSlashing<E> {
"attester_slashing".into()
}
fn decode(path: &Path, _spec: &ChainSpec) -> Result<Self, Error> {
fn decode(path: &Path, _fork_name: ForkName, _spec: &ChainSpec) -> Result<Self, Error> {
ssz_decode_file(path)
}
@ -131,7 +142,11 @@ impl<E: EthSpec> Operation<E> for AttesterSlashing<E> {
}
impl<E: EthSpec> Operation<E> for Deposit {
fn decode(path: &Path, _spec: &ChainSpec) -> Result<Self, Error> {
fn handler_name() -> String {
"deposit".into()
}
fn decode(path: &Path, _fork_name: ForkName, _spec: &ChainSpec) -> Result<Self, Error> {
ssz_decode_file(path)
}
@ -155,7 +170,7 @@ impl<E: EthSpec> Operation<E> for ProposerSlashing {
"proposer_slashing".into()
}
fn decode(path: &Path, _spec: &ChainSpec) -> Result<Self, Error> {
fn decode(path: &Path, _fork_name: ForkName, _spec: &ChainSpec) -> Result<Self, Error> {
ssz_decode_file(path)
}
@ -181,7 +196,7 @@ impl<E: EthSpec> Operation<E> for SignedVoluntaryExit {
"voluntary_exit".into()
}
fn decode(path: &Path, _spec: &ChainSpec) -> Result<Self, Error> {
fn decode(path: &Path, _fork_name: ForkName, _spec: &ChainSpec) -> Result<Self, Error> {
ssz_decode_file(path)
}
@ -204,7 +219,7 @@ impl<E: EthSpec> Operation<E> for BeaconBlock<E> {
"block.ssz_snappy".into()
}
fn decode(path: &Path, spec: &ChainSpec) -> Result<Self, Error> {
fn decode(path: &Path, _fork_name: ForkName, spec: &ChainSpec) -> Result<Self, Error> {
ssz_decode_file_with(path, |bytes| BeaconBlock::from_ssz_bytes(bytes, spec))
}
@ -239,7 +254,7 @@ impl<E: EthSpec> Operation<E> for SyncAggregate<E> {
fork_name != ForkName::Base
}
fn decode(path: &Path, _spec: &ChainSpec) -> Result<Self, Error> {
fn decode(path: &Path, _fork_name: ForkName, _spec: &ChainSpec) -> Result<Self, Error> {
ssz_decode_file(path)
}
@ -267,13 +282,11 @@ impl<E: EthSpec> Operation<E> for FullPayload<E> {
fork_name != ForkName::Base && fork_name != ForkName::Altair
}
//FIXME(sean) we could decode based on timestamp - we probably don't do decode a payload
// without a block this elsewhere at presetn. But when we support SSZ in the builder api we may need to.
// Although that API should include fork info. Hardcoding this for now
fn decode(path: &Path, _spec: &ChainSpec) -> Result<Self, Error> {
ssz_decode_file::<ExecutionPayloadMerge<E>>(path)
.map(ExecutionPayload::Merge)
.map(Into::into)
fn decode(path: &Path, fork_name: ForkName, _spec: &ChainSpec) -> Result<Self, Error> {
ssz_decode_file_with(path, |bytes| {
ExecutionPayload::from_ssz_bytes(bytes, fork_name)
})
.map(Into::into)
}
fn apply_to(
@ -306,13 +319,11 @@ impl<E: EthSpec> Operation<E> for BlindedPayload<E> {
fork_name != ForkName::Base && fork_name != ForkName::Altair
}
fn decode(path: &Path, _spec: &ChainSpec) -> Result<Self, Error> {
//FIXME(sean) we could decode based on timestamp - we probably don't do decode a payload
// without a block this elsewhere at presetn. But when we support SSZ in the builder api we may need to.
// Although that API should include fork info. Hardcoding this for now
let payload: Result<ExecutionPayload<E>, Error> =
ssz_decode_file::<ExecutionPayloadMerge<E>>(path).map(Into::into);
payload.map(Into::into)
fn decode(path: &Path, fork_name: ForkName, _spec: &ChainSpec) -> Result<Self, Error> {
ssz_decode_file_with(path, |bytes| {
ExecutionPayload::from_ssz_bytes(bytes, fork_name)
})
.map(Into::into)
}
fn apply_to(
@ -333,6 +344,67 @@ impl<E: EthSpec> Operation<E> for BlindedPayload<E> {
}
}
#[cfg(all(feature = "withdrawals", feature = "withdrawals-processing"))]
impl<E: EthSpec> Operation<E> for WithdrawalsPayload<E> {
fn handler_name() -> String {
"withdrawals".into()
}
fn filename() -> String {
"execution_payload.ssz_snappy".into()
}
fn is_enabled_for_fork(fork_name: ForkName) -> bool {
fork_name != ForkName::Base && fork_name != ForkName::Altair && fork_name != ForkName::Merge
}
fn decode(path: &Path, fork_name: ForkName, _spec: &ChainSpec) -> Result<Self, Error> {
ssz_decode_file_with(path, |bytes| {
ExecutionPayload::from_ssz_bytes(bytes, fork_name)
})
.map(|payload| WithdrawalsPayload {
payload: payload.into(),
})
}
fn apply_to(
&self,
state: &mut BeaconState<E>,
spec: &ChainSpec,
_: &Operations<E, Self>,
) -> Result<(), BlockProcessingError> {
process_withdrawals::<_, FullPayload<_>>(state, self.payload.to_ref(), spec)
}
}
#[cfg(all(feature = "withdrawals", feature = "withdrawals-processing"))]
impl<E: EthSpec> Operation<E> for SignedBlsToExecutionChange {
fn handler_name() -> String {
"bls_to_execution_change".into()
}
fn filename() -> String {
"address_change.ssz_snappy".into()
}
fn is_enabled_for_fork(fork_name: ForkName) -> bool {
fork_name != ForkName::Base && fork_name != ForkName::Altair && fork_name != ForkName::Merge
}
fn decode(path: &Path, _fork_name: ForkName, _spec: &ChainSpec) -> Result<Self, Error> {
ssz_decode_file(path)
}
fn apply_to(
&self,
state: &mut BeaconState<E>,
spec: &ChainSpec,
_extra: &Operations<E, Self>,
) -> Result<(), BlockProcessingError> {
process_bls_to_execution_changes(state, &[self.clone()], VerifySignatures::True, spec)
}
}
impl<E: EthSpec, O: Operation<E>> LoadCase for Operations<E, O> {
fn load_from_dir(path: &Path, fork_name: ForkName) -> Result<Self, Error> {
let spec = &testing_spec::<E>(fork_name);
@ -356,7 +428,7 @@ impl<E: EthSpec, O: Operation<E>> LoadCase for Operations<E, O> {
// Check BLS setting here before SSZ deserialization, as most types require signatures
// to be valid.
let (operation, bls_error) = if metadata.bls_setting.unwrap_or_default().check().is_ok() {
match O::decode(&path.join(O::filename()), spec) {
match O::decode(&path.join(O::filename()), fork_name, spec) {
Ok(op) => (Some(op), None),
Err(Error::InvalidBLSInput(error)) => (None, Some(error)),
Err(e) => return Err(e),
@ -399,9 +471,11 @@ impl<E: EthSpec, O: Operation<E>> Case for Operations<E, O> {
let mut expected = self.post.clone();
// Processing requires the committee caches.
state
.build_all_committee_caches(spec)
.expect("committee caches OK");
// NOTE: some of the withdrawals tests have 0 active validators, do not try
// to build the commitee cache in this case.
if O::handler_name() != "withdrawals" {
state.build_all_committee_caches(spec).unwrap();
}
let mut result = self
.operation

View File

@ -42,14 +42,17 @@ impl<E: EthSpec> LoadCase for TransitionTest<E> {
spec.altair_fork_epoch = Some(Epoch::new(0));
spec.bellatrix_fork_epoch = Some(metadata.fork_epoch);
}
ForkName::Eip4844 => {
spec.bellatrix_fork_epoch = Some(Epoch::new(0));
spec.eip4844_fork_epoch = Some(metadata.fork_epoch);
}
ForkName::Capella => {
spec.capella_fork_epoch = Some(Epoch::new(0));
spec.altair_fork_epoch = Some(Epoch::new(0));
spec.bellatrix_fork_epoch = Some(Epoch::new(0));
spec.capella_fork_epoch = Some(metadata.fork_epoch);
}
ForkName::Eip4844 => {
spec.altair_fork_epoch = Some(Epoch::new(0));
spec.bellatrix_fork_epoch = Some(Epoch::new(0));
spec.capella_fork_epoch = Some(Epoch::new(0));
spec.eip4844_fork_epoch = Some(metadata.fork_epoch);
}
}
// Load blocks

View File

@ -24,6 +24,11 @@ pub trait Handler {
fn run(&self) {
for fork_name in ForkName::list_all() {
// FIXME(eip4844): enable eip4844
if fork_name == ForkName::Eip4844 {
continue;
}
if self.is_enabled_for_fork(fork_name) {
self.run_for_fork(fork_name)
}
@ -218,6 +223,10 @@ impl<T, E> SszStaticHandler<T, E> {
Self::for_forks(vec![ForkName::Merge])
}
pub fn capella_only() -> Self {
Self::for_forks(vec![ForkName::Capella])
}
pub fn merge_and_later() -> Self {
Self::for_forks(ForkName::list_all()[2..].to_vec())
}
@ -533,10 +542,8 @@ impl<E: EthSpec + TypeName> Handler for ForkChoiceHandler<E> {
}
fn is_enabled_for_fork(&self, fork_name: ForkName) -> bool {
// Merge block tests are only enabled for Bellatrix or later.
if self.handler_name == "on_merge_block"
&& (fork_name == ForkName::Base || fork_name == ForkName::Altair)
{
// Merge block tests are only enabled for Bellatrix.
if self.handler_name == "on_merge_block" && fork_name != ForkName::Merge {
return false;
}

View File

@ -1,10 +1,9 @@
pub use case_result::CaseResult;
pub use cases::Case;
pub use cases::{
EffectiveBalanceUpdates, Eth1DataReset, HistoricalRootsUpdate, InactivityUpdates,
Case, EffectiveBalanceUpdates, Eth1DataReset, HistoricalRootsUpdate, InactivityUpdates,
JustificationAndFinalization, ParticipationFlagUpdates, ParticipationRecordUpdates,
RandaoMixesReset, RegistryUpdates, RewardsAndPenalties, Slashings, SlashingsReset,
SyncCommitteeUpdates,
SyncCommitteeUpdates, WithdrawalsPayload,
};
pub use decode::log_file_access;
pub use error::Error;

View File

@ -45,6 +45,8 @@ type_name_generic!(BeaconBlockBody);
type_name_generic!(BeaconBlockBodyBase, "BeaconBlockBody");
type_name_generic!(BeaconBlockBodyAltair, "BeaconBlockBody");
type_name_generic!(BeaconBlockBodyMerge, "BeaconBlockBody");
type_name_generic!(BeaconBlockBodyCapella, "BeaconBlockBody");
type_name_generic!(BeaconBlockBodyEip4844, "BeaconBlockBody");
type_name!(BeaconBlockHeader);
type_name_generic!(BeaconState);
type_name!(Checkpoint);
@ -54,8 +56,14 @@ type_name!(DepositData);
type_name!(DepositMessage);
type_name!(Eth1Data);
type_name_generic!(ExecutionPayload);
type_name_generic!(ExecutionPayloadMerge, "ExecutionPayload");
type_name_generic!(ExecutionPayloadCapella, "ExecutionPayload");
type_name_generic!(ExecutionPayloadEip4844, "ExecutionPayload");
type_name_generic!(FullPayload, "ExecutionPayload");
type_name_generic!(ExecutionPayloadHeader);
type_name_generic!(ExecutionPayloadHeaderMerge, "ExecutionPayloadHeader");
type_name_generic!(ExecutionPayloadHeaderCapella, "ExecutionPayloadHeader");
type_name_generic!(ExecutionPayloadHeaderEip4844, "ExecutionPayloadHeader");
type_name_generic!(BlindedPayload, "ExecutionPayloadHeader");
type_name!(Fork);
type_name!(ForkData);
@ -76,3 +84,6 @@ type_name_generic!(SyncAggregate);
type_name_generic!(SyncCommittee);
type_name!(Validator);
type_name!(VoluntaryExit);
type_name!(Withdrawal);
type_name!(BlsToExecutionChange, "BLSToExecutionChange");
type_name!(SignedBlsToExecutionChange, "SignedBLSToExecutionChange");

View File

@ -82,6 +82,18 @@ fn operations_execution_payload_blinded() {
OperationsHandler::<MainnetEthSpec, BlindedPayload<_>>::default().run();
}
#[test]
fn operations_withdrawals() {
OperationsHandler::<MinimalEthSpec, WithdrawalsPayload<_>>::default().run();
OperationsHandler::<MainnetEthSpec, WithdrawalsPayload<_>>::default().run();
}
#[test]
fn operations_bls_to_execution_change() {
OperationsHandler::<MinimalEthSpec, SignedBlsToExecutionChange>::default().run();
OperationsHandler::<MainnetEthSpec, SignedBlsToExecutionChange>::default().run();
}
#[test]
fn sanity_blocks() {
SanityBlocksHandler::<MinimalEthSpec>::default().run();
@ -250,6 +262,10 @@ mod ssz_static {
.run();
SszStaticHandler::<BeaconBlockBodyMerge<MainnetEthSpec>, MainnetEthSpec>::merge_only()
.run();
SszStaticHandler::<BeaconBlockBodyCapella<MinimalEthSpec>, MinimalEthSpec>::capella_only()
.run();
SszStaticHandler::<BeaconBlockBodyCapella<MainnetEthSpec>, MainnetEthSpec>::capella_only()
.run();
}
// Altair and later
@ -302,18 +318,44 @@ mod ssz_static {
// Merge and later
#[test]
fn execution_payload() {
SszStaticHandler::<ExecutionPayload<MinimalEthSpec>, MinimalEthSpec>::merge_and_later()
SszStaticHandler::<ExecutionPayloadMerge<MinimalEthSpec>, MinimalEthSpec>::merge_only()
.run();
SszStaticHandler::<ExecutionPayload<MainnetEthSpec>, MainnetEthSpec>::merge_and_later()
SszStaticHandler::<ExecutionPayloadMerge<MainnetEthSpec>, MainnetEthSpec>::merge_only()
.run();
SszStaticHandler::<ExecutionPayloadCapella<MinimalEthSpec>, MinimalEthSpec>::capella_only()
.run();
SszStaticHandler::<ExecutionPayloadCapella<MainnetEthSpec>, MainnetEthSpec>::capella_only()
.run();
}
#[test]
fn execution_payload_header() {
SszStaticHandler::<ExecutionPayloadHeader<MinimalEthSpec>, MinimalEthSpec>::merge_and_later()
SszStaticHandler::<ExecutionPayloadHeaderMerge<MinimalEthSpec>, MinimalEthSpec>::merge_only()
.run();
SszStaticHandler::<ExecutionPayloadHeader<MainnetEthSpec>, MainnetEthSpec>::merge_and_later()
SszStaticHandler::<ExecutionPayloadHeaderMerge<MainnetEthSpec>, MainnetEthSpec>::merge_only()
.run();
SszStaticHandler::<ExecutionPayloadHeaderCapella<MinimalEthSpec>, MinimalEthSpec>
::capella_only().run();
SszStaticHandler::<ExecutionPayloadHeaderCapella<MainnetEthSpec>, MainnetEthSpec>
::capella_only().run();
}
#[test]
fn withdrawal() {
SszStaticHandler::<Withdrawal, MinimalEthSpec>::capella_only().run();
SszStaticHandler::<Withdrawal, MainnetEthSpec>::capella_only().run();
}
#[test]
fn bls_to_execution_change() {
SszStaticHandler::<BlsToExecutionChange, MinimalEthSpec>::capella_only().run();
SszStaticHandler::<BlsToExecutionChange, MainnetEthSpec>::capella_only().run();
}
#[test]
fn signed_bls_to_execution_change() {
SszStaticHandler::<SignedBlsToExecutionChange, MinimalEthSpec>::capella_only().run();
SszStaticHandler::<SignedBlsToExecutionChange, MainnetEthSpec>::capella_only().run();
}
}

View File

@ -21,3 +21,7 @@ deposit_contract = { path = "../../common/deposit_contract" }
reqwest = { version = "0.11.0", features = ["json"] }
hex = "0.4.2"
fork_choice = { path = "../../consensus/fork_choice" }
[features]
default = []
withdrawals = []

View File

@ -16,8 +16,8 @@ use std::time::{Duration, Instant, SystemTime, UNIX_EPOCH};
use task_executor::TaskExecutor;
use tokio::time::sleep;
use types::{
Address, ChainSpec, EthSpec, ExecutionBlockHash, ExecutionPayload, FullPayload, Hash256,
MainnetEthSpec, PublicKeyBytes, Slot, Uint256,
Address, ChainSpec, EthSpec, ExecutionBlockHash, ExecutionPayload, ForkName, FullPayload,
Hash256, MainnetEthSpec, PublicKeyBytes, Slot, Uint256,
};
const EXECUTION_ENGINE_START_TIMEOUT: Duration = Duration::from_secs(20);
@ -326,6 +326,10 @@ impl<E: GenericExecutionEngine> TestRig<E> {
proposer_index,
forkchoice_update_params,
builder_params,
// FIXME: think about how to test other forks
ForkName::Merge,
#[cfg(feature = "withdrawals")]
None,
&self.spec,
)
.await
@ -450,6 +454,10 @@ impl<E: GenericExecutionEngine> TestRig<E> {
proposer_index,
forkchoice_update_params,
builder_params,
// FIXME: think about how to test other forks
ForkName::Merge,
#[cfg(feature = "withdrawals")]
None,
&self.spec,
)
.await