Remove withdrawals-processing feature (#3864)

* Use spec to Determine Supported Engine APIs

* Remove `withdrawals-processing` feature

* Fixed Tests

* Missed Some Spots

* Fixed Another Test

* Stupid Clippy
This commit is contained in:
ethDreamer 2023-01-11 22:15:08 -06:00 committed by GitHub
parent 98b11bbd3f
commit 52c1055fdc
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
21 changed files with 113 additions and 92 deletions

View File

@ -66,7 +66,7 @@ jobs:
DOCKER_CLI_EXPERIMENTAL: enabled DOCKER_CLI_EXPERIMENTAL: enabled
VERSION: ${{ needs.extract-version.outputs.VERSION }} VERSION: ${{ needs.extract-version.outputs.VERSION }}
VERSION_SUFFIX: ${{ needs.extract-version.outputs.VERSION_SUFFIX }} VERSION_SUFFIX: ${{ needs.extract-version.outputs.VERSION_SUFFIX }}
CROSS_FEATURES: withdrawals-processing CROSS_FEATURES: null
steps: steps:
- uses: actions/checkout@v3 - uses: actions/checkout@v3
- name: Update Rust - name: Update Rust

View File

@ -21,7 +21,7 @@ CROSS_FEATURES ?= gnosis,slasher-lmdb,slasher-mdbx
CROSS_PROFILE ?= release CROSS_PROFILE ?= release
# List of features to use when running EF tests. # List of features to use when running EF tests.
EF_TEST_FEATURES ?= beacon_chain/withdrawals-processing EF_TEST_FEATURES ?=
# Cargo profile for regular builds. # Cargo profile for regular builds.
PROFILE ?= release PROFILE ?= release
@ -89,12 +89,12 @@ build-release-tarballs:
# Runs the full workspace tests in **release**, without downloading any additional # Runs the full workspace tests in **release**, without downloading any additional
# test vectors. # test vectors.
test-release: test-release:
cargo test --workspace --features withdrawals-processing --release --exclude ef_tests --exclude beacon_chain --exclude slasher cargo test --workspace --release --exclude ef_tests --exclude beacon_chain --exclude slasher
# Runs the full workspace tests in **debug**, without downloading any additional test # Runs the full workspace tests in **debug**, without downloading any additional test
# vectors. # vectors.
test-debug: test-debug:
cargo test --workspace --features withdrawals-processing --exclude ef_tests --exclude beacon_chain cargo test --workspace --exclude ef_tests --exclude beacon_chain
# Runs cargo-fmt (linter). # Runs cargo-fmt (linter).
cargo-fmt: cargo-fmt:
@ -120,7 +120,7 @@ run-ef-tests:
test-beacon-chain: $(patsubst %,test-beacon-chain-%,$(FORKS)) test-beacon-chain: $(patsubst %,test-beacon-chain-%,$(FORKS))
test-beacon-chain-%: test-beacon-chain-%:
env FORK_NAME=$* cargo test --release --features fork_from_env,withdrawals-processing -p beacon_chain env FORK_NAME=$* cargo test --release --features fork_from_env -p beacon_chain
# Run the tests in the `operation_pool` crate for all known forks. # Run the tests in the `operation_pool` crate for all known forks.
test-op-pool: $(patsubst %,test-op-pool-%,$(FORKS)) test-op-pool: $(patsubst %,test-op-pool-%,$(FORKS))

View File

@ -13,11 +13,6 @@ node_test_rig = { path = "../testing/node_test_rig" }
[features] [features]
write_ssz_files = ["beacon_chain/write_ssz_files"] # Writes debugging .ssz files to /tmp during block processing. write_ssz_files = ["beacon_chain/write_ssz_files"] # Writes debugging .ssz files to /tmp during block processing.
withdrawals-processing = [
"beacon_chain/withdrawals-processing",
"store/withdrawals-processing",
"execution_layer/withdrawals-processing",
]
[dependencies] [dependencies]
eth2_config = { path = "../common/eth2_config" } eth2_config = { path = "../common/eth2_config" }

View File

@ -10,10 +10,6 @@ default = ["participation_metrics"]
write_ssz_files = [] # Writes debugging .ssz files to /tmp during block processing. write_ssz_files = [] # Writes debugging .ssz files to /tmp during block processing.
participation_metrics = [] # Exposes validator participation metrics to Prometheus. participation_metrics = [] # Exposes validator participation metrics to Prometheus.
fork_from_env = [] # Initialise the harness chain spec from the FORK_NAME env variable fork_from_env = [] # Initialise the harness chain spec from the FORK_NAME env variable
withdrawals-processing = [
"state_processing/withdrawals-processing",
"execution_layer/withdrawals-processing",
]
[dev-dependencies] [dev-dependencies]
maplit = "1.0.2" maplit = "1.0.2"

View File

@ -366,6 +366,7 @@ where
.collect::<Result<_, _>>() .collect::<Result<_, _>>()
.unwrap(); .unwrap();
let spec = MainnetEthSpec::default_spec();
let config = execution_layer::Config { let config = execution_layer::Config {
execution_endpoints: urls, execution_endpoints: urls,
secret_files: vec![], secret_files: vec![],
@ -376,6 +377,7 @@ where
config, config,
self.runtime.task_executor.clone(), self.runtime.task_executor.clone(),
self.log.clone(), self.log.clone(),
&spec,
) )
.unwrap(); .unwrap();
@ -414,13 +416,11 @@ where
}); });
let mock = MockExecutionLayer::new( let mock = MockExecutionLayer::new(
self.runtime.task_executor.clone(), self.runtime.task_executor.clone(),
spec.terminal_total_difficulty,
DEFAULT_TERMINAL_BLOCK, DEFAULT_TERMINAL_BLOCK,
spec.terminal_block_hash,
spec.terminal_block_hash_activation_epoch,
shanghai_time, shanghai_time,
eip4844_time, eip4844_time,
Some(JwtKey::from_slice(&DEFAULT_JWT_SECRET).unwrap()), Some(JwtKey::from_slice(&DEFAULT_JWT_SECRET).unwrap()),
spec,
None, None,
); );
self.execution_layer = Some(mock.el.clone()); self.execution_layer = Some(mock.el.clone());
@ -442,13 +442,11 @@ where
}); });
let mock_el = MockExecutionLayer::new( let mock_el = MockExecutionLayer::new(
self.runtime.task_executor.clone(), self.runtime.task_executor.clone(),
spec.terminal_total_difficulty,
DEFAULT_TERMINAL_BLOCK, DEFAULT_TERMINAL_BLOCK,
spec.terminal_block_hash,
spec.terminal_block_hash_activation_epoch,
shanghai_time, shanghai_time,
eip4844_time, eip4844_time,
Some(JwtKey::from_slice(&DEFAULT_JWT_SECRET).unwrap()), Some(JwtKey::from_slice(&DEFAULT_JWT_SECRET).unwrap()),
spec.clone(),
Some(builder_url.clone()), Some(builder_url.clone()),
) )
.move_to_terminal_block(); .move_to_terminal_block();

View File

@ -154,6 +154,7 @@ where
config, config,
context.executor.clone(), context.executor.clone(),
context.log().clone(), context.log().clone(),
&spec,
) )
.map_err(|e| format!("unable to start execution layer endpoints: {:?}", e))?; .map_err(|e| format!("unable to start execution layer endpoints: {:?}", e))?;
Some(execution_layer) Some(execution_layer)

View File

@ -122,7 +122,7 @@ impl SszEth1Cache {
cache: self.deposit_cache.to_deposit_cache()?, cache: self.deposit_cache.to_deposit_cache()?,
last_processed_block: self.last_processed_block, last_processed_block: self.last_processed_block,
}), }),
endpoint: endpoint_from_config(&config) endpoint: endpoint_from_config(&config, &spec)
.map_err(|e| format!("Failed to create endpoint: {:?}", e))?, .map_err(|e| format!("Failed to create endpoint: {:?}", e))?,
to_finalize: RwLock::new(None), to_finalize: RwLock::new(None),
// Set the remote head_block zero when creating a new instance. We only care about // Set the remote head_block zero when creating a new instance. We only care about

View File

@ -363,7 +363,7 @@ impl Default for Config {
} }
} }
pub fn endpoint_from_config(config: &Config) -> Result<HttpJsonRpc, String> { pub fn endpoint_from_config(config: &Config, spec: &ChainSpec) -> Result<HttpJsonRpc, String> {
match config.endpoint.clone() { match config.endpoint.clone() {
Eth1Endpoint::Auth { Eth1Endpoint::Auth {
endpoint, endpoint,
@ -373,11 +373,16 @@ pub fn endpoint_from_config(config: &Config) -> Result<HttpJsonRpc, String> {
} => { } => {
let auth = Auth::new_with_path(jwt_path, jwt_id, jwt_version) let auth = Auth::new_with_path(jwt_path, jwt_id, jwt_version)
.map_err(|e| format!("Failed to initialize jwt auth: {:?}", e))?; .map_err(|e| format!("Failed to initialize jwt auth: {:?}", e))?;
HttpJsonRpc::new_with_auth(endpoint, auth, Some(config.execution_timeout_multiplier)) HttpJsonRpc::new_with_auth(
.map_err(|e| format!("Failed to create eth1 json rpc client: {:?}", e)) endpoint,
auth,
Some(config.execution_timeout_multiplier),
spec,
)
.map_err(|e| format!("Failed to create eth1 json rpc client: {:?}", e))
} }
Eth1Endpoint::NoAuth(endpoint) => { Eth1Endpoint::NoAuth(endpoint) => {
HttpJsonRpc::new(endpoint, Some(config.execution_timeout_multiplier)) HttpJsonRpc::new(endpoint, Some(config.execution_timeout_multiplier), spec)
.map_err(|e| format!("Failed to create eth1 json rpc client: {:?}", e)) .map_err(|e| format!("Failed to create eth1 json rpc client: {:?}", e))
} }
} }
@ -404,7 +409,7 @@ impl Service {
deposit_cache: RwLock::new(DepositUpdater::new( deposit_cache: RwLock::new(DepositUpdater::new(
config.deposit_contract_deploy_block, config.deposit_contract_deploy_block,
)), )),
endpoint: endpoint_from_config(&config)?, endpoint: endpoint_from_config(&config, &spec)?,
to_finalize: RwLock::new(None), to_finalize: RwLock::new(None),
remote_head_block: RwLock::new(None), remote_head_block: RwLock::new(None),
config: RwLock::new(config), config: RwLock::new(config),
@ -433,7 +438,7 @@ impl Service {
inner: Arc::new(Inner { inner: Arc::new(Inner {
block_cache: <_>::default(), block_cache: <_>::default(),
deposit_cache: RwLock::new(deposit_cache), deposit_cache: RwLock::new(deposit_cache),
endpoint: endpoint_from_config(&config) endpoint: endpoint_from_config(&config, &spec)
.map_err(Error::FailedToInitializeFromSnapshot)?, .map_err(Error::FailedToInitializeFromSnapshot)?,
to_finalize: RwLock::new(None), to_finalize: RwLock::new(None),
remote_head_block: RwLock::new(None), remote_head_block: RwLock::new(None),

View File

@ -494,7 +494,8 @@ mod deposit_tree {
let mut deposit_counts = vec![]; let mut deposit_counts = vec![];
let client = let client =
HttpJsonRpc::new(SensitiveUrl::parse(&eth1.endpoint()).unwrap(), None).unwrap(); HttpJsonRpc::new(SensitiveUrl::parse(&eth1.endpoint()).unwrap(), None, spec)
.unwrap();
// Perform deposits to the smart contract, recording it's state along the way. // Perform deposits to the smart contract, recording it's state along the way.
for deposit in &deposits { for deposit in &deposits {
@ -598,8 +599,12 @@ mod http {
.expect("should start eth1 environment"); .expect("should start eth1 environment");
let deposit_contract = &eth1.deposit_contract; let deposit_contract = &eth1.deposit_contract;
let web3 = eth1.web3(); let web3 = eth1.web3();
let client = let client = HttpJsonRpc::new(
HttpJsonRpc::new(SensitiveUrl::parse(&eth1.endpoint()).unwrap(), None).unwrap(); SensitiveUrl::parse(&eth1.endpoint()).unwrap(),
None,
&MainnetEthSpec::default_spec(),
)
.unwrap();
let block_number = get_block_number(&web3).await; let block_number = get_block_number(&web3).await;
let logs = blocking_deposit_logs(&client, &eth1, 0..block_number).await; let logs = blocking_deposit_logs(&client, &eth1, 0..block_number).await;
@ -697,6 +702,7 @@ mod fast {
let web3 = eth1.web3(); let web3 = eth1.web3();
let now = get_block_number(&web3).await; let now = get_block_number(&web3).await;
let spec = MainnetEthSpec::default_spec();
let service = Service::new( let service = Service::new(
Config { Config {
endpoint: Eth1Endpoint::NoAuth( endpoint: Eth1Endpoint::NoAuth(
@ -710,11 +716,12 @@ mod fast {
..Config::default() ..Config::default()
}, },
log, log,
MainnetEthSpec::default_spec(), spec.clone(),
) )
.unwrap(); .unwrap();
let client = let client =
HttpJsonRpc::new(SensitiveUrl::parse(&eth1.endpoint()).unwrap(), None).unwrap(); HttpJsonRpc::new(SensitiveUrl::parse(&eth1.endpoint()).unwrap(), None, &spec)
.unwrap();
let n = 10; let n = 10;
let deposits: Vec<_> = (0..n).map(|_| random_deposit_data()).collect(); let deposits: Vec<_> = (0..n).map(|_| random_deposit_data()).collect();
for deposit in &deposits { for deposit in &deposits {

View File

@ -4,8 +4,6 @@ version = "0.1.0"
edition = "2021" edition = "2021"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[features]
withdrawals-processing = ["state_processing/withdrawals-processing"]
[dependencies] [dependencies]
types = { path = "../../consensus/types"} types = { path = "../../consensus/types"}

View File

@ -329,7 +329,7 @@ pub struct ProposeBlindedBlockResponse {
// This name is work in progress, it could // This name is work in progress, it could
// change when this method is actually proposed // change when this method is actually proposed
// but I'm writing this as it has been described // but I'm writing this as it has been described
#[derive(Clone, Copy)] #[derive(Clone, Copy, Debug)]
pub struct SupportedApis { pub struct SupportedApis {
pub new_payload_v1: bool, pub new_payload_v1: bool,
pub new_payload_v2: bool, pub new_payload_v2: bool,

View File

@ -10,7 +10,7 @@ use serde_json::json;
use tokio::sync::RwLock; use tokio::sync::RwLock;
use std::time::Duration; use std::time::Duration;
use types::EthSpec; use types::{ChainSpec, EthSpec};
pub use deposit_log::{DepositLog, Log}; pub use deposit_log::{DepositLog, Log};
pub use reqwest::Client; pub use reqwest::Client;
@ -538,12 +538,27 @@ impl HttpJsonRpc {
pub fn new( pub fn new(
url: SensitiveUrl, url: SensitiveUrl,
execution_timeout_multiplier: Option<u32>, execution_timeout_multiplier: Option<u32>,
spec: &ChainSpec,
) -> Result<Self, Error> { ) -> Result<Self, Error> {
// FIXME: remove this `cached_supported_apis` spec hack once the `engine_getCapabilities`
// method is implemented in all execution clients:
// https://github.com/ethereum/execution-apis/issues/321
let cached_supported_apis = RwLock::new(Some(SupportedApis {
new_payload_v1: true,
new_payload_v2: spec.capella_fork_epoch.is_some() || spec.eip4844_fork_epoch.is_some(),
forkchoice_updated_v1: true,
forkchoice_updated_v2: spec.capella_fork_epoch.is_some()
|| spec.eip4844_fork_epoch.is_some(),
get_payload_v1: true,
get_payload_v2: spec.capella_fork_epoch.is_some() || spec.eip4844_fork_epoch.is_some(),
exchange_transition_configuration_v1: true,
}));
Ok(Self { Ok(Self {
client: Client::builder().build()?, client: Client::builder().build()?,
url, url,
execution_timeout_multiplier: execution_timeout_multiplier.unwrap_or(1), execution_timeout_multiplier: execution_timeout_multiplier.unwrap_or(1),
cached_supported_apis: Default::default(), cached_supported_apis,
auth: None, auth: None,
}) })
} }
@ -552,12 +567,27 @@ impl HttpJsonRpc {
url: SensitiveUrl, url: SensitiveUrl,
auth: Auth, auth: Auth,
execution_timeout_multiplier: Option<u32>, execution_timeout_multiplier: Option<u32>,
spec: &ChainSpec,
) -> Result<Self, Error> { ) -> Result<Self, Error> {
// FIXME: remove this `cached_supported_apis` spec hack once the `engine_getCapabilities`
// method is implemented in all execution clients:
// https://github.com/ethereum/execution-apis/issues/321
let cached_supported_apis = RwLock::new(Some(SupportedApis {
new_payload_v1: true,
new_payload_v2: spec.capella_fork_epoch.is_some() || spec.eip4844_fork_epoch.is_some(),
forkchoice_updated_v1: true,
forkchoice_updated_v2: spec.capella_fork_epoch.is_some()
|| spec.eip4844_fork_epoch.is_some(),
get_payload_v1: true,
get_payload_v2: spec.capella_fork_epoch.is_some() || spec.eip4844_fork_epoch.is_some(),
exchange_transition_configuration_v1: true,
}));
Ok(Self { Ok(Self {
client: Client::builder().build()?, client: Client::builder().build()?,
url, url,
execution_timeout_multiplier: execution_timeout_multiplier.unwrap_or(1), execution_timeout_multiplier: execution_timeout_multiplier.unwrap_or(1),
cached_supported_apis: Default::default(), cached_supported_apis,
auth: Some(auth), auth: Some(auth),
}) })
} }
@ -848,21 +878,25 @@ impl HttpJsonRpc {
Ok(response) Ok(response)
} }
// this is a stub as this method hasn't been defined yet // TODO: This is currently a stub for the `engine_getCapabilities`
pub async fn supported_apis_v1(&self) -> Result<SupportedApis, Error> { // method. This stub is unused because we set cached_supported_apis
// in the constructor based on the `spec`
// Implement this once the execution clients support it
// https://github.com/ethereum/execution-apis/issues/321
pub async fn get_capabilities(&self) -> Result<SupportedApis, Error> {
Ok(SupportedApis { Ok(SupportedApis {
new_payload_v1: true, new_payload_v1: true,
new_payload_v2: cfg!(any(feature = "withdrawals-processing", test)), new_payload_v2: true,
forkchoice_updated_v1: true, forkchoice_updated_v1: true,
forkchoice_updated_v2: cfg!(any(feature = "withdrawals-processing", test)), forkchoice_updated_v2: true,
get_payload_v1: true, get_payload_v1: true,
get_payload_v2: cfg!(any(feature = "withdrawals-processing", test)), get_payload_v2: true,
exchange_transition_configuration_v1: true, exchange_transition_configuration_v1: true,
}) })
} }
pub async fn set_cached_supported_apis(&self, supported_apis: SupportedApis) { pub async fn set_cached_supported_apis(&self, supported_apis: Option<SupportedApis>) {
*self.cached_supported_apis.write().await = Some(supported_apis); *self.cached_supported_apis.write().await = supported_apis;
} }
pub async fn get_cached_supported_apis(&self) -> Result<SupportedApis, Error> { pub async fn get_cached_supported_apis(&self) -> Result<SupportedApis, Error> {
@ -870,8 +904,8 @@ impl HttpJsonRpc {
if let Some(supported_apis) = cached_opt { if let Some(supported_apis) = cached_opt {
Ok(supported_apis) Ok(supported_apis)
} else { } else {
let supported_apis = self.supported_apis_v1().await?; let supported_apis = self.get_capabilities().await?;
self.set_cached_supported_apis(supported_apis).await; self.set_cached_supported_apis(Some(supported_apis)).await;
Ok(supported_apis) Ok(supported_apis)
} }
} }
@ -955,6 +989,7 @@ mod test {
impl Tester { impl Tester {
pub fn new(with_auth: bool) -> Self { pub fn new(with_auth: bool) -> Self {
let server = MockServer::unit_testing(); let server = MockServer::unit_testing();
let spec = MainnetEthSpec::default_spec();
let rpc_url = SensitiveUrl::parse(&server.url()).unwrap(); let rpc_url = SensitiveUrl::parse(&server.url()).unwrap();
let echo_url = SensitiveUrl::parse(&format!("{}/echo", server.url())).unwrap(); let echo_url = SensitiveUrl::parse(&format!("{}/echo", server.url())).unwrap();
@ -965,13 +1000,13 @@ mod test {
let echo_auth = let echo_auth =
Auth::new(JwtKey::from_slice(&DEFAULT_JWT_SECRET).unwrap(), None, None); Auth::new(JwtKey::from_slice(&DEFAULT_JWT_SECRET).unwrap(), None, None);
( (
Arc::new(HttpJsonRpc::new_with_auth(rpc_url, rpc_auth, None).unwrap()), Arc::new(HttpJsonRpc::new_with_auth(rpc_url, rpc_auth, None, &spec).unwrap()),
Arc::new(HttpJsonRpc::new_with_auth(echo_url, echo_auth, None).unwrap()), Arc::new(HttpJsonRpc::new_with_auth(echo_url, echo_auth, None, &spec).unwrap()),
) )
} else { } else {
( (
Arc::new(HttpJsonRpc::new(rpc_url, None).unwrap()), Arc::new(HttpJsonRpc::new(rpc_url, None, &spec).unwrap()),
Arc::new(HttpJsonRpc::new(echo_url, None).unwrap()), Arc::new(HttpJsonRpc::new(echo_url, None, &spec).unwrap()),
) )
}; };

View File

@ -258,7 +258,12 @@ pub struct ExecutionLayer<T: EthSpec> {
impl<T: EthSpec> ExecutionLayer<T> { impl<T: EthSpec> ExecutionLayer<T> {
/// Instantiate `Self` with an Execution engine specified in `Config`, using JSON-RPC via HTTP. /// Instantiate `Self` with an Execution engine specified in `Config`, using JSON-RPC via HTTP.
pub fn from_config(config: Config, executor: TaskExecutor, log: Logger) -> Result<Self, Error> { pub fn from_config(
config: Config,
executor: TaskExecutor,
log: Logger,
spec: &ChainSpec,
) -> Result<Self, Error> {
let Config { let Config {
execution_endpoints: urls, execution_endpoints: urls,
builder_url, builder_url,
@ -313,8 +318,9 @@ impl<T: EthSpec> ExecutionLayer<T> {
let engine: Engine = { let engine: Engine = {
let auth = Auth::new(jwt_key, jwt_id, jwt_version); let auth = Auth::new(jwt_key, jwt_id, jwt_version);
debug!(log, "Loaded execution endpoint"; "endpoint" => %execution_url, "jwt_path" => ?secret_file.as_path()); debug!(log, "Loaded execution endpoint"; "endpoint" => %execution_url, "jwt_path" => ?secret_file.as_path());
let api = HttpJsonRpc::new_with_auth(execution_url, auth, execution_timeout_multiplier) let api =
.map_err(Error::ApiError)?; HttpJsonRpc::new_with_auth(execution_url, auth, execution_timeout_multiplier, spec)
.map_err(Error::ApiError)?;
Engine::new(api, executor.clone(), &log) Engine::new(api, executor.clone(), &log)
}; };

View File

@ -84,7 +84,8 @@ impl<E: EthSpec> TestingBuilder<E> {
}; };
let el = let el =
ExecutionLayer::from_config(config, executor.clone(), executor.log().clone()).unwrap(); ExecutionLayer::from_config(config, executor.clone(), executor.log().clone(), &spec)
.unwrap();
// This should probably be done for all fields, we only update ones we are testing with so far. // This should probably be done for all fields, we only update ones we are testing with so far.
let mut context = Context::for_mainnet(); let mut context = Context::for_mainnet();

View File

@ -9,7 +9,7 @@ use sensitive_url::SensitiveUrl;
use task_executor::TaskExecutor; use task_executor::TaskExecutor;
use tempfile::NamedTempFile; use tempfile::NamedTempFile;
use tree_hash::TreeHash; use tree_hash::TreeHash;
use types::{Address, ChainSpec, Epoch, EthSpec, FullPayload, Hash256, Uint256}; use types::{Address, ChainSpec, Epoch, EthSpec, FullPayload, Hash256, MainnetEthSpec};
pub struct MockExecutionLayer<T: EthSpec> { pub struct MockExecutionLayer<T: EthSpec> {
pub server: MockServer<T>, pub server: MockServer<T>,
@ -20,15 +20,17 @@ pub struct MockExecutionLayer<T: EthSpec> {
impl<T: EthSpec> MockExecutionLayer<T> { impl<T: EthSpec> MockExecutionLayer<T> {
pub fn default_params(executor: TaskExecutor) -> Self { pub fn default_params(executor: TaskExecutor) -> Self {
let mut spec = MainnetEthSpec::default_spec();
spec.terminal_total_difficulty = DEFAULT_TERMINAL_DIFFICULTY.into();
spec.terminal_block_hash = ExecutionBlockHash::zero();
spec.terminal_block_hash_activation_epoch = Epoch::new(0);
Self::new( Self::new(
executor, executor,
DEFAULT_TERMINAL_DIFFICULTY.into(),
DEFAULT_TERMINAL_BLOCK, DEFAULT_TERMINAL_BLOCK,
ExecutionBlockHash::zero(),
Epoch::new(0),
None, None,
None, None,
Some(JwtKey::from_slice(&DEFAULT_JWT_SECRET).unwrap()), Some(JwtKey::from_slice(&DEFAULT_JWT_SECRET).unwrap()),
spec,
None, None,
) )
} }
@ -36,29 +38,22 @@ impl<T: EthSpec> MockExecutionLayer<T> {
#[allow(clippy::too_many_arguments)] #[allow(clippy::too_many_arguments)]
pub fn new( pub fn new(
executor: TaskExecutor, executor: TaskExecutor,
terminal_total_difficulty: Uint256,
terminal_block: u64, terminal_block: u64,
terminal_block_hash: ExecutionBlockHash,
terminal_block_hash_activation_epoch: Epoch,
shanghai_time: Option<u64>, shanghai_time: Option<u64>,
eip4844_time: Option<u64>, eip4844_time: Option<u64>,
jwt_key: Option<JwtKey>, jwt_key: Option<JwtKey>,
spec: ChainSpec,
builder_url: Option<SensitiveUrl>, builder_url: Option<SensitiveUrl>,
) -> Self { ) -> Self {
let handle = executor.handle().unwrap(); let handle = executor.handle().unwrap();
let mut spec = T::default_spec();
spec.terminal_total_difficulty = terminal_total_difficulty;
spec.terminal_block_hash = terminal_block_hash;
spec.terminal_block_hash_activation_epoch = terminal_block_hash_activation_epoch;
let jwt_key = jwt_key.unwrap_or_else(JwtKey::random); let jwt_key = jwt_key.unwrap_or_else(JwtKey::random);
let server = MockServer::new( let server = MockServer::new(
&handle, &handle,
jwt_key, jwt_key,
terminal_total_difficulty, spec.terminal_total_difficulty,
terminal_block, terminal_block,
terminal_block_hash, spec.terminal_block_hash,
shanghai_time, shanghai_time,
eip4844_time, eip4844_time,
); );
@ -78,7 +73,8 @@ impl<T: EthSpec> MockExecutionLayer<T> {
..Default::default() ..Default::default()
}; };
let el = let el =
ExecutionLayer::from_config(config, executor.clone(), executor.log().clone()).unwrap(); ExecutionLayer::from_config(config, executor.clone(), executor.log().clone(), &spec)
.unwrap();
Self { Self {
server, server,

View File

@ -26,6 +26,3 @@ lru = "0.7.1"
sloggers = { version = "2.1.1", features = ["json"] } sloggers = { version = "2.1.1", features = ["json"] }
directory = { path = "../../common/directory" } directory = { path = "../../common/directory" }
strum = { version = "0.24.0", features = ["derive"] } strum = { version = "0.24.0", features = ["derive"] }
[features]
withdrawals-processing = ["state_processing/withdrawals-processing"]

View File

@ -43,4 +43,3 @@ arbitrary-fuzz = [
"eth2_ssz_types/arbitrary", "eth2_ssz_types/arbitrary",
"tree_hash/arbitrary", "tree_hash/arbitrary",
] ]
withdrawals-processing = []

View File

@ -474,10 +474,6 @@ pub fn get_expected_withdrawals<T: EthSpec>(
let mut validator_index = state.next_withdrawal_validator_index()?; let mut validator_index = state.next_withdrawal_validator_index()?;
let mut withdrawals = vec![]; let mut withdrawals = vec![];
if cfg!(not(feature = "withdrawals-processing")) {
return Ok(withdrawals.into());
}
let bound = std::cmp::min( let bound = std::cmp::min(
state.validators().len() as u64, state.validators().len() as u64,
spec.max_validators_per_withdrawals_sweep, spec.max_validators_per_withdrawals_sweep,
@ -525,9 +521,6 @@ pub fn process_withdrawals<'payload, T: EthSpec, Payload: AbstractExecPayload<T>
payload: Payload::Ref<'payload>, payload: Payload::Ref<'payload>,
spec: &ChainSpec, spec: &ChainSpec,
) -> Result<(), BlockProcessingError> { ) -> Result<(), BlockProcessingError> {
if cfg!(not(feature = "withdrawals-processing")) {
return Ok(());
}
match state { match state {
BeaconState::Merge(_) => Ok(()), BeaconState::Merge(_) => Ok(()),
BeaconState::Capella(_) | BeaconState::Eip4844(_) => { BeaconState::Capella(_) | BeaconState::Eip4844(_) => {

View File

@ -300,9 +300,6 @@ pub fn process_bls_to_execution_changes<T: EthSpec>(
verify_signatures: VerifySignatures, verify_signatures: VerifySignatures,
spec: &ChainSpec, spec: &ChainSpec,
) -> Result<(), BlockProcessingError> { ) -> Result<(), BlockProcessingError> {
if cfg!(not(feature = "withdrawals-processing")) {
return Ok(());
}
for (i, signed_address_change) in bls_to_execution_changes.iter().enumerate() { for (i, signed_address_change) in bls_to_execution_changes.iter().enumerate() {
verify_bls_to_execution_change(state, signed_address_change, verify_signatures, spec) verify_bls_to_execution_change(state, signed_address_change, verify_signatures, spec)
.map_err(|e| e.into_with_index(i))?; .map_err(|e| e.into_with_index(i))?;

View File

@ -24,8 +24,6 @@ gnosis = []
slasher-mdbx = ["slasher/mdbx"] slasher-mdbx = ["slasher/mdbx"]
# Support slasher LMDB backend. # Support slasher LMDB backend.
slasher-lmdb = ["slasher/lmdb"] slasher-lmdb = ["slasher/lmdb"]
# Support for withdrawals consensus processing logic.
withdrawals-processing = ["beacon_node/withdrawals-processing"]
[dependencies] [dependencies]
beacon_node = { "path" = "../beacon_node" } beacon_node = { "path" = "../beacon_node" }

View File

@ -110,6 +110,8 @@ impl<E: GenericExecutionEngine> TestRig<E> {
let (runtime_shutdown, exit) = exit_future::signal(); let (runtime_shutdown, exit) = exit_future::signal();
let (shutdown_tx, _) = futures::channel::mpsc::channel(1); let (shutdown_tx, _) = futures::channel::mpsc::channel(1);
let executor = TaskExecutor::new(Arc::downgrade(&runtime), exit, log.clone(), shutdown_tx); let executor = TaskExecutor::new(Arc::downgrade(&runtime), exit, log.clone(), shutdown_tx);
let mut spec = MainnetEthSpec::default_spec();
spec.terminal_total_difficulty = Uint256::zero();
let fee_recipient = None; let fee_recipient = None;
@ -125,7 +127,7 @@ impl<E: GenericExecutionEngine> TestRig<E> {
..Default::default() ..Default::default()
}; };
let execution_layer = let execution_layer =
ExecutionLayer::from_config(config, executor.clone(), log.clone()).unwrap(); ExecutionLayer::from_config(config, executor.clone(), log.clone(), &spec).unwrap();
ExecutionPair { ExecutionPair {
execution_engine, execution_engine,
execution_layer, execution_layer,
@ -144,16 +146,13 @@ impl<E: GenericExecutionEngine> TestRig<E> {
..Default::default() ..Default::default()
}; };
let execution_layer = let execution_layer =
ExecutionLayer::from_config(config, executor, log.clone()).unwrap(); ExecutionLayer::from_config(config, executor, log.clone(), &spec).unwrap();
ExecutionPair { ExecutionPair {
execution_engine, execution_engine,
execution_layer, execution_layer,
} }
}; };
let mut spec = MainnetEthSpec::default_spec();
spec.terminal_total_difficulty = Uint256::zero();
Self { Self {
runtime, runtime,
ee_a, ee_a,