Update Execution Layer Tests for Capella
This commit is contained in:
parent
222a514506
commit
be232c4587
2
Makefile
2
Makefile
@ -28,7 +28,7 @@ PROFILE ?= release
|
|||||||
|
|
||||||
# List of all hard forks. This list is used to set env variables for several tests so that
|
# List of all hard forks. This list is used to set env variables for several tests so that
|
||||||
# they run for different forks.
|
# they run for different forks.
|
||||||
FORKS=phase0 altair merge
|
FORKS=phase0 altair merge capella
|
||||||
|
|
||||||
# Builds the Lighthouse binary in release (optimized).
|
# Builds the Lighthouse binary in release (optimized).
|
||||||
#
|
#
|
||||||
|
@ -11,11 +11,11 @@ use crate::{
|
|||||||
StateSkipConfig,
|
StateSkipConfig,
|
||||||
};
|
};
|
||||||
use bls::get_withdrawal_credentials;
|
use bls::get_withdrawal_credentials;
|
||||||
use execution_layer::test_utils::DEFAULT_JWT_SECRET;
|
|
||||||
use execution_layer::{
|
use execution_layer::{
|
||||||
auth::JwtKey,
|
auth::JwtKey,
|
||||||
test_utils::{
|
test_utils::{
|
||||||
ExecutionBlockGenerator, MockExecutionLayer, TestingBuilder, DEFAULT_TERMINAL_BLOCK,
|
ExecutionBlockGenerator, MockExecutionLayer, TestingBuilder, DEFAULT_JWT_SECRET,
|
||||||
|
DEFAULT_TERMINAL_BLOCK,
|
||||||
},
|
},
|
||||||
ExecutionLayer,
|
ExecutionLayer,
|
||||||
};
|
};
|
||||||
@ -385,12 +385,20 @@ where
|
|||||||
|
|
||||||
pub fn mock_execution_layer(mut self) -> Self {
|
pub fn mock_execution_layer(mut self) -> Self {
|
||||||
let spec = self.spec.clone().expect("cannot build without spec");
|
let spec = self.spec.clone().expect("cannot build without spec");
|
||||||
|
let shanghai_time = spec.capella_fork_epoch.map(|epoch| {
|
||||||
|
HARNESS_GENESIS_TIME + spec.seconds_per_slot * E::slots_per_epoch() * epoch.as_u64()
|
||||||
|
});
|
||||||
|
let eip4844_time = spec.eip4844_fork_epoch.map(|epoch| {
|
||||||
|
HARNESS_GENESIS_TIME + spec.seconds_per_slot * E::slots_per_epoch() * epoch.as_u64()
|
||||||
|
});
|
||||||
let mock = MockExecutionLayer::new(
|
let mock = MockExecutionLayer::new(
|
||||||
self.runtime.task_executor.clone(),
|
self.runtime.task_executor.clone(),
|
||||||
spec.terminal_total_difficulty,
|
spec.terminal_total_difficulty,
|
||||||
DEFAULT_TERMINAL_BLOCK,
|
DEFAULT_TERMINAL_BLOCK,
|
||||||
spec.terminal_block_hash,
|
spec.terminal_block_hash,
|
||||||
spec.terminal_block_hash_activation_epoch,
|
spec.terminal_block_hash_activation_epoch,
|
||||||
|
shanghai_time,
|
||||||
|
eip4844_time,
|
||||||
Some(JwtKey::from_slice(&DEFAULT_JWT_SECRET).unwrap()),
|
Some(JwtKey::from_slice(&DEFAULT_JWT_SECRET).unwrap()),
|
||||||
None,
|
None,
|
||||||
);
|
);
|
||||||
@ -405,12 +413,20 @@ where
|
|||||||
let builder_url = SensitiveUrl::parse(format!("http://127.0.0.1:{port}").as_str()).unwrap();
|
let builder_url = SensitiveUrl::parse(format!("http://127.0.0.1:{port}").as_str()).unwrap();
|
||||||
|
|
||||||
let spec = self.spec.clone().expect("cannot build without spec");
|
let spec = self.spec.clone().expect("cannot build without spec");
|
||||||
|
let shanghai_time = spec.capella_fork_epoch.map(|epoch| {
|
||||||
|
HARNESS_GENESIS_TIME + spec.seconds_per_slot * E::slots_per_epoch() * epoch.as_u64()
|
||||||
|
});
|
||||||
|
let eip4844_time = spec.eip4844_fork_epoch.map(|epoch| {
|
||||||
|
HARNESS_GENESIS_TIME + spec.seconds_per_slot * E::slots_per_epoch() * epoch.as_u64()
|
||||||
|
});
|
||||||
let mock_el = MockExecutionLayer::new(
|
let mock_el = MockExecutionLayer::new(
|
||||||
self.runtime.task_executor.clone(),
|
self.runtime.task_executor.clone(),
|
||||||
spec.terminal_total_difficulty,
|
spec.terminal_total_difficulty,
|
||||||
DEFAULT_TERMINAL_BLOCK,
|
DEFAULT_TERMINAL_BLOCK,
|
||||||
spec.terminal_block_hash,
|
spec.terminal_block_hash,
|
||||||
spec.terminal_block_hash_activation_epoch,
|
spec.terminal_block_hash_activation_epoch,
|
||||||
|
shanghai_time,
|
||||||
|
eip4844_time,
|
||||||
Some(JwtKey::from_slice(&DEFAULT_JWT_SECRET).unwrap()),
|
Some(JwtKey::from_slice(&DEFAULT_JWT_SECRET).unwrap()),
|
||||||
Some(builder_url.clone()),
|
Some(builder_url.clone()),
|
||||||
)
|
)
|
||||||
|
170
beacon_node/beacon_chain/tests/capella.rs
Normal file
170
beacon_node/beacon_chain/tests/capella.rs
Normal file
@ -0,0 +1,170 @@
|
|||||||
|
#![cfg(not(debug_assertions))] // Tests run too slow in debug.
|
||||||
|
|
||||||
|
use beacon_chain::test_utils::BeaconChainHarness;
|
||||||
|
use execution_layer::test_utils::Block;
|
||||||
|
use types::*;
|
||||||
|
|
||||||
|
const VALIDATOR_COUNT: usize = 32;
|
||||||
|
type E = MainnetEthSpec;
|
||||||
|
|
||||||
|
fn verify_execution_payload_chain<T: EthSpec>(chain: &[FullPayload<T>]) {
|
||||||
|
let mut prev_ep: Option<FullPayload<T>> = None;
|
||||||
|
|
||||||
|
for ep in chain {
|
||||||
|
assert!(!ep.is_default_with_empty_roots());
|
||||||
|
assert!(ep.block_hash() != ExecutionBlockHash::zero());
|
||||||
|
|
||||||
|
// Check against previous `ExecutionPayload`.
|
||||||
|
if let Some(prev_ep) = prev_ep {
|
||||||
|
assert_eq!(prev_ep.block_hash(), ep.execution_payload().parent_hash());
|
||||||
|
assert_eq!(
|
||||||
|
prev_ep.execution_payload().block_number() + 1,
|
||||||
|
ep.execution_payload().block_number()
|
||||||
|
);
|
||||||
|
assert!(ep.execution_payload().timestamp() > prev_ep.execution_payload().timestamp());
|
||||||
|
}
|
||||||
|
prev_ep = Some(ep.clone());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
async fn base_altair_merge_capella() {
|
||||||
|
let altair_fork_epoch = Epoch::new(4);
|
||||||
|
let altair_fork_slot = altair_fork_epoch.start_slot(E::slots_per_epoch());
|
||||||
|
let bellatrix_fork_epoch = Epoch::new(8);
|
||||||
|
let merge_fork_slot = bellatrix_fork_epoch.start_slot(E::slots_per_epoch());
|
||||||
|
let capella_fork_epoch = Epoch::new(12);
|
||||||
|
let capella_fork_slot = capella_fork_epoch.start_slot(E::slots_per_epoch());
|
||||||
|
|
||||||
|
let mut spec = E::default_spec();
|
||||||
|
spec.altair_fork_epoch = Some(altair_fork_epoch);
|
||||||
|
spec.bellatrix_fork_epoch = Some(bellatrix_fork_epoch);
|
||||||
|
spec.capella_fork_epoch = Some(capella_fork_epoch);
|
||||||
|
|
||||||
|
let harness = BeaconChainHarness::builder(E::default())
|
||||||
|
.spec(spec)
|
||||||
|
.logger(logging::test_logger())
|
||||||
|
.deterministic_keypairs(VALIDATOR_COUNT)
|
||||||
|
.fresh_ephemeral_store()
|
||||||
|
.mock_execution_layer()
|
||||||
|
.build();
|
||||||
|
|
||||||
|
/*
|
||||||
|
* Start with the base fork.
|
||||||
|
*/
|
||||||
|
assert!(harness.chain.head_snapshot().beacon_block.as_base().is_ok());
|
||||||
|
|
||||||
|
/*
|
||||||
|
* Do the Altair fork.
|
||||||
|
*/
|
||||||
|
harness.extend_to_slot(altair_fork_slot).await;
|
||||||
|
|
||||||
|
let altair_head = &harness.chain.head_snapshot().beacon_block;
|
||||||
|
assert!(altair_head.as_altair().is_ok());
|
||||||
|
assert_eq!(altair_head.slot(), altair_fork_slot);
|
||||||
|
|
||||||
|
/*
|
||||||
|
* Do the merge fork, without a terminal PoW block.
|
||||||
|
*/
|
||||||
|
harness.extend_to_slot(merge_fork_slot).await;
|
||||||
|
|
||||||
|
let merge_head = &harness.chain.head_snapshot().beacon_block;
|
||||||
|
assert!(merge_head.as_merge().is_ok());
|
||||||
|
assert_eq!(merge_head.slot(), merge_fork_slot);
|
||||||
|
assert!(
|
||||||
|
merge_head
|
||||||
|
.message()
|
||||||
|
.body()
|
||||||
|
.execution_payload()
|
||||||
|
.unwrap()
|
||||||
|
.is_default_with_empty_roots(),
|
||||||
|
"Merge head is default payload"
|
||||||
|
);
|
||||||
|
|
||||||
|
/*
|
||||||
|
* Next merge block shouldn't include an exec payload.
|
||||||
|
*/
|
||||||
|
harness.extend_slots(1).await;
|
||||||
|
|
||||||
|
let one_after_merge_head = &harness.chain.head_snapshot().beacon_block;
|
||||||
|
assert!(
|
||||||
|
one_after_merge_head
|
||||||
|
.message()
|
||||||
|
.body()
|
||||||
|
.execution_payload()
|
||||||
|
.unwrap()
|
||||||
|
.is_default_with_empty_roots(),
|
||||||
|
"One after merge head is default payload"
|
||||||
|
);
|
||||||
|
assert_eq!(one_after_merge_head.slot(), merge_fork_slot + 1);
|
||||||
|
|
||||||
|
/*
|
||||||
|
* Trigger the terminal PoW block.
|
||||||
|
*/
|
||||||
|
harness
|
||||||
|
.execution_block_generator()
|
||||||
|
.move_to_terminal_block()
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
|
// Add a slot duration to get to the next slot
|
||||||
|
let timestamp = harness.get_timestamp_at_slot() + harness.spec.seconds_per_slot;
|
||||||
|
harness
|
||||||
|
.execution_block_generator()
|
||||||
|
.modify_last_block(|block| {
|
||||||
|
if let Block::PoW(terminal_block) = block {
|
||||||
|
terminal_block.timestamp = timestamp;
|
||||||
|
}
|
||||||
|
});
|
||||||
|
harness.extend_slots(1).await;
|
||||||
|
|
||||||
|
let two_after_merge_head = &harness.chain.head_snapshot().beacon_block;
|
||||||
|
assert!(
|
||||||
|
two_after_merge_head
|
||||||
|
.message()
|
||||||
|
.body()
|
||||||
|
.execution_payload()
|
||||||
|
.unwrap()
|
||||||
|
.is_default_with_empty_roots(),
|
||||||
|
"Two after merge head is default payload"
|
||||||
|
);
|
||||||
|
assert_eq!(two_after_merge_head.slot(), merge_fork_slot + 2);
|
||||||
|
|
||||||
|
/*
|
||||||
|
* Next merge block should include an exec payload.
|
||||||
|
*/
|
||||||
|
let mut execution_payloads = vec![];
|
||||||
|
for _ in (merge_fork_slot.as_u64() + 3)..capella_fork_slot.as_u64() {
|
||||||
|
harness.extend_slots(1).await;
|
||||||
|
let block = &harness.chain.head_snapshot().beacon_block;
|
||||||
|
let full_payload: FullPayload<E> = block
|
||||||
|
.message()
|
||||||
|
.body()
|
||||||
|
.execution_payload()
|
||||||
|
.unwrap()
|
||||||
|
.clone()
|
||||||
|
.into();
|
||||||
|
// pre-capella shouldn't have withdrawals
|
||||||
|
assert!(full_payload.withdrawals_root().is_err());
|
||||||
|
execution_payloads.push(full_payload);
|
||||||
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* Should enter capella fork now.
|
||||||
|
*/
|
||||||
|
for _ in 0..16 {
|
||||||
|
harness.extend_slots(1).await;
|
||||||
|
let block = &harness.chain.head_snapshot().beacon_block;
|
||||||
|
let full_payload: FullPayload<E> = block
|
||||||
|
.message()
|
||||||
|
.body()
|
||||||
|
.execution_payload()
|
||||||
|
.unwrap()
|
||||||
|
.clone()
|
||||||
|
.into();
|
||||||
|
// post-capella should have withdrawals
|
||||||
|
assert!(full_payload.withdrawals_root().is_ok());
|
||||||
|
execution_payloads.push(full_payload);
|
||||||
|
}
|
||||||
|
|
||||||
|
verify_execution_payload_chain(execution_payloads.as_slice());
|
||||||
|
}
|
@ -1,6 +1,7 @@
|
|||||||
mod attestation_production;
|
mod attestation_production;
|
||||||
mod attestation_verification;
|
mod attestation_verification;
|
||||||
mod block_verification;
|
mod block_verification;
|
||||||
|
mod capella;
|
||||||
mod merge;
|
mod merge;
|
||||||
mod op_verification;
|
mod op_verification;
|
||||||
mod payload_invalidation;
|
mod payload_invalidation;
|
||||||
|
@ -191,18 +191,17 @@ async fn base_altair_merge_with_terminal_block_after_fork() {
|
|||||||
|
|
||||||
harness.extend_slots(1).await;
|
harness.extend_slots(1).await;
|
||||||
|
|
||||||
let one_after_merge_head = &harness.chain.head_snapshot().beacon_block;
|
let two_after_merge_head = &harness.chain.head_snapshot().beacon_block;
|
||||||
// FIXME: why is this being tested twice?
|
|
||||||
assert!(
|
assert!(
|
||||||
one_after_merge_head
|
two_after_merge_head
|
||||||
.message()
|
.message()
|
||||||
.body()
|
.body()
|
||||||
.execution_payload()
|
.execution_payload()
|
||||||
.unwrap()
|
.unwrap()
|
||||||
.is_default_with_empty_roots(),
|
.is_default_with_empty_roots(),
|
||||||
"One after merge head is default payload"
|
"Two after merge head is default payload"
|
||||||
);
|
);
|
||||||
assert_eq!(one_after_merge_head.slot(), merge_fork_slot + 2);
|
assert_eq!(two_after_merge_head.slot(), merge_fork_slot + 2);
|
||||||
|
|
||||||
/*
|
/*
|
||||||
* Next merge block should include an exec payload.
|
* Next merge block should include an exec payload.
|
||||||
|
@ -13,7 +13,8 @@ use std::collections::HashMap;
|
|||||||
use tree_hash::TreeHash;
|
use tree_hash::TreeHash;
|
||||||
use tree_hash_derive::TreeHash;
|
use tree_hash_derive::TreeHash;
|
||||||
use types::{
|
use types::{
|
||||||
EthSpec, ExecutionBlockHash, ExecutionPayload, ExecutionPayloadMerge, Hash256, Uint256,
|
EthSpec, ExecutionBlockHash, ExecutionPayload, ExecutionPayloadCapella,
|
||||||
|
ExecutionPayloadEip4844, ExecutionPayloadMerge, ForkName, Hash256, Uint256,
|
||||||
};
|
};
|
||||||
|
|
||||||
const GAS_LIMIT: u64 = 16384;
|
const GAS_LIMIT: u64 = 16384;
|
||||||
@ -113,6 +114,11 @@ pub struct ExecutionBlockGenerator<T: EthSpec> {
|
|||||||
pub pending_payloads: HashMap<ExecutionBlockHash, ExecutionPayload<T>>,
|
pub pending_payloads: HashMap<ExecutionBlockHash, ExecutionPayload<T>>,
|
||||||
pub next_payload_id: u64,
|
pub next_payload_id: u64,
|
||||||
pub payload_ids: HashMap<PayloadId, ExecutionPayload<T>>,
|
pub payload_ids: HashMap<PayloadId, ExecutionPayload<T>>,
|
||||||
|
/*
|
||||||
|
* Post-merge fork triggers
|
||||||
|
*/
|
||||||
|
pub shanghai_time: Option<u64>, // withdrawals
|
||||||
|
pub eip4844_time: Option<u64>, // 4844
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<T: EthSpec> ExecutionBlockGenerator<T> {
|
impl<T: EthSpec> ExecutionBlockGenerator<T> {
|
||||||
@ -120,6 +126,8 @@ impl<T: EthSpec> ExecutionBlockGenerator<T> {
|
|||||||
terminal_total_difficulty: Uint256,
|
terminal_total_difficulty: Uint256,
|
||||||
terminal_block_number: u64,
|
terminal_block_number: u64,
|
||||||
terminal_block_hash: ExecutionBlockHash,
|
terminal_block_hash: ExecutionBlockHash,
|
||||||
|
shanghai_time: Option<u64>,
|
||||||
|
eip4844_time: Option<u64>,
|
||||||
) -> Self {
|
) -> Self {
|
||||||
let mut gen = Self {
|
let mut gen = Self {
|
||||||
head_block: <_>::default(),
|
head_block: <_>::default(),
|
||||||
@ -132,6 +140,8 @@ impl<T: EthSpec> ExecutionBlockGenerator<T> {
|
|||||||
pending_payloads: <_>::default(),
|
pending_payloads: <_>::default(),
|
||||||
next_payload_id: 0,
|
next_payload_id: 0,
|
||||||
payload_ids: <_>::default(),
|
payload_ids: <_>::default(),
|
||||||
|
shanghai_time,
|
||||||
|
eip4844_time,
|
||||||
};
|
};
|
||||||
|
|
||||||
gen.insert_pow_block(0).unwrap();
|
gen.insert_pow_block(0).unwrap();
|
||||||
@ -163,6 +173,16 @@ impl<T: EthSpec> ExecutionBlockGenerator<T> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn get_fork_at_timestamp(&self, timestamp: u64) -> ForkName {
|
||||||
|
match self.eip4844_time {
|
||||||
|
Some(fork_time) if timestamp >= fork_time => ForkName::Eip4844,
|
||||||
|
_ => match self.shanghai_time {
|
||||||
|
Some(fork_time) if timestamp >= fork_time => ForkName::Capella,
|
||||||
|
_ => ForkName::Merge,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
pub fn execution_block_by_number(&self, number: u64) -> Option<ExecutionBlock> {
|
pub fn execution_block_by_number(&self, number: u64) -> Option<ExecutionBlock> {
|
||||||
self.block_by_number(number)
|
self.block_by_number(number)
|
||||||
.map(|block| block.as_execution_block(self.terminal_total_difficulty))
|
.map(|block| block.as_execution_block(self.terminal_total_difficulty))
|
||||||
@ -395,7 +415,9 @@ impl<T: EthSpec> ExecutionBlockGenerator<T> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn forkchoice_updated_v1(
|
// This function expects payload_attributes to already be validated with respect to
|
||||||
|
// the current fork [obtained by self.get_fork_at_timestamp(payload_attributes.timestamp)]
|
||||||
|
pub fn forkchoice_updated(
|
||||||
&mut self,
|
&mut self,
|
||||||
forkchoice_state: ForkchoiceState,
|
forkchoice_state: ForkchoiceState,
|
||||||
payload_attributes: Option<PayloadAttributes>,
|
payload_attributes: Option<PayloadAttributes>,
|
||||||
@ -469,8 +491,8 @@ impl<T: EthSpec> ExecutionBlockGenerator<T> {
|
|||||||
transactions: vec![].into(),
|
transactions: vec![].into(),
|
||||||
}),
|
}),
|
||||||
PayloadAttributes::V2(pa) => {
|
PayloadAttributes::V2(pa) => {
|
||||||
// FIXME: think about how to test different forks
|
match self.get_fork_at_timestamp(pa.timestamp) {
|
||||||
ExecutionPayload::Merge(ExecutionPayloadMerge {
|
ForkName::Merge => ExecutionPayload::Merge(ExecutionPayloadMerge {
|
||||||
parent_hash: forkchoice_state.head_block_hash,
|
parent_hash: forkchoice_state.head_block_hash,
|
||||||
fee_recipient: pa.suggested_fee_recipient,
|
fee_recipient: pa.suggested_fee_recipient,
|
||||||
receipts_root: Hash256::repeat_byte(42),
|
receipts_root: Hash256::repeat_byte(42),
|
||||||
@ -485,8 +507,50 @@ impl<T: EthSpec> ExecutionBlockGenerator<T> {
|
|||||||
base_fee_per_gas: Uint256::one(),
|
base_fee_per_gas: Uint256::one(),
|
||||||
block_hash: ExecutionBlockHash::zero(),
|
block_hash: ExecutionBlockHash::zero(),
|
||||||
transactions: vec![].into(),
|
transactions: vec![].into(),
|
||||||
|
}),
|
||||||
|
ForkName::Capella => {
|
||||||
|
ExecutionPayload::Capella(ExecutionPayloadCapella {
|
||||||
|
parent_hash: forkchoice_state.head_block_hash,
|
||||||
|
fee_recipient: pa.suggested_fee_recipient,
|
||||||
|
receipts_root: Hash256::repeat_byte(42),
|
||||||
|
state_root: Hash256::repeat_byte(43),
|
||||||
|
logs_bloom: vec![0; 256].into(),
|
||||||
|
prev_randao: pa.prev_randao,
|
||||||
|
block_number: parent.block_number() + 1,
|
||||||
|
gas_limit: GAS_LIMIT,
|
||||||
|
gas_used: GAS_USED,
|
||||||
|
timestamp: pa.timestamp,
|
||||||
|
extra_data: "block gen was here".as_bytes().to_vec().into(),
|
||||||
|
base_fee_per_gas: Uint256::one(),
|
||||||
|
block_hash: ExecutionBlockHash::zero(),
|
||||||
|
transactions: vec![].into(),
|
||||||
|
withdrawals: pa.withdrawals.as_ref().unwrap().clone().into(),
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
ForkName::Eip4844 => {
|
||||||
|
ExecutionPayload::Eip4844(ExecutionPayloadEip4844 {
|
||||||
|
parent_hash: forkchoice_state.head_block_hash,
|
||||||
|
fee_recipient: pa.suggested_fee_recipient,
|
||||||
|
receipts_root: Hash256::repeat_byte(42),
|
||||||
|
state_root: Hash256::repeat_byte(43),
|
||||||
|
logs_bloom: vec![0; 256].into(),
|
||||||
|
prev_randao: pa.prev_randao,
|
||||||
|
block_number: parent.block_number() + 1,
|
||||||
|
gas_limit: GAS_LIMIT,
|
||||||
|
gas_used: GAS_USED,
|
||||||
|
timestamp: pa.timestamp,
|
||||||
|
extra_data: "block gen was here".as_bytes().to_vec().into(),
|
||||||
|
base_fee_per_gas: Uint256::one(),
|
||||||
|
// FIXME(4844): maybe this should be set to something?
|
||||||
|
excess_data_gas: Uint256::one(),
|
||||||
|
block_hash: ExecutionBlockHash::zero(),
|
||||||
|
transactions: vec![].into(),
|
||||||
|
withdrawals: pa.withdrawals.as_ref().unwrap().clone().into(),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
_ => unreachable!(),
|
||||||
|
}
|
||||||
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
*execution_payload.block_hash_mut() =
|
*execution_payload.block_hash_mut() =
|
||||||
@ -576,6 +640,8 @@ mod test {
|
|||||||
TERMINAL_DIFFICULTY.into(),
|
TERMINAL_DIFFICULTY.into(),
|
||||||
TERMINAL_BLOCK,
|
TERMINAL_BLOCK,
|
||||||
ExecutionBlockHash::zero(),
|
ExecutionBlockHash::zero(),
|
||||||
|
None,
|
||||||
|
None,
|
||||||
);
|
);
|
||||||
|
|
||||||
for i in 0..=TERMINAL_BLOCK {
|
for i in 0..=TERMINAL_BLOCK {
|
||||||
|
@ -82,17 +82,40 @@ pub async fn handle_rpc<T: EthSpec>(
|
|||||||
ENGINE_NEW_PAYLOAD_V2 => {
|
ENGINE_NEW_PAYLOAD_V2 => {
|
||||||
JsonExecutionPayload::V2(get_param::<JsonExecutionPayloadV2<T>>(params, 0)?)
|
JsonExecutionPayload::V2(get_param::<JsonExecutionPayloadV2<T>>(params, 0)?)
|
||||||
}
|
}
|
||||||
|
// TODO(4844) add that here..
|
||||||
_ => unreachable!(),
|
_ => unreachable!(),
|
||||||
};
|
};
|
||||||
let fork = match request {
|
|
||||||
JsonExecutionPayload::V1(_) => ForkName::Merge,
|
let fork = ctx
|
||||||
JsonExecutionPayload::V2(ref payload) => {
|
.execution_block_generator
|
||||||
if payload.withdrawals.is_none() {
|
.read()
|
||||||
ForkName::Merge
|
.get_fork_at_timestamp(*request.timestamp());
|
||||||
} else {
|
// validate method called correctly according to shanghai fork time
|
||||||
ForkName::Capella
|
match fork {
|
||||||
|
ForkName::Merge => {
|
||||||
|
if request.withdrawals().is_ok() && request.withdrawals().unwrap().is_some() {
|
||||||
|
return Err(format!(
|
||||||
|
"{} called with `withdrawals` before capella fork!",
|
||||||
|
method
|
||||||
|
));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
ForkName::Capella => {
|
||||||
|
if method == ENGINE_NEW_PAYLOAD_V1 {
|
||||||
|
return Err(format!("{} called after capella fork!", method));
|
||||||
|
}
|
||||||
|
if request.withdrawals().is_err()
|
||||||
|
|| (request.withdrawals().is_ok()
|
||||||
|
&& request.withdrawals().unwrap().is_none())
|
||||||
|
{
|
||||||
|
return Err(format!(
|
||||||
|
"{} called without `withdrawals` after capella fork!",
|
||||||
|
method
|
||||||
|
));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// TODO(4844) add 4844 error checking here
|
||||||
|
_ => unreachable!(),
|
||||||
};
|
};
|
||||||
|
|
||||||
// Canned responses set by block hash take priority.
|
// Canned responses set by block hash take priority.
|
||||||
@ -125,7 +148,7 @@ pub async fn handle_rpc<T: EthSpec>(
|
|||||||
|
|
||||||
Ok(serde_json::to_value(JsonPayloadStatusV1::from(response)).unwrap())
|
Ok(serde_json::to_value(JsonPayloadStatusV1::from(response)).unwrap())
|
||||||
}
|
}
|
||||||
ENGINE_GET_PAYLOAD_V1 => {
|
ENGINE_GET_PAYLOAD_V1 | ENGINE_GET_PAYLOAD_V2 => {
|
||||||
let request: JsonPayloadIdRequest = get_param(params, 0)?;
|
let request: JsonPayloadIdRequest = get_param(params, 0)?;
|
||||||
let id = request.into();
|
let id = request.into();
|
||||||
|
|
||||||
@ -135,12 +158,76 @@ pub async fn handle_rpc<T: EthSpec>(
|
|||||||
.get_payload(&id)
|
.get_payload(&id)
|
||||||
.ok_or_else(|| format!("no payload for id {:?}", id))?;
|
.ok_or_else(|| format!("no payload for id {:?}", id))?;
|
||||||
|
|
||||||
Ok(serde_json::to_value(JsonExecutionPayloadV1::try_from(response).unwrap()).unwrap())
|
// validate method called correctly according to shanghai fork time
|
||||||
|
if ctx
|
||||||
|
.execution_block_generator
|
||||||
|
.read()
|
||||||
|
.get_fork_at_timestamp(response.timestamp())
|
||||||
|
== ForkName::Capella
|
||||||
|
&& method == ENGINE_GET_PAYLOAD_V1
|
||||||
|
{
|
||||||
|
return Err(format!("{} called after capella fork!", method));
|
||||||
}
|
}
|
||||||
// FIXME(capella): handle fcu version 2
|
// TODO(4844) add 4844 error checking here
|
||||||
ENGINE_FORKCHOICE_UPDATED_V1 => {
|
|
||||||
|
match method {
|
||||||
|
ENGINE_GET_PAYLOAD_V1 => Ok(serde_json::to_value(
|
||||||
|
JsonExecutionPayloadV1::try_from(response).unwrap(),
|
||||||
|
)
|
||||||
|
.unwrap()),
|
||||||
|
ENGINE_GET_PAYLOAD_V2 => Ok(serde_json::to_value(JsonGetPayloadResponse {
|
||||||
|
execution_payload: JsonExecutionPayloadV2::try_from(response).unwrap(),
|
||||||
|
})
|
||||||
|
.unwrap()),
|
||||||
|
_ => unreachable!(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
ENGINE_FORKCHOICE_UPDATED_V1 | ENGINE_FORKCHOICE_UPDATED_V2 => {
|
||||||
let forkchoice_state: JsonForkchoiceStateV1 = get_param(params, 0)?;
|
let forkchoice_state: JsonForkchoiceStateV1 = get_param(params, 0)?;
|
||||||
let payload_attributes: Option<JsonPayloadAttributes> = get_param(params, 1)?;
|
let payload_attributes = match method {
|
||||||
|
ENGINE_FORKCHOICE_UPDATED_V1 => {
|
||||||
|
let jpa1: Option<JsonPayloadAttributesV1> = get_param(params, 1)?;
|
||||||
|
jpa1.map(JsonPayloadAttributes::V1)
|
||||||
|
}
|
||||||
|
ENGINE_FORKCHOICE_UPDATED_V2 => {
|
||||||
|
let jpa2: Option<JsonPayloadAttributesV2> = get_param(params, 1)?;
|
||||||
|
jpa2.map(JsonPayloadAttributes::V2)
|
||||||
|
}
|
||||||
|
_ => unreachable!(),
|
||||||
|
};
|
||||||
|
|
||||||
|
// validate method called correctly according to shanghai fork time
|
||||||
|
if let Some(pa) = payload_attributes.as_ref() {
|
||||||
|
match ctx
|
||||||
|
.execution_block_generator
|
||||||
|
.read()
|
||||||
|
.get_fork_at_timestamp(*pa.timestamp())
|
||||||
|
{
|
||||||
|
ForkName::Merge => {
|
||||||
|
if pa.withdrawals().is_ok() && pa.withdrawals().unwrap().is_some() {
|
||||||
|
return Err(format!(
|
||||||
|
"{} called with `withdrawals` before capella fork!",
|
||||||
|
method
|
||||||
|
));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
ForkName::Capella => {
|
||||||
|
if method == ENGINE_FORKCHOICE_UPDATED_V1 {
|
||||||
|
return Err(format!("{} called after capella fork!", method));
|
||||||
|
}
|
||||||
|
if pa.withdrawals().is_err()
|
||||||
|
|| (pa.withdrawals().is_ok() && pa.withdrawals().unwrap().is_none())
|
||||||
|
{
|
||||||
|
return Err(format!(
|
||||||
|
"{} called without `withdrawals` after capella fork!",
|
||||||
|
method
|
||||||
|
));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// TODO(4844) add 4844 error checking here
|
||||||
|
_ => unreachable!(),
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
if let Some(hook_response) = ctx
|
if let Some(hook_response) = ctx
|
||||||
.hook
|
.hook
|
||||||
@ -161,10 +248,7 @@ pub async fn handle_rpc<T: EthSpec>(
|
|||||||
return Ok(serde_json::to_value(response).unwrap());
|
return Ok(serde_json::to_value(response).unwrap());
|
||||||
}
|
}
|
||||||
|
|
||||||
let mut response = ctx
|
let mut response = ctx.execution_block_generator.write().forkchoice_updated(
|
||||||
.execution_block_generator
|
|
||||||
.write()
|
|
||||||
.forkchoice_updated_v1(
|
|
||||||
forkchoice_state.into(),
|
forkchoice_state.into(),
|
||||||
payload_attributes.map(|json| json.into()),
|
payload_attributes.map(|json| json.into()),
|
||||||
)?;
|
)?;
|
||||||
|
@ -26,17 +26,22 @@ impl<T: EthSpec> MockExecutionLayer<T> {
|
|||||||
DEFAULT_TERMINAL_BLOCK,
|
DEFAULT_TERMINAL_BLOCK,
|
||||||
ExecutionBlockHash::zero(),
|
ExecutionBlockHash::zero(),
|
||||||
Epoch::new(0),
|
Epoch::new(0),
|
||||||
|
None,
|
||||||
|
None,
|
||||||
Some(JwtKey::from_slice(&DEFAULT_JWT_SECRET).unwrap()),
|
Some(JwtKey::from_slice(&DEFAULT_JWT_SECRET).unwrap()),
|
||||||
None,
|
None,
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[allow(clippy::too_many_arguments)]
|
||||||
pub fn new(
|
pub fn new(
|
||||||
executor: TaskExecutor,
|
executor: TaskExecutor,
|
||||||
terminal_total_difficulty: Uint256,
|
terminal_total_difficulty: Uint256,
|
||||||
terminal_block: u64,
|
terminal_block: u64,
|
||||||
terminal_block_hash: ExecutionBlockHash,
|
terminal_block_hash: ExecutionBlockHash,
|
||||||
terminal_block_hash_activation_epoch: Epoch,
|
terminal_block_hash_activation_epoch: Epoch,
|
||||||
|
shanghai_time: Option<u64>,
|
||||||
|
eip4844_time: Option<u64>,
|
||||||
jwt_key: Option<JwtKey>,
|
jwt_key: Option<JwtKey>,
|
||||||
builder_url: Option<SensitiveUrl>,
|
builder_url: Option<SensitiveUrl>,
|
||||||
) -> Self {
|
) -> Self {
|
||||||
@ -54,6 +59,8 @@ impl<T: EthSpec> MockExecutionLayer<T> {
|
|||||||
terminal_total_difficulty,
|
terminal_total_difficulty,
|
||||||
terminal_block,
|
terminal_block,
|
||||||
terminal_block_hash,
|
terminal_block_hash,
|
||||||
|
shanghai_time,
|
||||||
|
eip4844_time,
|
||||||
);
|
);
|
||||||
|
|
||||||
let url = SensitiveUrl::parse(&server.url()).unwrap();
|
let url = SensitiveUrl::parse(&server.url()).unwrap();
|
||||||
|
@ -45,6 +45,8 @@ pub struct MockExecutionConfig {
|
|||||||
pub terminal_difficulty: Uint256,
|
pub terminal_difficulty: Uint256,
|
||||||
pub terminal_block: u64,
|
pub terminal_block: u64,
|
||||||
pub terminal_block_hash: ExecutionBlockHash,
|
pub terminal_block_hash: ExecutionBlockHash,
|
||||||
|
pub shanghai_time: Option<u64>,
|
||||||
|
pub eip4844_time: Option<u64>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Default for MockExecutionConfig {
|
impl Default for MockExecutionConfig {
|
||||||
@ -55,6 +57,8 @@ impl Default for MockExecutionConfig {
|
|||||||
terminal_block: DEFAULT_TERMINAL_BLOCK,
|
terminal_block: DEFAULT_TERMINAL_BLOCK,
|
||||||
terminal_block_hash: ExecutionBlockHash::zero(),
|
terminal_block_hash: ExecutionBlockHash::zero(),
|
||||||
server_config: Config::default(),
|
server_config: Config::default(),
|
||||||
|
shanghai_time: None,
|
||||||
|
eip4844_time: None,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -74,6 +78,8 @@ impl<T: EthSpec> MockServer<T> {
|
|||||||
DEFAULT_TERMINAL_DIFFICULTY.into(),
|
DEFAULT_TERMINAL_DIFFICULTY.into(),
|
||||||
DEFAULT_TERMINAL_BLOCK,
|
DEFAULT_TERMINAL_BLOCK,
|
||||||
ExecutionBlockHash::zero(),
|
ExecutionBlockHash::zero(),
|
||||||
|
None, // FIXME(capella): should this be the default?
|
||||||
|
None, // FIXME(eip4844): should this be the default?
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -84,11 +90,18 @@ impl<T: EthSpec> MockServer<T> {
|
|||||||
terminal_block,
|
terminal_block,
|
||||||
terminal_block_hash,
|
terminal_block_hash,
|
||||||
server_config,
|
server_config,
|
||||||
|
shanghai_time,
|
||||||
|
eip4844_time,
|
||||||
} = config;
|
} = config;
|
||||||
let last_echo_request = Arc::new(RwLock::new(None));
|
let last_echo_request = Arc::new(RwLock::new(None));
|
||||||
let preloaded_responses = Arc::new(Mutex::new(vec![]));
|
let preloaded_responses = Arc::new(Mutex::new(vec![]));
|
||||||
let execution_block_generator =
|
let execution_block_generator = ExecutionBlockGenerator::new(
|
||||||
ExecutionBlockGenerator::new(terminal_difficulty, terminal_block, terminal_block_hash);
|
terminal_difficulty,
|
||||||
|
terminal_block,
|
||||||
|
terminal_block_hash,
|
||||||
|
shanghai_time,
|
||||||
|
eip4844_time,
|
||||||
|
);
|
||||||
|
|
||||||
let ctx: Arc<Context<T>> = Arc::new(Context {
|
let ctx: Arc<Context<T>> = Arc::new(Context {
|
||||||
config: server_config,
|
config: server_config,
|
||||||
@ -140,6 +153,8 @@ impl<T: EthSpec> MockServer<T> {
|
|||||||
terminal_difficulty: Uint256,
|
terminal_difficulty: Uint256,
|
||||||
terminal_block: u64,
|
terminal_block: u64,
|
||||||
terminal_block_hash: ExecutionBlockHash,
|
terminal_block_hash: ExecutionBlockHash,
|
||||||
|
shanghai_time: Option<u64>,
|
||||||
|
eip4844_time: Option<u64>,
|
||||||
) -> Self {
|
) -> Self {
|
||||||
Self::new_with_config(
|
Self::new_with_config(
|
||||||
handle,
|
handle,
|
||||||
@ -149,6 +164,8 @@ impl<T: EthSpec> MockServer<T> {
|
|||||||
terminal_difficulty,
|
terminal_difficulty,
|
||||||
terminal_block,
|
terminal_block,
|
||||||
terminal_block_hash,
|
terminal_block_hash,
|
||||||
|
shanghai_time,
|
||||||
|
eip4844_time,
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
Loading…
Reference in New Issue
Block a user