Update engine_api
to latest version (#4223)
* Update Engine API to Latest * Get Mock EE Working * Fix Mock EE * Update Engine API Again * Rip out get_blobs_bundle Stuff * Fix Test Harness * Fix Clippy Complaints * Fix Beacon Chain Tests
This commit is contained in:
parent
aa34339298
commit
c1d47da02d
1
Cargo.lock
generated
1
Cargo.lock
generated
@ -2647,6 +2647,7 @@ dependencies = [
|
||||
"hex",
|
||||
"jsonwebtoken",
|
||||
"keccak-hash",
|
||||
"kzg",
|
||||
"lazy_static",
|
||||
"lighthouse_metrics",
|
||||
"lru 0.7.8",
|
||||
|
@ -4711,7 +4711,7 @@ impl<T: BeaconChainTypes> BeaconChain<T> {
|
||||
bls_to_execution_changes,
|
||||
} = partial_beacon_block;
|
||||
|
||||
let (inner_block, blobs_opt) = match &state {
|
||||
let (inner_block, blobs_opt, proofs_opt) = match &state {
|
||||
BeaconState::Base(_) => (
|
||||
BeaconBlock::Base(BeaconBlockBase {
|
||||
slot,
|
||||
@ -4731,6 +4731,7 @@ impl<T: BeaconChainTypes> BeaconChain<T> {
|
||||
},
|
||||
}),
|
||||
None,
|
||||
None,
|
||||
),
|
||||
BeaconState::Altair(_) => (
|
||||
BeaconBlock::Altair(BeaconBlockAltair {
|
||||
@ -4753,9 +4754,10 @@ impl<T: BeaconChainTypes> BeaconChain<T> {
|
||||
},
|
||||
}),
|
||||
None,
|
||||
None,
|
||||
),
|
||||
BeaconState::Merge(_) => {
|
||||
let (payload, _, _) = block_contents
|
||||
let (payload, _, _, _) = block_contents
|
||||
.ok_or(BlockProductionError::MissingExecutionPayload)?
|
||||
.deconstruct();
|
||||
(
|
||||
@ -4781,10 +4783,11 @@ impl<T: BeaconChainTypes> BeaconChain<T> {
|
||||
},
|
||||
}),
|
||||
None,
|
||||
None,
|
||||
)
|
||||
}
|
||||
BeaconState::Capella(_) => {
|
||||
let (payload, _, _) = block_contents
|
||||
let (payload, _, _, _) = block_contents
|
||||
.ok_or(BlockProductionError::MissingExecutionPayload)?
|
||||
.deconstruct();
|
||||
(
|
||||
@ -4811,10 +4814,11 @@ impl<T: BeaconChainTypes> BeaconChain<T> {
|
||||
},
|
||||
}),
|
||||
None,
|
||||
None,
|
||||
)
|
||||
}
|
||||
BeaconState::Deneb(_) => {
|
||||
let (payload, kzg_commitments, blobs) = block_contents
|
||||
let (payload, kzg_commitments, blobs, proofs) = block_contents
|
||||
.ok_or(BlockProductionError::MissingExecutionPayload)?
|
||||
.deconstruct();
|
||||
(
|
||||
@ -4843,6 +4847,7 @@ impl<T: BeaconChainTypes> BeaconChain<T> {
|
||||
},
|
||||
}),
|
||||
blobs,
|
||||
proofs,
|
||||
)
|
||||
}
|
||||
};
|
||||
@ -4915,8 +4920,11 @@ impl<T: BeaconChainTypes> BeaconChain<T> {
|
||||
)));
|
||||
}
|
||||
|
||||
let kzg_proofs =
|
||||
Self::compute_blob_kzg_proofs(kzg, &blobs, expected_kzg_commitments, slot)?;
|
||||
let kzg_proofs = if let Some(proofs) = proofs_opt {
|
||||
Vec::from(proofs)
|
||||
} else {
|
||||
Self::compute_blob_kzg_proofs(kzg, &blobs, expected_kzg_commitments, slot)?
|
||||
};
|
||||
|
||||
kzg_utils::validate_blobs::<T::EthSpec>(
|
||||
kzg,
|
||||
|
@ -12,13 +12,14 @@ use crate::data_availability_checker::{
|
||||
};
|
||||
use crate::kzg_utils::{validate_blob, validate_blobs};
|
||||
use crate::BeaconChainError;
|
||||
use eth2::types::BlockContentsTuple;
|
||||
use kzg::Kzg;
|
||||
use slog::{debug, warn};
|
||||
use std::borrow::Cow;
|
||||
use types::{
|
||||
BeaconBlockRef, BeaconState, BeaconStateError, BlobSidecar, BlobSidecarList, ChainSpec,
|
||||
CloneConfig, Epoch, EthSpec, Hash256, KzgCommitment, RelativeEpoch, SignedBeaconBlock,
|
||||
SignedBeaconBlockHeader, SignedBlobSidecar, Slot,
|
||||
CloneConfig, Epoch, EthSpec, FullPayload, Hash256, KzgCommitment, RelativeEpoch,
|
||||
SignedBeaconBlock, SignedBeaconBlockHeader, SignedBlobSidecar, Slot,
|
||||
};
|
||||
|
||||
#[derive(Debug)]
|
||||
@ -659,3 +660,18 @@ impl<E: EthSpec> From<SignedBeaconBlock<E>> for BlockWrapper<E> {
|
||||
Self::Block(Arc::new(value))
|
||||
}
|
||||
}
|
||||
|
||||
impl<E: EthSpec> From<BlockContentsTuple<E, FullPayload<E>>> for BlockWrapper<E> {
|
||||
fn from(value: BlockContentsTuple<E, FullPayload<E>>) -> Self {
|
||||
match value.1 {
|
||||
Some(variable_list) => Self::BlockAndBlobs(
|
||||
Arc::new(value.0),
|
||||
Vec::from(variable_list)
|
||||
.into_iter()
|
||||
.map(|signed_blob| signed_blob.message)
|
||||
.collect::<Vec<_>>(),
|
||||
),
|
||||
None => Self::Block(Arc::new(value.0)),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1,3 +1,4 @@
|
||||
use crate::blob_verification::{AsBlock, BlockWrapper};
|
||||
pub use crate::persisted_beacon_chain::PersistedBeaconChain;
|
||||
pub use crate::{
|
||||
beacon_chain::{BEACON_CHAIN_DB_KEY, ETH1_CACHE_DB_KEY, FORK_CHOICE_DB_KEY, OP_POOL_DB_KEY},
|
||||
@ -13,6 +14,7 @@ use crate::{
|
||||
StateSkipConfig,
|
||||
};
|
||||
use bls::get_withdrawal_credentials;
|
||||
use eth2::types::BlockContentsTuple;
|
||||
use execution_layer::{
|
||||
auth::JwtKey,
|
||||
test_utils::{
|
||||
@ -25,7 +27,7 @@ use fork_choice::CountUnrealized;
|
||||
use futures::channel::mpsc::Receiver;
|
||||
pub use genesis::{interop_genesis_state_with_eth1, DEFAULT_ETH1_BLOCK_HASH};
|
||||
use int_to_bytes::int_to_bytes32;
|
||||
use kzg::TrustedSetup;
|
||||
use kzg::{Kzg, TrustedSetup};
|
||||
use merkle_proof::MerkleTree;
|
||||
use parking_lot::Mutex;
|
||||
use parking_lot::RwLockWriteGuard;
|
||||
@ -446,6 +448,13 @@ where
|
||||
let deneb_time = spec.deneb_fork_epoch.map(|epoch| {
|
||||
HARNESS_GENESIS_TIME + spec.seconds_per_slot * E::slots_per_epoch() * epoch.as_u64()
|
||||
});
|
||||
|
||||
let trusted_setup: TrustedSetup =
|
||||
serde_json::from_reader(eth2_network_config::TRUSTED_SETUP)
|
||||
.map_err(|e| format!("Unable to read trusted setup file: {}", e))
|
||||
.expect("should have trusted setup");
|
||||
let kzg = Kzg::new_from_trusted_setup(trusted_setup).expect("should create kzg");
|
||||
|
||||
let mock = MockExecutionLayer::new(
|
||||
self.runtime.task_executor.clone(),
|
||||
DEFAULT_TERMINAL_BLOCK,
|
||||
@ -455,6 +464,7 @@ where
|
||||
Some(JwtKey::from_slice(&DEFAULT_JWT_SECRET).unwrap()),
|
||||
spec,
|
||||
None,
|
||||
Some(kzg),
|
||||
);
|
||||
self.execution_layer = Some(mock.el.clone());
|
||||
self.mock_execution_layer = Some(mock);
|
||||
@ -477,6 +487,11 @@ where
|
||||
let deneb_time = spec.deneb_fork_epoch.map(|epoch| {
|
||||
HARNESS_GENESIS_TIME + spec.seconds_per_slot * E::slots_per_epoch() * epoch.as_u64()
|
||||
});
|
||||
let trusted_setup: TrustedSetup =
|
||||
serde_json::from_reader(eth2_network_config::TRUSTED_SETUP)
|
||||
.map_err(|e| format!("Unable to read trusted setup file: {}", e))
|
||||
.expect("should have trusted setup");
|
||||
let kzg = Kzg::new_from_trusted_setup(trusted_setup).expect("should create kzg");
|
||||
let mock_el = MockExecutionLayer::new(
|
||||
self.runtime.task_executor.clone(),
|
||||
DEFAULT_TERMINAL_BLOCK,
|
||||
@ -486,6 +501,7 @@ where
|
||||
Some(JwtKey::from_slice(&DEFAULT_JWT_SECRET).unwrap()),
|
||||
spec.clone(),
|
||||
Some(builder_url.clone()),
|
||||
Some(kzg),
|
||||
)
|
||||
.move_to_terminal_block();
|
||||
|
||||
@ -755,7 +771,7 @@ where
|
||||
&self,
|
||||
mut state: BeaconState<E>,
|
||||
slot: Slot,
|
||||
) -> (SignedBeaconBlock<E>, BeaconState<E>) {
|
||||
) -> (BlockContentsTuple<E, FullPayload<E>>, BeaconState<E>) {
|
||||
assert_ne!(slot, 0, "can't produce a block at slot 0");
|
||||
assert!(slot >= state.slot());
|
||||
|
||||
@ -795,7 +811,37 @@ where
|
||||
&self.spec,
|
||||
);
|
||||
|
||||
(signed_block, state)
|
||||
let block_contents: BlockContentsTuple<E, FullPayload<E>> = match &signed_block {
|
||||
SignedBeaconBlock::Base(_)
|
||||
| SignedBeaconBlock::Altair(_)
|
||||
| SignedBeaconBlock::Merge(_)
|
||||
| SignedBeaconBlock::Capella(_) => (signed_block, None),
|
||||
SignedBeaconBlock::Deneb(_) => {
|
||||
if let Some(blobs) = self
|
||||
.chain
|
||||
.proposal_blob_cache
|
||||
.pop(&signed_block.canonical_root())
|
||||
{
|
||||
let signed_blobs = Vec::from(blobs)
|
||||
.into_iter()
|
||||
.map(|blob| {
|
||||
blob.sign(
|
||||
&self.validator_keypairs[proposer_index].sk,
|
||||
&state.fork(),
|
||||
state.genesis_validators_root(),
|
||||
&self.spec,
|
||||
)
|
||||
})
|
||||
.collect::<Vec<_>>()
|
||||
.into();
|
||||
(signed_block, Some(signed_blobs))
|
||||
} else {
|
||||
(signed_block, None)
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
(block_contents, state)
|
||||
}
|
||||
|
||||
/// Useful for the `per_block_processing` tests. Creates a block, and returns the state after
|
||||
@ -1663,18 +1709,18 @@ where
|
||||
(deposits, state)
|
||||
}
|
||||
|
||||
pub async fn process_block(
|
||||
pub async fn process_block<B: Into<BlockWrapper<E>>>(
|
||||
&self,
|
||||
slot: Slot,
|
||||
block_root: Hash256,
|
||||
block: SignedBeaconBlock<E>,
|
||||
block: B,
|
||||
) -> Result<SignedBeaconBlockHash, BlockError<E>> {
|
||||
self.set_current_slot(slot);
|
||||
let block_hash: SignedBeaconBlockHash = self
|
||||
.chain
|
||||
.process_block(
|
||||
block_root,
|
||||
Arc::new(block),
|
||||
block.into(),
|
||||
CountUnrealized::True,
|
||||
NotifyExecutionLayer::Yes,
|
||||
)
|
||||
@ -1685,15 +1731,16 @@ where
|
||||
Ok(block_hash)
|
||||
}
|
||||
|
||||
pub async fn process_block_result(
|
||||
pub async fn process_block_result<B: Into<BlockWrapper<E>>>(
|
||||
&self,
|
||||
block: SignedBeaconBlock<E>,
|
||||
block: B,
|
||||
) -> Result<SignedBeaconBlockHash, BlockError<E>> {
|
||||
let wrapped_block = block.into();
|
||||
let block_hash: SignedBeaconBlockHash = self
|
||||
.chain
|
||||
.process_block(
|
||||
block.canonical_root(),
|
||||
Arc::new(block),
|
||||
wrapped_block.canonical_root(),
|
||||
wrapped_block,
|
||||
CountUnrealized::True,
|
||||
NotifyExecutionLayer::Yes,
|
||||
)
|
||||
@ -1759,11 +1806,18 @@ where
|
||||
&self,
|
||||
slot: Slot,
|
||||
state: BeaconState<E>,
|
||||
) -> Result<(SignedBeaconBlockHash, SignedBeaconBlock<E>, BeaconState<E>), BlockError<E>> {
|
||||
) -> Result<
|
||||
(
|
||||
SignedBeaconBlockHash,
|
||||
BlockContentsTuple<E, FullPayload<E>>,
|
||||
BeaconState<E>,
|
||||
),
|
||||
BlockError<E>,
|
||||
> {
|
||||
self.set_current_slot(slot);
|
||||
let (block, new_state) = self.make_block(state, slot).await;
|
||||
let block_hash = self
|
||||
.process_block(slot, block.canonical_root(), block.clone())
|
||||
.process_block(slot, block.0.canonical_root(), block.clone())
|
||||
.await?;
|
||||
Ok((block_hash, block, new_state))
|
||||
}
|
||||
@ -1819,7 +1873,7 @@ where
|
||||
sync_committee_strategy: SyncCommitteeStrategy,
|
||||
) -> Result<(SignedBeaconBlockHash, BeaconState<E>), BlockError<E>> {
|
||||
let (block_hash, block, state) = self.add_block_at_slot(slot, state).await?;
|
||||
self.attest_block(&state, state_root, block_hash, &block, validators);
|
||||
self.attest_block(&state, state_root, block_hash, &block.0, validators);
|
||||
|
||||
if sync_committee_strategy == SyncCommitteeStrategy::AllValidators
|
||||
&& state.current_sync_committee().is_ok()
|
||||
@ -2047,7 +2101,7 @@ where
|
||||
state: BeaconState<E>,
|
||||
slot: Slot,
|
||||
_block_strategy: BlockStrategy,
|
||||
) -> (SignedBeaconBlock<E>, BeaconState<E>) {
|
||||
) -> (BlockContentsTuple<E, FullPayload<E>>, BeaconState<E>) {
|
||||
self.make_block(state, slot).await
|
||||
}
|
||||
|
||||
|
@ -1025,8 +1025,8 @@ async fn verify_block_for_gossip_slashing_detection() {
|
||||
harness.advance_slot();
|
||||
|
||||
let state = harness.get_current_state();
|
||||
let (block1, _) = harness.make_block(state.clone(), Slot::new(1)).await;
|
||||
let (block2, _) = harness.make_block(state, Slot::new(1)).await;
|
||||
let ((block1, _), _) = harness.make_block(state.clone(), Slot::new(1)).await;
|
||||
let ((block2, _), _) = harness.make_block(state, Slot::new(1)).await;
|
||||
|
||||
let verified_block = harness
|
||||
.chain
|
||||
@ -1065,7 +1065,7 @@ async fn verify_block_for_gossip_doppelganger_detection() {
|
||||
let harness = get_harness(VALIDATOR_COUNT);
|
||||
|
||||
let state = harness.get_current_state();
|
||||
let (block, _) = harness.make_block(state.clone(), Slot::new(1)).await;
|
||||
let ((block, _), _) = harness.make_block(state.clone(), Slot::new(1)).await;
|
||||
|
||||
let verified_block = harness
|
||||
.chain
|
||||
@ -1152,7 +1152,7 @@ async fn add_base_block_to_altair_chain() {
|
||||
// Produce an Altair block.
|
||||
let state = harness.get_current_state();
|
||||
let slot = harness.get_current_slot();
|
||||
let (altair_signed_block, _) = harness.make_block(state.clone(), slot).await;
|
||||
let ((altair_signed_block, _), _) = harness.make_block(state.clone(), slot).await;
|
||||
let altair_block = &altair_signed_block
|
||||
.as_altair()
|
||||
.expect("test expects an altair block")
|
||||
@ -1289,7 +1289,7 @@ async fn add_altair_block_to_base_chain() {
|
||||
// Produce an altair block.
|
||||
let state = harness.get_current_state();
|
||||
let slot = harness.get_current_slot();
|
||||
let (base_signed_block, _) = harness.make_block(state.clone(), slot).await;
|
||||
let ((base_signed_block, _), _) = harness.make_block(state.clone(), slot).await;
|
||||
let base_block = &base_signed_block
|
||||
.as_base()
|
||||
.expect("test expects a base block")
|
||||
|
@ -223,7 +223,7 @@ impl InvalidPayloadRig {
|
||||
let head = self.harness.chain.head_snapshot();
|
||||
let state = head.beacon_state.clone_with_only_committee_caches();
|
||||
let slot = slot_override.unwrap_or(state.slot() + 1);
|
||||
let (block, post_state) = self.harness.make_block(state, slot).await;
|
||||
let ((block, _), post_state) = self.harness.make_block(state, slot).await;
|
||||
let block_root = block.canonical_root();
|
||||
|
||||
let set_new_payload = |payload: Payload| match payload {
|
||||
@ -691,7 +691,8 @@ async fn invalidates_all_descendants() {
|
||||
.state_at_slot(fork_parent_slot, StateSkipConfig::WithStateRoots)
|
||||
.unwrap();
|
||||
assert_eq!(fork_parent_state.slot(), fork_parent_slot);
|
||||
let (fork_block, _fork_post_state) = rig.harness.make_block(fork_parent_state, fork_slot).await;
|
||||
let ((fork_block, _), _fork_post_state) =
|
||||
rig.harness.make_block(fork_parent_state, fork_slot).await;
|
||||
let fork_block_root = rig
|
||||
.harness
|
||||
.chain
|
||||
@ -789,7 +790,8 @@ async fn switches_heads() {
|
||||
.state_at_slot(fork_parent_slot, StateSkipConfig::WithStateRoots)
|
||||
.unwrap();
|
||||
assert_eq!(fork_parent_state.slot(), fork_parent_slot);
|
||||
let (fork_block, _fork_post_state) = rig.harness.make_block(fork_parent_state, fork_slot).await;
|
||||
let ((fork_block, _), _fork_post_state) =
|
||||
rig.harness.make_block(fork_parent_state, fork_slot).await;
|
||||
let fork_parent_root = fork_block.parent_root();
|
||||
let fork_block_root = rig
|
||||
.harness
|
||||
@ -1033,8 +1035,8 @@ async fn invalid_parent() {
|
||||
// Produce another block atop the parent, but don't import yet.
|
||||
let slot = parent_block.slot() + 1;
|
||||
rig.harness.set_current_slot(slot);
|
||||
let (block, state) = rig.harness.make_block(parent_state, slot).await;
|
||||
let block = Arc::new(block);
|
||||
let (block_tuple, state) = rig.harness.make_block(parent_state, slot).await;
|
||||
let block = Arc::new(block_tuple.0);
|
||||
let block_root = block.canonical_root();
|
||||
assert_eq!(block.parent_root(), parent_root);
|
||||
|
||||
@ -1850,8 +1852,8 @@ impl InvalidHeadSetup {
|
||||
.chain
|
||||
.state_at_slot(slot - 1, StateSkipConfig::WithStateRoots)
|
||||
.unwrap();
|
||||
let (fork_block, _) = rig.harness.make_block(parent_state, slot).await;
|
||||
opt_fork_block = Some(Arc::new(fork_block));
|
||||
let (fork_block_tuple, _) = rig.harness.make_block(parent_state, slot).await;
|
||||
opt_fork_block = Some(Arc::new(fork_block_tuple.0));
|
||||
} else {
|
||||
// Skipped slot.
|
||||
};
|
||||
|
@ -2022,7 +2022,7 @@ async fn garbage_collect_temp_states_from_failed_block() {
|
||||
|
||||
let genesis_state = harness.get_current_state();
|
||||
let block_slot = Slot::new(2 * slots_per_epoch);
|
||||
let (signed_block, state) = harness.make_block(genesis_state, block_slot).await;
|
||||
let ((signed_block, _), state) = harness.make_block(genesis_state, block_slot).await;
|
||||
|
||||
let (mut block, _) = signed_block.deconstruct();
|
||||
|
||||
@ -2422,7 +2422,7 @@ async fn revert_minority_fork_on_resume() {
|
||||
harness1.process_attestations(attestations.clone());
|
||||
harness2.process_attestations(attestations);
|
||||
|
||||
let (block, new_state) = harness1.make_block(state, slot).await;
|
||||
let ((block, _), new_state) = harness1.make_block(state, slot).await;
|
||||
|
||||
harness1
|
||||
.process_block(slot, block.canonical_root(), block.clone())
|
||||
@ -2463,7 +2463,7 @@ async fn revert_minority_fork_on_resume() {
|
||||
harness2.process_attestations(attestations);
|
||||
|
||||
// Minority chain block (no attesters).
|
||||
let (block1, new_state1) = harness1.make_block(state1, slot).await;
|
||||
let ((block1, _), new_state1) = harness1.make_block(state1, slot).await;
|
||||
harness1
|
||||
.process_block(slot, block1.canonical_root(), block1)
|
||||
.await
|
||||
@ -2471,7 +2471,7 @@ async fn revert_minority_fork_on_resume() {
|
||||
state1 = new_state1;
|
||||
|
||||
// Majority chain block (all attesters).
|
||||
let (block2, new_state2) = harness2.make_block(state2, slot).await;
|
||||
let ((block2, _), new_state2) = harness2.make_block(state2, slot).await;
|
||||
harness2
|
||||
.process_block(slot, block2.canonical_root(), block2.clone())
|
||||
.await
|
||||
|
@ -25,6 +25,7 @@ hex = "0.4.2"
|
||||
eth2_ssz = "0.4.1"
|
||||
eth2_ssz_types = "0.2.2"
|
||||
eth2 = { path = "../../common/eth2" }
|
||||
kzg = { path = "../../crypto/kzg" }
|
||||
state_processing = { path = "../../consensus/state_processing" }
|
||||
superstruct = "0.6.0"
|
||||
lru = "0.7.1"
|
||||
|
@ -16,12 +16,14 @@ use serde::{Deserialize, Serialize};
|
||||
use std::convert::TryFrom;
|
||||
use strum::IntoStaticStr;
|
||||
use superstruct::superstruct;
|
||||
use types::beacon_block_body::KzgCommitments;
|
||||
use types::blob_sidecar::Blobs;
|
||||
pub use types::{
|
||||
Address, EthSpec, ExecutionBlockHash, ExecutionPayload, ExecutionPayloadHeader,
|
||||
ExecutionPayloadRef, FixedVector, ForkName, Hash256, Transactions, Uint256, VariableList,
|
||||
Withdrawal, Withdrawals,
|
||||
};
|
||||
use types::{ExecutionPayloadCapella, ExecutionPayloadDeneb, ExecutionPayloadMerge};
|
||||
use types::{ExecutionPayloadCapella, ExecutionPayloadDeneb, ExecutionPayloadMerge, KzgProofs};
|
||||
|
||||
pub mod auth;
|
||||
pub mod http;
|
||||
@ -377,6 +379,8 @@ pub struct GetPayloadResponse<T: EthSpec> {
|
||||
#[superstruct(only(Deneb), partial_getter(rename = "execution_payload_deneb"))]
|
||||
pub execution_payload: ExecutionPayloadDeneb<T>,
|
||||
pub block_value: Uint256,
|
||||
#[superstruct(only(Deneb))]
|
||||
pub blobs_bundle: BlobsBundleV1<T>,
|
||||
}
|
||||
|
||||
impl<'a, T: EthSpec> From<GetPayloadResponseRef<'a, T>> for ExecutionPayloadRef<'a, T> {
|
||||
@ -395,20 +399,25 @@ impl<T: EthSpec> From<GetPayloadResponse<T>> for ExecutionPayload<T> {
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: EthSpec> From<GetPayloadResponse<T>> for (ExecutionPayload<T>, Uint256) {
|
||||
impl<T: EthSpec> From<GetPayloadResponse<T>>
|
||||
for (ExecutionPayload<T>, Uint256, Option<BlobsBundleV1<T>>)
|
||||
{
|
||||
fn from(response: GetPayloadResponse<T>) -> Self {
|
||||
match response {
|
||||
GetPayloadResponse::Merge(inner) => (
|
||||
ExecutionPayload::Merge(inner.execution_payload),
|
||||
inner.block_value,
|
||||
None,
|
||||
),
|
||||
GetPayloadResponse::Capella(inner) => (
|
||||
ExecutionPayload::Capella(inner.execution_payload),
|
||||
inner.block_value,
|
||||
None,
|
||||
),
|
||||
GetPayloadResponse::Deneb(inner) => (
|
||||
ExecutionPayload::Deneb(inner.execution_payload),
|
||||
inner.block_value,
|
||||
Some(inner.blobs_bundle),
|
||||
),
|
||||
}
|
||||
}
|
||||
@ -513,6 +522,13 @@ impl<E: EthSpec> ExecutionPayloadBodyV1<E> {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Default, Debug, PartialEq)]
|
||||
pub struct BlobsBundleV1<E: EthSpec> {
|
||||
pub commitments: KzgCommitments<E>,
|
||||
pub proofs: KzgProofs<E>,
|
||||
pub blobs: Blobs<E>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, Debug)]
|
||||
pub struct EngineCapabilities {
|
||||
pub new_payload_v1: bool,
|
||||
|
@ -40,9 +40,6 @@ pub const ENGINE_GET_PAYLOAD_V2: &str = "engine_getPayloadV2";
|
||||
pub const ENGINE_GET_PAYLOAD_V3: &str = "engine_getPayloadV3";
|
||||
pub const ENGINE_GET_PAYLOAD_TIMEOUT: Duration = Duration::from_secs(2);
|
||||
|
||||
pub const ENGINE_GET_BLOBS_BUNDLE_V1: &str = "engine_getBlobsBundleV1";
|
||||
pub const ENGINE_GET_BLOBS_BUNDLE_TIMEOUT: Duration = Duration::from_secs(2);
|
||||
|
||||
pub const ENGINE_FORKCHOICE_UPDATED_V1: &str = "engine_forkchoiceUpdatedV1";
|
||||
pub const ENGINE_FORKCHOICE_UPDATED_V2: &str = "engine_forkchoiceUpdatedV2";
|
||||
pub const ENGINE_FORKCHOICE_UPDATED_TIMEOUT: Duration = Duration::from_secs(8);
|
||||
@ -927,23 +924,6 @@ impl HttpJsonRpc {
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn get_blobs_bundle_v1<T: EthSpec>(
|
||||
&self,
|
||||
payload_id: PayloadId,
|
||||
) -> Result<JsonBlobsBundle<T>, Error> {
|
||||
let params = json!([JsonPayloadIdRequest::from(payload_id)]);
|
||||
|
||||
let response: JsonBlobsBundle<T> = self
|
||||
.rpc_request(
|
||||
ENGINE_GET_BLOBS_BUNDLE_V1,
|
||||
params,
|
||||
ENGINE_GET_BLOBS_BUNDLE_TIMEOUT,
|
||||
)
|
||||
.await?;
|
||||
|
||||
Ok(response)
|
||||
}
|
||||
|
||||
pub async fn forkchoice_updated_v1(
|
||||
&self,
|
||||
forkchoice_state: ForkchoiceState,
|
||||
|
@ -291,6 +291,8 @@ pub struct JsonGetPayloadResponse<T: EthSpec> {
|
||||
pub execution_payload: JsonExecutionPayloadV3<T>,
|
||||
#[serde(with = "eth2_serde_utils::u256_hex_be")]
|
||||
pub block_value: Uint256,
|
||||
#[superstruct(only(V3))]
|
||||
pub blobs_bundle: JsonBlobsBundleV1<T>,
|
||||
}
|
||||
|
||||
impl<T: EthSpec> From<JsonGetPayloadResponse<T>> for GetPayloadResponse<T> {
|
||||
@ -312,6 +314,7 @@ impl<T: EthSpec> From<JsonGetPayloadResponse<T>> for GetPayloadResponse<T> {
|
||||
GetPayloadResponse::Deneb(GetPayloadResponseDeneb {
|
||||
execution_payload: response.execution_payload.into(),
|
||||
block_value: response.block_value,
|
||||
blobs_bundle: response.blobs_bundle.into(),
|
||||
})
|
||||
}
|
||||
}
|
||||
@ -409,12 +412,31 @@ impl From<JsonPayloadAttributes> for PayloadAttributes {
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Serialize, Deserialize)]
|
||||
#[serde(bound = "T: EthSpec", rename_all = "camelCase")]
|
||||
pub struct JsonBlobsBundle<T: EthSpec> {
|
||||
pub block_hash: ExecutionBlockHash,
|
||||
pub kzgs: KzgCommitments<T>,
|
||||
#[serde(bound = "E: EthSpec", rename_all = "camelCase")]
|
||||
pub struct JsonBlobsBundleV1<E: EthSpec> {
|
||||
pub commitments: KzgCommitments<E>,
|
||||
pub proofs: KzgProofs<E>,
|
||||
#[serde(with = "ssz_types::serde_utils::list_of_hex_fixed_vec")]
|
||||
pub blobs: Blobs<T>,
|
||||
pub blobs: Blobs<E>,
|
||||
}
|
||||
|
||||
impl<E: EthSpec> From<BlobsBundleV1<E>> for JsonBlobsBundleV1<E> {
|
||||
fn from(blobs_bundle: BlobsBundleV1<E>) -> Self {
|
||||
Self {
|
||||
commitments: blobs_bundle.commitments,
|
||||
proofs: blobs_bundle.proofs,
|
||||
blobs: blobs_bundle.blobs,
|
||||
}
|
||||
}
|
||||
}
|
||||
impl<E: EthSpec> From<JsonBlobsBundleV1<E>> for BlobsBundleV1<E> {
|
||||
fn from(json_blobs_bundle: JsonBlobsBundleV1<E>) -> Self {
|
||||
Self {
|
||||
commitments: json_blobs_bundle.commitments,
|
||||
proofs: json_blobs_bundle.proofs,
|
||||
blobs: json_blobs_bundle.blobs,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Clone, Serialize, Deserialize)]
|
||||
|
@ -45,12 +45,12 @@ use types::beacon_block_body::KzgCommitments;
|
||||
use types::blob_sidecar::Blobs;
|
||||
use types::consts::deneb::BLOB_TX_TYPE;
|
||||
use types::transaction::{AccessTuple, BlobTransaction, EcdsaSignature, SignedBlobTransaction};
|
||||
use types::Withdrawals;
|
||||
use types::{AbstractExecPayload, BeaconStateError, ExecPayload, VersionedHash};
|
||||
use types::{
|
||||
BlindedPayload, BlockType, ChainSpec, Epoch, ExecutionBlockHash, ExecutionPayload,
|
||||
ExecutionPayloadCapella, ExecutionPayloadDeneb, ExecutionPayloadMerge, ForkName,
|
||||
};
|
||||
use types::{KzgProofs, Withdrawals};
|
||||
use types::{
|
||||
ProposerPreparationData, PublicKeyBytes, Signature, SignedBeaconBlock, Slot, Transaction,
|
||||
Uint256,
|
||||
@ -141,22 +141,53 @@ pub enum BlockProposalContents<T: EthSpec, Payload: AbstractExecPayload<T>> {
|
||||
block_value: Uint256,
|
||||
kzg_commitments: KzgCommitments<T>,
|
||||
blobs: Blobs<T>,
|
||||
proofs: KzgProofs<T>,
|
||||
},
|
||||
}
|
||||
|
||||
impl<E: EthSpec, Payload: AbstractExecPayload<E>> From<GetPayloadResponse<E>>
|
||||
for BlockProposalContents<E, Payload>
|
||||
{
|
||||
fn from(response: GetPayloadResponse<E>) -> Self {
|
||||
let (execution_payload, block_value, maybe_bundle) = response.into();
|
||||
match maybe_bundle {
|
||||
Some(bundle) => Self::PayloadAndBlobs {
|
||||
payload: execution_payload.into(),
|
||||
block_value,
|
||||
kzg_commitments: bundle.commitments,
|
||||
blobs: bundle.blobs,
|
||||
proofs: bundle.proofs,
|
||||
},
|
||||
None => Self::Payload {
|
||||
payload: execution_payload.into(),
|
||||
block_value,
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(clippy::type_complexity)]
|
||||
impl<T: EthSpec, Payload: AbstractExecPayload<T>> BlockProposalContents<T, Payload> {
|
||||
pub fn deconstruct(self) -> (Payload, Option<KzgCommitments<T>>, Option<Blobs<T>>) {
|
||||
pub fn deconstruct(
|
||||
self,
|
||||
) -> (
|
||||
Payload,
|
||||
Option<KzgCommitments<T>>,
|
||||
Option<Blobs<T>>,
|
||||
Option<KzgProofs<T>>,
|
||||
) {
|
||||
match self {
|
||||
Self::Payload {
|
||||
payload,
|
||||
block_value: _,
|
||||
} => (payload, None, None),
|
||||
} => (payload, None, None, None),
|
||||
Self::PayloadAndBlobs {
|
||||
payload,
|
||||
block_value: _,
|
||||
kzg_commitments,
|
||||
blobs,
|
||||
} => (payload, Some(kzg_commitments), Some(blobs)),
|
||||
proofs,
|
||||
} => (payload, Some(kzg_commitments), Some(blobs), Some(proofs)),
|
||||
}
|
||||
}
|
||||
|
||||
@ -171,6 +202,7 @@ impl<T: EthSpec, Payload: AbstractExecPayload<T>> BlockProposalContents<T, Paylo
|
||||
block_value: _,
|
||||
kzg_commitments: _,
|
||||
blobs: _,
|
||||
proofs: _,
|
||||
} => payload,
|
||||
}
|
||||
}
|
||||
@ -185,6 +217,7 @@ impl<T: EthSpec, Payload: AbstractExecPayload<T>> BlockProposalContents<T, Paylo
|
||||
block_value: _,
|
||||
kzg_commitments: _,
|
||||
blobs: _,
|
||||
proofs: _,
|
||||
} => payload,
|
||||
}
|
||||
}
|
||||
@ -199,6 +232,7 @@ impl<T: EthSpec, Payload: AbstractExecPayload<T>> BlockProposalContents<T, Paylo
|
||||
block_value,
|
||||
kzg_commitments: _,
|
||||
blobs: _,
|
||||
proofs: _,
|
||||
} => block_value,
|
||||
}
|
||||
}
|
||||
@ -215,6 +249,7 @@ impl<T: EthSpec, Payload: AbstractExecPayload<T>> BlockProposalContents<T, Paylo
|
||||
block_value: Uint256::zero(),
|
||||
blobs: VariableList::default(),
|
||||
kzg_commitments: VariableList::default(),
|
||||
proofs: VariableList::default(),
|
||||
},
|
||||
})
|
||||
}
|
||||
@ -1116,25 +1151,7 @@ impl<T: EthSpec> ExecutionLayer<T> {
|
||||
}
|
||||
};
|
||||
|
||||
let blob_fut = async {
|
||||
match current_fork {
|
||||
ForkName::Base | ForkName::Altair | ForkName::Merge | ForkName::Capella => {
|
||||
None
|
||||
}
|
||||
ForkName::Deneb => {
|
||||
debug!(
|
||||
self.log(),
|
||||
"Issuing engine_getBlobsBundle";
|
||||
"suggested_fee_recipient" => ?payload_attributes.suggested_fee_recipient(),
|
||||
"prev_randao" => ?payload_attributes.prev_randao(),
|
||||
"timestamp" => payload_attributes.timestamp(),
|
||||
"parent_hash" => ?parent_hash,
|
||||
);
|
||||
Some(engine.api.get_blobs_bundle_v1::<T>(payload_id).await)
|
||||
}
|
||||
}
|
||||
};
|
||||
let payload_fut = async {
|
||||
let payload_response = async {
|
||||
debug!(
|
||||
self.log(),
|
||||
"Issuing engine_getPayload";
|
||||
@ -1144,9 +1161,8 @@ impl<T: EthSpec> ExecutionLayer<T> {
|
||||
"parent_hash" => ?parent_hash,
|
||||
);
|
||||
engine.api.get_payload::<T>(current_fork, payload_id).await
|
||||
};
|
||||
let (blob, payload_response) = tokio::join!(blob_fut, payload_fut);
|
||||
let (execution_payload, block_value) = payload_response.map(|payload_response| {
|
||||
}.await?;
|
||||
|
||||
if payload_response.execution_payload_ref().fee_recipient() != payload_attributes.suggested_fee_recipient() {
|
||||
error!(
|
||||
self.log(),
|
||||
@ -1167,22 +1183,8 @@ impl<T: EthSpec> ExecutionLayer<T> {
|
||||
attempts."
|
||||
);
|
||||
}
|
||||
payload_response.into()
|
||||
})?;
|
||||
if let Some(blob) = blob.transpose()? {
|
||||
// FIXME(sean) cache blobs
|
||||
Ok(BlockProposalContents::PayloadAndBlobs {
|
||||
payload: execution_payload.into(),
|
||||
block_value,
|
||||
blobs: blob.blobs,
|
||||
kzg_commitments: blob.kzgs,
|
||||
})
|
||||
} else {
|
||||
Ok(BlockProposalContents::Payload {
|
||||
payload: execution_payload.into(),
|
||||
block_value,
|
||||
})
|
||||
}
|
||||
|
||||
Ok(payload_response.into())
|
||||
})
|
||||
.await
|
||||
.map_err(Box::new)
|
||||
|
@ -6,15 +6,21 @@ use crate::{
|
||||
},
|
||||
ExecutionBlock, PayloadAttributes, PayloadId, PayloadStatusV1, PayloadStatusV1Status,
|
||||
},
|
||||
ExecutionBlockWithTransactions,
|
||||
BlobsBundleV1, ExecutionBlockWithTransactions,
|
||||
};
|
||||
use kzg::{Kzg, BYTES_PER_BLOB, BYTES_PER_FIELD_ELEMENT, FIELD_ELEMENTS_PER_BLOB};
|
||||
use rand::RngCore;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use ssz::Encode;
|
||||
use std::collections::HashMap;
|
||||
use std::sync::Arc;
|
||||
use tree_hash::TreeHash;
|
||||
use tree_hash_derive::TreeHash;
|
||||
use types::transaction::{BlobTransaction, EcdsaSignature, SignedBlobTransaction};
|
||||
use types::{
|
||||
EthSpec, ExecutionBlockHash, ExecutionPayload, ExecutionPayloadCapella, ExecutionPayloadDeneb,
|
||||
ExecutionPayloadMerge, ForkName, Hash256, Uint256,
|
||||
Blob, EthSpec, ExecutionBlockHash, ExecutionPayload, ExecutionPayloadCapella,
|
||||
ExecutionPayloadDeneb, ExecutionPayloadMerge, ForkName, Hash256, Transaction, Transactions,
|
||||
Uint256,
|
||||
};
|
||||
|
||||
const GAS_LIMIT: u64 = 16384;
|
||||
@ -119,6 +125,11 @@ pub struct ExecutionBlockGenerator<T: EthSpec> {
|
||||
*/
|
||||
pub shanghai_time: Option<u64>, // withdrawals
|
||||
pub deneb_time: Option<u64>, // 4844
|
||||
/*
|
||||
* deneb stuff
|
||||
*/
|
||||
pub blobs_bundles: HashMap<PayloadId, BlobsBundleV1<T>>,
|
||||
pub kzg: Option<Arc<Kzg>>,
|
||||
}
|
||||
|
||||
impl<T: EthSpec> ExecutionBlockGenerator<T> {
|
||||
@ -128,6 +139,7 @@ impl<T: EthSpec> ExecutionBlockGenerator<T> {
|
||||
terminal_block_hash: ExecutionBlockHash,
|
||||
shanghai_time: Option<u64>,
|
||||
deneb_time: Option<u64>,
|
||||
kzg: Option<Kzg>,
|
||||
) -> Self {
|
||||
let mut gen = Self {
|
||||
head_block: <_>::default(),
|
||||
@ -142,6 +154,8 @@ impl<T: EthSpec> ExecutionBlockGenerator<T> {
|
||||
payload_ids: <_>::default(),
|
||||
shanghai_time,
|
||||
deneb_time,
|
||||
blobs_bundles: <_>::default(),
|
||||
kzg: kzg.map(Arc::new),
|
||||
};
|
||||
|
||||
gen.insert_pow_block(0).unwrap();
|
||||
@ -394,6 +408,11 @@ impl<T: EthSpec> ExecutionBlockGenerator<T> {
|
||||
self.payload_ids.get(id).cloned()
|
||||
}
|
||||
|
||||
pub fn get_blobs_bundle(&mut self, id: &PayloadId) -> Option<BlobsBundleV1<T>> {
|
||||
// remove it to free memory
|
||||
self.blobs_bundles.remove(id)
|
||||
}
|
||||
|
||||
pub fn new_payload(&mut self, payload: ExecutionPayload<T>) -> PayloadStatusV1 {
|
||||
let parent = if let Some(parent) = self.blocks.get(&payload.parent_hash()) {
|
||||
parent
|
||||
@ -561,6 +580,22 @@ impl<T: EthSpec> ExecutionBlockGenerator<T> {
|
||||
}
|
||||
};
|
||||
|
||||
match execution_payload.fork_name() {
|
||||
ForkName::Base | ForkName::Altair | ForkName::Merge | ForkName::Capella => {}
|
||||
ForkName::Deneb => {
|
||||
// get random number between 0 and Max Blobs
|
||||
let num_blobs = rand::random::<usize>() % T::max_blobs_per_block();
|
||||
let (bundle, transactions) = self.generate_random_blobs(num_blobs)?;
|
||||
for tx in Vec::from(transactions) {
|
||||
execution_payload
|
||||
.transactions_mut()
|
||||
.push(tx)
|
||||
.map_err(|_| "transactions are full".to_string())?;
|
||||
}
|
||||
self.blobs_bundles.insert(id, bundle);
|
||||
}
|
||||
}
|
||||
|
||||
*execution_payload.block_hash_mut() =
|
||||
ExecutionBlockHash::from_root(execution_payload.tree_hash_root());
|
||||
|
||||
@ -590,6 +625,88 @@ impl<T: EthSpec> ExecutionBlockGenerator<T> {
|
||||
payload_id: id.map(Into::into),
|
||||
})
|
||||
}
|
||||
|
||||
fn generate_random_blobs(
|
||||
&self,
|
||||
n_blobs: usize,
|
||||
) -> Result<(BlobsBundleV1<T>, Transactions<T>), String> {
|
||||
let mut bundle = BlobsBundleV1::<T>::default();
|
||||
let mut transactions = vec![];
|
||||
for blob_index in 0..n_blobs {
|
||||
// fill a vector with random bytes
|
||||
let mut blob_bytes = [0u8; BYTES_PER_BLOB];
|
||||
rand::thread_rng().fill_bytes(&mut blob_bytes);
|
||||
// Ensure that the blob is canonical by ensuring that
|
||||
// each field element contained in the blob is < BLS_MODULUS
|
||||
for i in 0..FIELD_ELEMENTS_PER_BLOB {
|
||||
blob_bytes[i * BYTES_PER_FIELD_ELEMENT + BYTES_PER_FIELD_ELEMENT - 1] = 0;
|
||||
}
|
||||
|
||||
let blob = Blob::<T>::new(Vec::from(blob_bytes))
|
||||
.map_err(|e| format!("error constructing random blob: {:?}", e))?;
|
||||
|
||||
let commitment = self
|
||||
.kzg
|
||||
.as_ref()
|
||||
.ok_or("kzg not initialized")?
|
||||
.blob_to_kzg_commitment(blob_bytes.into())
|
||||
.map_err(|e| format!("error computing kzg commitment: {:?}", e))?;
|
||||
|
||||
let proof = self
|
||||
.kzg
|
||||
.as_ref()
|
||||
.ok_or("kzg not initialized")?
|
||||
.compute_blob_kzg_proof(blob_bytes.into(), commitment)
|
||||
.map_err(|e| format!("error computing kzg proof: {:?}", e))?;
|
||||
|
||||
let versioned_hash = commitment.calculate_versioned_hash();
|
||||
|
||||
let blob_transaction = BlobTransaction {
|
||||
chain_id: Default::default(),
|
||||
nonce: 0,
|
||||
max_priority_fee_per_gas: Default::default(),
|
||||
max_fee_per_gas: Default::default(),
|
||||
gas: 100000,
|
||||
to: None,
|
||||
value: Default::default(),
|
||||
data: Default::default(),
|
||||
access_list: Default::default(),
|
||||
max_fee_per_data_gas: Default::default(),
|
||||
versioned_hashes: vec![versioned_hash].into(),
|
||||
};
|
||||
let bad_signature = EcdsaSignature {
|
||||
y_parity: false,
|
||||
r: Uint256::from(0),
|
||||
s: Uint256::from(0),
|
||||
};
|
||||
let signed_blob_transaction = SignedBlobTransaction {
|
||||
message: blob_transaction,
|
||||
signature: bad_signature,
|
||||
};
|
||||
// calculate transaction bytes
|
||||
let tx_bytes = [0x05u8]
|
||||
.into_iter()
|
||||
.chain(signed_blob_transaction.as_ssz_bytes().into_iter())
|
||||
.collect::<Vec<_>>();
|
||||
let tx = Transaction::<T::MaxBytesPerTransaction>::from(tx_bytes);
|
||||
|
||||
transactions.push(tx);
|
||||
bundle
|
||||
.blobs
|
||||
.push(blob)
|
||||
.map_err(|_| format!("blobs are full, blob index: {:?}", blob_index))?;
|
||||
bundle
|
||||
.commitments
|
||||
.push(commitment)
|
||||
.map_err(|_| format!("blobs are full, blob index: {:?}", blob_index))?;
|
||||
bundle
|
||||
.proofs
|
||||
.push(proof)
|
||||
.map_err(|_| format!("blobs are full, blob index: {:?}", blob_index))?;
|
||||
}
|
||||
|
||||
Ok((bundle, transactions.into()))
|
||||
}
|
||||
}
|
||||
|
||||
fn payload_id_from_u64(n: u64) -> PayloadId {
|
||||
@ -650,6 +767,7 @@ mod test {
|
||||
ExecutionBlockHash::zero(),
|
||||
None,
|
||||
None,
|
||||
None,
|
||||
);
|
||||
|
||||
for i in 0..=TERMINAL_BLOCK {
|
||||
|
@ -224,6 +224,8 @@ pub async fn handle_rpc<T: EthSpec>(
|
||||
)
|
||||
})?;
|
||||
|
||||
let maybe_blobs = ctx.execution_block_generator.write().get_blobs_bundle(&id);
|
||||
|
||||
// validate method called correctly according to shanghai fork time
|
||||
if ctx
|
||||
.execution_block_generator
|
||||
@ -291,6 +293,12 @@ pub async fn handle_rpc<T: EthSpec>(
|
||||
serde_json::to_value(JsonGetPayloadResponseV3 {
|
||||
execution_payload,
|
||||
block_value: DEFAULT_MOCK_EL_PAYLOAD_VALUE_WEI.into(),
|
||||
blobs_bundle: maybe_blobs
|
||||
.ok_or((
|
||||
"No blobs returned despite V3 Payload".to_string(),
|
||||
GENERIC_ERROR_CODE,
|
||||
))?
|
||||
.into(),
|
||||
})
|
||||
.unwrap()
|
||||
}
|
||||
@ -324,7 +332,7 @@ pub async fn handle_rpc<T: EthSpec>(
|
||||
.map(|opt| opt.map(JsonPayloadAttributes::V1))
|
||||
.transpose()
|
||||
}
|
||||
ForkName::Capella => {
|
||||
ForkName::Capella | ForkName::Deneb => {
|
||||
get_param::<Option<JsonPayloadAttributesV2>>(params, 1)
|
||||
.map(|opt| opt.map(JsonPayloadAttributes::V2))
|
||||
.transpose()
|
||||
|
@ -5,6 +5,7 @@ use crate::{
|
||||
},
|
||||
Config, *,
|
||||
};
|
||||
use kzg::Kzg;
|
||||
use sensitive_url::SensitiveUrl;
|
||||
use task_executor::TaskExecutor;
|
||||
use tempfile::NamedTempFile;
|
||||
@ -33,6 +34,7 @@ impl<T: EthSpec> MockExecutionLayer<T> {
|
||||
Some(JwtKey::from_slice(&DEFAULT_JWT_SECRET).unwrap()),
|
||||
spec,
|
||||
None,
|
||||
None,
|
||||
)
|
||||
}
|
||||
|
||||
@ -46,6 +48,7 @@ impl<T: EthSpec> MockExecutionLayer<T> {
|
||||
jwt_key: Option<JwtKey>,
|
||||
spec: ChainSpec,
|
||||
builder_url: Option<SensitiveUrl>,
|
||||
kzg: Option<Kzg>,
|
||||
) -> Self {
|
||||
let handle = executor.handle().unwrap();
|
||||
|
||||
@ -58,6 +61,7 @@ impl<T: EthSpec> MockExecutionLayer<T> {
|
||||
spec.terminal_block_hash,
|
||||
shanghai_time,
|
||||
deneb_time,
|
||||
kzg,
|
||||
);
|
||||
|
||||
let url = SensitiveUrl::parse(&server.url()).unwrap();
|
||||
|
@ -8,6 +8,7 @@ use bytes::Bytes;
|
||||
use environment::null_logger;
|
||||
use execution_block_generator::PoWBlock;
|
||||
use handle_rpc::handle_rpc;
|
||||
use kzg::Kzg;
|
||||
use parking_lot::{Mutex, RwLock, RwLockWriteGuard};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use serde_json::json;
|
||||
@ -96,10 +97,15 @@ impl<T: EthSpec> MockServer<T> {
|
||||
ExecutionBlockHash::zero(),
|
||||
None, // FIXME(capella): should this be the default?
|
||||
None, // FIXME(deneb): should this be the default?
|
||||
None, // FIXME(deneb): should this be the default?
|
||||
)
|
||||
}
|
||||
|
||||
pub fn new_with_config(handle: &runtime::Handle, config: MockExecutionConfig) -> Self {
|
||||
pub fn new_with_config(
|
||||
handle: &runtime::Handle,
|
||||
config: MockExecutionConfig,
|
||||
kzg: Option<Kzg>,
|
||||
) -> Self {
|
||||
let MockExecutionConfig {
|
||||
jwt_key,
|
||||
terminal_difficulty,
|
||||
@ -117,6 +123,7 @@ impl<T: EthSpec> MockServer<T> {
|
||||
terminal_block_hash,
|
||||
shanghai_time,
|
||||
deneb_time,
|
||||
kzg,
|
||||
);
|
||||
|
||||
let ctx: Arc<Context<T>> = Arc::new(Context {
|
||||
@ -168,6 +175,7 @@ impl<T: EthSpec> MockServer<T> {
|
||||
*self.ctx.engine_capabilities.write() = engine_capabilities;
|
||||
}
|
||||
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
pub fn new(
|
||||
handle: &runtime::Handle,
|
||||
jwt_key: JwtKey,
|
||||
@ -176,6 +184,7 @@ impl<T: EthSpec> MockServer<T> {
|
||||
terminal_block_hash: ExecutionBlockHash,
|
||||
shanghai_time: Option<u64>,
|
||||
deneb_time: Option<u64>,
|
||||
kzg: Option<Kzg>,
|
||||
) -> Self {
|
||||
Self::new_with_config(
|
||||
handle,
|
||||
@ -188,6 +197,7 @@ impl<T: EthSpec> MockServer<T> {
|
||||
shanghai_time,
|
||||
deneb_time,
|
||||
},
|
||||
kzg,
|
||||
)
|
||||
}
|
||||
|
||||
|
@ -480,7 +480,7 @@ pub async fn proposer_boost_re_org_test(
|
||||
|
||||
// Produce block B and process it halfway through the slot.
|
||||
let (block_b, mut state_b) = harness.make_block(state_a.clone(), slot_b).await;
|
||||
let block_b_root = block_b.canonical_root();
|
||||
let block_b_root = block_b.0.canonical_root();
|
||||
|
||||
let obs_time = slot_clock.start_of(slot_b).unwrap() + slot_clock.slot_duration() / 2;
|
||||
slot_clock.set_current_time(obs_time);
|
||||
@ -573,8 +573,18 @@ pub async fn proposer_boost_re_org_test(
|
||||
|
||||
// Check the fork choice updates that were sent.
|
||||
let forkchoice_updates = forkchoice_updates.lock();
|
||||
let block_a_exec_hash = block_a.message().execution_payload().unwrap().block_hash();
|
||||
let block_b_exec_hash = block_b.message().execution_payload().unwrap().block_hash();
|
||||
let block_a_exec_hash = block_a
|
||||
.0
|
||||
.message()
|
||||
.execution_payload()
|
||||
.unwrap()
|
||||
.block_hash();
|
||||
let block_b_exec_hash = block_b
|
||||
.0
|
||||
.message()
|
||||
.execution_payload()
|
||||
.unwrap()
|
||||
.block_hash();
|
||||
|
||||
let block_c_timestamp = block_c.message().execution_payload().unwrap().timestamp();
|
||||
|
||||
@ -679,7 +689,7 @@ pub async fn fork_choice_before_proposal() {
|
||||
let state_a = harness.get_current_state();
|
||||
let (block_b, state_b) = harness.make_block(state_a.clone(), slot_b).await;
|
||||
let block_root_b = harness
|
||||
.process_block(slot_b, block_b.canonical_root(), block_b)
|
||||
.process_block(slot_b, block_b.0.canonical_root(), block_b)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
@ -694,7 +704,7 @@ pub async fn fork_choice_before_proposal() {
|
||||
|
||||
let (block_c, state_c) = harness.make_block(state_a, slot_c).await;
|
||||
let block_root_c = harness
|
||||
.process_block(slot_c, block_c.canonical_root(), block_c.clone())
|
||||
.process_block(slot_c, block_c.0.canonical_root(), block_c.clone())
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
|
@ -107,7 +107,7 @@ impl TestRig {
|
||||
"precondition: current slot is one after head"
|
||||
);
|
||||
|
||||
let (next_block, next_state) = harness
|
||||
let (next_block_tuple, next_state) = harness
|
||||
.make_block(head.beacon_state.clone(), harness.chain.slot().unwrap())
|
||||
.await;
|
||||
|
||||
@ -133,9 +133,9 @@ impl TestRig {
|
||||
.get_unaggregated_attestations(
|
||||
&AttestationStrategy::AllValidators,
|
||||
&next_state,
|
||||
next_block.state_root(),
|
||||
next_block.canonical_root(),
|
||||
next_block.slot(),
|
||||
next_block_tuple.0.state_root(),
|
||||
next_block_tuple.0.canonical_root(),
|
||||
next_block_tuple.0.slot(),
|
||||
)
|
||||
.into_iter()
|
||||
.flatten()
|
||||
@ -145,9 +145,9 @@ impl TestRig {
|
||||
.make_attestations(
|
||||
&harness.get_all_validators(),
|
||||
&next_state,
|
||||
next_block.state_root(),
|
||||
next_block.canonical_root().into(),
|
||||
next_block.slot(),
|
||||
next_block_tuple.0.state_root(),
|
||||
next_block_tuple.0.canonical_root().into(),
|
||||
next_block_tuple.0.slot(),
|
||||
)
|
||||
.into_iter()
|
||||
.filter_map(|(_, aggregate_opt)| aggregate_opt)
|
||||
@ -209,7 +209,7 @@ impl TestRig {
|
||||
|
||||
Self {
|
||||
chain,
|
||||
next_block: Arc::new(next_block),
|
||||
next_block: Arc::new(next_block_tuple.0),
|
||||
attestations,
|
||||
next_block_attestations,
|
||||
next_block_aggregate_attestations,
|
||||
|
@ -179,15 +179,15 @@ impl ForkChoiceTest {
|
||||
let slot = self.harness.get_current_slot();
|
||||
let (block, state_) = self.harness.make_block(state, slot).await;
|
||||
state = state_;
|
||||
if !predicate(block.message(), &state) {
|
||||
if !predicate(block.0.message(), &state) {
|
||||
break;
|
||||
}
|
||||
if let Ok(block_hash) = self.harness.process_block_result(block.clone()).await {
|
||||
self.harness.attest_block(
|
||||
&state,
|
||||
block.state_root(),
|
||||
block.0.state_root(),
|
||||
block_hash,
|
||||
&block,
|
||||
&block.0,
|
||||
&validators,
|
||||
);
|
||||
self.harness.advance_slot();
|
||||
@ -273,8 +273,8 @@ impl ForkChoiceTest {
|
||||
)
|
||||
.unwrap();
|
||||
let slot = self.harness.get_current_slot();
|
||||
let (mut signed_block, mut state) = self.harness.make_block(state, slot).await;
|
||||
func(&mut signed_block, &mut state);
|
||||
let (mut block_tuple, mut state) = self.harness.make_block(state, slot).await;
|
||||
func(&mut block_tuple.0, &mut state);
|
||||
let current_slot = self.harness.get_current_slot();
|
||||
self.harness
|
||||
.chain
|
||||
@ -282,8 +282,8 @@ impl ForkChoiceTest {
|
||||
.fork_choice_write_lock()
|
||||
.on_block(
|
||||
current_slot,
|
||||
signed_block.message(),
|
||||
signed_block.canonical_root(),
|
||||
block_tuple.0.message(),
|
||||
block_tuple.0.canonical_root(),
|
||||
Duration::from_secs(0),
|
||||
&state,
|
||||
PayloadVerificationStatus::Verified,
|
||||
@ -315,8 +315,8 @@ impl ForkChoiceTest {
|
||||
)
|
||||
.unwrap();
|
||||
let slot = self.harness.get_current_slot();
|
||||
let (mut signed_block, mut state) = self.harness.make_block(state, slot).await;
|
||||
mutation_func(&mut signed_block, &mut state);
|
||||
let (mut block_tuple, mut state) = self.harness.make_block(state, slot).await;
|
||||
mutation_func(&mut block_tuple.0, &mut state);
|
||||
let current_slot = self.harness.get_current_slot();
|
||||
let err = self
|
||||
.harness
|
||||
@ -325,8 +325,8 @@ impl ForkChoiceTest {
|
||||
.fork_choice_write_lock()
|
||||
.on_block(
|
||||
current_slot,
|
||||
signed_block.message(),
|
||||
signed_block.canonical_root(),
|
||||
block_tuple.0.message(),
|
||||
block_tuple.0.canonical_root(),
|
||||
Duration::from_secs(0),
|
||||
&state,
|
||||
PayloadVerificationStatus::Verified,
|
||||
|
@ -1,5 +1,6 @@
|
||||
use crate::test_utils::TestRandom;
|
||||
use crate::{Blob, EthSpec, Hash256, SignedRoot, Slot};
|
||||
use crate::{Blob, ChainSpec, Domain, EthSpec, Fork, Hash256, SignedBlobSidecar, SignedRoot, Slot};
|
||||
use bls::SecretKey;
|
||||
use derivative::Derivative;
|
||||
use kzg::{KzgCommitment, KzgProof};
|
||||
use serde_derive::{Deserialize, Serialize};
|
||||
@ -72,7 +73,7 @@ impl<T: EthSpec> Ord for BlobSidecar<T> {
|
||||
}
|
||||
|
||||
pub type BlobSidecarList<T> = VariableList<Arc<BlobSidecar<T>>, <T as EthSpec>::MaxBlobsPerBlock>;
|
||||
pub type Blobs<T> = VariableList<Blob<T>, <T as EthSpec>::MaxExtraDataBytes>;
|
||||
pub type Blobs<T> = VariableList<Blob<T>, <T as EthSpec>::MaxBlobsPerBlock>;
|
||||
|
||||
impl<T: EthSpec> SignedRoot for BlobSidecar<T> {}
|
||||
|
||||
@ -93,4 +94,28 @@ impl<T: EthSpec> BlobSidecar<T> {
|
||||
// Fixed part
|
||||
Self::empty().as_ssz_bytes().len()
|
||||
}
|
||||
|
||||
// this is mostly not used except for in testing
|
||||
pub fn sign(
|
||||
self: Arc<Self>,
|
||||
secret_key: &SecretKey,
|
||||
fork: &Fork,
|
||||
genesis_validators_root: Hash256,
|
||||
spec: &ChainSpec,
|
||||
) -> SignedBlobSidecar<T> {
|
||||
let signing_epoch = self.slot.epoch(T::slots_per_epoch());
|
||||
let domain = spec.get_domain(
|
||||
signing_epoch,
|
||||
Domain::BlobSidecar,
|
||||
fork,
|
||||
genesis_validators_root,
|
||||
);
|
||||
let message = self.signing_root(domain);
|
||||
let signature = secret_key.sign(message);
|
||||
|
||||
SignedBlobSidecar {
|
||||
message: self,
|
||||
signature,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -204,6 +204,7 @@ pub type Address = H160;
|
||||
pub type ForkVersion = [u8; 4];
|
||||
pub type BLSFieldElement = Uint256;
|
||||
pub type Blob<T> = FixedVector<u8, <T as EthSpec>::BytesPerBlob>;
|
||||
pub type KzgProofs<T> = VariableList<KzgProof, <T as EthSpec>::MaxBlobsPerBlock>;
|
||||
pub type VersionedHash = Hash256;
|
||||
pub type Hash64 = ethereum_types::H64;
|
||||
|
||||
|
@ -1,18 +1,29 @@
|
||||
use c_kzg::{Bytes48, BYTES_PER_COMMITMENT};
|
||||
use derivative::Derivative;
|
||||
use eth2_hashing::hash_fixed;
|
||||
use serde::de::{Deserialize, Deserializer};
|
||||
use serde::ser::{Serialize, Serializer};
|
||||
use ssz_derive::{Decode, Encode};
|
||||
use std::fmt;
|
||||
use std::fmt::{Debug, Display, Formatter};
|
||||
use std::str::FromStr;
|
||||
use tree_hash::{PackedEncoding, TreeHash};
|
||||
use tree_hash::{Hash256, PackedEncoding, TreeHash};
|
||||
|
||||
pub const BLOB_COMMITMENT_VERSION_KZG: u8 = 0x01;
|
||||
|
||||
#[derive(Derivative, Clone, Copy, Encode, Decode)]
|
||||
#[derivative(PartialEq, Eq, Hash)]
|
||||
#[ssz(struct_behaviour = "transparent")]
|
||||
pub struct KzgCommitment(pub [u8; BYTES_PER_COMMITMENT]);
|
||||
|
||||
impl KzgCommitment {
|
||||
pub fn calculate_versioned_hash(&self) -> Hash256 {
|
||||
let mut versioned_hash = hash_fixed(&self.0);
|
||||
versioned_hash[0] = BLOB_COMMITMENT_VERSION_KZG;
|
||||
Hash256::from_slice(versioned_hash.as_slice())
|
||||
}
|
||||
}
|
||||
|
||||
impl From<KzgCommitment> for Bytes48 {
|
||||
fn from(value: KzgCommitment) -> Self {
|
||||
value.0.into()
|
||||
|
@ -20,6 +20,7 @@ pub enum Error {
|
||||
}
|
||||
|
||||
/// A wrapper over a kzg library that holds the trusted setup parameters.
|
||||
#[derive(Debug)]
|
||||
pub struct Kzg {
|
||||
trusted_setup: KzgSettings,
|
||||
}
|
||||
|
@ -236,7 +236,7 @@ impl<E: EthSpec> LocalExecutionNode<E> {
|
||||
panic!("Failed to write jwt file {}", e);
|
||||
}
|
||||
Self {
|
||||
server: MockServer::new_with_config(&context.executor.handle().unwrap(), config),
|
||||
server: MockServer::new_with_config(&context.executor.handle().unwrap(), config, None),
|
||||
datadir,
|
||||
}
|
||||
}
|
||||
|
Loading…
Reference in New Issue
Block a user