Update engine_api to latest version (#4223)

* Update Engine API to Latest

* Get Mock EE Working

* Fix Mock EE

* Update Engine API Again

* Rip out get_blobs_bundle Stuff

* Fix Test Harness

* Fix Clippy Complaints

* Fix Beacon Chain Tests
This commit is contained in:
ethDreamer 2023-04-27 13:18:21 -05:00 committed by GitHub
parent aa34339298
commit c1d47da02d
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
24 changed files with 449 additions and 159 deletions

1
Cargo.lock generated
View File

@ -2647,6 +2647,7 @@ dependencies = [
"hex", "hex",
"jsonwebtoken", "jsonwebtoken",
"keccak-hash", "keccak-hash",
"kzg",
"lazy_static", "lazy_static",
"lighthouse_metrics", "lighthouse_metrics",
"lru 0.7.8", "lru 0.7.8",

View File

@ -4711,7 +4711,7 @@ impl<T: BeaconChainTypes> BeaconChain<T> {
bls_to_execution_changes, bls_to_execution_changes,
} = partial_beacon_block; } = partial_beacon_block;
let (inner_block, blobs_opt) = match &state { let (inner_block, blobs_opt, proofs_opt) = match &state {
BeaconState::Base(_) => ( BeaconState::Base(_) => (
BeaconBlock::Base(BeaconBlockBase { BeaconBlock::Base(BeaconBlockBase {
slot, slot,
@ -4731,6 +4731,7 @@ impl<T: BeaconChainTypes> BeaconChain<T> {
}, },
}), }),
None, None,
None,
), ),
BeaconState::Altair(_) => ( BeaconState::Altair(_) => (
BeaconBlock::Altair(BeaconBlockAltair { BeaconBlock::Altair(BeaconBlockAltair {
@ -4753,9 +4754,10 @@ impl<T: BeaconChainTypes> BeaconChain<T> {
}, },
}), }),
None, None,
None,
), ),
BeaconState::Merge(_) => { BeaconState::Merge(_) => {
let (payload, _, _) = block_contents let (payload, _, _, _) = block_contents
.ok_or(BlockProductionError::MissingExecutionPayload)? .ok_or(BlockProductionError::MissingExecutionPayload)?
.deconstruct(); .deconstruct();
( (
@ -4781,10 +4783,11 @@ impl<T: BeaconChainTypes> BeaconChain<T> {
}, },
}), }),
None, None,
None,
) )
} }
BeaconState::Capella(_) => { BeaconState::Capella(_) => {
let (payload, _, _) = block_contents let (payload, _, _, _) = block_contents
.ok_or(BlockProductionError::MissingExecutionPayload)? .ok_or(BlockProductionError::MissingExecutionPayload)?
.deconstruct(); .deconstruct();
( (
@ -4811,10 +4814,11 @@ impl<T: BeaconChainTypes> BeaconChain<T> {
}, },
}), }),
None, None,
None,
) )
} }
BeaconState::Deneb(_) => { BeaconState::Deneb(_) => {
let (payload, kzg_commitments, blobs) = block_contents let (payload, kzg_commitments, blobs, proofs) = block_contents
.ok_or(BlockProductionError::MissingExecutionPayload)? .ok_or(BlockProductionError::MissingExecutionPayload)?
.deconstruct(); .deconstruct();
( (
@ -4843,6 +4847,7 @@ impl<T: BeaconChainTypes> BeaconChain<T> {
}, },
}), }),
blobs, blobs,
proofs,
) )
} }
}; };
@ -4915,8 +4920,11 @@ impl<T: BeaconChainTypes> BeaconChain<T> {
))); )));
} }
let kzg_proofs = let kzg_proofs = if let Some(proofs) = proofs_opt {
Self::compute_blob_kzg_proofs(kzg, &blobs, expected_kzg_commitments, slot)?; Vec::from(proofs)
} else {
Self::compute_blob_kzg_proofs(kzg, &blobs, expected_kzg_commitments, slot)?
};
kzg_utils::validate_blobs::<T::EthSpec>( kzg_utils::validate_blobs::<T::EthSpec>(
kzg, kzg,

View File

@ -12,13 +12,14 @@ use crate::data_availability_checker::{
}; };
use crate::kzg_utils::{validate_blob, validate_blobs}; use crate::kzg_utils::{validate_blob, validate_blobs};
use crate::BeaconChainError; use crate::BeaconChainError;
use eth2::types::BlockContentsTuple;
use kzg::Kzg; use kzg::Kzg;
use slog::{debug, warn}; use slog::{debug, warn};
use std::borrow::Cow; use std::borrow::Cow;
use types::{ use types::{
BeaconBlockRef, BeaconState, BeaconStateError, BlobSidecar, BlobSidecarList, ChainSpec, BeaconBlockRef, BeaconState, BeaconStateError, BlobSidecar, BlobSidecarList, ChainSpec,
CloneConfig, Epoch, EthSpec, Hash256, KzgCommitment, RelativeEpoch, SignedBeaconBlock, CloneConfig, Epoch, EthSpec, FullPayload, Hash256, KzgCommitment, RelativeEpoch,
SignedBeaconBlockHeader, SignedBlobSidecar, Slot, SignedBeaconBlock, SignedBeaconBlockHeader, SignedBlobSidecar, Slot,
}; };
#[derive(Debug)] #[derive(Debug)]
@ -659,3 +660,18 @@ impl<E: EthSpec> From<SignedBeaconBlock<E>> for BlockWrapper<E> {
Self::Block(Arc::new(value)) Self::Block(Arc::new(value))
} }
} }
impl<E: EthSpec> From<BlockContentsTuple<E, FullPayload<E>>> for BlockWrapper<E> {
fn from(value: BlockContentsTuple<E, FullPayload<E>>) -> Self {
match value.1 {
Some(variable_list) => Self::BlockAndBlobs(
Arc::new(value.0),
Vec::from(variable_list)
.into_iter()
.map(|signed_blob| signed_blob.message)
.collect::<Vec<_>>(),
),
None => Self::Block(Arc::new(value.0)),
}
}
}

View File

@ -1,3 +1,4 @@
use crate::blob_verification::{AsBlock, BlockWrapper};
pub use crate::persisted_beacon_chain::PersistedBeaconChain; pub use crate::persisted_beacon_chain::PersistedBeaconChain;
pub use crate::{ pub use crate::{
beacon_chain::{BEACON_CHAIN_DB_KEY, ETH1_CACHE_DB_KEY, FORK_CHOICE_DB_KEY, OP_POOL_DB_KEY}, beacon_chain::{BEACON_CHAIN_DB_KEY, ETH1_CACHE_DB_KEY, FORK_CHOICE_DB_KEY, OP_POOL_DB_KEY},
@ -13,6 +14,7 @@ use crate::{
StateSkipConfig, StateSkipConfig,
}; };
use bls::get_withdrawal_credentials; use bls::get_withdrawal_credentials;
use eth2::types::BlockContentsTuple;
use execution_layer::{ use execution_layer::{
auth::JwtKey, auth::JwtKey,
test_utils::{ test_utils::{
@ -25,7 +27,7 @@ use fork_choice::CountUnrealized;
use futures::channel::mpsc::Receiver; use futures::channel::mpsc::Receiver;
pub use genesis::{interop_genesis_state_with_eth1, DEFAULT_ETH1_BLOCK_HASH}; pub use genesis::{interop_genesis_state_with_eth1, DEFAULT_ETH1_BLOCK_HASH};
use int_to_bytes::int_to_bytes32; use int_to_bytes::int_to_bytes32;
use kzg::TrustedSetup; use kzg::{Kzg, TrustedSetup};
use merkle_proof::MerkleTree; use merkle_proof::MerkleTree;
use parking_lot::Mutex; use parking_lot::Mutex;
use parking_lot::RwLockWriteGuard; use parking_lot::RwLockWriteGuard;
@ -446,6 +448,13 @@ where
let deneb_time = spec.deneb_fork_epoch.map(|epoch| { let deneb_time = spec.deneb_fork_epoch.map(|epoch| {
HARNESS_GENESIS_TIME + spec.seconds_per_slot * E::slots_per_epoch() * epoch.as_u64() HARNESS_GENESIS_TIME + spec.seconds_per_slot * E::slots_per_epoch() * epoch.as_u64()
}); });
let trusted_setup: TrustedSetup =
serde_json::from_reader(eth2_network_config::TRUSTED_SETUP)
.map_err(|e| format!("Unable to read trusted setup file: {}", e))
.expect("should have trusted setup");
let kzg = Kzg::new_from_trusted_setup(trusted_setup).expect("should create kzg");
let mock = MockExecutionLayer::new( let mock = MockExecutionLayer::new(
self.runtime.task_executor.clone(), self.runtime.task_executor.clone(),
DEFAULT_TERMINAL_BLOCK, DEFAULT_TERMINAL_BLOCK,
@ -455,6 +464,7 @@ where
Some(JwtKey::from_slice(&DEFAULT_JWT_SECRET).unwrap()), Some(JwtKey::from_slice(&DEFAULT_JWT_SECRET).unwrap()),
spec, spec,
None, None,
Some(kzg),
); );
self.execution_layer = Some(mock.el.clone()); self.execution_layer = Some(mock.el.clone());
self.mock_execution_layer = Some(mock); self.mock_execution_layer = Some(mock);
@ -477,6 +487,11 @@ where
let deneb_time = spec.deneb_fork_epoch.map(|epoch| { let deneb_time = spec.deneb_fork_epoch.map(|epoch| {
HARNESS_GENESIS_TIME + spec.seconds_per_slot * E::slots_per_epoch() * epoch.as_u64() HARNESS_GENESIS_TIME + spec.seconds_per_slot * E::slots_per_epoch() * epoch.as_u64()
}); });
let trusted_setup: TrustedSetup =
serde_json::from_reader(eth2_network_config::TRUSTED_SETUP)
.map_err(|e| format!("Unable to read trusted setup file: {}", e))
.expect("should have trusted setup");
let kzg = Kzg::new_from_trusted_setup(trusted_setup).expect("should create kzg");
let mock_el = MockExecutionLayer::new( let mock_el = MockExecutionLayer::new(
self.runtime.task_executor.clone(), self.runtime.task_executor.clone(),
DEFAULT_TERMINAL_BLOCK, DEFAULT_TERMINAL_BLOCK,
@ -486,6 +501,7 @@ where
Some(JwtKey::from_slice(&DEFAULT_JWT_SECRET).unwrap()), Some(JwtKey::from_slice(&DEFAULT_JWT_SECRET).unwrap()),
spec.clone(), spec.clone(),
Some(builder_url.clone()), Some(builder_url.clone()),
Some(kzg),
) )
.move_to_terminal_block(); .move_to_terminal_block();
@ -755,7 +771,7 @@ where
&self, &self,
mut state: BeaconState<E>, mut state: BeaconState<E>,
slot: Slot, slot: Slot,
) -> (SignedBeaconBlock<E>, BeaconState<E>) { ) -> (BlockContentsTuple<E, FullPayload<E>>, BeaconState<E>) {
assert_ne!(slot, 0, "can't produce a block at slot 0"); assert_ne!(slot, 0, "can't produce a block at slot 0");
assert!(slot >= state.slot()); assert!(slot >= state.slot());
@ -795,7 +811,37 @@ where
&self.spec, &self.spec,
); );
(signed_block, state) let block_contents: BlockContentsTuple<E, FullPayload<E>> = match &signed_block {
SignedBeaconBlock::Base(_)
| SignedBeaconBlock::Altair(_)
| SignedBeaconBlock::Merge(_)
| SignedBeaconBlock::Capella(_) => (signed_block, None),
SignedBeaconBlock::Deneb(_) => {
if let Some(blobs) = self
.chain
.proposal_blob_cache
.pop(&signed_block.canonical_root())
{
let signed_blobs = Vec::from(blobs)
.into_iter()
.map(|blob| {
blob.sign(
&self.validator_keypairs[proposer_index].sk,
&state.fork(),
state.genesis_validators_root(),
&self.spec,
)
})
.collect::<Vec<_>>()
.into();
(signed_block, Some(signed_blobs))
} else {
(signed_block, None)
}
}
};
(block_contents, state)
} }
/// Useful for the `per_block_processing` tests. Creates a block, and returns the state after /// Useful for the `per_block_processing` tests. Creates a block, and returns the state after
@ -1663,18 +1709,18 @@ where
(deposits, state) (deposits, state)
} }
pub async fn process_block( pub async fn process_block<B: Into<BlockWrapper<E>>>(
&self, &self,
slot: Slot, slot: Slot,
block_root: Hash256, block_root: Hash256,
block: SignedBeaconBlock<E>, block: B,
) -> Result<SignedBeaconBlockHash, BlockError<E>> { ) -> Result<SignedBeaconBlockHash, BlockError<E>> {
self.set_current_slot(slot); self.set_current_slot(slot);
let block_hash: SignedBeaconBlockHash = self let block_hash: SignedBeaconBlockHash = self
.chain .chain
.process_block( .process_block(
block_root, block_root,
Arc::new(block), block.into(),
CountUnrealized::True, CountUnrealized::True,
NotifyExecutionLayer::Yes, NotifyExecutionLayer::Yes,
) )
@ -1685,15 +1731,16 @@ where
Ok(block_hash) Ok(block_hash)
} }
pub async fn process_block_result( pub async fn process_block_result<B: Into<BlockWrapper<E>>>(
&self, &self,
block: SignedBeaconBlock<E>, block: B,
) -> Result<SignedBeaconBlockHash, BlockError<E>> { ) -> Result<SignedBeaconBlockHash, BlockError<E>> {
let wrapped_block = block.into();
let block_hash: SignedBeaconBlockHash = self let block_hash: SignedBeaconBlockHash = self
.chain .chain
.process_block( .process_block(
block.canonical_root(), wrapped_block.canonical_root(),
Arc::new(block), wrapped_block,
CountUnrealized::True, CountUnrealized::True,
NotifyExecutionLayer::Yes, NotifyExecutionLayer::Yes,
) )
@ -1759,11 +1806,18 @@ where
&self, &self,
slot: Slot, slot: Slot,
state: BeaconState<E>, state: BeaconState<E>,
) -> Result<(SignedBeaconBlockHash, SignedBeaconBlock<E>, BeaconState<E>), BlockError<E>> { ) -> Result<
(
SignedBeaconBlockHash,
BlockContentsTuple<E, FullPayload<E>>,
BeaconState<E>,
),
BlockError<E>,
> {
self.set_current_slot(slot); self.set_current_slot(slot);
let (block, new_state) = self.make_block(state, slot).await; let (block, new_state) = self.make_block(state, slot).await;
let block_hash = self let block_hash = self
.process_block(slot, block.canonical_root(), block.clone()) .process_block(slot, block.0.canonical_root(), block.clone())
.await?; .await?;
Ok((block_hash, block, new_state)) Ok((block_hash, block, new_state))
} }
@ -1819,7 +1873,7 @@ where
sync_committee_strategy: SyncCommitteeStrategy, sync_committee_strategy: SyncCommitteeStrategy,
) -> Result<(SignedBeaconBlockHash, BeaconState<E>), BlockError<E>> { ) -> Result<(SignedBeaconBlockHash, BeaconState<E>), BlockError<E>> {
let (block_hash, block, state) = self.add_block_at_slot(slot, state).await?; let (block_hash, block, state) = self.add_block_at_slot(slot, state).await?;
self.attest_block(&state, state_root, block_hash, &block, validators); self.attest_block(&state, state_root, block_hash, &block.0, validators);
if sync_committee_strategy == SyncCommitteeStrategy::AllValidators if sync_committee_strategy == SyncCommitteeStrategy::AllValidators
&& state.current_sync_committee().is_ok() && state.current_sync_committee().is_ok()
@ -2047,7 +2101,7 @@ where
state: BeaconState<E>, state: BeaconState<E>,
slot: Slot, slot: Slot,
_block_strategy: BlockStrategy, _block_strategy: BlockStrategy,
) -> (SignedBeaconBlock<E>, BeaconState<E>) { ) -> (BlockContentsTuple<E, FullPayload<E>>, BeaconState<E>) {
self.make_block(state, slot).await self.make_block(state, slot).await
} }

View File

@ -1025,8 +1025,8 @@ async fn verify_block_for_gossip_slashing_detection() {
harness.advance_slot(); harness.advance_slot();
let state = harness.get_current_state(); let state = harness.get_current_state();
let (block1, _) = harness.make_block(state.clone(), Slot::new(1)).await; let ((block1, _), _) = harness.make_block(state.clone(), Slot::new(1)).await;
let (block2, _) = harness.make_block(state, Slot::new(1)).await; let ((block2, _), _) = harness.make_block(state, Slot::new(1)).await;
let verified_block = harness let verified_block = harness
.chain .chain
@ -1065,7 +1065,7 @@ async fn verify_block_for_gossip_doppelganger_detection() {
let harness = get_harness(VALIDATOR_COUNT); let harness = get_harness(VALIDATOR_COUNT);
let state = harness.get_current_state(); let state = harness.get_current_state();
let (block, _) = harness.make_block(state.clone(), Slot::new(1)).await; let ((block, _), _) = harness.make_block(state.clone(), Slot::new(1)).await;
let verified_block = harness let verified_block = harness
.chain .chain
@ -1152,7 +1152,7 @@ async fn add_base_block_to_altair_chain() {
// Produce an Altair block. // Produce an Altair block.
let state = harness.get_current_state(); let state = harness.get_current_state();
let slot = harness.get_current_slot(); let slot = harness.get_current_slot();
let (altair_signed_block, _) = harness.make_block(state.clone(), slot).await; let ((altair_signed_block, _), _) = harness.make_block(state.clone(), slot).await;
let altair_block = &altair_signed_block let altair_block = &altair_signed_block
.as_altair() .as_altair()
.expect("test expects an altair block") .expect("test expects an altair block")
@ -1289,7 +1289,7 @@ async fn add_altair_block_to_base_chain() {
// Produce an altair block. // Produce an altair block.
let state = harness.get_current_state(); let state = harness.get_current_state();
let slot = harness.get_current_slot(); let slot = harness.get_current_slot();
let (base_signed_block, _) = harness.make_block(state.clone(), slot).await; let ((base_signed_block, _), _) = harness.make_block(state.clone(), slot).await;
let base_block = &base_signed_block let base_block = &base_signed_block
.as_base() .as_base()
.expect("test expects a base block") .expect("test expects a base block")

View File

@ -223,7 +223,7 @@ impl InvalidPayloadRig {
let head = self.harness.chain.head_snapshot(); let head = self.harness.chain.head_snapshot();
let state = head.beacon_state.clone_with_only_committee_caches(); let state = head.beacon_state.clone_with_only_committee_caches();
let slot = slot_override.unwrap_or(state.slot() + 1); let slot = slot_override.unwrap_or(state.slot() + 1);
let (block, post_state) = self.harness.make_block(state, slot).await; let ((block, _), post_state) = self.harness.make_block(state, slot).await;
let block_root = block.canonical_root(); let block_root = block.canonical_root();
let set_new_payload = |payload: Payload| match payload { let set_new_payload = |payload: Payload| match payload {
@ -691,7 +691,8 @@ async fn invalidates_all_descendants() {
.state_at_slot(fork_parent_slot, StateSkipConfig::WithStateRoots) .state_at_slot(fork_parent_slot, StateSkipConfig::WithStateRoots)
.unwrap(); .unwrap();
assert_eq!(fork_parent_state.slot(), fork_parent_slot); assert_eq!(fork_parent_state.slot(), fork_parent_slot);
let (fork_block, _fork_post_state) = rig.harness.make_block(fork_parent_state, fork_slot).await; let ((fork_block, _), _fork_post_state) =
rig.harness.make_block(fork_parent_state, fork_slot).await;
let fork_block_root = rig let fork_block_root = rig
.harness .harness
.chain .chain
@ -789,7 +790,8 @@ async fn switches_heads() {
.state_at_slot(fork_parent_slot, StateSkipConfig::WithStateRoots) .state_at_slot(fork_parent_slot, StateSkipConfig::WithStateRoots)
.unwrap(); .unwrap();
assert_eq!(fork_parent_state.slot(), fork_parent_slot); assert_eq!(fork_parent_state.slot(), fork_parent_slot);
let (fork_block, _fork_post_state) = rig.harness.make_block(fork_parent_state, fork_slot).await; let ((fork_block, _), _fork_post_state) =
rig.harness.make_block(fork_parent_state, fork_slot).await;
let fork_parent_root = fork_block.parent_root(); let fork_parent_root = fork_block.parent_root();
let fork_block_root = rig let fork_block_root = rig
.harness .harness
@ -1033,8 +1035,8 @@ async fn invalid_parent() {
// Produce another block atop the parent, but don't import yet. // Produce another block atop the parent, but don't import yet.
let slot = parent_block.slot() + 1; let slot = parent_block.slot() + 1;
rig.harness.set_current_slot(slot); rig.harness.set_current_slot(slot);
let (block, state) = rig.harness.make_block(parent_state, slot).await; let (block_tuple, state) = rig.harness.make_block(parent_state, slot).await;
let block = Arc::new(block); let block = Arc::new(block_tuple.0);
let block_root = block.canonical_root(); let block_root = block.canonical_root();
assert_eq!(block.parent_root(), parent_root); assert_eq!(block.parent_root(), parent_root);
@ -1850,8 +1852,8 @@ impl InvalidHeadSetup {
.chain .chain
.state_at_slot(slot - 1, StateSkipConfig::WithStateRoots) .state_at_slot(slot - 1, StateSkipConfig::WithStateRoots)
.unwrap(); .unwrap();
let (fork_block, _) = rig.harness.make_block(parent_state, slot).await; let (fork_block_tuple, _) = rig.harness.make_block(parent_state, slot).await;
opt_fork_block = Some(Arc::new(fork_block)); opt_fork_block = Some(Arc::new(fork_block_tuple.0));
} else { } else {
// Skipped slot. // Skipped slot.
}; };

View File

@ -2022,7 +2022,7 @@ async fn garbage_collect_temp_states_from_failed_block() {
let genesis_state = harness.get_current_state(); let genesis_state = harness.get_current_state();
let block_slot = Slot::new(2 * slots_per_epoch); let block_slot = Slot::new(2 * slots_per_epoch);
let (signed_block, state) = harness.make_block(genesis_state, block_slot).await; let ((signed_block, _), state) = harness.make_block(genesis_state, block_slot).await;
let (mut block, _) = signed_block.deconstruct(); let (mut block, _) = signed_block.deconstruct();
@ -2422,7 +2422,7 @@ async fn revert_minority_fork_on_resume() {
harness1.process_attestations(attestations.clone()); harness1.process_attestations(attestations.clone());
harness2.process_attestations(attestations); harness2.process_attestations(attestations);
let (block, new_state) = harness1.make_block(state, slot).await; let ((block, _), new_state) = harness1.make_block(state, slot).await;
harness1 harness1
.process_block(slot, block.canonical_root(), block.clone()) .process_block(slot, block.canonical_root(), block.clone())
@ -2463,7 +2463,7 @@ async fn revert_minority_fork_on_resume() {
harness2.process_attestations(attestations); harness2.process_attestations(attestations);
// Minority chain block (no attesters). // Minority chain block (no attesters).
let (block1, new_state1) = harness1.make_block(state1, slot).await; let ((block1, _), new_state1) = harness1.make_block(state1, slot).await;
harness1 harness1
.process_block(slot, block1.canonical_root(), block1) .process_block(slot, block1.canonical_root(), block1)
.await .await
@ -2471,7 +2471,7 @@ async fn revert_minority_fork_on_resume() {
state1 = new_state1; state1 = new_state1;
// Majority chain block (all attesters). // Majority chain block (all attesters).
let (block2, new_state2) = harness2.make_block(state2, slot).await; let ((block2, _), new_state2) = harness2.make_block(state2, slot).await;
harness2 harness2
.process_block(slot, block2.canonical_root(), block2.clone()) .process_block(slot, block2.canonical_root(), block2.clone())
.await .await

View File

@ -25,6 +25,7 @@ hex = "0.4.2"
eth2_ssz = "0.4.1" eth2_ssz = "0.4.1"
eth2_ssz_types = "0.2.2" eth2_ssz_types = "0.2.2"
eth2 = { path = "../../common/eth2" } eth2 = { path = "../../common/eth2" }
kzg = { path = "../../crypto/kzg" }
state_processing = { path = "../../consensus/state_processing" } state_processing = { path = "../../consensus/state_processing" }
superstruct = "0.6.0" superstruct = "0.6.0"
lru = "0.7.1" lru = "0.7.1"

View File

@ -16,12 +16,14 @@ use serde::{Deserialize, Serialize};
use std::convert::TryFrom; use std::convert::TryFrom;
use strum::IntoStaticStr; use strum::IntoStaticStr;
use superstruct::superstruct; use superstruct::superstruct;
use types::beacon_block_body::KzgCommitments;
use types::blob_sidecar::Blobs;
pub use types::{ pub use types::{
Address, EthSpec, ExecutionBlockHash, ExecutionPayload, ExecutionPayloadHeader, Address, EthSpec, ExecutionBlockHash, ExecutionPayload, ExecutionPayloadHeader,
ExecutionPayloadRef, FixedVector, ForkName, Hash256, Transactions, Uint256, VariableList, ExecutionPayloadRef, FixedVector, ForkName, Hash256, Transactions, Uint256, VariableList,
Withdrawal, Withdrawals, Withdrawal, Withdrawals,
}; };
use types::{ExecutionPayloadCapella, ExecutionPayloadDeneb, ExecutionPayloadMerge}; use types::{ExecutionPayloadCapella, ExecutionPayloadDeneb, ExecutionPayloadMerge, KzgProofs};
pub mod auth; pub mod auth;
pub mod http; pub mod http;
@ -377,6 +379,8 @@ pub struct GetPayloadResponse<T: EthSpec> {
#[superstruct(only(Deneb), partial_getter(rename = "execution_payload_deneb"))] #[superstruct(only(Deneb), partial_getter(rename = "execution_payload_deneb"))]
pub execution_payload: ExecutionPayloadDeneb<T>, pub execution_payload: ExecutionPayloadDeneb<T>,
pub block_value: Uint256, pub block_value: Uint256,
#[superstruct(only(Deneb))]
pub blobs_bundle: BlobsBundleV1<T>,
} }
impl<'a, T: EthSpec> From<GetPayloadResponseRef<'a, T>> for ExecutionPayloadRef<'a, T> { impl<'a, T: EthSpec> From<GetPayloadResponseRef<'a, T>> for ExecutionPayloadRef<'a, T> {
@ -395,20 +399,25 @@ impl<T: EthSpec> From<GetPayloadResponse<T>> for ExecutionPayload<T> {
} }
} }
impl<T: EthSpec> From<GetPayloadResponse<T>> for (ExecutionPayload<T>, Uint256) { impl<T: EthSpec> From<GetPayloadResponse<T>>
for (ExecutionPayload<T>, Uint256, Option<BlobsBundleV1<T>>)
{
fn from(response: GetPayloadResponse<T>) -> Self { fn from(response: GetPayloadResponse<T>) -> Self {
match response { match response {
GetPayloadResponse::Merge(inner) => ( GetPayloadResponse::Merge(inner) => (
ExecutionPayload::Merge(inner.execution_payload), ExecutionPayload::Merge(inner.execution_payload),
inner.block_value, inner.block_value,
None,
), ),
GetPayloadResponse::Capella(inner) => ( GetPayloadResponse::Capella(inner) => (
ExecutionPayload::Capella(inner.execution_payload), ExecutionPayload::Capella(inner.execution_payload),
inner.block_value, inner.block_value,
None,
), ),
GetPayloadResponse::Deneb(inner) => ( GetPayloadResponse::Deneb(inner) => (
ExecutionPayload::Deneb(inner.execution_payload), ExecutionPayload::Deneb(inner.execution_payload),
inner.block_value, inner.block_value,
Some(inner.blobs_bundle),
), ),
} }
} }
@ -513,6 +522,13 @@ impl<E: EthSpec> ExecutionPayloadBodyV1<E> {
} }
} }
#[derive(Clone, Default, Debug, PartialEq)]
pub struct BlobsBundleV1<E: EthSpec> {
pub commitments: KzgCommitments<E>,
pub proofs: KzgProofs<E>,
pub blobs: Blobs<E>,
}
#[derive(Clone, Copy, Debug)] #[derive(Clone, Copy, Debug)]
pub struct EngineCapabilities { pub struct EngineCapabilities {
pub new_payload_v1: bool, pub new_payload_v1: bool,

View File

@ -40,9 +40,6 @@ pub const ENGINE_GET_PAYLOAD_V2: &str = "engine_getPayloadV2";
pub const ENGINE_GET_PAYLOAD_V3: &str = "engine_getPayloadV3"; pub const ENGINE_GET_PAYLOAD_V3: &str = "engine_getPayloadV3";
pub const ENGINE_GET_PAYLOAD_TIMEOUT: Duration = Duration::from_secs(2); pub const ENGINE_GET_PAYLOAD_TIMEOUT: Duration = Duration::from_secs(2);
pub const ENGINE_GET_BLOBS_BUNDLE_V1: &str = "engine_getBlobsBundleV1";
pub const ENGINE_GET_BLOBS_BUNDLE_TIMEOUT: Duration = Duration::from_secs(2);
pub const ENGINE_FORKCHOICE_UPDATED_V1: &str = "engine_forkchoiceUpdatedV1"; pub const ENGINE_FORKCHOICE_UPDATED_V1: &str = "engine_forkchoiceUpdatedV1";
pub const ENGINE_FORKCHOICE_UPDATED_V2: &str = "engine_forkchoiceUpdatedV2"; pub const ENGINE_FORKCHOICE_UPDATED_V2: &str = "engine_forkchoiceUpdatedV2";
pub const ENGINE_FORKCHOICE_UPDATED_TIMEOUT: Duration = Duration::from_secs(8); pub const ENGINE_FORKCHOICE_UPDATED_TIMEOUT: Duration = Duration::from_secs(8);
@ -927,23 +924,6 @@ impl HttpJsonRpc {
} }
} }
pub async fn get_blobs_bundle_v1<T: EthSpec>(
&self,
payload_id: PayloadId,
) -> Result<JsonBlobsBundle<T>, Error> {
let params = json!([JsonPayloadIdRequest::from(payload_id)]);
let response: JsonBlobsBundle<T> = self
.rpc_request(
ENGINE_GET_BLOBS_BUNDLE_V1,
params,
ENGINE_GET_BLOBS_BUNDLE_TIMEOUT,
)
.await?;
Ok(response)
}
pub async fn forkchoice_updated_v1( pub async fn forkchoice_updated_v1(
&self, &self,
forkchoice_state: ForkchoiceState, forkchoice_state: ForkchoiceState,

View File

@ -291,6 +291,8 @@ pub struct JsonGetPayloadResponse<T: EthSpec> {
pub execution_payload: JsonExecutionPayloadV3<T>, pub execution_payload: JsonExecutionPayloadV3<T>,
#[serde(with = "eth2_serde_utils::u256_hex_be")] #[serde(with = "eth2_serde_utils::u256_hex_be")]
pub block_value: Uint256, pub block_value: Uint256,
#[superstruct(only(V3))]
pub blobs_bundle: JsonBlobsBundleV1<T>,
} }
impl<T: EthSpec> From<JsonGetPayloadResponse<T>> for GetPayloadResponse<T> { impl<T: EthSpec> From<JsonGetPayloadResponse<T>> for GetPayloadResponse<T> {
@ -312,6 +314,7 @@ impl<T: EthSpec> From<JsonGetPayloadResponse<T>> for GetPayloadResponse<T> {
GetPayloadResponse::Deneb(GetPayloadResponseDeneb { GetPayloadResponse::Deneb(GetPayloadResponseDeneb {
execution_payload: response.execution_payload.into(), execution_payload: response.execution_payload.into(),
block_value: response.block_value, block_value: response.block_value,
blobs_bundle: response.blobs_bundle.into(),
}) })
} }
} }
@ -409,12 +412,31 @@ impl From<JsonPayloadAttributes> for PayloadAttributes {
} }
#[derive(Debug, PartialEq, Serialize, Deserialize)] #[derive(Debug, PartialEq, Serialize, Deserialize)]
#[serde(bound = "T: EthSpec", rename_all = "camelCase")] #[serde(bound = "E: EthSpec", rename_all = "camelCase")]
pub struct JsonBlobsBundle<T: EthSpec> { pub struct JsonBlobsBundleV1<E: EthSpec> {
pub block_hash: ExecutionBlockHash, pub commitments: KzgCommitments<E>,
pub kzgs: KzgCommitments<T>, pub proofs: KzgProofs<E>,
#[serde(with = "ssz_types::serde_utils::list_of_hex_fixed_vec")] #[serde(with = "ssz_types::serde_utils::list_of_hex_fixed_vec")]
pub blobs: Blobs<T>, pub blobs: Blobs<E>,
}
impl<E: EthSpec> From<BlobsBundleV1<E>> for JsonBlobsBundleV1<E> {
fn from(blobs_bundle: BlobsBundleV1<E>) -> Self {
Self {
commitments: blobs_bundle.commitments,
proofs: blobs_bundle.proofs,
blobs: blobs_bundle.blobs,
}
}
}
impl<E: EthSpec> From<JsonBlobsBundleV1<E>> for BlobsBundleV1<E> {
fn from(json_blobs_bundle: JsonBlobsBundleV1<E>) -> Self {
Self {
commitments: json_blobs_bundle.commitments,
proofs: json_blobs_bundle.proofs,
blobs: json_blobs_bundle.blobs,
}
}
} }
#[derive(Debug, PartialEq, Clone, Serialize, Deserialize)] #[derive(Debug, PartialEq, Clone, Serialize, Deserialize)]

View File

@ -45,12 +45,12 @@ use types::beacon_block_body::KzgCommitments;
use types::blob_sidecar::Blobs; use types::blob_sidecar::Blobs;
use types::consts::deneb::BLOB_TX_TYPE; use types::consts::deneb::BLOB_TX_TYPE;
use types::transaction::{AccessTuple, BlobTransaction, EcdsaSignature, SignedBlobTransaction}; use types::transaction::{AccessTuple, BlobTransaction, EcdsaSignature, SignedBlobTransaction};
use types::Withdrawals;
use types::{AbstractExecPayload, BeaconStateError, ExecPayload, VersionedHash}; use types::{AbstractExecPayload, BeaconStateError, ExecPayload, VersionedHash};
use types::{ use types::{
BlindedPayload, BlockType, ChainSpec, Epoch, ExecutionBlockHash, ExecutionPayload, BlindedPayload, BlockType, ChainSpec, Epoch, ExecutionBlockHash, ExecutionPayload,
ExecutionPayloadCapella, ExecutionPayloadDeneb, ExecutionPayloadMerge, ForkName, ExecutionPayloadCapella, ExecutionPayloadDeneb, ExecutionPayloadMerge, ForkName,
}; };
use types::{KzgProofs, Withdrawals};
use types::{ use types::{
ProposerPreparationData, PublicKeyBytes, Signature, SignedBeaconBlock, Slot, Transaction, ProposerPreparationData, PublicKeyBytes, Signature, SignedBeaconBlock, Slot, Transaction,
Uint256, Uint256,
@ -141,22 +141,53 @@ pub enum BlockProposalContents<T: EthSpec, Payload: AbstractExecPayload<T>> {
block_value: Uint256, block_value: Uint256,
kzg_commitments: KzgCommitments<T>, kzg_commitments: KzgCommitments<T>,
blobs: Blobs<T>, blobs: Blobs<T>,
proofs: KzgProofs<T>,
}, },
} }
impl<E: EthSpec, Payload: AbstractExecPayload<E>> From<GetPayloadResponse<E>>
for BlockProposalContents<E, Payload>
{
fn from(response: GetPayloadResponse<E>) -> Self {
let (execution_payload, block_value, maybe_bundle) = response.into();
match maybe_bundle {
Some(bundle) => Self::PayloadAndBlobs {
payload: execution_payload.into(),
block_value,
kzg_commitments: bundle.commitments,
blobs: bundle.blobs,
proofs: bundle.proofs,
},
None => Self::Payload {
payload: execution_payload.into(),
block_value,
},
}
}
}
#[allow(clippy::type_complexity)]
impl<T: EthSpec, Payload: AbstractExecPayload<T>> BlockProposalContents<T, Payload> { impl<T: EthSpec, Payload: AbstractExecPayload<T>> BlockProposalContents<T, Payload> {
pub fn deconstruct(self) -> (Payload, Option<KzgCommitments<T>>, Option<Blobs<T>>) { pub fn deconstruct(
self,
) -> (
Payload,
Option<KzgCommitments<T>>,
Option<Blobs<T>>,
Option<KzgProofs<T>>,
) {
match self { match self {
Self::Payload { Self::Payload {
payload, payload,
block_value: _, block_value: _,
} => (payload, None, None), } => (payload, None, None, None),
Self::PayloadAndBlobs { Self::PayloadAndBlobs {
payload, payload,
block_value: _, block_value: _,
kzg_commitments, kzg_commitments,
blobs, blobs,
} => (payload, Some(kzg_commitments), Some(blobs)), proofs,
} => (payload, Some(kzg_commitments), Some(blobs), Some(proofs)),
} }
} }
@ -171,6 +202,7 @@ impl<T: EthSpec, Payload: AbstractExecPayload<T>> BlockProposalContents<T, Paylo
block_value: _, block_value: _,
kzg_commitments: _, kzg_commitments: _,
blobs: _, blobs: _,
proofs: _,
} => payload, } => payload,
} }
} }
@ -185,6 +217,7 @@ impl<T: EthSpec, Payload: AbstractExecPayload<T>> BlockProposalContents<T, Paylo
block_value: _, block_value: _,
kzg_commitments: _, kzg_commitments: _,
blobs: _, blobs: _,
proofs: _,
} => payload, } => payload,
} }
} }
@ -199,6 +232,7 @@ impl<T: EthSpec, Payload: AbstractExecPayload<T>> BlockProposalContents<T, Paylo
block_value, block_value,
kzg_commitments: _, kzg_commitments: _,
blobs: _, blobs: _,
proofs: _,
} => block_value, } => block_value,
} }
} }
@ -215,6 +249,7 @@ impl<T: EthSpec, Payload: AbstractExecPayload<T>> BlockProposalContents<T, Paylo
block_value: Uint256::zero(), block_value: Uint256::zero(),
blobs: VariableList::default(), blobs: VariableList::default(),
kzg_commitments: VariableList::default(), kzg_commitments: VariableList::default(),
proofs: VariableList::default(),
}, },
}) })
} }
@ -1116,25 +1151,7 @@ impl<T: EthSpec> ExecutionLayer<T> {
} }
}; };
let blob_fut = async { let payload_response = async {
match current_fork {
ForkName::Base | ForkName::Altair | ForkName::Merge | ForkName::Capella => {
None
}
ForkName::Deneb => {
debug!(
self.log(),
"Issuing engine_getBlobsBundle";
"suggested_fee_recipient" => ?payload_attributes.suggested_fee_recipient(),
"prev_randao" => ?payload_attributes.prev_randao(),
"timestamp" => payload_attributes.timestamp(),
"parent_hash" => ?parent_hash,
);
Some(engine.api.get_blobs_bundle_v1::<T>(payload_id).await)
}
}
};
let payload_fut = async {
debug!( debug!(
self.log(), self.log(),
"Issuing engine_getPayload"; "Issuing engine_getPayload";
@ -1144,9 +1161,8 @@ impl<T: EthSpec> ExecutionLayer<T> {
"parent_hash" => ?parent_hash, "parent_hash" => ?parent_hash,
); );
engine.api.get_payload::<T>(current_fork, payload_id).await engine.api.get_payload::<T>(current_fork, payload_id).await
}; }.await?;
let (blob, payload_response) = tokio::join!(blob_fut, payload_fut);
let (execution_payload, block_value) = payload_response.map(|payload_response| {
if payload_response.execution_payload_ref().fee_recipient() != payload_attributes.suggested_fee_recipient() { if payload_response.execution_payload_ref().fee_recipient() != payload_attributes.suggested_fee_recipient() {
error!( error!(
self.log(), self.log(),
@ -1167,22 +1183,8 @@ impl<T: EthSpec> ExecutionLayer<T> {
attempts." attempts."
); );
} }
payload_response.into()
})?; Ok(payload_response.into())
if let Some(blob) = blob.transpose()? {
// FIXME(sean) cache blobs
Ok(BlockProposalContents::PayloadAndBlobs {
payload: execution_payload.into(),
block_value,
blobs: blob.blobs,
kzg_commitments: blob.kzgs,
})
} else {
Ok(BlockProposalContents::Payload {
payload: execution_payload.into(),
block_value,
})
}
}) })
.await .await
.map_err(Box::new) .map_err(Box::new)

View File

@ -6,15 +6,21 @@ use crate::{
}, },
ExecutionBlock, PayloadAttributes, PayloadId, PayloadStatusV1, PayloadStatusV1Status, ExecutionBlock, PayloadAttributes, PayloadId, PayloadStatusV1, PayloadStatusV1Status,
}, },
ExecutionBlockWithTransactions, BlobsBundleV1, ExecutionBlockWithTransactions,
}; };
use kzg::{Kzg, BYTES_PER_BLOB, BYTES_PER_FIELD_ELEMENT, FIELD_ELEMENTS_PER_BLOB};
use rand::RngCore;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use ssz::Encode;
use std::collections::HashMap; use std::collections::HashMap;
use std::sync::Arc;
use tree_hash::TreeHash; use tree_hash::TreeHash;
use tree_hash_derive::TreeHash; use tree_hash_derive::TreeHash;
use types::transaction::{BlobTransaction, EcdsaSignature, SignedBlobTransaction};
use types::{ use types::{
EthSpec, ExecutionBlockHash, ExecutionPayload, ExecutionPayloadCapella, ExecutionPayloadDeneb, Blob, EthSpec, ExecutionBlockHash, ExecutionPayload, ExecutionPayloadCapella,
ExecutionPayloadMerge, ForkName, Hash256, Uint256, ExecutionPayloadDeneb, ExecutionPayloadMerge, ForkName, Hash256, Transaction, Transactions,
Uint256,
}; };
const GAS_LIMIT: u64 = 16384; const GAS_LIMIT: u64 = 16384;
@ -119,6 +125,11 @@ pub struct ExecutionBlockGenerator<T: EthSpec> {
*/ */
pub shanghai_time: Option<u64>, // withdrawals pub shanghai_time: Option<u64>, // withdrawals
pub deneb_time: Option<u64>, // 4844 pub deneb_time: Option<u64>, // 4844
/*
* deneb stuff
*/
pub blobs_bundles: HashMap<PayloadId, BlobsBundleV1<T>>,
pub kzg: Option<Arc<Kzg>>,
} }
impl<T: EthSpec> ExecutionBlockGenerator<T> { impl<T: EthSpec> ExecutionBlockGenerator<T> {
@ -128,6 +139,7 @@ impl<T: EthSpec> ExecutionBlockGenerator<T> {
terminal_block_hash: ExecutionBlockHash, terminal_block_hash: ExecutionBlockHash,
shanghai_time: Option<u64>, shanghai_time: Option<u64>,
deneb_time: Option<u64>, deneb_time: Option<u64>,
kzg: Option<Kzg>,
) -> Self { ) -> Self {
let mut gen = Self { let mut gen = Self {
head_block: <_>::default(), head_block: <_>::default(),
@ -142,6 +154,8 @@ impl<T: EthSpec> ExecutionBlockGenerator<T> {
payload_ids: <_>::default(), payload_ids: <_>::default(),
shanghai_time, shanghai_time,
deneb_time, deneb_time,
blobs_bundles: <_>::default(),
kzg: kzg.map(Arc::new),
}; };
gen.insert_pow_block(0).unwrap(); gen.insert_pow_block(0).unwrap();
@ -394,6 +408,11 @@ impl<T: EthSpec> ExecutionBlockGenerator<T> {
self.payload_ids.get(id).cloned() self.payload_ids.get(id).cloned()
} }
pub fn get_blobs_bundle(&mut self, id: &PayloadId) -> Option<BlobsBundleV1<T>> {
// remove it to free memory
self.blobs_bundles.remove(id)
}
pub fn new_payload(&mut self, payload: ExecutionPayload<T>) -> PayloadStatusV1 { pub fn new_payload(&mut self, payload: ExecutionPayload<T>) -> PayloadStatusV1 {
let parent = if let Some(parent) = self.blocks.get(&payload.parent_hash()) { let parent = if let Some(parent) = self.blocks.get(&payload.parent_hash()) {
parent parent
@ -561,6 +580,22 @@ impl<T: EthSpec> ExecutionBlockGenerator<T> {
} }
}; };
match execution_payload.fork_name() {
ForkName::Base | ForkName::Altair | ForkName::Merge | ForkName::Capella => {}
ForkName::Deneb => {
// get random number between 0 and Max Blobs
let num_blobs = rand::random::<usize>() % T::max_blobs_per_block();
let (bundle, transactions) = self.generate_random_blobs(num_blobs)?;
for tx in Vec::from(transactions) {
execution_payload
.transactions_mut()
.push(tx)
.map_err(|_| "transactions are full".to_string())?;
}
self.blobs_bundles.insert(id, bundle);
}
}
*execution_payload.block_hash_mut() = *execution_payload.block_hash_mut() =
ExecutionBlockHash::from_root(execution_payload.tree_hash_root()); ExecutionBlockHash::from_root(execution_payload.tree_hash_root());
@ -590,6 +625,88 @@ impl<T: EthSpec> ExecutionBlockGenerator<T> {
payload_id: id.map(Into::into), payload_id: id.map(Into::into),
}) })
} }
fn generate_random_blobs(
&self,
n_blobs: usize,
) -> Result<(BlobsBundleV1<T>, Transactions<T>), String> {
let mut bundle = BlobsBundleV1::<T>::default();
let mut transactions = vec![];
for blob_index in 0..n_blobs {
// fill a vector with random bytes
let mut blob_bytes = [0u8; BYTES_PER_BLOB];
rand::thread_rng().fill_bytes(&mut blob_bytes);
// Ensure that the blob is canonical by ensuring that
// each field element contained in the blob is < BLS_MODULUS
for i in 0..FIELD_ELEMENTS_PER_BLOB {
blob_bytes[i * BYTES_PER_FIELD_ELEMENT + BYTES_PER_FIELD_ELEMENT - 1] = 0;
}
let blob = Blob::<T>::new(Vec::from(blob_bytes))
.map_err(|e| format!("error constructing random blob: {:?}", e))?;
let commitment = self
.kzg
.as_ref()
.ok_or("kzg not initialized")?
.blob_to_kzg_commitment(blob_bytes.into())
.map_err(|e| format!("error computing kzg commitment: {:?}", e))?;
let proof = self
.kzg
.as_ref()
.ok_or("kzg not initialized")?
.compute_blob_kzg_proof(blob_bytes.into(), commitment)
.map_err(|e| format!("error computing kzg proof: {:?}", e))?;
let versioned_hash = commitment.calculate_versioned_hash();
let blob_transaction = BlobTransaction {
chain_id: Default::default(),
nonce: 0,
max_priority_fee_per_gas: Default::default(),
max_fee_per_gas: Default::default(),
gas: 100000,
to: None,
value: Default::default(),
data: Default::default(),
access_list: Default::default(),
max_fee_per_data_gas: Default::default(),
versioned_hashes: vec![versioned_hash].into(),
};
let bad_signature = EcdsaSignature {
y_parity: false,
r: Uint256::from(0),
s: Uint256::from(0),
};
let signed_blob_transaction = SignedBlobTransaction {
message: blob_transaction,
signature: bad_signature,
};
// calculate transaction bytes
let tx_bytes = [0x05u8]
.into_iter()
.chain(signed_blob_transaction.as_ssz_bytes().into_iter())
.collect::<Vec<_>>();
let tx = Transaction::<T::MaxBytesPerTransaction>::from(tx_bytes);
transactions.push(tx);
bundle
.blobs
.push(blob)
.map_err(|_| format!("blobs are full, blob index: {:?}", blob_index))?;
bundle
.commitments
.push(commitment)
.map_err(|_| format!("blobs are full, blob index: {:?}", blob_index))?;
bundle
.proofs
.push(proof)
.map_err(|_| format!("blobs are full, blob index: {:?}", blob_index))?;
}
Ok((bundle, transactions.into()))
}
} }
fn payload_id_from_u64(n: u64) -> PayloadId { fn payload_id_from_u64(n: u64) -> PayloadId {
@ -650,6 +767,7 @@ mod test {
ExecutionBlockHash::zero(), ExecutionBlockHash::zero(),
None, None,
None, None,
None,
); );
for i in 0..=TERMINAL_BLOCK { for i in 0..=TERMINAL_BLOCK {

View File

@ -224,6 +224,8 @@ pub async fn handle_rpc<T: EthSpec>(
) )
})?; })?;
let maybe_blobs = ctx.execution_block_generator.write().get_blobs_bundle(&id);
// validate method called correctly according to shanghai fork time // validate method called correctly according to shanghai fork time
if ctx if ctx
.execution_block_generator .execution_block_generator
@ -291,6 +293,12 @@ pub async fn handle_rpc<T: EthSpec>(
serde_json::to_value(JsonGetPayloadResponseV3 { serde_json::to_value(JsonGetPayloadResponseV3 {
execution_payload, execution_payload,
block_value: DEFAULT_MOCK_EL_PAYLOAD_VALUE_WEI.into(), block_value: DEFAULT_MOCK_EL_PAYLOAD_VALUE_WEI.into(),
blobs_bundle: maybe_blobs
.ok_or((
"No blobs returned despite V3 Payload".to_string(),
GENERIC_ERROR_CODE,
))?
.into(),
}) })
.unwrap() .unwrap()
} }
@ -324,7 +332,7 @@ pub async fn handle_rpc<T: EthSpec>(
.map(|opt| opt.map(JsonPayloadAttributes::V1)) .map(|opt| opt.map(JsonPayloadAttributes::V1))
.transpose() .transpose()
} }
ForkName::Capella => { ForkName::Capella | ForkName::Deneb => {
get_param::<Option<JsonPayloadAttributesV2>>(params, 1) get_param::<Option<JsonPayloadAttributesV2>>(params, 1)
.map(|opt| opt.map(JsonPayloadAttributes::V2)) .map(|opt| opt.map(JsonPayloadAttributes::V2))
.transpose() .transpose()

View File

@ -5,6 +5,7 @@ use crate::{
}, },
Config, *, Config, *,
}; };
use kzg::Kzg;
use sensitive_url::SensitiveUrl; use sensitive_url::SensitiveUrl;
use task_executor::TaskExecutor; use task_executor::TaskExecutor;
use tempfile::NamedTempFile; use tempfile::NamedTempFile;
@ -33,6 +34,7 @@ impl<T: EthSpec> MockExecutionLayer<T> {
Some(JwtKey::from_slice(&DEFAULT_JWT_SECRET).unwrap()), Some(JwtKey::from_slice(&DEFAULT_JWT_SECRET).unwrap()),
spec, spec,
None, None,
None,
) )
} }
@ -46,6 +48,7 @@ impl<T: EthSpec> MockExecutionLayer<T> {
jwt_key: Option<JwtKey>, jwt_key: Option<JwtKey>,
spec: ChainSpec, spec: ChainSpec,
builder_url: Option<SensitiveUrl>, builder_url: Option<SensitiveUrl>,
kzg: Option<Kzg>,
) -> Self { ) -> Self {
let handle = executor.handle().unwrap(); let handle = executor.handle().unwrap();
@ -58,6 +61,7 @@ impl<T: EthSpec> MockExecutionLayer<T> {
spec.terminal_block_hash, spec.terminal_block_hash,
shanghai_time, shanghai_time,
deneb_time, deneb_time,
kzg,
); );
let url = SensitiveUrl::parse(&server.url()).unwrap(); let url = SensitiveUrl::parse(&server.url()).unwrap();

View File

@ -8,6 +8,7 @@ use bytes::Bytes;
use environment::null_logger; use environment::null_logger;
use execution_block_generator::PoWBlock; use execution_block_generator::PoWBlock;
use handle_rpc::handle_rpc; use handle_rpc::handle_rpc;
use kzg::Kzg;
use parking_lot::{Mutex, RwLock, RwLockWriteGuard}; use parking_lot::{Mutex, RwLock, RwLockWriteGuard};
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use serde_json::json; use serde_json::json;
@ -96,10 +97,15 @@ impl<T: EthSpec> MockServer<T> {
ExecutionBlockHash::zero(), ExecutionBlockHash::zero(),
None, // FIXME(capella): should this be the default? None, // FIXME(capella): should this be the default?
None, // FIXME(deneb): should this be the default? None, // FIXME(deneb): should this be the default?
None, // FIXME(deneb): should this be the default?
) )
} }
pub fn new_with_config(handle: &runtime::Handle, config: MockExecutionConfig) -> Self { pub fn new_with_config(
handle: &runtime::Handle,
config: MockExecutionConfig,
kzg: Option<Kzg>,
) -> Self {
let MockExecutionConfig { let MockExecutionConfig {
jwt_key, jwt_key,
terminal_difficulty, terminal_difficulty,
@ -117,6 +123,7 @@ impl<T: EthSpec> MockServer<T> {
terminal_block_hash, terminal_block_hash,
shanghai_time, shanghai_time,
deneb_time, deneb_time,
kzg,
); );
let ctx: Arc<Context<T>> = Arc::new(Context { let ctx: Arc<Context<T>> = Arc::new(Context {
@ -168,6 +175,7 @@ impl<T: EthSpec> MockServer<T> {
*self.ctx.engine_capabilities.write() = engine_capabilities; *self.ctx.engine_capabilities.write() = engine_capabilities;
} }
#[allow(clippy::too_many_arguments)]
pub fn new( pub fn new(
handle: &runtime::Handle, handle: &runtime::Handle,
jwt_key: JwtKey, jwt_key: JwtKey,
@ -176,6 +184,7 @@ impl<T: EthSpec> MockServer<T> {
terminal_block_hash: ExecutionBlockHash, terminal_block_hash: ExecutionBlockHash,
shanghai_time: Option<u64>, shanghai_time: Option<u64>,
deneb_time: Option<u64>, deneb_time: Option<u64>,
kzg: Option<Kzg>,
) -> Self { ) -> Self {
Self::new_with_config( Self::new_with_config(
handle, handle,
@ -188,6 +197,7 @@ impl<T: EthSpec> MockServer<T> {
shanghai_time, shanghai_time,
deneb_time, deneb_time,
}, },
kzg,
) )
} }

View File

@ -480,7 +480,7 @@ pub async fn proposer_boost_re_org_test(
// Produce block B and process it halfway through the slot. // Produce block B and process it halfway through the slot.
let (block_b, mut state_b) = harness.make_block(state_a.clone(), slot_b).await; let (block_b, mut state_b) = harness.make_block(state_a.clone(), slot_b).await;
let block_b_root = block_b.canonical_root(); let block_b_root = block_b.0.canonical_root();
let obs_time = slot_clock.start_of(slot_b).unwrap() + slot_clock.slot_duration() / 2; let obs_time = slot_clock.start_of(slot_b).unwrap() + slot_clock.slot_duration() / 2;
slot_clock.set_current_time(obs_time); slot_clock.set_current_time(obs_time);
@ -573,8 +573,18 @@ pub async fn proposer_boost_re_org_test(
// Check the fork choice updates that were sent. // Check the fork choice updates that were sent.
let forkchoice_updates = forkchoice_updates.lock(); let forkchoice_updates = forkchoice_updates.lock();
let block_a_exec_hash = block_a.message().execution_payload().unwrap().block_hash(); let block_a_exec_hash = block_a
let block_b_exec_hash = block_b.message().execution_payload().unwrap().block_hash(); .0
.message()
.execution_payload()
.unwrap()
.block_hash();
let block_b_exec_hash = block_b
.0
.message()
.execution_payload()
.unwrap()
.block_hash();
let block_c_timestamp = block_c.message().execution_payload().unwrap().timestamp(); let block_c_timestamp = block_c.message().execution_payload().unwrap().timestamp();
@ -679,7 +689,7 @@ pub async fn fork_choice_before_proposal() {
let state_a = harness.get_current_state(); let state_a = harness.get_current_state();
let (block_b, state_b) = harness.make_block(state_a.clone(), slot_b).await; let (block_b, state_b) = harness.make_block(state_a.clone(), slot_b).await;
let block_root_b = harness let block_root_b = harness
.process_block(slot_b, block_b.canonical_root(), block_b) .process_block(slot_b, block_b.0.canonical_root(), block_b)
.await .await
.unwrap(); .unwrap();
@ -694,7 +704,7 @@ pub async fn fork_choice_before_proposal() {
let (block_c, state_c) = harness.make_block(state_a, slot_c).await; let (block_c, state_c) = harness.make_block(state_a, slot_c).await;
let block_root_c = harness let block_root_c = harness
.process_block(slot_c, block_c.canonical_root(), block_c.clone()) .process_block(slot_c, block_c.0.canonical_root(), block_c.clone())
.await .await
.unwrap(); .unwrap();

View File

@ -107,7 +107,7 @@ impl TestRig {
"precondition: current slot is one after head" "precondition: current slot is one after head"
); );
let (next_block, next_state) = harness let (next_block_tuple, next_state) = harness
.make_block(head.beacon_state.clone(), harness.chain.slot().unwrap()) .make_block(head.beacon_state.clone(), harness.chain.slot().unwrap())
.await; .await;
@ -133,9 +133,9 @@ impl TestRig {
.get_unaggregated_attestations( .get_unaggregated_attestations(
&AttestationStrategy::AllValidators, &AttestationStrategy::AllValidators,
&next_state, &next_state,
next_block.state_root(), next_block_tuple.0.state_root(),
next_block.canonical_root(), next_block_tuple.0.canonical_root(),
next_block.slot(), next_block_tuple.0.slot(),
) )
.into_iter() .into_iter()
.flatten() .flatten()
@ -145,9 +145,9 @@ impl TestRig {
.make_attestations( .make_attestations(
&harness.get_all_validators(), &harness.get_all_validators(),
&next_state, &next_state,
next_block.state_root(), next_block_tuple.0.state_root(),
next_block.canonical_root().into(), next_block_tuple.0.canonical_root().into(),
next_block.slot(), next_block_tuple.0.slot(),
) )
.into_iter() .into_iter()
.filter_map(|(_, aggregate_opt)| aggregate_opt) .filter_map(|(_, aggregate_opt)| aggregate_opt)
@ -209,7 +209,7 @@ impl TestRig {
Self { Self {
chain, chain,
next_block: Arc::new(next_block), next_block: Arc::new(next_block_tuple.0),
attestations, attestations,
next_block_attestations, next_block_attestations,
next_block_aggregate_attestations, next_block_aggregate_attestations,

View File

@ -179,15 +179,15 @@ impl ForkChoiceTest {
let slot = self.harness.get_current_slot(); let slot = self.harness.get_current_slot();
let (block, state_) = self.harness.make_block(state, slot).await; let (block, state_) = self.harness.make_block(state, slot).await;
state = state_; state = state_;
if !predicate(block.message(), &state) { if !predicate(block.0.message(), &state) {
break; break;
} }
if let Ok(block_hash) = self.harness.process_block_result(block.clone()).await { if let Ok(block_hash) = self.harness.process_block_result(block.clone()).await {
self.harness.attest_block( self.harness.attest_block(
&state, &state,
block.state_root(), block.0.state_root(),
block_hash, block_hash,
&block, &block.0,
&validators, &validators,
); );
self.harness.advance_slot(); self.harness.advance_slot();
@ -273,8 +273,8 @@ impl ForkChoiceTest {
) )
.unwrap(); .unwrap();
let slot = self.harness.get_current_slot(); let slot = self.harness.get_current_slot();
let (mut signed_block, mut state) = self.harness.make_block(state, slot).await; let (mut block_tuple, mut state) = self.harness.make_block(state, slot).await;
func(&mut signed_block, &mut state); func(&mut block_tuple.0, &mut state);
let current_slot = self.harness.get_current_slot(); let current_slot = self.harness.get_current_slot();
self.harness self.harness
.chain .chain
@ -282,8 +282,8 @@ impl ForkChoiceTest {
.fork_choice_write_lock() .fork_choice_write_lock()
.on_block( .on_block(
current_slot, current_slot,
signed_block.message(), block_tuple.0.message(),
signed_block.canonical_root(), block_tuple.0.canonical_root(),
Duration::from_secs(0), Duration::from_secs(0),
&state, &state,
PayloadVerificationStatus::Verified, PayloadVerificationStatus::Verified,
@ -315,8 +315,8 @@ impl ForkChoiceTest {
) )
.unwrap(); .unwrap();
let slot = self.harness.get_current_slot(); let slot = self.harness.get_current_slot();
let (mut signed_block, mut state) = self.harness.make_block(state, slot).await; let (mut block_tuple, mut state) = self.harness.make_block(state, slot).await;
mutation_func(&mut signed_block, &mut state); mutation_func(&mut block_tuple.0, &mut state);
let current_slot = self.harness.get_current_slot(); let current_slot = self.harness.get_current_slot();
let err = self let err = self
.harness .harness
@ -325,8 +325,8 @@ impl ForkChoiceTest {
.fork_choice_write_lock() .fork_choice_write_lock()
.on_block( .on_block(
current_slot, current_slot,
signed_block.message(), block_tuple.0.message(),
signed_block.canonical_root(), block_tuple.0.canonical_root(),
Duration::from_secs(0), Duration::from_secs(0),
&state, &state,
PayloadVerificationStatus::Verified, PayloadVerificationStatus::Verified,

View File

@ -1,5 +1,6 @@
use crate::test_utils::TestRandom; use crate::test_utils::TestRandom;
use crate::{Blob, EthSpec, Hash256, SignedRoot, Slot}; use crate::{Blob, ChainSpec, Domain, EthSpec, Fork, Hash256, SignedBlobSidecar, SignedRoot, Slot};
use bls::SecretKey;
use derivative::Derivative; use derivative::Derivative;
use kzg::{KzgCommitment, KzgProof}; use kzg::{KzgCommitment, KzgProof};
use serde_derive::{Deserialize, Serialize}; use serde_derive::{Deserialize, Serialize};
@ -72,7 +73,7 @@ impl<T: EthSpec> Ord for BlobSidecar<T> {
} }
pub type BlobSidecarList<T> = VariableList<Arc<BlobSidecar<T>>, <T as EthSpec>::MaxBlobsPerBlock>; pub type BlobSidecarList<T> = VariableList<Arc<BlobSidecar<T>>, <T as EthSpec>::MaxBlobsPerBlock>;
pub type Blobs<T> = VariableList<Blob<T>, <T as EthSpec>::MaxExtraDataBytes>; pub type Blobs<T> = VariableList<Blob<T>, <T as EthSpec>::MaxBlobsPerBlock>;
impl<T: EthSpec> SignedRoot for BlobSidecar<T> {} impl<T: EthSpec> SignedRoot for BlobSidecar<T> {}
@ -93,4 +94,28 @@ impl<T: EthSpec> BlobSidecar<T> {
// Fixed part // Fixed part
Self::empty().as_ssz_bytes().len() Self::empty().as_ssz_bytes().len()
} }
// this is mostly not used except for in testing
pub fn sign(
self: Arc<Self>,
secret_key: &SecretKey,
fork: &Fork,
genesis_validators_root: Hash256,
spec: &ChainSpec,
) -> SignedBlobSidecar<T> {
let signing_epoch = self.slot.epoch(T::slots_per_epoch());
let domain = spec.get_domain(
signing_epoch,
Domain::BlobSidecar,
fork,
genesis_validators_root,
);
let message = self.signing_root(domain);
let signature = secret_key.sign(message);
SignedBlobSidecar {
message: self,
signature,
}
}
} }

View File

@ -204,6 +204,7 @@ pub type Address = H160;
pub type ForkVersion = [u8; 4]; pub type ForkVersion = [u8; 4];
pub type BLSFieldElement = Uint256; pub type BLSFieldElement = Uint256;
pub type Blob<T> = FixedVector<u8, <T as EthSpec>::BytesPerBlob>; pub type Blob<T> = FixedVector<u8, <T as EthSpec>::BytesPerBlob>;
pub type KzgProofs<T> = VariableList<KzgProof, <T as EthSpec>::MaxBlobsPerBlock>;
pub type VersionedHash = Hash256; pub type VersionedHash = Hash256;
pub type Hash64 = ethereum_types::H64; pub type Hash64 = ethereum_types::H64;

View File

@ -1,18 +1,29 @@
use c_kzg::{Bytes48, BYTES_PER_COMMITMENT}; use c_kzg::{Bytes48, BYTES_PER_COMMITMENT};
use derivative::Derivative; use derivative::Derivative;
use eth2_hashing::hash_fixed;
use serde::de::{Deserialize, Deserializer}; use serde::de::{Deserialize, Deserializer};
use serde::ser::{Serialize, Serializer}; use serde::ser::{Serialize, Serializer};
use ssz_derive::{Decode, Encode}; use ssz_derive::{Decode, Encode};
use std::fmt; use std::fmt;
use std::fmt::{Debug, Display, Formatter}; use std::fmt::{Debug, Display, Formatter};
use std::str::FromStr; use std::str::FromStr;
use tree_hash::{PackedEncoding, TreeHash}; use tree_hash::{Hash256, PackedEncoding, TreeHash};
pub const BLOB_COMMITMENT_VERSION_KZG: u8 = 0x01;
#[derive(Derivative, Clone, Copy, Encode, Decode)] #[derive(Derivative, Clone, Copy, Encode, Decode)]
#[derivative(PartialEq, Eq, Hash)] #[derivative(PartialEq, Eq, Hash)]
#[ssz(struct_behaviour = "transparent")] #[ssz(struct_behaviour = "transparent")]
pub struct KzgCommitment(pub [u8; BYTES_PER_COMMITMENT]); pub struct KzgCommitment(pub [u8; BYTES_PER_COMMITMENT]);
impl KzgCommitment {
pub fn calculate_versioned_hash(&self) -> Hash256 {
let mut versioned_hash = hash_fixed(&self.0);
versioned_hash[0] = BLOB_COMMITMENT_VERSION_KZG;
Hash256::from_slice(versioned_hash.as_slice())
}
}
impl From<KzgCommitment> for Bytes48 { impl From<KzgCommitment> for Bytes48 {
fn from(value: KzgCommitment) -> Self { fn from(value: KzgCommitment) -> Self {
value.0.into() value.0.into()

View File

@ -20,6 +20,7 @@ pub enum Error {
} }
/// A wrapper over a kzg library that holds the trusted setup parameters. /// A wrapper over a kzg library that holds the trusted setup parameters.
#[derive(Debug)]
pub struct Kzg { pub struct Kzg {
trusted_setup: KzgSettings, trusted_setup: KzgSettings,
} }

View File

@ -236,7 +236,7 @@ impl<E: EthSpec> LocalExecutionNode<E> {
panic!("Failed to write jwt file {}", e); panic!("Failed to write jwt file {}", e);
} }
Self { Self {
server: MockServer::new_with_config(&context.executor.handle().unwrap(), config), server: MockServer::new_with_config(&context.executor.handle().unwrap(), config, None),
datadir, datadir,
} }
} }