add network config

This commit is contained in:
Daniel Knopik 2022-09-17 20:55:21 +02:00
parent f9209e2d08
commit 76572db9d5
14 changed files with 159 additions and 7 deletions

View File

@ -100,6 +100,7 @@ use types::*;
pub use crate::canonical_head::{CanonicalHead, CanonicalHeadRwLock}; pub use crate::canonical_head::{CanonicalHead, CanonicalHeadRwLock};
pub use fork_choice::CountUnrealized; pub use fork_choice::CountUnrealized;
use types::kzg_commitment::KzgCommitment; use types::kzg_commitment::KzgCommitment;
use types::signed_blobs_sidecar::SignedBlobsSidecar;
pub type ForkChoiceError = fork_choice::Error<crate::ForkChoiceStoreError>; pub type ForkChoiceError = fork_choice::Error<crate::ForkChoiceStoreError>;
@ -374,6 +375,8 @@ pub struct BeaconChain<T: BeaconChainTypes> {
/// Sender given to tasks, so that if they encounter a state in which execution cannot /// Sender given to tasks, so that if they encounter a state in which execution cannot
/// continue they can request that everything shuts down. /// continue they can request that everything shuts down.
pub shutdown_sender: Sender<ShutdownReason>, pub shutdown_sender: Sender<ShutdownReason>,
pub block_waiting_for_sidecar: Mutex<Option<GossipVerifiedBlock<T>>>,
pub sidecar_waiting_for_block: Mutex<Option<SignedBlobsSidecar<T::EthSpec>>>,
/// Logging to CLI, etc. /// Logging to CLI, etc.
pub(crate) log: Logger, pub(crate) log: Logger,
/// Arbitrary bytes included in the blocks. /// Arbitrary bytes included in the blocks.
@ -2342,7 +2345,7 @@ impl<T: BeaconChainTypes> BeaconChain<T> {
// Import the blocks into the chain. // Import the blocks into the chain.
for signature_verified_block in signature_verified_blocks { for signature_verified_block in signature_verified_blocks {
match self match self
.process_block(signature_verified_block, count_unrealized) .process_block(signature_verified_block, None, count_unrealized)
.await .await
{ {
Ok(_) => imported_blocks += 1, Ok(_) => imported_blocks += 1,
@ -2428,6 +2431,7 @@ impl<T: BeaconChainTypes> BeaconChain<T> {
pub async fn process_block<B: IntoExecutionPendingBlock<T>>( pub async fn process_block<B: IntoExecutionPendingBlock<T>>(
self: &Arc<Self>, self: &Arc<Self>,
unverified_block: B, unverified_block: B,
sidecar: Option<SignedBlobsSidecar<T::EthSpec>>,
count_unrealized: CountUnrealized, count_unrealized: CountUnrealized,
) -> Result<Hash256, BlockError<T::EthSpec>> { ) -> Result<Hash256, BlockError<T::EthSpec>> {
// Start the Prometheus timer. // Start the Prometheus timer.
@ -2444,7 +2448,7 @@ impl<T: BeaconChainTypes> BeaconChain<T> {
let import_block = async move { let import_block = async move {
let execution_pending = unverified_block.into_execution_pending_block(&chain)?; let execution_pending = unverified_block.into_execution_pending_block(&chain)?;
chain chain
.import_execution_pending_block(execution_pending, count_unrealized) .import_execution_pending_block(execution_pending, sidecar, count_unrealized)
.await .await
}; };
@ -2502,6 +2506,7 @@ impl<T: BeaconChainTypes> BeaconChain<T> {
async fn import_execution_pending_block( async fn import_execution_pending_block(
self: Arc<Self>, self: Arc<Self>,
execution_pending_block: ExecutionPendingBlock<T>, execution_pending_block: ExecutionPendingBlock<T>,
sidecar: Option<SignedBlobsSidecar<T::EthSpec>>,
count_unrealized: CountUnrealized, count_unrealized: CountUnrealized,
) -> Result<Hash256, BlockError<T::EthSpec>> { ) -> Result<Hash256, BlockError<T::EthSpec>> {
let ExecutionPendingBlock { let ExecutionPendingBlock {
@ -2557,6 +2562,7 @@ impl<T: BeaconChainTypes> BeaconChain<T> {
move || { move || {
chain.import_block( chain.import_block(
block, block,
sidecar,
block_root, block_root,
state, state,
confirmed_state_roots, confirmed_state_roots,
@ -2579,6 +2585,7 @@ impl<T: BeaconChainTypes> BeaconChain<T> {
fn import_block( fn import_block(
&self, &self,
signed_block: Arc<SignedBeaconBlock<T::EthSpec>>, signed_block: Arc<SignedBeaconBlock<T::EthSpec>>,
sidecar: Option<SignedBlobsSidecar<T::EthSpec>>,
block_root: Hash256, block_root: Hash256,
mut state: BeaconState<T::EthSpec>, mut state: BeaconState<T::EthSpec>,
confirmed_state_roots: Vec<Hash256>, confirmed_state_roots: Vec<Hash256>,
@ -2917,6 +2924,9 @@ impl<T: BeaconChainTypes> BeaconChain<T> {
.collect(); .collect();
ops.push(StoreOp::PutBlock(block_root, signed_block.clone())); ops.push(StoreOp::PutBlock(block_root, signed_block.clone()));
ops.push(StoreOp::PutState(block.state_root(), &state)); ops.push(StoreOp::PutState(block.state_root(), &state));
if let Some(sidecar) = sidecar {
ops.push(StoreOp::PutBlobs(block_root, sidecar));
}
let txn_lock = self.store.hot_db.begin_rw_transaction(); let txn_lock = self.store.hot_db.begin_rw_transaction();
if let Err(e) = self.store.do_atomically(ops) { if let Err(e) = self.store.do_atomically(ops) {

View File

@ -144,6 +144,7 @@ pub enum BlockError<T: EthSpec> {
present_slot: Slot, present_slot: Slot,
block_slot: Slot, block_slot: Slot,
}, },
MissingSidecar,
/// The block state_root does not match the generated state. /// The block state_root does not match the generated state.
/// ///
/// ## Peer scoring /// ## Peer scoring
@ -277,6 +278,7 @@ pub enum BlockError<T: EthSpec> {
/// The peer sent us an invalid block, but I'm not really sure how to score this in an /// The peer sent us an invalid block, but I'm not really sure how to score this in an
/// "optimistic" sync world. /// "optimistic" sync world.
ParentExecutionPayloadInvalid { parent_root: Hash256 }, ParentExecutionPayloadInvalid { parent_root: Hash256 },
} }
/// Returned when block validation failed due to some issue verifying /// Returned when block validation failed due to some issue verifying

View File

@ -797,6 +797,8 @@ where
validator_pubkey_cache: TimeoutRwLock::new(validator_pubkey_cache), validator_pubkey_cache: TimeoutRwLock::new(validator_pubkey_cache),
attester_cache: <_>::default(), attester_cache: <_>::default(),
early_attester_cache: <_>::default(), early_attester_cache: <_>::default(),
block_waiting_for_sidecar: <_>::default(),
sidecar_waiting_for_block: <_>::default(),
shutdown_sender: self shutdown_sender: self
.shutdown_sender .shutdown_sender
.ok_or("Cannot build without a shutdown sender.")?, .ok_or("Cannot build without a shutdown sender.")?,

View File

@ -1458,7 +1458,7 @@ where
self.set_current_slot(slot); self.set_current_slot(slot);
let block_hash: SignedBeaconBlockHash = self let block_hash: SignedBeaconBlockHash = self
.chain .chain
.process_block(Arc::new(block), CountUnrealized::True) .process_block(Arc::new(block), todo!(), CountUnrealized::True)
.await? .await?
.into(); .into();
self.chain.recompute_head_at_current_slot().await; self.chain.recompute_head_at_current_slot().await;
@ -1471,7 +1471,7 @@ where
) -> Result<SignedBeaconBlockHash, BlockError<E>> { ) -> Result<SignedBeaconBlockHash, BlockError<E>> {
let block_hash: SignedBeaconBlockHash = self let block_hash: SignedBeaconBlockHash = self
.chain .chain
.process_block(Arc::new(block), CountUnrealized::True) .process_block(Arc::new(block), todo!(),CountUnrealized::True)
.await? .await?
.into(); .into();
self.chain.recompute_head_at_current_slot().await; self.chain.recompute_head_at_current_slot().await;

View File

@ -32,7 +32,7 @@ pub async fn publish_block<T: BeaconChainTypes>(
metrics::observe_duration(&metrics::HTTP_API_BLOCK_BROADCAST_DELAY_TIMES, delay); metrics::observe_duration(&metrics::HTTP_API_BLOCK_BROADCAST_DELAY_TIMES, delay);
match chain match chain
.process_block(block.clone(), CountUnrealized::True) .process_block(block.clone(), None, CountUnrealized::True)
.await .await
{ {
Ok(root) => { Ok(root) => {

View File

@ -673,6 +673,7 @@ impl<T: BeaconChainTypes> Worker<T> {
.await .await
{ {
let block_root = gossip_verified_block.block_root; let block_root = gossip_verified_block.block_root;
if let Some(handle) = duplicate_cache.check_and_insert(block_root) { if let Some(handle) = duplicate_cache.check_and_insert(block_root) {
self.process_gossip_verified_block( self.process_gossip_verified_block(
peer_id, peer_id,
@ -759,6 +760,9 @@ impl<T: BeaconChainTypes> Worker<T> {
verified_block verified_block
} }
Err(BlockError::MissingSidecar) => {
todo!(); //is relevant?
}
Err(BlockError::ParentUnknown(block)) => { Err(BlockError::ParentUnknown(block)) => {
debug!( debug!(
self.log, self.log,
@ -920,9 +924,24 @@ impl<T: BeaconChainTypes> Worker<T> {
) { ) {
let block: Arc<_> = verified_block.block.clone(); let block: Arc<_> = verified_block.block.clone();
let sidecar = if verified_block.block.message()
.body().blob_kzg_commitments().map(|committments| committments.is_empty()).unwrap_or(true) {
None
} else if let Some(sidecar) = self.chain.sidecar_waiting_for_block.lock().as_ref() {
if sidecar.message.beacon_block_root == verified_block.block_root() {
Some(sidecar.clone())
} else {
*self.chain.block_waiting_for_sidecar.lock() = Some(verified_block);
return
}
} else {
// we need the sidecar but dont have it yet
return
};
match self match self
.chain .chain
.process_block(verified_block, CountUnrealized::True) .process_block(verified_block, sidecar, CountUnrealized::True)
.await .await
{ {
Ok(block_root) => { Ok(block_root) => {

View File

@ -80,7 +80,7 @@ impl<T: BeaconChainTypes> Worker<T> {
} }
}; };
let slot = block.slot(); let slot = block.slot();
let result = self.chain.process_block(block, CountUnrealized::True).await; let result = self.chain.process_block(block, None, CountUnrealized::True).await;
metrics::inc_counter(&metrics::BEACON_PROCESSOR_RPC_BLOCK_IMPORTED_TOTAL); metrics::inc_counter(&metrics::BEACON_PROCESSOR_RPC_BLOCK_IMPORTED_TOTAL);

View File

@ -483,6 +483,19 @@ impl<E: EthSpec, Hot: ItemStore<E>, Cold: ItemStore<E>> HotColdDB<E, Hot, Cold>
} }
} }
pub fn blobs_as_kv_store_ops(
&self,
key: &Hash256,
blobs: &SignedBlobsSidecar<E>,
ops: &mut Vec<KeyValueStoreOp>,
) {
let db_key = get_key_for_col(DBColumn::BeaconBlob.into(), key.as_bytes());
ops.push(KeyValueStoreOp::PutKeyValue(
db_key,
blobs.as_ssz_bytes(),
));
}
pub fn put_state_summary( pub fn put_state_summary(
&self, &self,
state_root: &Hash256, state_root: &Hash256,
@ -710,6 +723,14 @@ impl<E: EthSpec, Hot: ItemStore<E>, Cold: ItemStore<E>> HotColdDB<E, Hot, Cold>
self.store_hot_state(&state_root, state, &mut key_value_batch)?; self.store_hot_state(&state_root, state, &mut key_value_batch)?;
} }
StoreOp::PutBlobs(block_root, blobs) => {
self.blobs_as_kv_store_ops(
&block_root,
&blobs,
&mut key_value_batch,
);
}
StoreOp::PutStateSummary(state_root, summary) => { StoreOp::PutStateSummary(state_root, summary) => {
key_value_batch.push(summary.as_kv_store_op(state_root)); key_value_batch.push(summary.as_kv_store_op(state_root));
} }
@ -754,6 +775,7 @@ impl<E: EthSpec, Hot: ItemStore<E>, Cold: ItemStore<E>> HotColdDB<E, Hot, Cold>
// Update the block cache whilst holding a lock, to ensure that the cache updates atomically // Update the block cache whilst holding a lock, to ensure that the cache updates atomically
// with the database. // with the database.
let mut guard = self.block_cache.lock(); let mut guard = self.block_cache.lock();
let mut guard_blob = self.blob_cache.lock();
for op in &batch { for op in &batch {
match op { match op {
@ -761,6 +783,10 @@ impl<E: EthSpec, Hot: ItemStore<E>, Cold: ItemStore<E>> HotColdDB<E, Hot, Cold>
guard.put(*block_root, (**block).clone()); guard.put(*block_root, (**block).clone());
} }
StoreOp::PutBlobs(block_root, blobs) => {
guard_blob.put(*block_root, blobs.clone());
}
StoreOp::PutState(_, _) => (), StoreOp::PutState(_, _) => (),
StoreOp::PutStateSummary(_, _) => (), StoreOp::PutStateSummary(_, _) => (),

View File

@ -42,6 +42,7 @@ use parking_lot::MutexGuard;
use std::sync::Arc; use std::sync::Arc;
use strum::{EnumString, IntoStaticStr}; use strum::{EnumString, IntoStaticStr};
pub use types::*; pub use types::*;
use types::signed_blobs_sidecar::SignedBlobsSidecar;
pub type ColumnIter<'a> = Box<dyn Iterator<Item = Result<(Hash256, Vec<u8>), Error>> + 'a>; pub type ColumnIter<'a> = Box<dyn Iterator<Item = Result<(Hash256, Vec<u8>), Error>> + 'a>;
pub type ColumnKeyIter<'a> = Box<dyn Iterator<Item = Result<Hash256, Error>> + 'a>; pub type ColumnKeyIter<'a> = Box<dyn Iterator<Item = Result<Hash256, Error>> + 'a>;
@ -155,6 +156,7 @@ pub trait ItemStore<E: EthSpec>: KeyValueStore<E> + Sync + Send + Sized + 'stati
pub enum StoreOp<'a, E: EthSpec> { pub enum StoreOp<'a, E: EthSpec> {
PutBlock(Hash256, Arc<SignedBeaconBlock<E>>), PutBlock(Hash256, Arc<SignedBeaconBlock<E>>),
PutState(Hash256, &'a BeaconState<E>), PutState(Hash256, &'a BeaconState<E>),
PutBlobs(Hash256, SignedBlobsSidecar<E>),
PutStateSummary(Hash256, HotStateSummary), PutStateSummary(Hash256, HotStateSummary),
PutStateTemporaryFlag(Hash256), PutStateTemporaryFlag(Hash256),
DeleteStateTemporaryFlag(Hash256), DeleteStateTemporaryFlag(Hash256),

View File

@ -307,5 +307,10 @@ define_hardcoded_nets!(
// Set to `true` if the genesis state can be found in the `built_in_network_configs` // Set to `true` if the genesis state can be found in the `built_in_network_configs`
// directory. // directory.
GENESIS_STATE_IS_KNOWN GENESIS_STATE_IS_KNOWN
),
(
eip4844,
"eip4844",
GENESIS_STATE_IS_KNOWN
) )
); );

View File

@ -0,0 +1,85 @@
# Prater config
# Extends the mainnet preset
CONFIG_NAME: 'eip4844'
PRESET_BASE: 'mainnet'
# Transition
# ---------------------------------------------------------------
TERMINAL_TOTAL_DIFFICULTY: 40
# By default, don't use these params
TERMINAL_BLOCK_HASH: 0x0000000000000000000000000000000000000000000000000000000000000000
TERMINAL_BLOCK_HASH_ACTIVATION_EPOCH: 18446744073709551615
# Genesis
# ---------------------------------------------------------------
# `2**14` (= 16,384)
MIN_GENESIS_ACTIVE_VALIDATOR_COUNT: 2
# Mar-01-2021 08:53:32 AM +UTC
MIN_GENESIS_TIME: 1653318000
# Prater area code (Vienna)
GENESIS_FORK_VERSION: 0x00000ffd
# Customized for Prater: 1919188 seconds (Mar-23-2021 02:00:00 PM +UTC)
GENESIS_DELAY: 0
# Forking
# ---------------------------------------------------------------
# Some forks are disabled for now:
# - These may be re-assigned to another fork-version later
# - Temporarily set to max uint64 value: 2**64 - 1
# Altair
ALTAIR_FORK_VERSION: 0x01000ffd
ALTAIR_FORK_EPOCH: 1
# Merge
BELLATRIX_FORK_VERSION: 0x02000ffd
BELLATRIX_FORK_EPOCH: 2
# Sharding
EIP4844_FORK_VERSION: 0x03000ffd
EIP4844_FORK_EPOCH: 3
# TBD, 2**32 is a placeholder. Merge transition approach is in active R&D.
TRANSITION_TOTAL_DIFFICULTY: 40
# Time parameters
# ---------------------------------------------------------------
# 12 seconds
SECONDS_PER_SLOT: 12
# 14 (estimate from Eth1 mainnet)
SECONDS_PER_ETH1_BLOCK: 14
# 2**8 (= 256) epochs ~27 hours
MIN_VALIDATOR_WITHDRAWABILITY_DELAY: 256
# 2**8 (= 256) epochs ~27 hours
SHARD_COMMITTEE_PERIOD: 256
# 2**11 (= 2,048) Eth1 blocks ~8 hours
ETH1_FOLLOW_DISTANCE: 15
# Validator cycle
# ---------------------------------------------------------------
# 2**2 (= 4)
INACTIVITY_SCORE_BIAS: 4
# 2**4 (= 16)
INACTIVITY_SCORE_RECOVERY_RATE: 16
# 2**4 * 10**9 (= 16,000,000,000) Gwei
EJECTION_BALANCE: 16000000000
# 2**2 (= 4)
MIN_PER_EPOCH_CHURN_LIMIT: 4
# 2**16 (= 65,536)
CHURN_LIMIT_QUOTIENT: 65536
# Fork choice
# ---------------------------------------------------------------
# 40%
PROPOSER_SCORE_BOOST: 40
# Deposit contract
# ---------------------------------------------------------------
# Ethereum Goerli testnet
DEPOSIT_CHAIN_ID: 1331
DEPOSIT_NETWORK_ID: 69
# Prater test deposit contract on Goerli Testnet
DEPOSIT_CONTRACT_ADDRESS: 0x8A04d14125D0FDCDc742F4A05C051De07232EDa4