Merge branch 'unstable' of https://github.com/sigp/lighthouse into eip4844

This commit is contained in:
realbigsean 2022-11-04 13:23:55 -04:00
commit 1aec17b09c
No known key found for this signature in database
GPG Key ID: B372B64D866BF8CC
26 changed files with 37 additions and 77 deletions

View File

@ -349,7 +349,7 @@ fn load_voting_keypair(
password_file_path: Option<&PathBuf>, password_file_path: Option<&PathBuf>,
stdin_inputs: bool, stdin_inputs: bool,
) -> Result<Keypair, String> { ) -> Result<Keypair, String> {
let keystore = Keystore::from_json_file(&voting_keystore_path).map_err(|e| { let keystore = Keystore::from_json_file(voting_keystore_path).map_err(|e| {
format!( format!(
"Unable to read keystore JSON {:?}: {:?}", "Unable to read keystore JSON {:?}: {:?}",
voting_keystore_path, e voting_keystore_path, e

View File

@ -176,7 +176,7 @@ pub fn cli_run(matches: &ArgMatches, validator_dir: PathBuf) -> Result<(), Strin
let password = match keystore_password_path.as_ref() { let password = match keystore_password_path.as_ref() {
Some(path) => { Some(path) => {
let password_from_file: ZeroizeString = fs::read_to_string(&path) let password_from_file: ZeroizeString = fs::read_to_string(path)
.map_err(|e| format!("Unable to read {:?}: {:?}", path, e))? .map_err(|e| format!("Unable to read {:?}: {:?}", path, e))?
.into(); .into();
password_from_file.without_newlines() password_from_file.without_newlines()
@ -256,7 +256,7 @@ pub fn cli_run(matches: &ArgMatches, validator_dir: PathBuf) -> Result<(), Strin
.ok_or_else(|| format!("Badly formatted file name: {:?}", src_keystore))?; .ok_or_else(|| format!("Badly formatted file name: {:?}", src_keystore))?;
// Copy the keystore to the new location. // Copy the keystore to the new location.
fs::copy(&src_keystore, &dest_keystore) fs::copy(src_keystore, &dest_keystore)
.map_err(|e| format!("Unable to copy keystore: {:?}", e))?; .map_err(|e| format!("Unable to copy keystore: {:?}", e))?;
// Register with slashing protection. // Register with slashing protection.

View File

@ -159,7 +159,7 @@ pub fn create_wallet_from_mnemonic(
unknown => return Err(format!("--{} {} is not supported", TYPE_FLAG, unknown)), unknown => return Err(format!("--{} {} is not supported", TYPE_FLAG, unknown)),
}; };
let mgr = WalletManager::open(&wallet_base_dir) let mgr = WalletManager::open(wallet_base_dir)
.map_err(|e| format!("Unable to open --{}: {:?}", WALLETS_DIR_FLAG, e))?; .map_err(|e| format!("Unable to open --{}: {:?}", WALLETS_DIR_FLAG, e))?;
let wallet_password: PlainText = match wallet_password_path { let wallet_password: PlainText = match wallet_password_path {

View File

@ -45,7 +45,7 @@ impl HeadTracker {
/// Returns a `SszHeadTracker`, which contains all necessary information to restore the state /// Returns a `SszHeadTracker`, which contains all necessary information to restore the state
/// of `Self` at some later point. /// of `Self` at some later point.
pub fn to_ssz_container(&self) -> SszHeadTracker { pub fn to_ssz_container(&self) -> SszHeadTracker {
SszHeadTracker::from_map(&*self.0.read()) SszHeadTracker::from_map(&self.0.read())
} }
/// Creates a new `Self` from the given `SszHeadTracker`, restoring `Self` to the same state of /// Creates a new `Self` from the given `SszHeadTracker`, restoring `Self` to the same state of

View File

@ -588,7 +588,7 @@ impl<E: EthSpec, Hot: ItemStore<E>, Cold: ItemStore<E>> BackgroundMigrator<E, Ho
let persisted_head = PersistedBeaconChain { let persisted_head = PersistedBeaconChain {
_canonical_head_block_root: DUMMY_CANONICAL_HEAD_BLOCK_ROOT, _canonical_head_block_root: DUMMY_CANONICAL_HEAD_BLOCK_ROOT,
genesis_block_root, genesis_block_root,
ssz_head_tracker: SszHeadTracker::from_map(&*head_tracker_lock), ssz_head_tracker: SszHeadTracker::from_map(&head_tracker_lock),
}; };
drop(head_tracker_lock); drop(head_tracker_lock);
kv_batch.push(persisted_head.as_kv_store_op(BEACON_CHAIN_DB_KEY)); kv_batch.push(persisted_head.as_kv_store_op(BEACON_CHAIN_DB_KEY));

View File

@ -17,7 +17,6 @@ use crate::persisted_fork_choice::{
}; };
use crate::types::ChainSpec; use crate::types::ChainSpec;
use slog::{warn, Logger}; use slog::{warn, Logger};
use std::path::Path;
use std::sync::Arc; use std::sync::Arc;
use store::hot_cold_store::{HotColdDB, HotColdDBError}; use store::hot_cold_store::{HotColdDB, HotColdDBError};
use store::metadata::{SchemaVersion, CURRENT_SCHEMA_VERSION}; use store::metadata::{SchemaVersion, CURRENT_SCHEMA_VERSION};
@ -27,7 +26,6 @@ use store::{Error as StoreError, StoreItem};
pub fn migrate_schema<T: BeaconChainTypes>( pub fn migrate_schema<T: BeaconChainTypes>(
db: Arc<HotColdDB<T::EthSpec, T::HotStore, T::ColdStore>>, db: Arc<HotColdDB<T::EthSpec, T::HotStore, T::ColdStore>>,
deposit_contract_deploy_block: u64, deposit_contract_deploy_block: u64,
datadir: &Path,
from: SchemaVersion, from: SchemaVersion,
to: SchemaVersion, to: SchemaVersion,
log: Logger, log: Logger,
@ -42,21 +40,12 @@ pub fn migrate_schema<T: BeaconChainTypes>(
migrate_schema::<T>( migrate_schema::<T>(
db.clone(), db.clone(),
deposit_contract_deploy_block, deposit_contract_deploy_block,
datadir,
from, from,
next, next,
log.clone(), log.clone(),
spec, spec,
)?; )?;
migrate_schema::<T>( migrate_schema::<T>(db, deposit_contract_deploy_block, next, to, log, spec)
db,
deposit_contract_deploy_block,
datadir,
next,
to,
log,
spec,
)
} }
// Downgrade across multiple versions by recursively migrating one step at a time. // Downgrade across multiple versions by recursively migrating one step at a time.
(_, _) if to.as_u64() + 1 < from.as_u64() => { (_, _) if to.as_u64() + 1 < from.as_u64() => {
@ -64,21 +53,12 @@ pub fn migrate_schema<T: BeaconChainTypes>(
migrate_schema::<T>( migrate_schema::<T>(
db.clone(), db.clone(),
deposit_contract_deploy_block, deposit_contract_deploy_block,
datadir,
from, from,
next, next,
log.clone(), log.clone(),
spec, spec,
)?; )?;
migrate_schema::<T>( migrate_schema::<T>(db, deposit_contract_deploy_block, next, to, log, spec)
db,
deposit_contract_deploy_block,
datadir,
next,
to,
log,
spec,
)
} }
// //

View File

@ -356,7 +356,7 @@ where
let urls: Vec<SensitiveUrl> = urls let urls: Vec<SensitiveUrl> = urls
.iter() .iter()
.map(|s| SensitiveUrl::parse(*s)) .map(|s| SensitiveUrl::parse(s))
.collect::<Result<_, _>>() .collect::<Result<_, _>>()
.unwrap(); .unwrap();

View File

@ -332,34 +332,22 @@ impl<T: EthSpec> ValidatorMonitor<T> {
metrics::set_int_gauge( metrics::set_int_gauge(
&metrics::VALIDATOR_MONITOR_SLASHED, &metrics::VALIDATOR_MONITOR_SLASHED,
&[id], &[id],
if validator.slashed { 1 } else { 0 }, i64::from(validator.slashed),
); );
metrics::set_int_gauge( metrics::set_int_gauge(
&metrics::VALIDATOR_MONITOR_ACTIVE, &metrics::VALIDATOR_MONITOR_ACTIVE,
&[id], &[id],
if validator.is_active_at(current_epoch) { i64::from(validator.is_active_at(current_epoch)),
1
} else {
0
},
); );
metrics::set_int_gauge( metrics::set_int_gauge(
&metrics::VALIDATOR_MONITOR_EXITED, &metrics::VALIDATOR_MONITOR_EXITED,
&[id], &[id],
if validator.is_exited_at(current_epoch) { i64::from(validator.is_exited_at(current_epoch)),
1
} else {
0
},
); );
metrics::set_int_gauge( metrics::set_int_gauge(
&metrics::VALIDATOR_MONITOR_WITHDRAWABLE, &metrics::VALIDATOR_MONITOR_WITHDRAWABLE,
&[id], &[id],
if validator.is_withdrawable_at(current_epoch) { i64::from(validator.is_withdrawable_at(current_epoch)),
1
} else {
0
},
); );
metrics::set_int_gauge( metrics::set_int_gauge(
&metrics::VALIDATOR_ACTIVATION_ELIGIBILITY_EPOCH, &metrics::VALIDATOR_ACTIVATION_ELIGIBILITY_EPOCH,

View File

@ -858,7 +858,6 @@ where
/// Specifies that the `Client` should use a `HotColdDB` database. /// Specifies that the `Client` should use a `HotColdDB` database.
pub fn disk_store( pub fn disk_store(
mut self, mut self,
datadir: &Path,
hot_path: &Path, hot_path: &Path,
cold_path: &Path, cold_path: &Path,
config: StoreConfig, config: StoreConfig,
@ -888,7 +887,6 @@ where
migrate_schema::<Witness<TSlotClock, TEth1Backend, _, _, _>>( migrate_schema::<Witness<TSlotClock, TEth1Backend, _, _, _>>(
db, db,
deposit_contract_deploy_block, deposit_contract_deploy_block,
datadir,
from, from,
to, to,
log, log,

View File

@ -1,3 +1,4 @@
#![recursion_limit = "256"]
//! This crate contains a HTTP server which serves the endpoints listed here: //! This crate contains a HTTP server which serves the endpoints listed here:
//! //!
//! https://github.com/ethereum/beacon-APIs //! https://github.com/ethereum/beacon-APIs

View File

@ -269,7 +269,7 @@ pub(crate) fn save_metadata_to_disk<E: EthSpec>(
metadata: MetaData<E>, metadata: MetaData<E>,
log: &slog::Logger, log: &slog::Logger,
) { ) {
let _ = std::fs::create_dir_all(&dir); let _ = std::fs::create_dir_all(dir);
match File::create(dir.join(METADATA_FILENAME)) match File::create(dir.join(METADATA_FILENAME))
.and_then(|mut f| f.write_all(&metadata.as_ssz_bytes())) .and_then(|mut f| f.write_all(&metadata.as_ssz_bytes()))
{ {

View File

@ -267,7 +267,7 @@ impl<T: EthSpec> OperationPool<T> {
&prev_epoch_key, &prev_epoch_key,
&*all_attestations, &*all_attestations,
state, state,
&*reward_cache, &reward_cache,
total_active_balance, total_active_balance,
prev_epoch_validity_filter, prev_epoch_validity_filter,
spec, spec,
@ -278,7 +278,7 @@ impl<T: EthSpec> OperationPool<T> {
&curr_epoch_key, &curr_epoch_key,
&*all_attestations, &*all_attestations,
state, state,
&*reward_cache, &reward_cache,
total_active_balance, total_active_balance,
curr_epoch_validity_filter, curr_epoch_validity_filter,
spec, spec,

View File

@ -61,7 +61,7 @@ impl<E: EthSpec> ProductionBeaconNode<E> {
let client_genesis = client_config.genesis.clone(); let client_genesis = client_config.genesis.clone();
let store_config = client_config.store.clone(); let store_config = client_config.store.clone();
let log = context.log().clone(); let log = context.log().clone();
let datadir = client_config.create_data_dir()?; let _datadir = client_config.create_data_dir()?;
let db_path = client_config.create_db_path()?; let db_path = client_config.create_db_path()?;
let freezer_db_path = client_config.create_freezer_db_path()?; let freezer_db_path = client_config.create_freezer_db_path()?;
let executor = context.executor.clone(); let executor = context.executor.clone();
@ -84,13 +84,7 @@ impl<E: EthSpec> ProductionBeaconNode<E> {
.runtime_context(context) .runtime_context(context)
.chain_spec(spec) .chain_spec(spec)
.http_api_config(client_config.http_api.clone()) .http_api_config(client_config.http_api.clone())
.disk_store( .disk_store(&db_path, &freezer_db_path, store_config, log.clone())?;
&datadir,
&db_path,
&freezer_db_path,
store_config,
log.clone(),
)?;
let builder = if let Some(slasher_config) = client_config.slasher.clone() { let builder = if let Some(slasher_config) = client_config.slasher.clone() {
let slasher = Arc::new( let slasher = Arc::new(

View File

@ -55,7 +55,7 @@ pub enum Error {
/// Creates a file with `600 (-rw-------)` permissions and writes the specified bytes to file. /// Creates a file with `600 (-rw-------)` permissions and writes the specified bytes to file.
pub fn create_with_600_perms<P: AsRef<Path>>(path: P, bytes: &[u8]) -> Result<(), Error> { pub fn create_with_600_perms<P: AsRef<Path>>(path: P, bytes: &[u8]) -> Result<(), Error> {
let path = path.as_ref(); let path = path.as_ref();
let mut file = File::create(&path).map_err(Error::UnableToCreateFile)?; let mut file = File::create(path).map_err(Error::UnableToCreateFile)?;
#[cfg(unix)] #[cfg(unix)]
{ {

View File

@ -10,7 +10,7 @@ where
S: Serializer, S: Serializer,
{ {
let mut hex_string: String = "0x".to_string(); let mut hex_string: String = "0x".to_string();
hex_string.push_str(&hex::encode(&bytes)); hex_string.push_str(&hex::encode(bytes));
serializer.serialize_str(&hex_string) serializer.serialize_str(&hex_string)
} }

View File

@ -39,7 +39,7 @@ impl<'de> Visitor<'de> for QuantityVisitor {
hex::decode(&format!("0{}", stripped)) hex::decode(&format!("0{}", stripped))
.map_err(|e| de::Error::custom(format!("invalid hex ({:?})", e))) .map_err(|e| de::Error::custom(format!("invalid hex ({:?})", e)))
} else { } else {
hex::decode(&stripped).map_err(|e| de::Error::custom(format!("invalid hex ({:?})", e))) hex::decode(stripped).map_err(|e| de::Error::custom(format!("invalid hex ({:?})", e)))
} }
} }
} }

View File

@ -368,7 +368,7 @@ mod test {
fn context_size() { fn context_size() {
assert_eq!( assert_eq!(
mem::size_of::<HalfNode>(), mem::size_of::<HalfNode>(),
232, 224,
"Halfnode size should be as expected" "Halfnode size should be as expected"
); );
} }

View File

@ -40,7 +40,7 @@ impl ConfigAndPreset {
let extra_fields = get_extra_fields(spec); let extra_fields = get_extra_fields(spec);
if spec.bellatrix_fork_epoch.is_some() if spec.bellatrix_fork_epoch.is_some()
|| fork_name == None || fork_name.is_none()
|| fork_name == Some(ForkName::Merge) || fork_name == Some(ForkName::Merge)
{ {
let bellatrix_preset = BellatrixPreset::from_chain_spec::<T>(spec); let bellatrix_preset = BellatrixPreset::from_chain_spec::<T>(spec);
@ -65,7 +65,7 @@ impl ConfigAndPreset {
/// Get a hashmap of constants to add to the `PresetAndConfig` /// Get a hashmap of constants to add to the `PresetAndConfig`
pub fn get_extra_fields(spec: &ChainSpec) -> HashMap<String, Value> { pub fn get_extra_fields(spec: &ChainSpec) -> HashMap<String, Value> {
let hex_string = |value: &[u8]| format!("0x{}", hex::encode(&value)).into(); let hex_string = |value: &[u8]| format!("0x{}", hex::encode(value)).into();
let u32_hex = |v: u32| hex_string(&v.to_le_bytes()); let u32_hex = |v: u32| hex_string(&v.to_le_bytes());
let u8_hex = |v: u8| hex_string(&v.to_le_bytes()); let u8_hex = |v: u8| hex_string(&v.to_le_bytes());
hashmap! { hashmap! {

View File

@ -27,7 +27,7 @@ impl Graffiti {
impl fmt::Display for Graffiti { impl fmt::Display for Graffiti {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{}", eth2_serde_utils::hex::encode(&self.0)) write!(f, "{}", eth2_serde_utils::hex::encode(self.0))
} }
} }

View File

@ -257,7 +257,6 @@ pub fn migrate_db<E: EthSpec>(
migrate_schema::<Witness<SystemTimeSlotClock, CachingEth1Backend<E>, _, _, _>>( migrate_schema::<Witness<SystemTimeSlotClock, CachingEth1Backend<E>, _, _, _>>(
db, db,
client_config.eth1.deposit_contract_deploy_block, client_config.eth1.deposit_contract_deploy_block,
&client_config.get_data_dir(),
from, from,
to, to,
log, log,

View File

@ -188,7 +188,7 @@ pub trait TargetArrayChunk: Sized + serde::Serialize + serde::de::DeserializeOwn
txn.put( txn.put(
Self::select_db(db), Self::select_db(db),
&disk_key.to_be_bytes(), disk_key.to_be_bytes(),
&compressed_value, &compressed_value,
)?; )?;
Ok(()) Ok(())

View File

@ -301,7 +301,7 @@ impl<E: EthSpec> SlasherDB<E> {
pub fn store_schema_version(&self, txn: &mut RwTransaction<'_>) -> Result<(), Error> { pub fn store_schema_version(&self, txn: &mut RwTransaction<'_>) -> Result<(), Error> {
txn.put( txn.put(
&self.databases.metadata_db, &self.databases.metadata_db,
&METADATA_VERSION_KEY, METADATA_VERSION_KEY,
&bincode::serialize(&CURRENT_SCHEMA_VERSION)?, &bincode::serialize(&CURRENT_SCHEMA_VERSION)?,
)?; )?;
Ok(()) Ok(())
@ -323,7 +323,7 @@ impl<E: EthSpec> SlasherDB<E> {
pub fn store_config(&self, config: &Config, txn: &mut RwTransaction<'_>) -> Result<(), Error> { pub fn store_config(&self, config: &Config, txn: &mut RwTransaction<'_>) -> Result<(), Error> {
txn.put( txn.put(
&self.databases.metadata_db, &self.databases.metadata_db,
&METADATA_CONFIG_KEY, METADATA_CONFIG_KEY,
&bincode::serialize(config)?, &bincode::serialize(config)?,
)?; )?;
Ok(()) Ok(())
@ -367,7 +367,7 @@ impl<E: EthSpec> SlasherDB<E> {
txn.put( txn.put(
&self.databases.attesters_db, &self.databases.attesters_db,
&AttesterKey::new(validator_index, target_epoch, &self.config), &AttesterKey::new(validator_index, target_epoch, &self.config),
&CompactAttesterRecord::null().as_bytes(), CompactAttesterRecord::null().as_bytes(),
)?; )?;
} }
} }
@ -423,7 +423,7 @@ impl<E: EthSpec> SlasherDB<E> {
key: &IndexedAttestationIdKey, key: &IndexedAttestationIdKey,
value: IndexedAttestationId, value: IndexedAttestationId,
) -> Result<(), Error> { ) -> Result<(), Error> {
txn.put(&self.databases.indexed_attestation_id_db, key, &value)?; txn.put(&self.databases.indexed_attestation_id_db, key, value)?;
Ok(()) Ok(())
} }
@ -579,7 +579,7 @@ impl<E: EthSpec> SlasherDB<E> {
txn.put( txn.put(
&self.databases.attesters_db, &self.databases.attesters_db,
&AttesterKey::new(validator_index, target_epoch, &self.config), &AttesterKey::new(validator_index, target_epoch, &self.config),
&indexed_attestation_id, indexed_attestation_id,
)?; )?;
Ok(AttesterSlashingStatus::NotSlashable) Ok(AttesterSlashingStatus::NotSlashable)

View File

@ -13,7 +13,7 @@ const GETH_REPO_URL: &str = "https://github.com/ethereum/go-ethereum";
pub fn build_result(repo_dir: &Path) -> Output { pub fn build_result(repo_dir: &Path) -> Output {
Command::new("make") Command::new("make")
.arg("geth") .arg("geth")
.current_dir(&repo_dir) .current_dir(repo_dir)
.output() .output()
.expect("failed to make geth") .expect("failed to make geth")
} }

View File

@ -203,8 +203,8 @@ impl<E: GenericExecutionEngine> TestRig<E> {
.await; .await;
// We hardcode the accounts here since some EEs start with a default unlocked account // We hardcode the accounts here since some EEs start with a default unlocked account
let account1 = ethers_core::types::Address::from_slice(&hex::decode(&ACCOUNT1).unwrap()); let account1 = ethers_core::types::Address::from_slice(&hex::decode(ACCOUNT1).unwrap());
let account2 = ethers_core::types::Address::from_slice(&hex::decode(&ACCOUNT2).unwrap()); let account2 = ethers_core::types::Address::from_slice(&hex::decode(ACCOUNT2).unwrap());
/* /*
* Check the transition config endpoint. * Check the transition config endpoint.

View File

@ -30,7 +30,7 @@ pub fn transactions<E: EthSpec>(account1: Address, account2: Address) -> Vec<Typ
Transaction::DepositDepositContract { Transaction::DepositDepositContract {
sender: account1, sender: account1,
deposit_contract_address: ethers_core::types::Address::from_slice( deposit_contract_address: ethers_core::types::Address::from_slice(
&hex::decode(&DEPOSIT_CONTRACT_ADDRESS).unwrap(), &hex::decode(DEPOSIT_CONTRACT_ADDRESS).unwrap(),
), ),
} }
.transaction::<E>(), .transaction::<E>(),

View File

@ -60,7 +60,7 @@ impl ApiSecret {
// Create and write the secret key to file with appropriate permissions // Create and write the secret key to file with appropriate permissions
create_with_600_perms( create_with_600_perms(
&sk_path, &sk_path,
eth2_serde_utils::hex::encode(&sk.serialize()).as_bytes(), eth2_serde_utils::hex::encode(sk.serialize()).as_bytes(),
) )
.map_err(|e| { .map_err(|e| {
format!( format!(