Interop chain start strategies (#479)
* Implement more flexible beacon chain genesis * Fix compile issues from rebase on master * Rename CLI flag * Adds initial documentation for TOML files * Update docs readme * Add first version of cli_util * Dont write cache fields in serde * Tidy cli_util * Add code to load genesis YAML file * Move serde_utils out of tests in `types` * Update logging text * Fix serde YAML for Fork * Make yaml hex decoding more strict * Update deterministic key generate for interop * Set deposit count on testing genesis state * Make some fixes for deposit count * Remove code fragements * Large restructure of docs * Tidy docs * Fix readme link * Add interop docs * Tidy README
This commit is contained in:
parent
0374e31907
commit
845f336a59
@ -9,6 +9,7 @@ members = [
|
||||
"eth2/utils/compare_fields",
|
||||
"eth2/utils/compare_fields_derive",
|
||||
"eth2/utils/eth2_config",
|
||||
"eth2/utils/eth2_interop_keypairs",
|
||||
"eth2/utils/hashing",
|
||||
"eth2/utils/logging",
|
||||
"eth2/utils/merkle_proof",
|
||||
@ -33,6 +34,7 @@ members = [
|
||||
"beacon_node/version",
|
||||
"beacon_node/beacon_chain",
|
||||
"tests/ef_tests",
|
||||
"tests/cli_util",
|
||||
"protos",
|
||||
"validator_client",
|
||||
"account_manager",
|
||||
|
@ -94,29 +94,37 @@ impl<T: BeaconChainTypes> BeaconChain<T> {
|
||||
store: Arc<T::Store>,
|
||||
slot_clock: T::SlotClock,
|
||||
mut genesis_state: BeaconState<T::EthSpec>,
|
||||
genesis_block: BeaconBlock<T::EthSpec>,
|
||||
mut genesis_block: BeaconBlock<T::EthSpec>,
|
||||
spec: ChainSpec,
|
||||
log: Logger,
|
||||
) -> Result<Self, Error> {
|
||||
genesis_state.build_all_caches(&spec)?;
|
||||
|
||||
let state_root = genesis_state.canonical_root();
|
||||
store.put(&state_root, &genesis_state)?;
|
||||
let genesis_state_root = genesis_state.canonical_root();
|
||||
store.put(&genesis_state_root, &genesis_state)?;
|
||||
|
||||
genesis_block.state_root = genesis_state_root;
|
||||
|
||||
let genesis_block_root = genesis_block.block_header().canonical_root();
|
||||
store.put(&genesis_block_root, &genesis_block)?;
|
||||
|
||||
// Also store the genesis block under the `ZERO_HASH` key.
|
||||
let genesis_block_root = genesis_block.block_header().canonical_root();
|
||||
let genesis_block_root = genesis_block.canonical_root();
|
||||
store.put(&Hash256::zero(), &genesis_block)?;
|
||||
|
||||
let canonical_head = RwLock::new(CheckPoint::new(
|
||||
genesis_block.clone(),
|
||||
genesis_block_root,
|
||||
genesis_state.clone(),
|
||||
state_root,
|
||||
genesis_state_root,
|
||||
));
|
||||
|
||||
info!(log, "BeaconChain init";
|
||||
"genesis_validator_count" => genesis_state.validators.len(),
|
||||
"genesis_state_root" => format!("{}", genesis_state_root),
|
||||
"genesis_block_root" => format!("{}", genesis_block_root),
|
||||
);
|
||||
|
||||
Ok(Self {
|
||||
spec,
|
||||
slot_clock,
|
||||
@ -760,7 +768,7 @@ impl<T: BeaconChainTypes> BeaconChain<T> {
|
||||
randao_reveal,
|
||||
// TODO: replace with real data.
|
||||
eth1_data: Eth1Data {
|
||||
deposit_count: 0,
|
||||
deposit_count: state.eth1_data.deposit_count,
|
||||
deposit_root: Hash256::zero(),
|
||||
block_hash: Hash256::zero(),
|
||||
},
|
||||
|
@ -18,6 +18,7 @@ slot_clock = { path = "../../eth2/utils/slot_clock" }
|
||||
serde = "1.0.93"
|
||||
serde_derive = "1.0"
|
||||
error-chain = "0.12.0"
|
||||
serde_yaml = "0.8"
|
||||
slog = { version = "^2.2.3" , features = ["max_level_trace"] }
|
||||
slog-async = "^2.3.0"
|
||||
slog-json = "^2.3"
|
||||
|
@ -1,27 +1,31 @@
|
||||
use crate::error::Result;
|
||||
use crate::{config::GenesisState, ClientConfig};
|
||||
use beacon_chain::{
|
||||
lmd_ghost::{LmdGhost, ThreadSafeReducedTree},
|
||||
slot_clock::SystemTimeSlotClock,
|
||||
store::Store,
|
||||
BeaconChain, BeaconChainTypes,
|
||||
};
|
||||
use slog::{info, Logger};
|
||||
use slog::{crit, info, Logger};
|
||||
use slot_clock::SlotClock;
|
||||
use std::fs::File;
|
||||
use std::marker::PhantomData;
|
||||
use std::sync::Arc;
|
||||
use std::time::SystemTime;
|
||||
use tree_hash::TreeHash;
|
||||
use types::{test_utils::TestingBeaconStateBuilder, BeaconBlock, ChainSpec, EthSpec, Hash256};
|
||||
|
||||
/// The number initial validators when starting the `Minimal`.
|
||||
const TESTNET_VALIDATOR_COUNT: usize = 16;
|
||||
use types::{
|
||||
test_utils::TestingBeaconStateBuilder, BeaconBlock, BeaconState, ChainSpec, EthSpec, Hash256,
|
||||
};
|
||||
|
||||
/// Provides a new, initialized `BeaconChain`
|
||||
pub trait InitialiseBeaconChain<T: BeaconChainTypes> {
|
||||
fn initialise_beacon_chain(
|
||||
store: Arc<T::Store>,
|
||||
config: &ClientConfig,
|
||||
spec: ChainSpec,
|
||||
log: Logger,
|
||||
) -> BeaconChain<T> {
|
||||
maybe_load_from_store_for_testnet::<_, T::Store, T::EthSpec>(store, spec, log)
|
||||
) -> Result<BeaconChain<T>> {
|
||||
maybe_load_from_store_for_testnet::<_, T::Store, T::EthSpec>(store, config, spec, log)
|
||||
}
|
||||
}
|
||||
|
||||
@ -42,45 +46,109 @@ impl<T: Store, E: EthSpec, X: BeaconChainTypes> InitialiseBeaconChain<X> for Cli
|
||||
/// Loads a `BeaconChain` from `store`, if it exists. Otherwise, create a new chain from genesis.
|
||||
fn maybe_load_from_store_for_testnet<T, U: Store, V: EthSpec>(
|
||||
store: Arc<U>,
|
||||
config: &ClientConfig,
|
||||
spec: ChainSpec,
|
||||
log: Logger,
|
||||
) -> BeaconChain<T>
|
||||
) -> Result<BeaconChain<T>>
|
||||
where
|
||||
T: BeaconChainTypes<Store = U, EthSpec = V>,
|
||||
T::LmdGhost: LmdGhost<U, V>,
|
||||
{
|
||||
let genesis_state = match &config.genesis_state {
|
||||
GenesisState::Mainnet => {
|
||||
crit!(log, "This release does not support mainnet genesis state.");
|
||||
return Err("Mainnet is unsupported".into());
|
||||
}
|
||||
GenesisState::RecentGenesis { validator_count } => {
|
||||
generate_testnet_genesis_state(*validator_count, recent_genesis_time(), &spec)
|
||||
}
|
||||
GenesisState::Generated {
|
||||
validator_count,
|
||||
genesis_time,
|
||||
} => generate_testnet_genesis_state(*validator_count, *genesis_time, &spec),
|
||||
GenesisState::Yaml { file } => {
|
||||
let file = File::open(file).map_err(|e| {
|
||||
format!("Unable to open YAML genesis state file {:?}: {:?}", file, e)
|
||||
})?;
|
||||
|
||||
serde_yaml::from_reader(file)
|
||||
.map_err(|e| format!("Unable to parse YAML genesis state file: {:?}", e))?
|
||||
}
|
||||
};
|
||||
|
||||
let mut genesis_block = BeaconBlock::empty(&spec);
|
||||
genesis_block.state_root = Hash256::from_slice(&genesis_state.tree_hash_root());
|
||||
let genesis_block_root = genesis_block.canonical_root();
|
||||
|
||||
// Slot clock
|
||||
let slot_clock = T::SlotClock::new(
|
||||
spec.genesis_slot,
|
||||
genesis_state.genesis_time,
|
||||
spec.seconds_per_slot,
|
||||
);
|
||||
|
||||
// Try load an existing `BeaconChain` from the store. If unable, create a new one.
|
||||
if let Ok(Some(beacon_chain)) =
|
||||
BeaconChain::from_store(store.clone(), spec.clone(), log.clone())
|
||||
{
|
||||
info!(
|
||||
log,
|
||||
"Loaded BeaconChain from store";
|
||||
"slot" => beacon_chain.head().beacon_state.slot,
|
||||
"best_slot" => beacon_chain.best_slot(),
|
||||
);
|
||||
// Here we check to ensure that the `BeaconChain` loaded from store has the expected
|
||||
// genesis block.
|
||||
//
|
||||
// Without this check, it's possible that there will be an existing DB with a `BeaconChain`
|
||||
// that has different parameters than provided to this executable.
|
||||
if beacon_chain.genesis_block_root == genesis_block_root {
|
||||
info!(
|
||||
log,
|
||||
"Loaded BeaconChain from store";
|
||||
"slot" => beacon_chain.head().beacon_state.slot,
|
||||
"best_slot" => beacon_chain.best_slot(),
|
||||
);
|
||||
|
||||
beacon_chain
|
||||
Ok(beacon_chain)
|
||||
} else {
|
||||
crit!(
|
||||
log,
|
||||
"The BeaconChain loaded from disk has an incorrect genesis root. \
|
||||
This may be caused by an old database in located in datadir."
|
||||
);
|
||||
Err("Incorrect genesis root".into())
|
||||
}
|
||||
} else {
|
||||
info!(log, "Initializing new BeaconChain from genesis");
|
||||
let state_builder = TestingBeaconStateBuilder::from_default_keypairs_file_if_exists(
|
||||
TESTNET_VALIDATOR_COUNT,
|
||||
&spec,
|
||||
);
|
||||
let (genesis_state, _keypairs) = state_builder.build();
|
||||
|
||||
let mut genesis_block = BeaconBlock::empty(&spec);
|
||||
genesis_block.state_root = Hash256::from_slice(&genesis_state.tree_hash_root());
|
||||
|
||||
// Slot clock
|
||||
let slot_clock = T::SlotClock::new(
|
||||
spec.genesis_slot,
|
||||
genesis_state.genesis_time,
|
||||
spec.seconds_per_slot,
|
||||
);
|
||||
|
||||
// Genesis chain
|
||||
//TODO: Handle error correctly
|
||||
BeaconChain::from_genesis(store, slot_clock, genesis_state, genesis_block, spec, log)
|
||||
.expect("Terminate if beacon chain generation fails")
|
||||
BeaconChain::from_genesis(
|
||||
store,
|
||||
slot_clock,
|
||||
genesis_state,
|
||||
genesis_block,
|
||||
spec,
|
||||
log.clone(),
|
||||
)
|
||||
.map_err(|e| format!("Failed to initialize new beacon chain: {:?}", e).into())
|
||||
}
|
||||
}
|
||||
|
||||
fn generate_testnet_genesis_state<E: EthSpec>(
|
||||
validator_count: usize,
|
||||
genesis_time: u64,
|
||||
spec: &ChainSpec,
|
||||
) -> BeaconState<E> {
|
||||
let (mut genesis_state, _keypairs) =
|
||||
TestingBeaconStateBuilder::from_default_keypairs_file_if_exists(validator_count, spec)
|
||||
.build();
|
||||
|
||||
genesis_state.genesis_time = genesis_time;
|
||||
|
||||
genesis_state
|
||||
}
|
||||
|
||||
/// Returns the system time, mod 30 minutes.
|
||||
///
|
||||
/// Used for easily creating testnets.
|
||||
fn recent_genesis_time() -> u64 {
|
||||
let now = SystemTime::now()
|
||||
.duration_since(SystemTime::UNIX_EPOCH)
|
||||
.unwrap()
|
||||
.as_secs();
|
||||
let secs_after_last_period = now.checked_rem(30 * 60).unwrap_or(0);
|
||||
// genesis is now the last 30 minute block.
|
||||
now - secs_after_last_period
|
||||
}
|
||||
|
@ -7,6 +7,12 @@ use std::fs::{self, OpenOptions};
|
||||
use std::path::PathBuf;
|
||||
use std::sync::Mutex;
|
||||
|
||||
/// The number initial validators when starting the `Minimal`.
|
||||
const TESTNET_VALIDATOR_COUNT: usize = 16;
|
||||
|
||||
/// The number initial validators when starting the `Minimal`.
|
||||
const TESTNET_SPEC_CONSTANTS: &str = "minimal";
|
||||
|
||||
/// The core configuration of a Lighthouse beacon node.
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct Config {
|
||||
@ -14,12 +20,35 @@ pub struct Config {
|
||||
pub db_type: String,
|
||||
db_name: String,
|
||||
pub log_file: PathBuf,
|
||||
pub spec_constants: String,
|
||||
pub genesis_state: GenesisState,
|
||||
pub network: network::NetworkConfig,
|
||||
pub rpc: rpc::RPCConfig,
|
||||
pub http: HttpServerConfig,
|
||||
pub rest_api: rest_api::APIConfig,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
#[serde(tag = "type")]
|
||||
pub enum GenesisState {
|
||||
/// Use the mainnet genesis state.
|
||||
///
|
||||
/// Mainnet genesis state is not presently known, so this is a place-holder.
|
||||
Mainnet,
|
||||
/// Generate a state with `validator_count` validators, all with well-known secret keys.
|
||||
///
|
||||
/// Set the genesis time to be the start of the previous 30-minute window.
|
||||
RecentGenesis { validator_count: usize },
|
||||
/// Generate a state with `genesis_time` and `validator_count` validators, all with well-known
|
||||
/// secret keys.
|
||||
Generated {
|
||||
validator_count: usize,
|
||||
genesis_time: u64,
|
||||
},
|
||||
/// Load a YAML-encoded genesis state from a file.
|
||||
Yaml { file: PathBuf },
|
||||
}
|
||||
|
||||
impl Default for Config {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
@ -33,6 +62,10 @@ impl Default for Config {
|
||||
rpc: rpc::RPCConfig::default(),
|
||||
http: HttpServerConfig::default(),
|
||||
rest_api: rest_api::APIConfig::default(),
|
||||
spec_constants: TESTNET_SPEC_CONSTANTS.into(),
|
||||
genesis_state: GenesisState::RecentGenesis {
|
||||
validator_count: TESTNET_VALIDATOR_COUNT,
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -67,9 +67,10 @@ where
|
||||
// Load a `BeaconChain` from the store, or create a new one if it does not exist.
|
||||
let beacon_chain = Arc::new(T::initialise_beacon_chain(
|
||||
store,
|
||||
&client_config,
|
||||
eth2_config.spec.clone(),
|
||||
log.clone(),
|
||||
));
|
||||
)?);
|
||||
// Registry all beacon chain metrics with the global registry.
|
||||
beacon_chain
|
||||
.metrics
|
||||
@ -90,7 +91,7 @@ where
|
||||
let slots_since_genesis = beacon_chain.slots_since_genesis().unwrap();
|
||||
info!(
|
||||
log,
|
||||
"Initializing state";
|
||||
"BeaconState cache init";
|
||||
"state_slot" => state_slot,
|
||||
"wall_clock_slot" => wall_clock_slot,
|
||||
"slots_since_genesis" => slots_since_genesis,
|
||||
@ -98,12 +99,6 @@ where
|
||||
);
|
||||
}
|
||||
do_state_catchup(&beacon_chain, &log);
|
||||
info!(
|
||||
log,
|
||||
"State initialized";
|
||||
"state_slot" => beacon_chain.head().beacon_state.slot,
|
||||
"wall_clock_slot" => beacon_chain.read_slot_clock().unwrap(),
|
||||
);
|
||||
|
||||
// Start the network service, libp2p and syncing threads
|
||||
// TODO: Add beacon_chain reference to network parameters
|
||||
|
@ -12,6 +12,7 @@ pub const DEFAULT_DATA_DIR: &str = ".lighthouse";
|
||||
|
||||
pub const CLIENT_CONFIG_FILENAME: &str = "beacon-node.toml";
|
||||
pub const ETH2_CONFIG_FILENAME: &str = "eth2-spec.toml";
|
||||
pub const TESTNET_CONFIG_FILENAME: &str = "testnet.toml";
|
||||
|
||||
fn main() {
|
||||
// debugging output for libp2p and external crates
|
||||
@ -21,7 +22,9 @@ fn main() {
|
||||
.version(version::version().as_str())
|
||||
.author("Sigma Prime <contact@sigmaprime.io>")
|
||||
.about("Eth 2.0 Client")
|
||||
// file system related arguments
|
||||
/*
|
||||
* Configuration directory locations.
|
||||
*/
|
||||
.arg(
|
||||
Arg::with_name("datadir")
|
||||
.long("datadir")
|
||||
@ -43,7 +46,9 @@ fn main() {
|
||||
.help("Data directory for network keys.")
|
||||
.takes_value(true)
|
||||
)
|
||||
// network related arguments
|
||||
/*
|
||||
* Network parameters.
|
||||
*/
|
||||
.arg(
|
||||
Arg::with_name("listen-address")
|
||||
.long("listen-address")
|
||||
@ -86,7 +91,9 @@ fn main() {
|
||||
.help("The IP address to broadcast to other peers on how to reach this node.")
|
||||
.takes_value(true),
|
||||
)
|
||||
// rpc related arguments
|
||||
/*
|
||||
* gRPC parameters.
|
||||
*/
|
||||
.arg(
|
||||
Arg::with_name("rpc")
|
||||
.long("rpc")
|
||||
@ -107,7 +114,9 @@ fn main() {
|
||||
.help("Listen port for RPC endpoint.")
|
||||
.takes_value(true),
|
||||
)
|
||||
// HTTP related arguments
|
||||
/*
|
||||
* HTTP server parameters.
|
||||
*/
|
||||
.arg(
|
||||
Arg::with_name("http")
|
||||
.long("http")
|
||||
@ -127,7 +136,6 @@ fn main() {
|
||||
.help("Listen port for the HTTP server.")
|
||||
.takes_value(true),
|
||||
)
|
||||
// REST API related arguments
|
||||
.arg(
|
||||
Arg::with_name("api")
|
||||
.long("api")
|
||||
@ -149,7 +157,10 @@ fn main() {
|
||||
.help("Set the listen TCP port for the RESTful HTTP API server.")
|
||||
.takes_value(true),
|
||||
)
|
||||
// General arguments
|
||||
|
||||
/*
|
||||
* Database parameters.
|
||||
*/
|
||||
.arg(
|
||||
Arg::with_name("db")
|
||||
.long("db")
|
||||
@ -159,12 +170,17 @@ fn main() {
|
||||
.possible_values(&["disk", "memory"])
|
||||
.default_value("memory"),
|
||||
)
|
||||
/*
|
||||
* Specification/testnet params.
|
||||
*/
|
||||
.arg(
|
||||
Arg::with_name("spec-constants")
|
||||
.long("spec-constants")
|
||||
Arg::with_name("default-spec")
|
||||
.long("default-spec")
|
||||
.value_name("TITLE")
|
||||
.short("s")
|
||||
.help("The title of the spec constants for chain config.")
|
||||
.short("default-spec")
|
||||
.help("Specifies the default eth2 spec to be used. Overridden by any spec loaded
|
||||
from disk. A spec will be written to disk after this flag is used, so it is
|
||||
primarily used for creating eth2 spec files.")
|
||||
.takes_value(true)
|
||||
.possible_values(&["mainnet", "minimal"])
|
||||
.default_value("minimal"),
|
||||
@ -175,6 +191,9 @@ fn main() {
|
||||
.short("r")
|
||||
.help("When present, genesis will be within 30 minutes prior. Only for testing"),
|
||||
)
|
||||
/*
|
||||
* Logging.
|
||||
*/
|
||||
.arg(
|
||||
Arg::with_name("debug-level")
|
||||
.long("debug-level")
|
||||
@ -288,7 +307,7 @@ fn main() {
|
||||
let mut eth2_config = match read_from_file::<Eth2Config>(eth2_config_path.clone()) {
|
||||
Ok(Some(c)) => c,
|
||||
Ok(None) => {
|
||||
let default = match matches.value_of("spec-constants") {
|
||||
let default = match matches.value_of("default-spec") {
|
||||
Some("mainnet") => Eth2Config::mainnet(),
|
||||
Some("minimal") => Eth2Config::minimal(),
|
||||
_ => unreachable!(), // Guarded by slog.
|
||||
|
@ -49,7 +49,7 @@ pub fn run_beacon_node(
|
||||
|
||||
info!(
|
||||
log,
|
||||
"Starting beacon node";
|
||||
"BeaconNode init";
|
||||
"p2p_listen_address" => format!("{:?}", &other_client_config.network.listen_address),
|
||||
"data_dir" => format!("{:?}", other_client_config.data_dir()),
|
||||
"network_dir" => format!("{:?}", other_client_config.network.network_dir),
|
||||
|
69
docs/README.md
Normal file
69
docs/README.md
Normal file
@ -0,0 +1,69 @@
|
||||
# Lighthouse Documentation
|
||||
|
||||
_Lighthouse is a work-in-progress. Instructions are provided for running the
|
||||
client, however these instructions are designed for developers and researchers
|
||||
working on the project. We do not (yet) provide user-facing functionality._
|
||||
|
||||
## Introduction
|
||||
|
||||
- [Overview of Ethereum 2.0](serenity.md)
|
||||
- [Development Environment Setup](env.md)
|
||||
|
||||
For client implementers looking to inter-op, see the [Inter-Op
|
||||
Docs](interop.md).
|
||||
|
||||
## Command-line Interface
|
||||
|
||||
With the [development environment](env.md) configured, run `cargo build --all
|
||||
--release` (this can take several minutes on the first build). Then,
|
||||
navigate to the `target/release/` directory and read the CLI documentation
|
||||
using:
|
||||
|
||||
```
|
||||
$ ./beacon_node -h
|
||||
```
|
||||
|
||||
The main [`README.md`](../README.md#simple-local-testnet) provides instructions
|
||||
for running a small, local testnet.
|
||||
|
||||
## REST API
|
||||
|
||||
The beacon node provides a RESTful HTTP API which serves information about the
|
||||
Beacon Chain, the P2P network and more.
|
||||
|
||||
This API is documented in the [`rest_oapi.yaml`](rest_oapi.yaml) Swagger YAML
|
||||
file. There's an interactive version hosted on
|
||||
[SwaggerHub](https://app.swaggerhub.com/apis/spble/lighthouse_rest_api/0.1.0).
|
||||
|
||||
The implementation of the Swagger API in Lighthouse is incomplete, we do not
|
||||
(yet) guarantee that all routes are implemented.
|
||||
|
||||
## Configuration Files
|
||||
|
||||
Lighthouse uses [TOML](https://github.com/toml-lang/toml) files for
|
||||
configuration. The following binaries use the following config files (they are
|
||||
generated from defaults if they don't already exist):
|
||||
|
||||
- [Beacon Node](/beacon_node)
|
||||
- [`~/.lighthouse/beacon_node.toml`](#beacon-nodetoml): the primary
|
||||
configuration file for a beacon node.
|
||||
- `~/.lighthouse/eth2-spec.toml`: defines chain-specific "constants" that
|
||||
define an Ethereum 2.0 network.
|
||||
- [Validator Client](/validator_client)
|
||||
- `~/.lighthouse/validator_client.toml`: the primary configuration file for
|
||||
a validator client.
|
||||
- `~/.lighthouse/eth2-spec.toml`: defines chain-specific "constants" that
|
||||
define an Ethereum 2.0 network.
|
||||
|
||||
_Note: default directories are shown, CLI flags can be used to override these
|
||||
defaults._
|
||||
|
||||
#### `beacon-node.toml`
|
||||
|
||||
A TOML configuration file that defines the behaviour of the beacon node
|
||||
runtime.
|
||||
|
||||
- Located in the `datadir` (default `~/.lighthouse`) as `beacon-node.toml`.
|
||||
- Created from defaults if not present.
|
||||
|
||||
See the [example](config_examples/beacon-node.toml) for more information.
|
98
docs/config_examples/beacon-node.toml
Normal file
98
docs/config_examples/beacon-node.toml
Normal file
@ -0,0 +1,98 @@
|
||||
#
|
||||
# Beacon Node TOML configuration file.
|
||||
#
|
||||
# Defines the runtime configuration of a Lighthouse Beacon Node.
|
||||
#
|
||||
|
||||
# The directory where beacon-node specific files will be placed. Includes the
|
||||
# database and configuration files.
|
||||
data_dir = ".lighthouse"
|
||||
# The type of database used. Can be either:
|
||||
#
|
||||
# - "disk": LevelDB (almost always desired).
|
||||
# - "memory": an in-memory hashmap (only used for testing).
|
||||
db_type = "disk"
|
||||
# The name of the LevelDB database directory, if any.
|
||||
db_name = "chain_db"
|
||||
# If specified, all logs will be written to this file.
|
||||
log_file = ""
|
||||
# Defines the Ethereum 2.0 specification set to be used:
|
||||
#
|
||||
# - "mainnet": parameters expected to be used for Eth2 mainnet.
|
||||
# - "minimal": smaller, more efficient parameters used for testing.
|
||||
spec_constants = "minimal"
|
||||
|
||||
#
|
||||
# The "genesis_state" object defines how the genesis state should be created.
|
||||
#
|
||||
|
||||
# The "RecentGenesis" type assumes that genesis started at the beginning of the
|
||||
# most-recent 30 minute window (e.g., 08:00, 08:30, 09:00, ...).
|
||||
[genesis_state]
|
||||
type = "RecentGenesis"
|
||||
validator_count = 16
|
||||
|
||||
# "Generated" is the same as "RecentGenesis", however allows for manual
|
||||
# specification of the genesis_time.
|
||||
#
|
||||
# [genesis_state]
|
||||
# type = "Generated"
|
||||
# validator_count = 16
|
||||
# genesis_time = 1564620118
|
||||
|
||||
# "Yaml" loads a full genesis state from YAML file.
|
||||
#
|
||||
# [genesis_state]
|
||||
# type = "Yaml"
|
||||
# file = "~/genesis_state.yaml"
|
||||
|
||||
#
|
||||
# P2P networking configuration.
|
||||
#
|
||||
[network]
|
||||
# The directory for storing p2p network related files. E.g., p2p keys, peer
|
||||
# lists, etc.
|
||||
network_dir = "/home/paul/.lighthouse/network"
|
||||
# The address that libp2p should use for incoming connections.
|
||||
listen_address = "127.0.0.1"
|
||||
# The port that libp2p should use for incoming connections.
|
||||
libp2p_port = 9000
|
||||
# The address that should listen for UDP peer-discovery.
|
||||
discovery_address = "127.0.0.1"
|
||||
# The port that should listen for UDP peer-discovery.
|
||||
discovery_port = 9000
|
||||
# Maximum number of libp2p peers.
|
||||
max_peers = 10
|
||||
# Boot nodes for initial peer discovery.
|
||||
boot_nodes = []
|
||||
# The client version, may be customized.
|
||||
client_version = "Lighthouse/v0.1.0-unstable/x86_64-linux"
|
||||
# A list of libp2p topics. Purpose unknown.
|
||||
topics = []
|
||||
|
||||
#
|
||||
# gRPC configuration. To be removed.
|
||||
#
|
||||
[rpc]
|
||||
enabled = false
|
||||
listen_address = "127.0.0.1"
|
||||
port = 5051
|
||||
|
||||
#
|
||||
# Legacy HTTP server configuration. To be removed.
|
||||
#
|
||||
[http]
|
||||
enabled = false
|
||||
listen_address = "127.0.0.1"
|
||||
listen_port = "5052"
|
||||
|
||||
#
|
||||
# RESTful HTTP API server configuration.
|
||||
#
|
||||
[rest_api]
|
||||
# Set to `true` to enable the gRPC server.
|
||||
enabled = true
|
||||
# The listen port for the HTTP server.
|
||||
listen_address = "127.0.0.1"
|
||||
# The listen port for the HTTP server.
|
||||
port = 1248
|
52
docs/env.md
Normal file
52
docs/env.md
Normal file
@ -0,0 +1,52 @@
|
||||
# Development Environment Setup
|
||||
|
||||
_This document describes how to setup a development environment. It is intended
|
||||
for software developers and researchers who wish to contribute to development._
|
||||
|
||||
Lighthouse is a Rust project and [`cargo`](https://doc.rust-lang.org/cargo/) is
|
||||
used extensively. As such, you'll need to install Rust in order to build the
|
||||
project. Generally, Rust is installed using the
|
||||
[rustup](https://www.rust-lang.org/tools/install) tool-chain manager.
|
||||
|
||||
## Steps
|
||||
|
||||
A fully-featured development environment can be achieved with the following
|
||||
steps:
|
||||
|
||||
1. Install [rustup](https://rustup.rs/).
|
||||
1. Use the command `rustup show` to get information about the Rust
|
||||
installation. You should see that the active tool-chain is the stable
|
||||
version.
|
||||
- Updates can be performed using` rustup update`, Lighthouse generally
|
||||
requires a recent version of Rust.
|
||||
1. Install build dependencies (Arch packages are listed here, your
|
||||
distribution will likely be similar):
|
||||
- `clang`: required by RocksDB.
|
||||
- `protobuf`: required for protobuf serialization (gRPC).
|
||||
- `cmake`: required for building protobuf
|
||||
- `git-lfs`: The Git extension for [Large File
|
||||
Support](https://git-lfs.github.com/) (required for Ethereum Foundation
|
||||
test vectors).
|
||||
1. Clone the repository with submodules: `git clone --recursive
|
||||
https://github.com/sigp/lighthouse`. If you're already cloned the repo,
|
||||
ensure testing submodules are present: `$ git submodule init; git
|
||||
submodule update`
|
||||
1. Change directory to the root of the repository.
|
||||
1. Run the test suite with `cargo test --all --release`. The build and test
|
||||
process can take several minutes. If you experience any failures on
|
||||
`master`, please raise an
|
||||
[issue](https://github.com/sigp/lighthouse/issues).
|
||||
|
||||
## Notes:
|
||||
|
||||
Lighthouse targets Rust `stable` but generally runs on `nightly` too.
|
||||
|
||||
### Note for Windows users:
|
||||
|
||||
Perl may also be required to build lighthouse. You can install [Strawberry
|
||||
Perl](http://strawberryperl.com/), or alternatively use a choco install command
|
||||
`choco install strawberryperl`.
|
||||
|
||||
Additionally, the dependency `protoc-grpcio v0.3.1` is reported to have issues
|
||||
compiling in Windows. You can specify a known working version by editing
|
||||
version in `protos/Cargo.toml` section to `protoc-grpcio = "<=0.3.0"`.
|
@ -1,40 +0,0 @@
|
||||
# Development Environment Setup
|
||||
|
||||
A few basic steps are needed to get set up (skip to #5 if you already have Rust
|
||||
installed):
|
||||
|
||||
1. Install [rustup](https://rustup.rs/). It's a toolchain manager for Rust (Linux | macOS | Windows). For installation, download the script with `$ curl -f https://sh.rustup.rs > rustup.sh`, review its content (e.g. `$ less ./rustup.sh`) and run the script `$ ./rustup.sh` (you may need to change the permissions to allow execution, i.e. `$ chmod +x rustup.sh`)
|
||||
2. (Linux & MacOS) To configure your current shell run: `$ source $HOME/.cargo/env`
|
||||
3. Use the command `rustup show` to get information about the Rust installation. You should see that the
|
||||
active toolchain is the stable version.
|
||||
4. Run `rustc --version` to check the installation and version of rust.
|
||||
- Updates can be performed using` rustup update` .
|
||||
5. Install build dependencies (Arch packages are listed here, your distribution will likely be similar):
|
||||
- `clang`: required by RocksDB.
|
||||
- `protobuf`: required for protobuf serialization (gRPC).
|
||||
- `cmake`: required for building protobuf
|
||||
- `git-lfs`: The Git extension for [Large File Support](https://git-lfs.github.com/) (required for EF tests submodule).
|
||||
6. If you haven't already, clone the repository with submodules: `git clone --recursive https://github.com/sigp/lighthouse`.
|
||||
Alternatively, run `git submodule init; git submodule update` in a repository which was cloned without submodules.
|
||||
7. Change directory to the root of the repository.
|
||||
8. Run the test by using command `cargo test --all --release`. By running, it will pass all the required test cases.
|
||||
If you are doing it for the first time, then you can grab a coffee in the meantime. Usually, it takes time
|
||||
to build, compile and pass all test cases. If there is no error then it means everything is working properly
|
||||
and it's time to get your hands dirty.
|
||||
In case, if there is an error, then please raise the [issue](https://github.com/sigp/lighthouse/issues).
|
||||
We will help you.
|
||||
9. As an alternative to, or instead of the above step, you may also run benchmarks by using
|
||||
the command `cargo bench --all`
|
||||
|
||||
## Notes:
|
||||
|
||||
Lighthouse targets Rust `stable` but _should_ run on `nightly`.
|
||||
|
||||
### Note for Windows users:
|
||||
|
||||
Perl may also be required to build lighthouse. You can install [Strawberry Perl](http://strawberryperl.com/),
|
||||
or alternatively use a choco install command `choco install strawberryperl`.
|
||||
|
||||
Additionally, the dependency `protoc-grpcio v0.3.1` is reported to have issues compiling in Windows. You can specify
|
||||
a known working version by editing version in protos/Cargo.toml's "build-dependencies" section to
|
||||
`protoc-grpcio = "<=0.3.0"`.
|
109
docs/interop.md
Normal file
109
docs/interop.md
Normal file
@ -0,0 +1,109 @@
|
||||
# Lighthouse Inter-Op Docs
|
||||
|
||||
_These documents are intended for a highly technical audience, specifically
|
||||
Ethereum 2.0 implementers._
|
||||
|
||||
This document provides details on how to use Lighthouse for inter-op testing.
|
||||
|
||||
## Steps
|
||||
|
||||
_Note: binaries are compiled into the `target/release` directory of the
|
||||
repository. In this example, we run binaries assuming the user is in this
|
||||
directory. E.g., running the beacon node binary can be achieved with
|
||||
`$ ./target/release/beacon_node`. Those familiar with `cargo` may use the
|
||||
equivalent (and more-convenient) `cargo run --release --` commands._
|
||||
|
||||
1. Setup a Lighthouse [development environment](env.md).
|
||||
1. Build all the binaries using `cargo build --all --release`
|
||||
1. Create default configuration files by running `$ ./beacon_node` and pressing
|
||||
Ctrl+C after the node has started.
|
||||
1. Follow the steps in [Genesis](#genesis) to configure the genesis state.
|
||||
1. Follow the steps in [Networking](#networking) to launch a node with
|
||||
appropriate networking parameters.
|
||||
|
||||
## Genesis
|
||||
|
||||
Lighthouse supports the following methods for generating a genesis state:
|
||||
|
||||
- [`Yaml`](#yaml): loads the genesis state from some YAML file (recommended
|
||||
method).
|
||||
- [`Generated`](#generated): generates a state given a `(validator_count,
|
||||
genesis_time)`
|
||||
tuple. _Note: this method is not yet fully specified and the state
|
||||
generated is almost certainly not identical to other implementations._
|
||||
- [`RecentGenesis`](#recentgenesis): identical to `Generated`, however the
|
||||
`genesis_time` is set
|
||||
to the previous 30-minute window. For example, if a state is generated at
|
||||
`0845`, the genesis time will be `0830`.
|
||||
|
||||
You may configure a `beacon_node` to use one of these methods using the
|
||||
[`beacon_node.toml`](README.md#beacon-nodetoml). There is a [documented
|
||||
example](config_examples/) configuration file which includes an example for
|
||||
each of these methods (see the `genesis_state` object).
|
||||
|
||||
### Yaml
|
||||
|
||||
This method involves loading a `BeaconState` from a YAML file. We provide
|
||||
instructions for generating that YAML file and starting from it. If starting
|
||||
from a pre-existing YAML file, simply skip the generation steps.
|
||||
|
||||
#### Generating a YAML file
|
||||
|
||||
The [cli_util](/tests/cli_util) generate YAML genesis state files. You can run
|
||||
`$ ./cli_util genesis_yaml -h` to see documentation. We provide an example to
|
||||
generate a YAML file with the following properties:
|
||||
|
||||
- 10 initial validators, each with [deterministic
|
||||
keypairs](https://github.com/ethereum/eth2.0-pm/issues/60#issuecomment-512157915).
|
||||
- The genesis file is stored in `~/.lighthouse/`, the default data directory
|
||||
(an absolute path must be supplied).
|
||||
- Genesis time is set to the time when the command is run (it can be customized
|
||||
with the `-g` flag).
|
||||
|
||||
```
|
||||
$ ./cli_util genesis_yaml -n 10 -f /home/user/.lighthouse/genesis_state.yaml
|
||||
```
|
||||
|
||||
#### Configuring the Beacon Node
|
||||
|
||||
Modify the [`beacon-node.toml`](README.md#beacon-nodetoml) file to have the
|
||||
following `genesiss_state` object (choosing the `file`):
|
||||
|
||||
```
|
||||
[genesis_state]
|
||||
type = "Yaml"
|
||||
file = "/home/user/.lighthouse/genesis_state.yaml"
|
||||
```
|
||||
|
||||
### Generated
|
||||
|
||||
Modify the [`beacon-node.toml`](README.md#beacon-nodetoml) file to have the
|
||||
following `genesis_state` object (choosing the `validator_count` and
|
||||
`genesis_time`):
|
||||
|
||||
```
|
||||
[genesis_state]
|
||||
type = "Generated"
|
||||
validator_count = 16
|
||||
genesis_time = 1564620118
|
||||
```
|
||||
|
||||
### RecentGenesis
|
||||
|
||||
Modify the [`beacon-node.toml`](README.md#beacon-nodetoml) file to have the
|
||||
following `genesis_state` object (choosing the `validator_count`):
|
||||
|
||||
```
|
||||
[genesis_state]
|
||||
type = "RecentGenesis"
|
||||
validator_count = 16
|
||||
```
|
||||
|
||||
## Networking
|
||||
|
||||
_TODO: provide details on config required to connect to some IP address._
|
||||
|
||||
## References
|
||||
|
||||
The BLS key generation method used should be identical to [this
|
||||
implementation](https://github.com/ethereum/eth2.0-pm/issues/60#issuecomment-512157915).
|
@ -1,83 +0,0 @@
|
||||
# About Lighthouse
|
||||
|
||||
## Goals
|
||||
|
||||
The purpose of this project is to work alongside the Ethereum community to
|
||||
implement a secure, trustworthy, open-source Ethereum Serenity client in Rust.
|
||||
|
||||
* **Security**: Lighthouse's main goal is to implement everything with a
|
||||
security-first mindset. The goal is to ensure that all components of lighthouse
|
||||
are thoroughly tested, checked and secure.
|
||||
|
||||
* **Trust** : As Ethereum Serenity is a Proof-of-Stake system, which
|
||||
involves the interaction of the Ethereum protocol and user funds. Thus, a goal
|
||||
of Lighthouse is to provide a client that is trustworthy.
|
||||
|
||||
All code can be tested and verified the goal of Lighthouse is to provide code
|
||||
that is trusted.
|
||||
|
||||
* **Transparency**: Lighthouse aims at being as transparent as possible. This
|
||||
goal is for Lighthouse to embrace the open-source community and allow for all
|
||||
to understand the decisions, direction and changes in all aspects.
|
||||
|
||||
* **Error Resilience**: As Lighthouse embraces the "never `panic`" mindset, the
|
||||
goal is to be resilient to errors that may occur. Providing a client that has
|
||||
tolerance against errors provides further properties for a secure, trustworthy
|
||||
client that Lighthouse aims to provide.
|
||||
|
||||
In addition to implementing a new client, the project seeks to maintain and
|
||||
improve the Ethereum protocol wherever possible.
|
||||
|
||||
## Ideology
|
||||
|
||||
### Never Panic
|
||||
|
||||
Lighthouse will be the gateway interacting with the Proof-of-Stake system
|
||||
employed by Ethereum. This requires the validation and proposal of blocks
|
||||
and extremely timely responses. As part of this, Lighthouse aims to ensure
|
||||
the most uptime as possible, meaning minimising the amount of
|
||||
exceptions and gracefully handling any issues.
|
||||
|
||||
Rust's `panic` provides the ability to throw an exception and exit, this
|
||||
will terminate the running processes. Thus, Lighthouse aims to use `panic`
|
||||
as little as possible to minimise the possible termination cases.
|
||||
|
||||
### Security First Mindset
|
||||
|
||||
Lighthouse aims to provide a safe, secure Serenity client for the Ethereum
|
||||
ecosystem. At each step of development, the aim is to have a security-first
|
||||
mindset and always ensure you are following the safe, secure mindset. When
|
||||
contributing to any part of the Lighthouse client, through any development,
|
||||
always ensure you understand each aspect thoroughly and cover all potential
|
||||
security considerations of your code.
|
||||
|
||||
### Functions aren't completed until they are tested
|
||||
|
||||
As part of the Security First mindset, we want to aim to cover as many distinct
|
||||
cases. A function being developed is not considered "completed" until tests
|
||||
exist for that function. The tests not only help show the correctness of the
|
||||
function, but also provide a way for new developers to understand how the
|
||||
function is to be called and how it works.
|
||||
|
||||
|
||||
## Engineering Ethos
|
||||
|
||||
Lighthouse aims to produce many small easily-tested components, each separated
|
||||
into individual crates wherever possible.
|
||||
|
||||
Generally, tests can be kept in the same file, as is typical in Rust.
|
||||
Integration tests should be placed in the `tests` directory in the crate's
|
||||
root. Particularly large (line-count) tests should be placed into a separate
|
||||
file.
|
||||
|
||||
A function is not considered complete until a test exists for it. We produce
|
||||
tests to protect against regression (accidentally breaking things) and to
|
||||
provide examples that help readers of the code base understand how functions
|
||||
should (or should not) be used.
|
||||
|
||||
Each pull request is to be reviewed by at least one "core developer" (i.e.,
|
||||
someone with write-access to the repository). This helps to ensure bugs are
|
||||
detected, consistency is maintained, and responsibility of errors is dispersed.
|
||||
|
||||
Discussion must be respectful and intellectual. Have fun and make jokes, but
|
||||
always respect the limits of other people.
|
@ -1,233 +0,0 @@
|
||||
# Contributing to Lighthouse
|
||||
|
||||
[![Gitter](https://badges.gitter.im/Join%20Chat.svg)](https://gitter.im/sigp/lighthouse?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge)
|
||||
|
||||
Lighthouse is an open-source Ethereum Serenity client built in
|
||||
[Rust](https://www.rust-lang.org/).
|
||||
|
||||
Lighthouse welcomes all contributions with open arms. If you are interested in
|
||||
contributing to the Ethereum ecosystem, and you want to learn Rust, Lighthouse
|
||||
is a great project to work on.
|
||||
|
||||
This documentation aims to provide a smooth on-boarding for all who wish to
|
||||
help contribute to Lighthouse. Whether it is helping with the mountain of
|
||||
documentation, writing extra tests or developing components, all help is
|
||||
appreciated and your contributions will help not only the community but all
|
||||
the contributors.
|
||||
|
||||
We've bundled up our Goals, Ethos and Ideology into one document for you to
|
||||
read through, please read our [About Lighthouse](lighthouse.md) docs. :smile:
|
||||
|
||||
Layer-1 infrastructure is a critical component for the ecosystem and relies
|
||||
heavily on contributions from the community. Building Ethereum Serenity is a
|
||||
huge task and we refuse to conduct an inappropriate ICO or charge licensing
|
||||
fees. Instead, we fund development through grants and support from Sigma
|
||||
Prime.
|
||||
|
||||
If you have any additional questions, please feel free to jump on the
|
||||
[gitter](https://gitter.im/sigp/lighthouse) and have a chat with all of us.
|
||||
|
||||
**Pre-reading Materials:**
|
||||
|
||||
* [About Lighthouse](lighthouse.md)
|
||||
* [Ethereum Serenity](serenity.md)
|
||||
|
||||
**Repository**
|
||||
|
||||
If you'd like to contribute, try having a look through the [open
|
||||
issues](https://github.com/sigp/lighthouse/issues) (tip: look for the [good
|
||||
first
|
||||
issue](https://github.com/sigp/lighthouse/issues?q=is%3Aissue+is%3Aopen+label%3A%22good+first+issue%22)
|
||||
tag) and ping us on the [gitter](https://gitter.im/sigp/lighthouse) channel. We need
|
||||
your support!
|
||||
|
||||
## Understanding Serenity
|
||||
|
||||
Ethereum's Serenity is based on a Proof-of-Stake based sharded beacon chain.
|
||||
|
||||
(*If you don't know what that is, don't `panic`, that's what this documentation
|
||||
is for!* :smile:)
|
||||
|
||||
Read through our [Understanding
|
||||
Serenity](https://github.com/sigp/lighthouse/blob/master/docs/serenity.md) docs
|
||||
to learn more! :smile: (*unless you've already read it.*)
|
||||
|
||||
The document explains the necessary fundamentals for understanding Ethereum,
|
||||
Proof-of-Stake and the Serenity we are working towards.
|
||||
|
||||
## Development Onboarding
|
||||
|
||||
If you would like to contribute and develop Lighthouse, there are only a few
|
||||
things to go through (and then you're on your way!).
|
||||
|
||||
### Understanding Rust
|
||||
|
||||
Rust is an extremely powerful, low-level programming language that provides
|
||||
freedom and performance to create powerful projects. The [Rust
|
||||
Book](https://doc.rust-lang.org/stable/book/) provides insight into the Rust
|
||||
language and some of the coding style to follow (As well as acting as a great
|
||||
introduction and tutorial for the language.)
|
||||
|
||||
Rust has a steep learning curve, but there are many resources to help you!
|
||||
|
||||
* [Rust Book](https://doc.rust-lang.org/stable/book/)
|
||||
* [Rust by example](https://doc.rust-lang.org/stable/rust-by-example/)
|
||||
* [Learning Rust With Entirely Too Many Linked Lists](http://cglab.ca/~abeinges/blah/too-many-lists/book/)
|
||||
* [Rustlings](https://github.com/rustlings/rustlings)
|
||||
* [Rust Exercism](https://exercism.io/tracks/rust)
|
||||
* [Learn X in Y minutes - Rust](https://learnxinyminutes.com/docs/rust/)
|
||||
|
||||
|
||||
#### Getting Started and installing Rust
|
||||
|
||||
We recommend installing Rust using [**rustup**](https://rustup.rs/). Rustup
|
||||
allows you to easily install versions of rust.
|
||||
|
||||
**Linux/Unix/Mac:**
|
||||
|
||||
```
|
||||
$ curl https://sh.rustup.rs -sSf | sh
|
||||
```
|
||||
|
||||
**Windows (You need a bit more):**
|
||||
* Install the Visual Studio 2015 with C++ support
|
||||
* Install Rustup using: https://static.rust-lang.org/rustup/dist/x86_64-pc-windows-msvc/rustup-init.exe
|
||||
* You can then use the ``VS2015 x64 Native Tools Command Prompt`` and run:
|
||||
|
||||
```
|
||||
rustup default stable-x86-64-pc-windows-msvc
|
||||
```
|
||||
|
||||
#### Getting ready with Cargo
|
||||
|
||||
[Cargo](https://doc.rust-lang.org/cargo/) is the package manager for Rust, and
|
||||
allows to extend to a number of packages and external libraries. It's also extremely
|
||||
handy for handling dependencies and helping to modularise your project better.
|
||||
|
||||
*Note: If you've installed rust through rustup, you should have ``cargo``
|
||||
installed.*
|
||||
|
||||
#### Rust Terminology
|
||||
|
||||
When developing rust, you'll come across some terminology that differs to
|
||||
other programming languages you may have used.
|
||||
|
||||
* **Trait**: A trait is a collection of methods defined for a type, they can be
|
||||
implemented for any data type.
|
||||
* **Struct**: A custom data type that lets us name and package together
|
||||
multiple related values that make a meaningful group.
|
||||
* **Crate**: A crate is synonymous with a *library* or *package* in other
|
||||
languages. They can produce an executable or library depending on the
|
||||
project.
|
||||
* **Module**: A collection of items: functions, structs, traits, and even other
|
||||
modules. Modules allow you to hierarchically split code into logical units
|
||||
and manage visibility.
|
||||
* **Attribute**: Metadata applied to some module, crate or item.
|
||||
* **Macros**: Macros are powerful meta-programming statements that get expanded
|
||||
into source code that gets compiled with the rest of the code (Unlike `C`
|
||||
macros that are pre-processed, Rust macros form an Abstract Syntax Tree).
|
||||
|
||||
|
||||
Other good appendix resources:
|
||||
|
||||
* [Keywords](https://doc.rust-lang.org/book/appendix-01-keywords.html)
|
||||
* [Operators/Symbols](https://doc.rust-lang.org/book/appendix-02-operators.html)
|
||||
* [Traits](https://doc.rust-lang.org/book/appendix-03-derivable-traits.html)
|
||||
|
||||
|
||||
### Understanding the Git Workflow
|
||||
|
||||
Lighthouse utilises git as the primary open-source development tool. To help
|
||||
with your contributions, it is great to understand the processes used to ensure
|
||||
everything remains in sync and there's as little conflict as possible when
|
||||
working on similar files.
|
||||
|
||||
Lighthouse uses the **feature branch** workflow, where each issue, or each
|
||||
feature, is developed on its own branch and then merged in via a pull-request.
|
||||
|
||||
* [Feature Branch Tutorial](https://www.atlassian.com/git/tutorials/comparing-workflows/feature-branch-workflow)
|
||||
|
||||
## Code Conventions/Styleguide and Ethos
|
||||
|
||||
### Ethos
|
||||
|
||||
**Pull Requests**
|
||||
|
||||
Pull requests should be reviewed by **at least** one "*core developer*"
|
||||
(someone with write-access to the repo). This should ensure bugs are caught and
|
||||
the code is kept in a consistent state that follows all conventions and style.
|
||||
|
||||
All discussion (whether in PRs or Issues or in the Gitter) should be respectful
|
||||
and intellectual. Have fun, but always respect the limits of other people.
|
||||
|
||||
**Testing**
|
||||
|
||||
*"A function is not considered complete until tests exist for it."*
|
||||
|
||||
Generally, tests can be self-contained in the same file. Integration tests
|
||||
should be added into the ``tests/`` directory in the crate's **root**.
|
||||
|
||||
Large line-count tests should be in a separate file.
|
||||
|
||||
### Rust StyleGuide
|
||||
|
||||
Lighthouse adheres to Rust code conventions as outlined in the [**Rust
|
||||
Styleguide**](https://github.com/rust-dev-tools/fmt-rfcs/blob/master/guide/guide.md).
|
||||
|
||||
Ensure you use [Clippy](https://github.com/rust-lang/rust-clippy) to lint and
|
||||
check your code.
|
||||
|
||||
| Code Aspect | Guideline Format |
|
||||
|:--------------------|:-------------------------------|
|
||||
| Types | ``UpperCamelCase`` |
|
||||
| Enums/Enum Variants | ``UpperCamelCase`` |
|
||||
| Struct Fields | ``snake_case`` |
|
||||
| Function / Method | ``snake_case`` |
|
||||
| Macro Names | ``snake_case`` |
|
||||
| Constants | ``SCREAMING_SNAKE_CASE`` |
|
||||
| Forbidden name | Trailing Underscore: ``name_`` |
|
||||
|
||||
Other general rust docs:
|
||||
|
||||
* [Rust Other Style Advice](https://github.com/rust-dev-tools/fmt-rfcs/blob/master/guide/advice.md)
|
||||
* [Cargo.toml Conventions](https://github.com/rust-dev-tools/fmt-rfcs/blob/master/guide/cargo.md)
|
||||
|
||||
### TODOs
|
||||
|
||||
All `TODO` statements should be accompanied by a GitHub issue.
|
||||
|
||||
```rust
|
||||
pub fn my_function(&mut self, _something &[u8]) -> Result<String, Error> {
|
||||
// TODO: something_here
|
||||
// https://github.com/sigp/lighthouse/issues/XX
|
||||
}
|
||||
```
|
||||
|
||||
### Comments
|
||||
|
||||
**General Comments**
|
||||
|
||||
* Prefer line (``//``) comments to block comments (``/* ... */``)
|
||||
* Comments can appear on the line prior to the item or after a trailing space.
|
||||
```rust
|
||||
// Comment for this struct
|
||||
struct Lighthouse {}
|
||||
|
||||
fn make_blockchain() {} // A comment on the same line after a space
|
||||
```
|
||||
|
||||
**Doc Comments**
|
||||
|
||||
* The ``///`` is used to generate comments for Docs.
|
||||
* The comments should come before attributes.
|
||||
|
||||
```rust
|
||||
/// Stores the core configuration for this Lighthouse instance.
|
||||
/// This struct is general, other components may implement more
|
||||
/// specialized config structs.
|
||||
#[derive(Clone)]
|
||||
pub struct LighthouseConfig {
|
||||
pub data_dir: PathBuf,
|
||||
pub p2p_listen_port: u16,
|
||||
}
|
||||
```
|
@ -11,6 +11,7 @@ compare_fields = { path = "../utils/compare_fields" }
|
||||
compare_fields_derive = { path = "../utils/compare_fields_derive" }
|
||||
dirs = "1.0"
|
||||
derivative = "1.0"
|
||||
eth2_interop_keypairs = { path = "../utils/eth2_interop_keypairs" }
|
||||
ethereum-types = "0.5"
|
||||
hashing = { path = "../utils/hashing" }
|
||||
hex = "0.3"
|
||||
|
@ -1,4 +1,5 @@
|
||||
use crate::test_utils::{graffiti_from_hex_str, TestRandom};
|
||||
use crate::test_utils::TestRandom;
|
||||
use crate::utils::graffiti_from_hex_str;
|
||||
use crate::*;
|
||||
|
||||
use serde_derive::{Deserialize, Serialize};
|
||||
|
@ -134,13 +134,13 @@ where
|
||||
pub finalized_checkpoint: Checkpoint,
|
||||
|
||||
// Caching (not in the spec)
|
||||
#[serde(default)]
|
||||
#[serde(skip_serializing, skip_deserializing)]
|
||||
#[ssz(skip_serializing)]
|
||||
#[ssz(skip_deserializing)]
|
||||
#[tree_hash(skip_hashing)]
|
||||
#[test_random(default)]
|
||||
pub committee_caches: [CommitteeCache; CACHED_EPOCHS],
|
||||
#[serde(default)]
|
||||
#[serde(skip_serializing, skip_deserializing)]
|
||||
#[ssz(skip_serializing)]
|
||||
#[ssz(skip_deserializing)]
|
||||
#[tree_hash(skip_hashing)]
|
||||
|
@ -1,7 +1,7 @@
|
||||
use crate::*;
|
||||
use int_to_bytes::int_to_bytes4;
|
||||
use serde_derive::{Deserialize, Serialize};
|
||||
use test_utils::{u8_from_hex_str, u8_to_hex_str};
|
||||
use utils::{u8_from_hex_str, u8_to_hex_str};
|
||||
|
||||
/// Each of the BLS signature domains.
|
||||
///
|
||||
|
@ -1,7 +1,6 @@
|
||||
use crate::{
|
||||
test_utils::{fork_from_hex_str, TestRandom},
|
||||
Epoch,
|
||||
};
|
||||
use crate::test_utils::TestRandom;
|
||||
use crate::utils::{fork_from_hex_str, fork_to_hex_str};
|
||||
use crate::Epoch;
|
||||
|
||||
use serde_derive::{Deserialize, Serialize};
|
||||
use ssz_derive::{Decode, Encode};
|
||||
@ -25,9 +24,15 @@ use tree_hash_derive::{CachedTreeHash, TreeHash};
|
||||
TestRandom,
|
||||
)]
|
||||
pub struct Fork {
|
||||
#[serde(deserialize_with = "fork_from_hex_str")]
|
||||
#[serde(
|
||||
serialize_with = "fork_to_hex_str",
|
||||
deserialize_with = "fork_from_hex_str"
|
||||
)]
|
||||
pub previous_version: [u8; 4],
|
||||
#[serde(deserialize_with = "fork_from_hex_str")]
|
||||
#[serde(
|
||||
serialize_with = "fork_to_hex_str",
|
||||
deserialize_with = "fork_from_hex_str"
|
||||
)]
|
||||
pub current_version: [u8; 4],
|
||||
pub epoch: Epoch,
|
||||
}
|
||||
|
@ -30,6 +30,7 @@ pub mod indexed_attestation;
|
||||
pub mod pending_attestation;
|
||||
pub mod proposer_slashing;
|
||||
pub mod transfer;
|
||||
pub mod utils;
|
||||
pub mod voluntary_exit;
|
||||
#[macro_use]
|
||||
pub mod slot_epoch_macros;
|
||||
|
@ -133,6 +133,9 @@ impl<T: EthSpec> TestingBeaconStateBuilder<T> {
|
||||
spec,
|
||||
);
|
||||
|
||||
state.eth1_data.deposit_count = validator_count as u64;
|
||||
state.eth1_deposit_index = validator_count as u64;
|
||||
|
||||
let balances = vec![starting_balance; validator_count].into();
|
||||
|
||||
debug!("Importing {} existing validators...", validator_count);
|
||||
|
@ -1,14 +1,12 @@
|
||||
use crate::*;
|
||||
use int_to_bytes::int_to_bytes48;
|
||||
use eth2_interop_keypairs::be_private_key;
|
||||
use log::debug;
|
||||
use rayon::prelude::*;
|
||||
|
||||
/// Generates `validator_count` keypairs where the secret key is the index of the
|
||||
/// validator.
|
||||
/// Generates `validator_count` keypairs where the secret key is derived solely from the index of
|
||||
/// the validator.
|
||||
///
|
||||
/// For example, the first validator has a secret key of `int_to_bytes48(1)`, the second has
|
||||
/// `int_to_bytes48(2)` and so on. (We skip `0` as it generates a weird looking public key and is
|
||||
/// probably invalid).
|
||||
/// Uses the `eth2_interop_keypairs` crate to generate keys.
|
||||
pub fn generate_deterministic_keypairs(validator_count: usize) -> Vec<Keypair> {
|
||||
debug!(
|
||||
"Generating {} deterministic validator keypairs...",
|
||||
@ -20,6 +18,7 @@ pub fn generate_deterministic_keypairs(validator_count: usize) -> Vec<Keypair> {
|
||||
.par_iter()
|
||||
.map(|&i| generate_deterministic_keypair(i))
|
||||
.collect();
|
||||
|
||||
keypairs
|
||||
}
|
||||
|
||||
@ -27,8 +26,8 @@ pub fn generate_deterministic_keypairs(validator_count: usize) -> Vec<Keypair> {
|
||||
///
|
||||
/// This is used for testing only, and not to be used in production!
|
||||
pub fn generate_deterministic_keypair(validator_index: usize) -> Keypair {
|
||||
let secret = int_to_bytes48(validator_index as u64 + 1000);
|
||||
let sk = SecretKey::from_bytes(&secret).unwrap();
|
||||
let sk = SecretKey::from_bytes(&be_private_key(validator_index))
|
||||
.expect("be_private_key always returns valid keys");
|
||||
let pk = PublicKey::from_secret_key(&sk);
|
||||
Keypair { sk, pk }
|
||||
}
|
||||
|
@ -3,7 +3,6 @@ mod macros;
|
||||
mod builders;
|
||||
mod generate_deterministic_keypairs;
|
||||
mod keypairs_file;
|
||||
mod serde_utils;
|
||||
mod test_random;
|
||||
|
||||
pub use builders::*;
|
||||
@ -14,5 +13,4 @@ pub use rand::{
|
||||
RngCore,
|
||||
{prng::XorShiftRng, SeedableRng},
|
||||
};
|
||||
pub use serde_utils::{fork_from_hex_str, graffiti_from_hex_str, u8_from_hex_str, u8_to_hex_str};
|
||||
pub use test_random::TestRandom;
|
||||
|
3
eth2/types/src/utils.rs
Normal file
3
eth2/types/src/utils.rs
Normal file
@ -0,0 +1,3 @@
|
||||
mod serde_utils;
|
||||
|
||||
pub use serde_utils::*;
|
@ -1,3 +1,4 @@
|
||||
use hex;
|
||||
use serde::de::Error;
|
||||
use serde::{Deserialize, Deserializer, Serializer};
|
||||
|
||||
@ -32,7 +33,7 @@ where
|
||||
let mut array = [0 as u8; FORK_BYTES_LEN];
|
||||
let decoded: Vec<u8> = hex::decode(&s.as_str()[2..]).map_err(D::Error::custom)?;
|
||||
|
||||
if decoded.len() > FORK_BYTES_LEN {
|
||||
if decoded.len() != FORK_BYTES_LEN {
|
||||
return Err(D::Error::custom("Fork length too long"));
|
||||
}
|
||||
|
||||
@ -45,6 +46,17 @@ where
|
||||
Ok(array)
|
||||
}
|
||||
|
||||
// #[allow(clippy::trivially_copy_pass_by_ref)] // Serde requires the `byte` to be a ref.
|
||||
pub fn fork_to_hex_str<S>(bytes: &[u8; 4], serializer: S) -> Result<S::Ok, S::Error>
|
||||
where
|
||||
S: Serializer,
|
||||
{
|
||||
let mut hex_string: String = "0x".to_string();
|
||||
hex_string.push_str(&hex::encode(&bytes));
|
||||
|
||||
serializer.serialize_str(&hex_string)
|
||||
}
|
||||
|
||||
pub fn graffiti_from_hex_str<'de, D>(deserializer: D) -> Result<[u8; GRAFFITI_BYTES_LEN], D::Error>
|
||||
where
|
||||
D: Deserializer<'de>,
|
11
eth2/utils/eth2_interop_keypairs/Cargo.toml
Normal file
11
eth2/utils/eth2_interop_keypairs/Cargo.toml
Normal file
@ -0,0 +1,11 @@
|
||||
[package]
|
||||
name = "eth2_interop_keypairs"
|
||||
version = "0.1.0"
|
||||
authors = ["Paul Hauner <paul@paulhauner.com>"]
|
||||
edition = "2018"
|
||||
|
||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||
|
||||
[dependencies]
|
||||
num-bigint = "0.2"
|
||||
eth2_hashing = "0.1"
|
130
eth2/utils/eth2_interop_keypairs/src/lib.rs
Normal file
130
eth2/utils/eth2_interop_keypairs/src/lib.rs
Normal file
@ -0,0 +1,130 @@
|
||||
//! Produces the "deterministic" validator private keys used for inter-operability testing for
|
||||
//! Ethereum 2.0 clients.
|
||||
//!
|
||||
//! Each private key is the first hash in the sha2 hash-chain that is less than 2^255. As such,
|
||||
//! keys generated here are **not secret** and are **not for production use**.
|
||||
//!
|
||||
//! Note: these keys have not been tested against a reference implementation, yet.
|
||||
|
||||
use eth2_hashing::hash;
|
||||
use num_bigint::BigUint;
|
||||
|
||||
pub const CURVE_ORDER_BITS: usize = 255;
|
||||
pub const PRIVATE_KEY_BYTES: usize = 48;
|
||||
pub const HASH_BYTES: usize = 32;
|
||||
|
||||
fn hash_big_int_le(uint: BigUint) -> BigUint {
|
||||
let mut preimage = uint.to_bytes_le();
|
||||
preimage.resize(32, 0_u8);
|
||||
BigUint::from_bytes_le(&hash(&preimage))
|
||||
}
|
||||
|
||||
fn private_key(validator_index: usize) -> BigUint {
|
||||
let mut key = BigUint::from(validator_index);
|
||||
loop {
|
||||
key = hash_big_int_le(key);
|
||||
if key.bits() <= CURVE_ORDER_BITS {
|
||||
break key;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Generates an **unsafe** BLS12-381 private key for the given validator index, where that private
|
||||
/// key is represented in big-endian bytes.
|
||||
pub fn be_private_key(validator_index: usize) -> [u8; PRIVATE_KEY_BYTES] {
|
||||
let vec = private_key(validator_index).to_bytes_be();
|
||||
|
||||
let mut out = [0; PRIVATE_KEY_BYTES];
|
||||
out[PRIVATE_KEY_BYTES - vec.len()..PRIVATE_KEY_BYTES].copy_from_slice(&vec);
|
||||
out
|
||||
}
|
||||
|
||||
/// Generates an **unsafe** BLS12-381 private key for the given validator index, where that private
|
||||
/// key is represented in little-endian bytes.
|
||||
pub fn le_private_key(validator_index: usize) -> [u8; PRIVATE_KEY_BYTES] {
|
||||
let vec = private_key(validator_index).to_bytes_le();
|
||||
|
||||
let mut out = [0; PRIVATE_KEY_BYTES];
|
||||
out[0..vec.len()].copy_from_slice(&vec);
|
||||
out
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
fn flip(vec: &[u8]) -> Vec<u8> {
|
||||
let len = vec.len();
|
||||
let mut out = vec![0; len];
|
||||
for i in 0..len {
|
||||
out[len - 1 - i] = vec[i];
|
||||
}
|
||||
out
|
||||
}
|
||||
|
||||
fn pad_le_bls(mut vec: Vec<u8>) -> Vec<u8> {
|
||||
vec.resize(PRIVATE_KEY_BYTES, 0_u8);
|
||||
vec
|
||||
}
|
||||
|
||||
fn pad_be_bls(mut vec: Vec<u8>) -> Vec<u8> {
|
||||
let mut out = vec![0; PRIVATE_KEY_BYTES - vec.len()];
|
||||
out.append(&mut vec);
|
||||
out
|
||||
}
|
||||
|
||||
fn pad_le_hash(index: usize) -> Vec<u8> {
|
||||
let mut vec = index.to_le_bytes().to_vec();
|
||||
vec.resize(HASH_BYTES, 0_u8);
|
||||
vec
|
||||
}
|
||||
|
||||
fn multihash(index: usize, rounds: usize) -> Vec<u8> {
|
||||
let mut vec = pad_le_hash(index);
|
||||
for _ in 0..rounds {
|
||||
vec = hash(&vec);
|
||||
}
|
||||
vec
|
||||
}
|
||||
|
||||
fn compare(validator_index: usize, preimage: &[u8]) {
|
||||
assert_eq!(
|
||||
&le_private_key(validator_index)[..],
|
||||
&pad_le_bls(hash(preimage))[..]
|
||||
);
|
||||
assert_eq!(
|
||||
&be_private_key(validator_index)[..],
|
||||
&pad_be_bls(flip(&hash(preimage)))[..]
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn consistency() {
|
||||
for i in 0..256 {
|
||||
let le = BigUint::from_bytes_le(&le_private_key(i));
|
||||
let be = BigUint::from_bytes_be(&be_private_key(i));
|
||||
assert_eq!(le, be);
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn non_repeats() {
|
||||
// These indices only need one hash to be in the curve order.
|
||||
compare(0, &pad_le_hash(0));
|
||||
compare(3, &pad_le_hash(3));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn repeats() {
|
||||
// Index 5 needs 5x hashes to get into the curve order.
|
||||
compare(5, &multihash(5, 5));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn doesnt_panic() {
|
||||
for i in 0..256 {
|
||||
be_private_key(i);
|
||||
le_private_key(i);
|
||||
}
|
||||
}
|
||||
}
|
1
tests/cli_util/.gitignore
vendored
Normal file
1
tests/cli_util/.gitignore
vendored
Normal file
@ -0,0 +1 @@
|
||||
genesis_state.yaml
|
15
tests/cli_util/Cargo.toml
Normal file
15
tests/cli_util/Cargo.toml
Normal file
@ -0,0 +1,15 @@
|
||||
[package]
|
||||
name = "cli_util"
|
||||
version = "0.1.0"
|
||||
authors = ["Paul Hauner <paul@paulhauner.com>"]
|
||||
edition = "2018"
|
||||
|
||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||
|
||||
[dependencies]
|
||||
clap = "2.33"
|
||||
log = "0.4"
|
||||
serde = "1.0"
|
||||
serde_yaml = "0.8"
|
||||
simple_logger = "1.0"
|
||||
types = { path = "../../eth2/types" }
|
118
tests/cli_util/src/main.rs
Normal file
118
tests/cli_util/src/main.rs
Normal file
@ -0,0 +1,118 @@
|
||||
#[macro_use]
|
||||
extern crate log;
|
||||
|
||||
use clap::{App, Arg, SubCommand};
|
||||
use std::fs::File;
|
||||
use std::path::PathBuf;
|
||||
use std::time::{SystemTime, UNIX_EPOCH};
|
||||
use types::{test_utils::TestingBeaconStateBuilder, EthSpec, MainnetEthSpec, MinimalEthSpec};
|
||||
|
||||
fn main() {
|
||||
simple_logger::init().expect("logger should initialize");
|
||||
|
||||
let matches = App::new("Lighthouse Testing CLI Tool")
|
||||
.version("0.1.0")
|
||||
.author("Paul Hauner <paul@sigmaprime.io>")
|
||||
.about("Performs various testing-related tasks.")
|
||||
.subcommand(
|
||||
SubCommand::with_name("genesis_yaml")
|
||||
.about("Generates a genesis YAML file")
|
||||
.version("0.1.0")
|
||||
.author("Paul Hauner <paul@sigmaprime.io>")
|
||||
.arg(
|
||||
Arg::with_name("num_validators")
|
||||
.short("n")
|
||||
.value_name("INTEGER")
|
||||
.takes_value(true)
|
||||
.required(true)
|
||||
.help("Number of initial validators."),
|
||||
)
|
||||
.arg(
|
||||
Arg::with_name("genesis_time")
|
||||
.short("g")
|
||||
.value_name("INTEGER")
|
||||
.takes_value(true)
|
||||
.required(false)
|
||||
.help("Eth2 genesis time (seconds since UNIX epoch)."),
|
||||
)
|
||||
.arg(
|
||||
Arg::with_name("spec")
|
||||
.short("s")
|
||||
.value_name("STRING")
|
||||
.takes_value(true)
|
||||
.required(true)
|
||||
.possible_values(&["minimal", "mainnet"])
|
||||
.default_value("minimal")
|
||||
.help("Eth2 genesis time (seconds since UNIX epoch)."),
|
||||
)
|
||||
.arg(
|
||||
Arg::with_name("output_file")
|
||||
.short("f")
|
||||
.value_name("PATH")
|
||||
.takes_value(true)
|
||||
.default_value("./genesis_state.yaml")
|
||||
.help("Output file for generated state."),
|
||||
),
|
||||
)
|
||||
.get_matches();
|
||||
|
||||
if let Some(matches) = matches.subcommand_matches("genesis_yaml") {
|
||||
let num_validators = matches
|
||||
.value_of("num_validators")
|
||||
.expect("slog requires num_validators")
|
||||
.parse::<usize>()
|
||||
.expect("num_validators must be a valid integer");
|
||||
|
||||
let genesis_time = if let Some(string) = matches.value_of("genesis_time") {
|
||||
string
|
||||
.parse::<u64>()
|
||||
.expect("genesis_time must be a valid integer")
|
||||
} else {
|
||||
warn!("No genesis time supplied via CLI, using the current time.");
|
||||
SystemTime::now()
|
||||
.duration_since(UNIX_EPOCH)
|
||||
.expect("should obtain time since unix epoch")
|
||||
.as_secs()
|
||||
};
|
||||
|
||||
let file = matches
|
||||
.value_of("output_file")
|
||||
.expect("slog requires output file")
|
||||
.parse::<PathBuf>()
|
||||
.expect("output_file must be a valid path");
|
||||
|
||||
info!(
|
||||
"Creating genesis state with {} validators and genesis time {}.",
|
||||
num_validators, genesis_time
|
||||
);
|
||||
|
||||
match matches.value_of("spec").expect("spec is required by slog") {
|
||||
"minimal" => genesis_yaml::<MinimalEthSpec>(num_validators, genesis_time, file),
|
||||
"mainnet" => genesis_yaml::<MainnetEthSpec>(num_validators, genesis_time, file),
|
||||
_ => unreachable!("guarded by slog possible_values"),
|
||||
};
|
||||
|
||||
info!("Genesis state YAML file created. Exiting successfully.");
|
||||
} else {
|
||||
error!("No subcommand supplied.")
|
||||
}
|
||||
}
|
||||
|
||||
/// Creates a genesis state and writes it to a YAML file.
|
||||
fn genesis_yaml<T: EthSpec>(validator_count: usize, genesis_time: u64, output: PathBuf) {
|
||||
let spec = &T::default_spec();
|
||||
|
||||
let builder: TestingBeaconStateBuilder<T> =
|
||||
TestingBeaconStateBuilder::from_default_keypairs_file_if_exists(validator_count, spec);
|
||||
|
||||
let (mut state, _keypairs) = builder.build();
|
||||
state.genesis_time = genesis_time;
|
||||
|
||||
info!("Generated state root: {:?}", state.canonical_root());
|
||||
|
||||
info!("Writing genesis state to {:?}", output);
|
||||
|
||||
let file = File::create(output.clone())
|
||||
.unwrap_or_else(|e| panic!("unable to create file: {:?}. Error: {:?}", output, e));
|
||||
serde_yaml::to_writer(file, &state).expect("should be able to serialize BeaconState");
|
||||
}
|
Loading…
Reference in New Issue
Block a user