lighthouse/lcli/src/transition_blocks.rs

411 lines
14 KiB
Rust
Raw Normal View History

//! # Transition Blocks
//!
//! Use this tool to apply a `SignedBeaconBlock` to a `BeaconState`. Useful for benchmarking or
//! troubleshooting consensus failures.
//!
//! It can load states and blocks from file or pull them from a beaconAPI. Objects pulled from a
//! beaconAPI can be saved to disk to reduce future calls to that server.
//!
//! Logging output is controlled via the `RUST_LOG` environment variable. For example, `export
//! RUST_LOG=debug`.
//!
//! ## Examples
//!
//! ### Run using a block from a beaconAPI
//!
//! Download the 0x6c69 block and its pre-state (the state from its parent block) from the
//! beaconAPI. Advance the pre-state to the slot of the 0x6c69 block and apply that block to the
//! pre-state.
//!
//! ```ignore
//! lcli transition-blocks \
//! --beacon-url http://localhost:5052 \
//! --block-id 0x6c69cf50a451f1ec905e954bf1fa22970f371a72a5aa9f8e3a43a18fdd980bec \
//! --runs 10
//! ```
//!
//! ### Download a block and pre-state from a beaconAPI to the filesystem
//!
//! Download a block and pre-state to the filesystem, without performing any transitions:
//!
//! ```ignore
//! lcli transition-blocks \
//! --beacon-url http://localhost:5052 \
//! --block-id 0x6c69cf50a451f1ec905e954bf1fa22970f371a72a5aa9f8e3a43a18fdd980bec \
//! --runs 0 \
//! --block-output-path /tmp/block-0x6c69.ssz \
//! --pre-state-output-path /tmp/pre-state-0x6c69.ssz
//! ```
//!
//! ### Use a block and pre-state from the filesystem
//!
//! Do one run over the block and pre-state downloaded in the previous example and save the post
//! state to file:
//!
//! ```ignore
//! lcli transition-blocks \
//! --block-path /tmp/block-0x6c69.ssz \
//! --pre-state-path /tmp/pre-state-0x6c69.ssz
//! --post-state-output-path /tmp/post-state-0x6c69.ssz
//! ```
//!
//! ### Isolate block processing for benchmarking
//!
//! Try to isolate block processing as much as possible for benchmarking:
//!
//! ```ignore
//! lcli transition-blocks \
//! --block-path /tmp/block-0x6c69.ssz \
//! --pre-state-path /tmp/pre-state-0x6c69.ssz \
//! --runs 10 \
//! --exclude-cache-builds \
//! --exclude-post-block-thc
//! ```
use beacon_chain::{
test_utils::EphemeralHarnessType, validator_pubkey_cache::ValidatorPubkeyCache,
};
2019-09-09 05:55:14 +00:00
use clap::ArgMatches;
use clap_utils::{parse_optional, parse_required};
use environment::{null_logger, Environment};
use eth2::{
types::{BlockId, StateId},
BeaconNodeHttpClient, SensitiveUrl, Timeouts,
};
use ssz::Encode;
use state_processing::{
block_signature_verifier::BlockSignatureVerifier, per_block_processing, per_slot_processing,
BlockSignatureStrategy, ConsensusContext, VerifyBlockRoot,
};
use std::borrow::Cow;
2019-09-09 05:55:14 +00:00
use std::fs::File;
use std::io::prelude::*;
use std::path::{Path, PathBuf};
use std::sync::Arc;
use std::time::{Duration, Instant};
use store::HotColdDB;
use types::{BeaconState, ChainSpec, CloneConfig, EthSpec, Hash256, SignedBeaconBlock};
const HTTP_TIMEOUT: Duration = Duration::from_secs(10);
#[derive(Debug)]
struct Config {
no_signature_verification: bool,
exclude_cache_builds: bool,
exclude_post_block_thc: bool,
}
pub fn run<T: EthSpec>(mut env: Environment<T>, matches: &ArgMatches) -> Result<(), String> {
let spec = &T::default_spec();
let executor = env.core_context().executor;
/*
* Parse (most) CLI arguments.
*/
let pre_state_path: Option<PathBuf> = parse_optional(matches, "pre-state-path")?;
let block_path: Option<PathBuf> = parse_optional(matches, "block-path")?;
let post_state_output_path: Option<PathBuf> =
parse_optional(matches, "post-state-output-path")?;
let pre_state_output_path: Option<PathBuf> = parse_optional(matches, "pre-state-output-path")?;
let block_output_path: Option<PathBuf> = parse_optional(matches, "block-output-path")?;
let beacon_url: Option<SensitiveUrl> = parse_optional(matches, "beacon-url")?;
let runs: usize = parse_required(matches, "runs")?;
let config = Config {
no_signature_verification: matches.is_present("no-signature-verification"),
exclude_cache_builds: matches.is_present("exclude-cache-builds"),
exclude_post_block_thc: matches.is_present("exclude-post-block-thc"),
};
info!("Using {} spec", T::spec_name());
info!("Doing {} runs", runs);
info!("{:?}", &config);
2019-09-09 05:55:14 +00:00
/*
* Load the block and pre-state from disk or beaconAPI URL.
*/
2019-09-09 05:55:14 +00:00
let (mut pre_state, mut state_root_opt, block) = match (pre_state_path, block_path, beacon_url)
{
(Some(pre_state_path), Some(block_path), None) => {
info!("Block path: {:?}", block_path);
info!("Pre-state path: {:?}", pre_state_path);
let pre_state = load_from_ssz_with(&pre_state_path, spec, BeaconState::from_ssz_bytes)?;
let block = load_from_ssz_with(&block_path, spec, SignedBeaconBlock::from_ssz_bytes)?;
(pre_state, None, block)
}
(None, None, Some(beacon_url)) => {
let block_id: BlockId = parse_required(matches, "block-id")?;
let client = BeaconNodeHttpClient::new(beacon_url, Timeouts::set_all(HTTP_TIMEOUT));
executor
.handle()
.ok_or("shutdown in progress")?
.block_on(async move {
let block = client
.get_beacon_blocks(block_id)
.await
.map_err(|e| format!("Failed to download block: {:?}", e))?
.ok_or_else(|| format!("Unable to locate block at {:?}", block_id))?
.data;
if block.slot() == spec.genesis_slot {
return Err("Cannot run on the genesis block".to_string());
}
let parent_block: SignedBeaconBlock<T> = client
.get_beacon_blocks(BlockId::Root(block.parent_root()))
.await
.map_err(|e| format!("Failed to download parent block: {:?}", e))?
.ok_or_else(|| format!("Unable to locate parent block at {:?}", block_id))?
.data;
let state_root = parent_block.state_root();
let state_id = StateId::Root(state_root);
let pre_state = client
.get_debug_beacon_states::<T>(state_id)
.await
.map_err(|e| format!("Failed to download state: {:?}", e))?
.ok_or_else(|| format!("Unable to locate state at {:?}", state_id))?
.data;
Ok((pre_state, Some(state_root), block))
})
.map_err(|e| format!("Failed to complete task: {:?}", e))?
}
_ => {
return Err(
"must supply *both* --pre-state-path and --block-path *or* only --beacon-url"
.into(),
)
}
};
// Compute the block root.
let block_root = block.canonical_root();
/*
* Create a `BeaconStore` and `ValidatorPubkeyCache` for block signature verification.
*/
let store = HotColdDB::open_ephemeral(
<_>::default(),
spec.clone(),
null_logger().map_err(|e| format!("Failed to create null_logger: {:?}", e))?,
)
.map_err(|e| format!("Failed to create ephemeral store: {:?}", e))?;
let store = Arc::new(store);
debug!("Building pubkey cache (might take some time)");
let validator_pubkey_cache = ValidatorPubkeyCache::new(&pre_state, store)
.map_err(|e| format!("Failed to create pubkey cache: {:?}", e))?;
/*
* If cache builds are excluded from the timings, build them early so they are available for
* each run.
*/
if config.exclude_cache_builds {
pre_state
.build_all_caches(spec)
.map_err(|e| format!("Unable to build caches: {:?}", e))?;
let state_root = pre_state
.update_tree_hash_cache()
.map_err(|e| format!("Unable to build THC: {:?}", e))?;
if state_root_opt.map_or(false, |expected| expected != state_root) {
return Err(format!(
"State root mismatch! Expected {}, computed {}",
state_root_opt.unwrap(),
state_root
));
}
state_root_opt = Some(state_root);
}
/*
* Perform the core "runs".
*/
let mut output_post_state = None;
for i in 0..runs {
let pre_state = pre_state.clone_with(CloneConfig::all());
let block = block.clone();
let start = Instant::now();
let post_state = do_transition(
pre_state,
block_root,
block,
state_root_opt,
&config,
&validator_pubkey_cache,
spec,
)?;
let duration = Instant::now().duration_since(start);
info!("Run {}: {:?}", i, duration);
if output_post_state.is_none() {
output_post_state = Some(post_state)
}
}
2019-09-09 05:55:14 +00:00
/*
* Write artifacts to disk, if required.
*/
2019-09-09 05:55:14 +00:00
if let Some(path) = post_state_output_path {
let output_post_state = output_post_state.ok_or_else(|| {
format!(
"Post state was not computed, cannot save to disk (runs = {})",
runs
)
})?;
let mut output_file =
File::create(path).map_err(|e| format!("Unable to create output file: {:?}", e))?;
output_file
.write_all(&output_post_state.as_ssz_bytes())
.map_err(|e| format!("Unable to write to output file: {:?}", e))?;
}
if let Some(path) = pre_state_output_path {
let mut output_file =
File::create(path).map_err(|e| format!("Unable to create output file: {:?}", e))?;
output_file
.write_all(&pre_state.as_ssz_bytes())
.map_err(|e| format!("Unable to write to output file: {:?}", e))?;
}
if let Some(path) = block_output_path {
let mut output_file =
File::create(path).map_err(|e| format!("Unable to create output file: {:?}", e))?;
output_file
.write_all(&block.as_ssz_bytes())
.map_err(|e| format!("Unable to write to output file: {:?}", e))?;
}
2019-09-09 05:55:14 +00:00
Ok(())
}
fn do_transition<T: EthSpec>(
mut pre_state: BeaconState<T>,
block_root: Hash256,
Update to Spec v0.10 (#817) * Start updating types * WIP * Signature hacking * Existing EF tests passing with fake_crypto * Updates * Delete outdated API spec * The refactor continues * It compiles * WIP test fixes * All release tests passing bar genesis state parsing * Update and test YamlConfig * Update to spec v0.10 compatible BLS * Updates to BLS EF tests * Add EF test for AggregateVerify And delete unused hash2curve tests for uncompressed points * Update EF tests to v0.10.1 * Use optional block root correctly in block proc * Use genesis fork in deposit domain. All tests pass * Cargo fmt * Fast aggregate verify test * Update REST API docs * Cargo fmt * Fix unused import * Bump spec tags to v0.10.1 * Add `seconds_per_eth1_block` to chainspec * Update to timestamp based eth1 voting scheme * Return None from `get_votes_to_consider` if block cache is empty * Handle overflows in `is_candidate_block` * Revert to failing tests * Fix eth1 data sets test * Choose default vote according to spec * Fix collect_valid_votes tests * Fix `get_votes_to_consider` to choose all eligible blocks * Uncomment winning_vote tests * Add comments; remove unused code * Reduce seconds_per_eth1_block for simulation * Addressed review comments * Add test for default vote case * Fix logs * Remove unused functions * Meter default eth1 votes * Fix comments * Address review comments; remove unused dependency * Disable/delete two outdated tests * Bump eth1 default vote warn to error * Delete outdated eth1 test Co-authored-by: Pawan Dhananjay <pawandhananjay@gmail.com>
2020-02-10 23:19:36 +00:00
block: SignedBeaconBlock<T>,
mut state_root_opt: Option<Hash256>,
config: &Config,
validator_pubkey_cache: &ValidatorPubkeyCache<EphemeralHarnessType<T>>,
spec: &ChainSpec,
2019-09-09 05:55:14 +00:00
) -> Result<BeaconState<T>, String> {
if !config.exclude_cache_builds {
let t = Instant::now();
pre_state
.build_all_caches(spec)
.map_err(|e| format!("Unable to build caches: {:?}", e))?;
debug!("Build caches: {:?}", t.elapsed());
let t = Instant::now();
let state_root = pre_state
.update_tree_hash_cache()
.map_err(|e| format!("Unable to build tree hash cache: {:?}", e))?;
debug!("Initial tree hash: {:?}", t.elapsed());
if state_root_opt.map_or(false, |expected| expected != state_root) {
return Err(format!(
"State root mismatch! Expected {}, computed {}",
state_root_opt.unwrap(),
state_root
));
}
state_root_opt = Some(state_root);
}
let state_root = state_root_opt.ok_or("Failed to compute state root, internal error")?;
2019-09-09 05:55:14 +00:00
// Transition the parent state to the block slot.
let t = Instant::now();
for i in pre_state.slot().as_u64()..block.slot().as_u64() {
per_slot_processing(&mut pre_state, Some(state_root), spec)
2019-09-09 05:55:14 +00:00
.map_err(|e| format!("Failed to advance slot on iteration {}: {:?}", i, e))?;
}
debug!("Slot processing: {:?}", t.elapsed());
let t = Instant::now();
2019-09-09 05:55:14 +00:00
pre_state
.build_all_caches(spec)
.map_err(|e| format!("Unable to build caches: {:?}", e))?;
debug!("Build all caches (again): {:?}", t.elapsed());
if !config.no_signature_verification {
let get_pubkey = move |validator_index| {
validator_pubkey_cache
.get(validator_index)
.map(Cow::Borrowed)
};
let decompressor = move |pk_bytes| {
// Map compressed pubkey to validator index.
let validator_index = validator_pubkey_cache.get_index(pk_bytes)?;
// Map validator index to pubkey (respecting guard on unknown validators).
get_pubkey(validator_index)
};
let t = Instant::now();
BlockSignatureVerifier::verify_entire_block(
&pre_state,
get_pubkey,
decompressor,
&block,
Some(block_root),
Some(block.message().proposer_index()),
spec,
)
.map_err(|e| format!("Invalid block signature: {:?}", e))?;
debug!("Batch verify block signatures: {:?}", t.elapsed());
}
2019-09-09 05:55:14 +00:00
let t = Instant::now();
let mut ctxt = ConsensusContext::new(pre_state.slot())
.set_current_block_root(block_root)
.set_proposer_index(block.message().proposer_index());
2019-09-09 05:55:14 +00:00
per_block_processing(
&mut pre_state,
&block,
BlockSignatureStrategy::NoVerification,
VerifyBlockRoot::True,
&mut ctxt,
2019-09-09 05:55:14 +00:00
spec,
)
.map_err(|e| format!("State transition failed: {:?}", e))?;
debug!("Process block: {:?}", t.elapsed());
if !config.exclude_post_block_thc {
let t = Instant::now();
pre_state
.update_tree_hash_cache()
.map_err(|e| format!("Unable to build tree hash cache: {:?}", e))?;
debug!("Post-block tree hash: {:?}", t.elapsed());
}
2019-09-09 05:55:14 +00:00
Ok(pre_state)
}
pub fn load_from_ssz_with<T>(
path: &Path,
spec: &ChainSpec,
decoder: impl FnOnce(&[u8], &ChainSpec) -> Result<T, ssz::DecodeError>,
) -> Result<T, String> {
2019-09-09 05:55:14 +00:00
let mut file =
File::open(path).map_err(|e| format!("Unable to open file {:?}: {:?}", path, e))?;
2019-09-09 05:55:14 +00:00
let mut bytes = vec![];
file.read_to_end(&mut bytes)
.map_err(|e| format!("Unable to read from file {:?}: {:?}", path, e))?;
let t = Instant::now();
let result = decoder(&bytes, spec).map_err(|e| format!("Ssz decode failed: {:?}", e));
debug!("SSZ decoding {}: {:?}", path.display(), t.elapsed());
result
2019-09-09 05:55:14 +00:00
}