Added Merkle Proof Generation for Beacon State (#3674)
## Issue Addressed This PR addresses partially #3651 ## Proposed Changes This PR adds the following methods: * a new method to trait `TreeHash`, `hash_tree_leaves` which returns all the Merkle leaves of the ssz object. * a new method to `BeaconState`: `compute_merkle_proof` which generates a specific merkle proof for given depth and index by using the `hash_tree_leaves` as leaves function. ## Additional Info Now here is some rationale on why I decided to go down this route: adding a new function to commonly used trait is a pain but was necessary to make sure we have all merkle leaves for every object, that is why I just added `hash_tree_leaves` in the trait and not `compute_merkle_proof` as well. although it would make sense it gives us code duplication/harder review time and we just need it from one specific object in one specific usecase so not worth the effort YET. In my humble opinion. Co-authored-by: Michael Sproul <micsproul@gmail.com>
This commit is contained in:
parent
84c7d8cc70
commit
9d6209725f
1
Cargo.lock
generated
1
Cargo.lock
generated
@ -6908,6 +6908,7 @@ dependencies = [
|
||||
"lazy_static",
|
||||
"log",
|
||||
"maplit",
|
||||
"merkle_proof",
|
||||
"parking_lot 0.12.1",
|
||||
"rand 0.8.5",
|
||||
"rand_xorshift",
|
||||
|
@ -9,6 +9,7 @@ name = "benches"
|
||||
harness = false
|
||||
|
||||
[dependencies]
|
||||
merkle_proof = { path = "../../consensus/merkle_proof" }
|
||||
bls = { path = "../../crypto/bls" }
|
||||
compare_fields = { path = "../../common/compare_fields" }
|
||||
compare_fields_derive = { path = "../../common/compare_fields_derive" }
|
||||
|
@ -124,6 +124,8 @@ pub enum Error {
|
||||
current_epoch: Epoch,
|
||||
epoch: Epoch,
|
||||
},
|
||||
IndexNotSupported(usize),
|
||||
MerkleTreeError(merkle_proof::MerkleTreeError),
|
||||
}
|
||||
|
||||
/// Control whether an epoch-indexed field can be indexed at the next epoch or not.
|
||||
@ -1669,6 +1671,57 @@ impl<T: EthSpec> BeaconState<T> {
|
||||
};
|
||||
Ok(sync_committee)
|
||||
}
|
||||
|
||||
pub fn compute_merkle_proof(
|
||||
&mut self,
|
||||
generalized_index: usize,
|
||||
) -> Result<Vec<Hash256>, Error> {
|
||||
// 1. Convert generalized index to field index.
|
||||
let field_index = match generalized_index {
|
||||
light_client_update::CURRENT_SYNC_COMMITTEE_INDEX
|
||||
| light_client_update::NEXT_SYNC_COMMITTEE_INDEX => {
|
||||
// Sync committees are top-level fields, subtract off the generalized indices
|
||||
// for the internal nodes. Result should be 22 or 23, the field offset of the committee
|
||||
// in the `BeaconState`:
|
||||
// https://github.com/ethereum/consensus-specs/blob/dev/specs/altair/beacon-chain.md#beaconstate
|
||||
generalized_index
|
||||
.checked_sub(tree_hash_cache::NUM_BEACON_STATE_HASH_TREE_ROOT_LEAVES)
|
||||
.ok_or(Error::IndexNotSupported(generalized_index))?
|
||||
}
|
||||
light_client_update::FINALIZED_ROOT_INDEX => {
|
||||
// Finalized root is the right child of `finalized_checkpoint`, divide by two to get
|
||||
// the generalized index of `state.finalized_checkpoint`.
|
||||
let finalized_checkpoint_generalized_index = generalized_index / 2;
|
||||
// Subtract off the internal nodes. Result should be 105/2 - 32 = 20 which matches
|
||||
// position of `finalized_checkpoint` in `BeaconState`.
|
||||
finalized_checkpoint_generalized_index
|
||||
.checked_sub(tree_hash_cache::NUM_BEACON_STATE_HASH_TREE_ROOT_LEAVES)
|
||||
.ok_or(Error::IndexNotSupported(generalized_index))?
|
||||
}
|
||||
_ => return Err(Error::IndexNotSupported(generalized_index)),
|
||||
};
|
||||
|
||||
// 2. Get all `BeaconState` leaves.
|
||||
let cache = self.tree_hash_cache_mut().take();
|
||||
let leaves = if let Some(mut cache) = cache {
|
||||
cache.recalculate_tree_hash_leaves(self)?
|
||||
} else {
|
||||
return Err(Error::TreeHashCacheNotInitialized);
|
||||
};
|
||||
|
||||
// 3. Make deposit tree.
|
||||
// Use the depth of the `BeaconState` fields (i.e. `log2(32) = 5`).
|
||||
let depth = light_client_update::CURRENT_SYNC_COMMITTEE_PROOF_LEN;
|
||||
let tree = merkle_proof::MerkleTree::create(&leaves, depth);
|
||||
let (_, mut proof) = tree.generate_proof(field_index, depth)?;
|
||||
|
||||
// 4. If we're proving the finalized root, patch in the finalized epoch to complete the proof.
|
||||
if generalized_index == light_client_update::FINALIZED_ROOT_INDEX {
|
||||
proof.insert(0, self.finalized_checkpoint().epoch.tree_hash_root());
|
||||
}
|
||||
|
||||
Ok(proof)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<RelativeEpochError> for Error {
|
||||
@ -1701,6 +1754,12 @@ impl From<tree_hash::Error> for Error {
|
||||
}
|
||||
}
|
||||
|
||||
impl From<merkle_proof::MerkleTreeError> for Error {
|
||||
fn from(e: merkle_proof::MerkleTreeError) -> Error {
|
||||
Error::MerkleTreeError(e)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<ArithError> for Error {
|
||||
fn from(e: ArithError) -> Error {
|
||||
Error::ArithError(e)
|
||||
|
@ -18,7 +18,7 @@ use tree_hash::{mix_in_length, MerkleHasher, TreeHash};
|
||||
///
|
||||
/// This constant is set with the assumption that there are `> 16` and `<= 32` fields on the
|
||||
/// `BeaconState`. **Tree hashing will fail if this value is set incorrectly.**
|
||||
const NUM_BEACON_STATE_HASH_TREE_ROOT_LEAVES: usize = 32;
|
||||
pub const NUM_BEACON_STATE_HASH_TREE_ROOT_LEAVES: usize = 32;
|
||||
|
||||
/// The number of nodes in the Merkle tree of a validator record.
|
||||
const NODES_PER_VALIDATOR: usize = 15;
|
||||
@ -210,6 +210,90 @@ impl<T: EthSpec> BeaconTreeHashCacheInner<T> {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn recalculate_tree_hash_leaves(
|
||||
&mut self,
|
||||
state: &BeaconState<T>,
|
||||
) -> Result<Vec<Hash256>, Error> {
|
||||
let mut leaves = vec![
|
||||
// Genesis data leaves.
|
||||
state.genesis_time().tree_hash_root(),
|
||||
state.genesis_validators_root().tree_hash_root(),
|
||||
// Current fork data leaves.
|
||||
state.slot().tree_hash_root(),
|
||||
state.fork().tree_hash_root(),
|
||||
state.latest_block_header().tree_hash_root(),
|
||||
// Roots leaves.
|
||||
state
|
||||
.block_roots()
|
||||
.recalculate_tree_hash_root(&mut self.fixed_arena, &mut self.block_roots)?,
|
||||
state
|
||||
.state_roots()
|
||||
.recalculate_tree_hash_root(&mut self.fixed_arena, &mut self.state_roots)?,
|
||||
state
|
||||
.historical_roots()
|
||||
.recalculate_tree_hash_root(&mut self.fixed_arena, &mut self.historical_roots)?,
|
||||
// Eth1 Data leaves.
|
||||
state.eth1_data().tree_hash_root(),
|
||||
self.eth1_data_votes.recalculate_tree_hash_root(state)?,
|
||||
state.eth1_deposit_index().tree_hash_root(),
|
||||
// Validator leaves.
|
||||
self.validators
|
||||
.recalculate_tree_hash_root(state.validators())?,
|
||||
state
|
||||
.balances()
|
||||
.recalculate_tree_hash_root(&mut self.balances_arena, &mut self.balances)?,
|
||||
state
|
||||
.randao_mixes()
|
||||
.recalculate_tree_hash_root(&mut self.fixed_arena, &mut self.randao_mixes)?,
|
||||
state
|
||||
.slashings()
|
||||
.recalculate_tree_hash_root(&mut self.slashings_arena, &mut self.slashings)?,
|
||||
];
|
||||
// Participation
|
||||
if let BeaconState::Base(state) = state {
|
||||
leaves.push(state.previous_epoch_attestations.tree_hash_root());
|
||||
leaves.push(state.current_epoch_attestations.tree_hash_root());
|
||||
} else {
|
||||
leaves.push(
|
||||
self.previous_epoch_participation
|
||||
.recalculate_tree_hash_root(&ParticipationList::new(
|
||||
state.previous_epoch_participation()?,
|
||||
))?,
|
||||
);
|
||||
leaves.push(
|
||||
self.current_epoch_participation
|
||||
.recalculate_tree_hash_root(&ParticipationList::new(
|
||||
state.current_epoch_participation()?,
|
||||
))?,
|
||||
);
|
||||
}
|
||||
// Checkpoint leaves
|
||||
leaves.push(state.justification_bits().tree_hash_root());
|
||||
leaves.push(state.previous_justified_checkpoint().tree_hash_root());
|
||||
leaves.push(state.current_justified_checkpoint().tree_hash_root());
|
||||
leaves.push(state.finalized_checkpoint().tree_hash_root());
|
||||
// Inactivity & light-client sync committees (Altair and later).
|
||||
if let Ok(inactivity_scores) = state.inactivity_scores() {
|
||||
leaves.push(
|
||||
self.inactivity_scores
|
||||
.recalculate_tree_hash_root(inactivity_scores)?,
|
||||
);
|
||||
}
|
||||
if let Ok(current_sync_committee) = state.current_sync_committee() {
|
||||
leaves.push(current_sync_committee.tree_hash_root());
|
||||
}
|
||||
|
||||
if let Ok(next_sync_committee) = state.next_sync_committee() {
|
||||
leaves.push(next_sync_committee.tree_hash_root());
|
||||
}
|
||||
|
||||
// Execution payload (merge and later).
|
||||
if let Ok(payload_header) = state.latest_execution_payload_header() {
|
||||
leaves.push(payload_header.tree_hash_root());
|
||||
}
|
||||
Ok(leaves)
|
||||
}
|
||||
|
||||
/// Updates the cache and returns the tree hash root for the given `state`.
|
||||
///
|
||||
/// The provided `state` should be a descendant of the last `state` given to this function, or
|
||||
@ -246,121 +330,9 @@ impl<T: EthSpec> BeaconTreeHashCacheInner<T> {
|
||||
|
||||
let mut hasher = MerkleHasher::with_leaves(NUM_BEACON_STATE_HASH_TREE_ROOT_LEAVES);
|
||||
|
||||
hasher.write(state.genesis_time().tree_hash_root().as_bytes())?;
|
||||
hasher.write(state.genesis_validators_root().tree_hash_root().as_bytes())?;
|
||||
hasher.write(state.slot().tree_hash_root().as_bytes())?;
|
||||
hasher.write(state.fork().tree_hash_root().as_bytes())?;
|
||||
hasher.write(state.latest_block_header().tree_hash_root().as_bytes())?;
|
||||
hasher.write(
|
||||
state
|
||||
.block_roots()
|
||||
.recalculate_tree_hash_root(&mut self.fixed_arena, &mut self.block_roots)?
|
||||
.as_bytes(),
|
||||
)?;
|
||||
hasher.write(
|
||||
state
|
||||
.state_roots()
|
||||
.recalculate_tree_hash_root(&mut self.fixed_arena, &mut self.state_roots)?
|
||||
.as_bytes(),
|
||||
)?;
|
||||
hasher.write(
|
||||
state
|
||||
.historical_roots()
|
||||
.recalculate_tree_hash_root(&mut self.fixed_arena, &mut self.historical_roots)?
|
||||
.as_bytes(),
|
||||
)?;
|
||||
hasher.write(state.eth1_data().tree_hash_root().as_bytes())?;
|
||||
hasher.write(
|
||||
self.eth1_data_votes
|
||||
.recalculate_tree_hash_root(state)?
|
||||
.as_bytes(),
|
||||
)?;
|
||||
hasher.write(state.eth1_deposit_index().tree_hash_root().as_bytes())?;
|
||||
hasher.write(
|
||||
self.validators
|
||||
.recalculate_tree_hash_root(state.validators())?
|
||||
.as_bytes(),
|
||||
)?;
|
||||
hasher.write(
|
||||
state
|
||||
.balances()
|
||||
.recalculate_tree_hash_root(&mut self.balances_arena, &mut self.balances)?
|
||||
.as_bytes(),
|
||||
)?;
|
||||
hasher.write(
|
||||
state
|
||||
.randao_mixes()
|
||||
.recalculate_tree_hash_root(&mut self.fixed_arena, &mut self.randao_mixes)?
|
||||
.as_bytes(),
|
||||
)?;
|
||||
hasher.write(
|
||||
state
|
||||
.slashings()
|
||||
.recalculate_tree_hash_root(&mut self.slashings_arena, &mut self.slashings)?
|
||||
.as_bytes(),
|
||||
)?;
|
||||
|
||||
// Participation
|
||||
if let BeaconState::Base(state) = state {
|
||||
hasher.write(
|
||||
state
|
||||
.previous_epoch_attestations
|
||||
.tree_hash_root()
|
||||
.as_bytes(),
|
||||
)?;
|
||||
hasher.write(state.current_epoch_attestations.tree_hash_root().as_bytes())?;
|
||||
} else {
|
||||
hasher.write(
|
||||
self.previous_epoch_participation
|
||||
.recalculate_tree_hash_root(&ParticipationList::new(
|
||||
state.previous_epoch_participation()?,
|
||||
))?
|
||||
.as_bytes(),
|
||||
)?;
|
||||
hasher.write(
|
||||
self.current_epoch_participation
|
||||
.recalculate_tree_hash_root(&ParticipationList::new(
|
||||
state.current_epoch_participation()?,
|
||||
))?
|
||||
.as_bytes(),
|
||||
)?;
|
||||
}
|
||||
|
||||
hasher.write(state.justification_bits().tree_hash_root().as_bytes())?;
|
||||
hasher.write(
|
||||
state
|
||||
.previous_justified_checkpoint()
|
||||
.tree_hash_root()
|
||||
.as_bytes(),
|
||||
)?;
|
||||
hasher.write(
|
||||
state
|
||||
.current_justified_checkpoint()
|
||||
.tree_hash_root()
|
||||
.as_bytes(),
|
||||
)?;
|
||||
hasher.write(state.finalized_checkpoint().tree_hash_root().as_bytes())?;
|
||||
|
||||
// Inactivity & light-client sync committees (Altair and later).
|
||||
if let Ok(inactivity_scores) = state.inactivity_scores() {
|
||||
hasher.write(
|
||||
self.inactivity_scores
|
||||
.recalculate_tree_hash_root(inactivity_scores)?
|
||||
.as_bytes(),
|
||||
)?;
|
||||
}
|
||||
|
||||
if let Ok(current_sync_committee) = state.current_sync_committee() {
|
||||
hasher.write(current_sync_committee.tree_hash_root().as_bytes())?;
|
||||
}
|
||||
|
||||
if let Ok(next_sync_committee) = state.next_sync_committee() {
|
||||
hasher.write(next_sync_committee.tree_hash_root().as_bytes())?;
|
||||
}
|
||||
|
||||
// Execution payload (merge and later).
|
||||
if let Ok(payload_header) = state.latest_execution_payload_header() {
|
||||
hasher.write(payload_header.tree_hash_root().as_bytes())?;
|
||||
let leaves = self.recalculate_tree_hash_leaves(state)?;
|
||||
for leaf in leaves {
|
||||
hasher.write(leaf.as_bytes())?;
|
||||
}
|
||||
|
||||
let root = hasher.finish()?;
|
||||
|
@ -21,17 +21,15 @@ pub struct LightClientBootstrap<T: EthSpec> {
|
||||
}
|
||||
|
||||
impl<T: EthSpec> LightClientBootstrap<T> {
|
||||
pub fn from_beacon_state(beacon_state: BeaconState<T>) -> Result<Self, Error> {
|
||||
pub fn from_beacon_state(beacon_state: &mut BeaconState<T>) -> Result<Self, Error> {
|
||||
let mut header = beacon_state.latest_block_header().clone();
|
||||
header.state_root = beacon_state.tree_hash_root();
|
||||
let current_sync_committee_branch =
|
||||
beacon_state.compute_merkle_proof(CURRENT_SYNC_COMMITTEE_INDEX)?;
|
||||
Ok(LightClientBootstrap {
|
||||
header,
|
||||
current_sync_committee: beacon_state.current_sync_committee()?.clone(),
|
||||
/// TODO(Giulio2002): Generate Merkle Proof, this is just empty hashes
|
||||
current_sync_committee_branch: FixedVector::new(vec![
|
||||
Hash256::zero();
|
||||
CURRENT_SYNC_COMMITTEE_PROOF_LEN
|
||||
])?,
|
||||
current_sync_committee_branch: FixedVector::new(current_sync_committee_branch)?,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
@ -31,7 +31,7 @@ impl<T: EthSpec> LightClientFinalityUpdate<T> {
|
||||
chain_spec: ChainSpec,
|
||||
beacon_state: BeaconState<T>,
|
||||
block: BeaconBlock<T>,
|
||||
attested_state: BeaconState<T>,
|
||||
attested_state: &mut BeaconState<T>,
|
||||
finalized_block: BeaconBlock<T>,
|
||||
) -> Result<Self, Error> {
|
||||
let altair_fork_epoch = chain_spec
|
||||
@ -60,11 +60,12 @@ impl<T: EthSpec> LightClientFinalityUpdate<T> {
|
||||
if finalized_header.tree_hash_root() != beacon_state.finalized_checkpoint().root {
|
||||
return Err(Error::InvalidFinalizedBlock);
|
||||
}
|
||||
// TODO(Giulio2002): compute proper merkle proofs.
|
||||
|
||||
let finality_branch = attested_state.compute_merkle_proof(FINALIZED_ROOT_INDEX)?;
|
||||
Ok(Self {
|
||||
attested_header: attested_header,
|
||||
finalized_header: finalized_header,
|
||||
finality_branch: FixedVector::new(vec![Hash256::zero(); FINALIZED_ROOT_PROOF_LEN])?,
|
||||
finality_branch: FixedVector::new(finality_branch)?,
|
||||
sync_aggregate: sync_aggregate.clone(),
|
||||
signature_slot: block.slot(),
|
||||
})
|
||||
|
@ -77,7 +77,7 @@ impl<T: EthSpec> LightClientUpdate<T> {
|
||||
chain_spec: ChainSpec,
|
||||
beacon_state: BeaconState<T>,
|
||||
block: BeaconBlock<T>,
|
||||
attested_state: BeaconState<T>,
|
||||
attested_state: &mut BeaconState<T>,
|
||||
finalized_block: BeaconBlock<T>,
|
||||
) -> Result<Self, Error> {
|
||||
let altair_fork_epoch = chain_spec
|
||||
@ -114,16 +114,15 @@ impl<T: EthSpec> LightClientUpdate<T> {
|
||||
if finalized_header.tree_hash_root() != beacon_state.finalized_checkpoint().root {
|
||||
return Err(Error::InvalidFinalizedBlock);
|
||||
}
|
||||
// TODO(Giulio2002): compute proper merkle proofs.
|
||||
let next_sync_committee_branch =
|
||||
attested_state.compute_merkle_proof(NEXT_SYNC_COMMITTEE_INDEX)?;
|
||||
let finality_branch = attested_state.compute_merkle_proof(FINALIZED_ROOT_INDEX)?;
|
||||
Ok(Self {
|
||||
attested_header,
|
||||
next_sync_committee: attested_state.next_sync_committee()?.clone(),
|
||||
next_sync_committee_branch: FixedVector::new(vec![
|
||||
Hash256::zero();
|
||||
NEXT_SYNC_COMMITTEE_PROOF_LEN
|
||||
])?,
|
||||
next_sync_committee_branch: FixedVector::new(next_sync_committee_branch)?,
|
||||
finalized_header,
|
||||
finality_branch: FixedVector::new(vec![Hash256::zero(); FINALIZED_ROOT_PROOF_LEN])?,
|
||||
finality_branch: FixedVector::new(finality_branch)?,
|
||||
sync_aggregate: sync_aggregate.clone(),
|
||||
signature_slot: block.slot(),
|
||||
})
|
||||
|
@ -18,6 +18,7 @@ mod fork;
|
||||
mod fork_choice;
|
||||
mod genesis_initialization;
|
||||
mod genesis_validity;
|
||||
mod merkle_proof_validity;
|
||||
mod operations;
|
||||
mod rewards;
|
||||
mod sanity_blocks;
|
||||
@ -41,6 +42,7 @@ pub use epoch_processing::*;
|
||||
pub use fork::ForkTest;
|
||||
pub use genesis_initialization::*;
|
||||
pub use genesis_validity::*;
|
||||
pub use merkle_proof_validity::*;
|
||||
pub use operations::*;
|
||||
pub use rewards::RewardsTest;
|
||||
pub use sanity_blocks::*;
|
||||
|
83
testing/ef_tests/src/cases/merkle_proof_validity.rs
Normal file
83
testing/ef_tests/src/cases/merkle_proof_validity.rs
Normal file
@ -0,0 +1,83 @@
|
||||
use super::*;
|
||||
use crate::decode::{ssz_decode_state, yaml_decode_file};
|
||||
use serde_derive::Deserialize;
|
||||
use std::path::Path;
|
||||
use tree_hash::Hash256;
|
||||
use types::{BeaconState, EthSpec, ForkName};
|
||||
|
||||
#[derive(Debug, Clone, Deserialize)]
|
||||
pub struct Metadata {
|
||||
#[serde(rename(deserialize = "description"))]
|
||||
_description: String,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Deserialize)]
|
||||
pub struct MerkleProof {
|
||||
pub leaf: Hash256,
|
||||
pub leaf_index: usize,
|
||||
pub branch: Vec<Hash256>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Deserialize)]
|
||||
#[serde(bound = "E: EthSpec")]
|
||||
pub struct MerkleProofValidity<E: EthSpec> {
|
||||
pub metadata: Option<Metadata>,
|
||||
pub state: BeaconState<E>,
|
||||
pub merkle_proof: MerkleProof,
|
||||
}
|
||||
|
||||
impl<E: EthSpec> LoadCase for MerkleProofValidity<E> {
|
||||
fn load_from_dir(path: &Path, fork_name: ForkName) -> Result<Self, Error> {
|
||||
let spec = &testing_spec::<E>(fork_name);
|
||||
let state = ssz_decode_state(&path.join("state.ssz_snappy"), spec)?;
|
||||
let merkle_proof = yaml_decode_file(&path.join("proof.yaml"))?;
|
||||
// Metadata does not exist in these tests but it is left like this just in case.
|
||||
let meta_path = path.join("meta.yaml");
|
||||
let metadata = if meta_path.exists() {
|
||||
Some(yaml_decode_file(&meta_path)?)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
Ok(Self {
|
||||
metadata,
|
||||
state,
|
||||
merkle_proof,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl<E: EthSpec> Case for MerkleProofValidity<E> {
|
||||
fn result(&self, _case_index: usize, _fork_name: ForkName) -> Result<(), Error> {
|
||||
let mut state = self.state.clone();
|
||||
state.initialize_tree_hash_cache();
|
||||
let proof = match state.compute_merkle_proof(self.merkle_proof.leaf_index) {
|
||||
Ok(proof) => proof,
|
||||
Err(_) => {
|
||||
return Err(Error::FailedToParseTest(
|
||||
"Could not retrieve merkle proof".to_string(),
|
||||
))
|
||||
}
|
||||
};
|
||||
let proof_len = proof.len();
|
||||
let branch_len = self.merkle_proof.branch.len();
|
||||
if proof_len != branch_len {
|
||||
return Err(Error::NotEqual(format!(
|
||||
"Branches not equal in length computed: {}, expected {}",
|
||||
proof_len, branch_len
|
||||
)));
|
||||
}
|
||||
|
||||
for (i, proof_leaf) in proof.iter().enumerate().take(proof_len) {
|
||||
let expected_leaf = self.merkle_proof.branch[i];
|
||||
if *proof_leaf != expected_leaf {
|
||||
return Err(Error::NotEqual(format!(
|
||||
"Leaves not equal in merke proof computed: {}, expected: {}",
|
||||
hex::encode(proof_leaf),
|
||||
hex::encode(expected_leaf)
|
||||
)));
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
}
|
@ -617,6 +617,30 @@ impl<E: EthSpec + TypeName> Handler for GenesisInitializationHandler<E> {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Derivative)]
|
||||
#[derivative(Default(bound = ""))]
|
||||
pub struct MerkleProofValidityHandler<E>(PhantomData<E>);
|
||||
|
||||
impl<E: EthSpec + TypeName> Handler for MerkleProofValidityHandler<E> {
|
||||
type Case = cases::MerkleProofValidity<E>;
|
||||
|
||||
fn config_name() -> &'static str {
|
||||
E::name()
|
||||
}
|
||||
|
||||
fn runner_name() -> &'static str {
|
||||
"light_client"
|
||||
}
|
||||
|
||||
fn handler_name(&self) -> String {
|
||||
"single_merkle_proof".into()
|
||||
}
|
||||
|
||||
fn is_enabled_for_fork(&self, fork_name: ForkName) -> bool {
|
||||
fork_name != ForkName::Base
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Derivative)]
|
||||
#[derivative(Default(bound = ""))]
|
||||
pub struct OperationsHandler<E, O>(PhantomData<(E, O)>);
|
||||
|
@ -465,6 +465,11 @@ fn genesis_validity() {
|
||||
// Note: there are no genesis validity tests for mainnet
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn merkle_proof_validity() {
|
||||
MerkleProofValidityHandler::<MainnetEthSpec>::default().run();
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn rewards() {
|
||||
for handler in &["basic", "leak", "random"] {
|
||||
|
Loading…
Reference in New Issue
Block a user