Clippy clean (#536)

* Change into_iter to iter

* Fix clippy 'easy' warnings

* Clippy eth2/utils

* Add struct NetworkInfo

* Clippy for types, utils, and beacon_node/store/src/iters.rs

* Cargo fmt

* Change foo to my_foo

* Remove complex signature

* suppress clippy warning for unit_value in benches

* Use enumerate instead of iterating over range

* Allow trivially_copy_pass_by_ref in serde_utils
This commit is contained in:
pscott 2019-09-30 05:58:45 +02:00 committed by Paul Hauner
parent 682b11f248
commit 7eb82125ef
39 changed files with 118 additions and 121 deletions

View File

@ -130,7 +130,6 @@ fn main() {
log,
"The account manager must be run with a subcommand. See help for more information."
);
return;
}
}
}

View File

@ -125,6 +125,8 @@ pub struct BeaconChain<T: BeaconChainTypes> {
log: Logger,
}
type BeaconInfo<T> = (BeaconBlock<T>, BeaconState<T>);
impl<T: BeaconChainTypes> BeaconChain<T> {
/// Instantiate a new Beacon Chain, from genesis.
pub fn from_genesis(
@ -1060,7 +1062,7 @@ impl<T: BeaconChainTypes> BeaconChain<T> {
if block.slot <= finalized_slot {
return Ok(BlockProcessingOutcome::WouldRevertFinalizedSlot {
block_slot: block.slot,
finalized_slot: finalized_slot,
finalized_slot,
});
}
@ -1258,7 +1260,7 @@ impl<T: BeaconChainTypes> BeaconChain<T> {
&self,
randao_reveal: Signature,
slot: Slot,
) -> Result<(BeaconBlock<T::EthSpec>, BeaconState<T::EthSpec>), BlockProductionError> {
) -> Result<BeaconInfo<T::EthSpec>, BlockProductionError> {
let state = self
.state_at_slot(slot - 1)
.map_err(|_| BlockProductionError::UnableToProduceAtSlot(slot))?;
@ -1279,7 +1281,7 @@ impl<T: BeaconChainTypes> BeaconChain<T> {
mut state: BeaconState<T::EthSpec>,
produce_at_slot: Slot,
randao_reveal: Signature,
) -> Result<(BeaconBlock<T::EthSpec>, BeaconState<T::EthSpec>), BlockProductionError> {
) -> Result<BeaconInfo<T::EthSpec>, BlockProductionError> {
metrics::inc_counter(&metrics::BLOCK_PRODUCTION_REQUESTS);
let timer = metrics::start_timer(&metrics::BLOCK_PRODUCTION_TIMES);
@ -1457,7 +1459,7 @@ impl<T: BeaconChainTypes> BeaconChain<T> {
// End fork choice metrics timer.
metrics::stop_timer(timer);
if let Err(_) = result {
if result.is_err() {
metrics::inc_counter(&metrics::FORK_CHOICE_ERRORS);
}

View File

@ -137,7 +137,7 @@ impl<T: BeaconChainTypes> BeaconChainBuilder<T> {
BuildStrategy::LoadFromStore => {
BeaconChain::from_store(store, eth1_backend, event_handler, self.spec, self.log)
.map_err(|e| format!("Error loading BeaconChain from database: {:?}", e))?
.ok_or_else(|| format!("Unable to find exising BeaconChain in database."))?
.ok_or_else(|| "Unable to find exising BeaconChain in database.".to_string())?
}
BuildStrategy::FromGenesis {
genesis_block,

View File

@ -228,10 +228,7 @@ fn scrape_head_state<T: BeaconChainTypes>(state: &BeaconState<T::EthSpec>, state
);
set_gauge_by_usize(&HEAD_STATE_SHARDS, state.previous_crosslinks.len());
set_gauge_by_usize(&HEAD_STATE_TOTAL_VALIDATORS, state.validators.len());
set_gauge_by_u64(
&HEAD_STATE_VALIDATOR_BALANCES,
state.balances.iter().fold(0_u64, |acc, i| acc + i),
);
set_gauge_by_u64(&HEAD_STATE_VALIDATOR_BALANCES, state.balances.iter().sum());
set_gauge_by_usize(
&HEAD_STATE_ACTIVE_VALIDATORS,
state

View File

@ -21,7 +21,7 @@ pub use types::test_utils::generate_deterministic_keypairs;
pub use crate::persisted_beacon_chain::{PersistedBeaconChain, BEACON_CHAIN_DB_KEY};
pub const HARNESS_GENESIS_TIME: u64 = 1567552690; // 4th September 2019
pub const HARNESS_GENESIS_TIME: u64 = 1_567_552_690; // 4th September 2019
/// Indicates how the `BeaconChainHarness` should produce blocks.
#[derive(Clone, Copy, Debug)]

View File

@ -12,6 +12,7 @@ use beacon_chain::{
use exit_future::Signal;
use futures::{future::Future, Stream};
use network::Service as NetworkService;
use rest_api::NetworkInfo;
use slog::{crit, error, info, o};
use slot_clock::SlotClock;
use std::marker::PhantomData;
@ -231,12 +232,15 @@ where
// Start the `rest_api` service
let api_exit_signal = if client_config.rest_api.enabled {
let network_info = NetworkInfo {
network_service: network.clone(),
network_chan: network_send.clone(),
};
match rest_api::start_server(
&client_config.rest_api,
executor,
beacon_chain.clone(),
network.clone(),
network_send.clone(),
network_info,
client_config.db_path().expect("unable to read datadir"),
eth2_config.clone(),
&log,

View File

@ -68,11 +68,11 @@ pub fn parse_pubkey(string: &str) -> Result<PublicKey, ApiError> {
let pubkey = PublicKey::from_bytes(pubkey_bytes.as_slice()).map_err(|e| {
ApiError::BadRequest(format!("Unable to deserialize public key: {:?}.", e))
})?;
return Ok(pubkey);
Ok(pubkey)
} else {
return Err(ApiError::BadRequest(
Err(ApiError::BadRequest(
"Public key must have a '0x' prefix".to_string(),
));
))
}
}

View File

@ -48,6 +48,11 @@ pub struct ApiService<T: BeaconChainTypes + 'static> {
eth2_config: Arc<Eth2Config>,
}
pub struct NetworkInfo<T: BeaconChainTypes> {
pub network_service: Arc<NetworkService<T>>,
pub network_chan: mpsc::UnboundedSender<NetworkMessage>,
}
fn into_boxfut<F: IntoFuture + 'static>(item: F) -> BoxFut
where
F: IntoFuture<Item = Response<Body>, Error = ApiError>,
@ -194,8 +199,7 @@ pub fn start_server<T: BeaconChainTypes>(
config: &ApiConfig,
executor: &TaskExecutor,
beacon_chain: Arc<BeaconChain<T>>,
network_service: Arc<NetworkService<T>>,
network_chan: mpsc::UnboundedSender<NetworkMessage>,
network_info: NetworkInfo<T>,
db_path: PathBuf,
eth2_config: Eth2Config,
log: &slog::Logger,
@ -226,8 +230,8 @@ pub fn start_server<T: BeaconChainTypes>(
log: server_log.clone(),
beacon_chain: server_bc.clone(),
db_path: db_path.clone(),
network_service: network_service.clone(),
network_channel: Arc::new(RwLock::new(network_chan.clone())),
network_service: network_info.network_service.clone(),
network_channel: Arc::new(RwLock::new(network_info.network_chan.clone())),
eth2_config: eth2_config.clone(),
})
};

View File

@ -27,7 +27,7 @@ impl ResponseBuilder {
e
))
})
.map(|h| String::from(h))?;
.map(String::from)?;
// JSON is our default encoding, unless something else is requested.
let encoding = match content_header {
@ -85,7 +85,7 @@ impl ResponseBuilder {
Response::builder()
.status(StatusCode::OK)
.header("content-type", content_type)
.body(Body::from(body))
.body(body)
.map_err(|e| ApiError::ServerError(format!("Failed to build response: {:?}", e)))
}

View File

@ -190,7 +190,7 @@ pub fn get_new_beacon_block<T: BeaconChainTypes + 'static>(req: Request<Body>) -
/// HTTP Handler to publish a BeaconBlock, which has been signed by a validator.
pub fn publish_beacon_block<T: BeaconChainTypes + 'static>(req: Request<Body>) -> BoxFut {
let _ = try_future!(check_content_type_for_json(&req));
try_future!(check_content_type_for_json(&req));
let log = get_logger_from_request(&req);
let beacon_chain = try_future!(get_beacon_chain_from_request::<T>(&req));
// Get the network sending channel from the request, for later transmission
@ -268,9 +268,12 @@ pub fn get_new_attestation<T: BeaconChainTypes + 'static>(req: Request<Body>) ->
.map_err(|e| {
ApiError::ServerError(format!("Unable to read validator index cache. {:?}", e))
})?
.ok_or(ApiError::BadRequest(
"The provided validator public key does not correspond to a validator index.".into(),
))?;
.ok_or_else(|| {
ApiError::BadRequest(
"The provided validator public key does not correspond to a validator index."
.into(),
)
})?;
// Build cache for the requested epoch
head_state
@ -286,7 +289,7 @@ pub fn get_new_attestation<T: BeaconChainTypes + 'static>(req: Request<Body>) ->
e
))
})?
.ok_or(ApiError::BadRequest("No validator duties could be found for the requested validator. Cannot provide valid attestation.".into()))?;
.ok_or_else(|| ApiError::BadRequest("No validator duties could be found for the requested validator. Cannot provide valid attestation.".into()))?;
// Check that we are requesting an attestation during the slot where it is relevant.
let present_slot = beacon_chain.slot().map_err(|e| ApiError::ServerError(
@ -354,7 +357,7 @@ pub fn get_new_attestation<T: BeaconChainTypes + 'static>(req: Request<Body>) ->
/// HTTP Handler to publish an Attestation, which has been signed by a validator.
pub fn publish_attestation<T: BeaconChainTypes + 'static>(req: Request<Body>) -> BoxFut {
let _ = try_future!(check_content_type_for_json(&req));
try_future!(check_content_type_for_json(&req));
let log = get_logger_from_request(&req);
let beacon_chain = try_future!(get_beacon_chain_from_request::<T>(&req));
// Get the network sending channel from the request, for later transmission

View File

@ -118,7 +118,7 @@ impl<T: BeaconChainTypes> AttestationService for AttestationServiceInstance<T> {
self.network_chan
.try_send(NetworkMessage::Publish {
topics: vec![topic],
message: message,
message,
})
.unwrap_or_else(|e| {
error!(

View File

@ -121,7 +121,7 @@ impl<T: BeaconChainTypes> BeaconBlockService for BeaconBlockServiceInstance<T> {
self.network_chan
.try_send(NetworkMessage::Publish {
topics: vec![topic],
message: message,
message,
})
.unwrap_or_else(|e| {
error!(

View File

@ -233,7 +233,7 @@ impl ConfigBuilder {
// directory onto it.
let data_dir: PathBuf = cli_args
.value_of("datadir")
.map(|string| PathBuf::from(string))
.map(PathBuf::from)
.or_else(|| {
dirs::home_dir().map(|mut home| {
home.push(DEFAULT_DATA_DIR);
@ -528,9 +528,9 @@ impl ConfigBuilder {
.parse::<Ipv4Addr>()
.map_err(|e| format!("Unable to parse default listen address: {:?}", e))?;
self.client_config.network.listen_address = addr.clone().into();
self.client_config.rpc.listen_address = addr.clone();
self.client_config.rest_api.listen_address = addr.clone();
self.client_config.network.listen_address = addr.into();
self.client_config.rpc.listen_address = addr;
self.client_config.rest_api.listen_address = addr;
Ok(())
}
@ -557,8 +557,8 @@ impl ConfigBuilder {
if self.eth2_config.spec_constants != self.client_config.spec_constants {
crit!(self.log, "Specification constants do not match.";
"client_config" => format!("{}", self.client_config.spec_constants),
"eth2_config" => format!("{}", self.eth2_config.spec_constants)
"client_config" => self.client_config.spec_constants.to_string(),
"eth2_config" => self.eth2_config.spec_constants.to_string()
);
return Err("Specification constant mismatch".into());
}

View File

@ -214,7 +214,7 @@ mod test {
state_a.slot = Slot::from(slots_per_historical_root);
state_b.slot = Slot::from(slots_per_historical_root * 2);
let mut hashes = (0..).into_iter().map(|i| Hash256::from_low_u64_be(i));
let mut hashes = (0..).map(Hash256::from_low_u64_be);
for root in &mut state_a.block_roots[..] {
*root = hashes.next().unwrap()
@ -230,7 +230,7 @@ mod test {
let iter = BlockRootsIterator::new(store.clone(), &state_b);
assert!(
iter.clone().find(|(_root, slot)| *slot == 0).is_some(),
iter.clone().any(|(_root, slot)| slot == 0),
"iter should contain zero slot"
);
@ -241,8 +241,8 @@ mod test {
assert_eq!(collected.len(), expected_len);
for i in 0..expected_len {
assert_eq!(collected[i].0, Hash256::from_low_u64_be(i as u64));
for (i, item) in collected.iter().enumerate() {
assert_eq!(item.0, Hash256::from_low_u64_be(i as u64));
}
}
@ -257,17 +257,17 @@ mod test {
state_a.slot = Slot::from(slots_per_historical_root);
state_b.slot = Slot::from(slots_per_historical_root * 2);
let mut hashes = (0..).into_iter().map(|i| Hash256::from_low_u64_be(i));
let mut hashes = (0..).map(Hash256::from_low_u64_be);
for slot in 0..slots_per_historical_root {
state_a
.set_state_root(Slot::from(slot), hashes.next().unwrap())
.expect(&format!("should set state_a slot {}", slot));
.unwrap_or_else(|_| panic!("should set state_a slot {}", slot));
}
for slot in slots_per_historical_root..slots_per_historical_root * 2 {
state_b
.set_state_root(Slot::from(slot), hashes.next().unwrap())
.expect(&format!("should set state_b slot {}", slot));
.unwrap_or_else(|_| panic!("should set state_b slot {}", slot));
}
let state_a_root = Hash256::from_low_u64_be(slots_per_historical_root as u64);
@ -279,7 +279,7 @@ mod test {
let iter = StateRootsIterator::new(store.clone(), &state_b);
assert!(
iter.clone().find(|(_root, slot)| *slot == 0).is_some(),
iter.clone().any(|(_root, slot)| slot == 0),
"iter should contain zero slot"
);
@ -290,8 +290,8 @@ mod test {
assert_eq!(collected.len(), expected_len, "collection length incorrect");
for i in 0..expected_len {
let (hash, slot) = collected[i];
for (i, item) in collected.iter().enumerate() {
let (hash, slot) = *item;
assert_eq!(slot, i as u64, "slot mismatch at {}: {} vs {}", i, slot, i);

View File

@ -94,7 +94,7 @@ pub fn scrape_for_metrics(db_path: &PathBuf) {
let db_size = if let Ok(iter) = fs::read_dir(db_path) {
iter.filter_map(std::result::Result::ok)
.map(size_of_dir_entry)
.fold(0_u64, |sum, val| sum + val)
.sum()
} else {
0
};

View File

@ -246,7 +246,7 @@ where
pub fn latest_message(&self, validator_index: usize) -> Option<(Hash256, Slot)> {
match self.latest_votes.get_ref(validator_index) {
Some(Some(v)) => Some((v.hash.clone(), v.slot.clone())),
Some(Some(v)) => Some((v.hash, v.slot)),
_ => None,
}
}

View File

@ -71,6 +71,7 @@ fn get_worst_block<T: EthSpec>(
builder.build(&spec)
}
#[allow(clippy::unit_arg)]
fn bench_block<T: EthSpec>(
c: &mut Criterion,
block: BeaconBlock<T>,

View File

@ -59,8 +59,8 @@ pub enum VerifySignatures {
}
impl VerifySignatures {
pub fn is_true(&self) -> bool {
*self == VerifySignatures::True
pub fn is_true(self) -> bool {
self == VerifySignatures::True
}
}

View File

@ -45,7 +45,7 @@ pub fn is_valid_indexed_attestation<T: EthSpec>(
let check_sorted = |list: &[u64]| -> Result<()> {
list.windows(2).enumerate().try_for_each(|(i, pair)| {
if pair[0] >= pair[1] {
return Err(error(Invalid::BadValidatorIndicesOrdering(i)));
Err(error(Invalid::BadValidatorIndicesOrdering(i)))
} else {
Ok(())
}

View File

@ -31,8 +31,8 @@ fn invalid_block_header_state_slot() {
let builder = get_builder(&spec);
let (mut block, mut state) = builder.build(None, None, &spec);
state.slot = Slot::new(133713);
block.slot = Slot::new(424242);
state.slot = Slot::new(133_713);
block.slot = Slot::new(424_242);
let result = per_block_processing(
&mut state,

View File

@ -130,6 +130,6 @@ fn verify_casper_ffg_vote<'a, T: EthSpec>(
);
Ok(state.get_previous_crosslink(data.crosslink.shard)?)
} else {
return Err(error(Invalid::BadTargetEpoch));
Err(error(Invalid::BadTargetEpoch))
}
}

View File

@ -70,7 +70,7 @@ impl<T: EthSpec> BlockBuilder<T> {
builder.set_parent_root(parent_root);
// Used as a stream of validator indices for use in slashings, exits, etc.
let mut validators_iter = (0..keypairs.len() as u64).into_iter();
let mut validators_iter = 0..keypairs.len() as u64;
// Insert `ProposerSlashing` objects.
for _ in 0..self.num_proposer_slashings {
@ -171,7 +171,7 @@ impl<T: EthSpec> BlockBuilder<T> {
info!("Inserted {} transfers.", builder.block.body.transfers.len());
// Set the eth1 data to be different from the state.
self.block_builder.block.body.eth1_data.block_hash = Hash256::from_slice(&vec![42; 32]);
self.block_builder.block.body.eth1_data.block_hash = Hash256::from_slice(&[42; 32]);
let block = self
.block_builder

View File

@ -73,7 +73,6 @@ fn shuffles_for_the_right_epoch() {
let spec = &MinimalEthSpec::default_spec();
let distinct_hashes: Vec<Hash256> = (0..MinimalEthSpec::epochs_per_historical_vector())
.into_iter()
.map(|i| Hash256::from_low_u64_be(i as u64))
.collect();

View File

@ -192,7 +192,7 @@ fn tree_hash_cache() {
assert_eq!(root.as_bytes(), &state.tree_hash_root()[..]);
state.slot = state.slot + 1;
state.slot += 1;
let root = state.update_tree_hash_cache().unwrap();
assert_eq!(root.as_bytes(), &state.tree_hash_root()[..]);
@ -231,9 +231,8 @@ mod committees {
shuffle_list(active_indices, spec.shuffle_round_count, &seed[..], false).unwrap();
let mut expected_indices_iter = shuffling.iter();
let mut expected_shards_iter = (0..T::ShardCount::to_u64())
.into_iter()
.map(|i| (start_shard + i) % T::ShardCount::to_u64());
let mut expected_shards_iter =
(0..T::ShardCount::to_u64()).map(|i| (start_shard + i) % T::ShardCount::to_u64());
// Loop through all slots in the epoch being tested.
for slot in epoch.slot_iter(T::slots_per_epoch()) {
@ -306,7 +305,6 @@ mod committees {
let (mut state, _keypairs): (BeaconState<T>, _) = builder.build();
let distinct_hashes: Vec<Hash256> = (0..T::epochs_per_historical_vector())
.into_iter()
.map(|i| Hash256::from_low_u64_be(i as u64))
.collect();
state.randao_mixes = FixedVector::from(distinct_hashes);

View File

@ -123,7 +123,7 @@ impl<T: EthSpec> TestingBeaconStateBuilder<T> {
.collect::<Vec<_>>()
.into();
let genesis_time = 1567052589; // 29 August, 2019;
let genesis_time = 1_567_052_589; // 29 August, 2019;
let mut state = BeaconState::new(
genesis_time,

View File

@ -46,6 +46,7 @@ where
Ok(array)
}
#[allow(clippy::trivially_copy_pass_by_ref)]
pub fn fork_to_hex_str<S>(bytes: &[u8; FORK_BYTES_LEN], serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,

View File

@ -80,8 +80,7 @@ mod tests {
#[test]
pub fn test_ssz_round_trip() {
let original =
SecretKey::from_bytes("jzjxxgjajfjrmgodszzsgqccmhnyvetcuxobhtynojtpdtbj".as_bytes())
.unwrap();
SecretKey::from_bytes(b"jzjxxgjajfjrmgodszzsgqccmhnyvetcuxobhtynojtpdtbj").unwrap();
let bytes = ssz_encode(&original);
let decoded = SecretKey::from_ssz_bytes(&bytes).unwrap();

View File

@ -58,10 +58,12 @@ pub fn be_private_key(validator_index: usize) -> [u8; PRIVATE_KEY_BYTES] {
/// Return a public and private keypair for a given `validator_index`.
pub fn keypair(validator_index: usize) -> Keypair {
let sk = SecretKey::from_bytes(&be_private_key(validator_index)).expect(&format!(
"Should build valid private key for validator index {}",
validator_index
));
let sk = SecretKey::from_bytes(&be_private_key(validator_index)).unwrap_or_else(|_| {
panic!(
"Should build valid private key for validator index {}",
validator_index
)
});
Keypair {
pk: PublicKey::from_secret_key(&sk),

View File

@ -19,14 +19,11 @@ fn reference_private_keys() {
"2908643403277969554503670470854573663206729491025062456164283925661321952518",
"19554639423851580804889717218680781396599791537051606512605582393920758869044",
];
reference
.into_iter()
.enumerate()
.for_each(|(i, reference)| {
let bytes = be_private_key(i);
let num = BigUint::from_bytes_be(&bytes);
assert_eq!(&num.to_str_radix(10), reference)
});
reference.iter().enumerate().for_each(|(i, reference)| {
let bytes = be_private_key(i);
let num = BigUint::from_bytes_be(&bytes);
assert_eq!(&num.to_str_radix(10), reference)
});
}
#[test]
@ -46,19 +43,16 @@ fn reference_public_keys() {
"ptMQ27+rmiJFD1mZP4ekzl22Ij87Xx8w0sTscYki1ADgs8d0HejlmWD3JBGg7hCn",
"mJNBPAAoOj+e2f2YRd2hzqOCKNIlZ/lUHczDV+VKLWpuIEEDySVky8BfSQWsfEk6",
];
reference
.into_iter()
.enumerate()
.for_each(|(i, reference)| {
let pair = keypair(i);
let reference = base64::decode(reference).expect("Reference should be valid base64");
reference.iter().enumerate().for_each(|(i, reference)| {
let pair = keypair(i);
let reference = base64::decode(reference).expect("Reference should be valid base64");
assert_eq!(
reference.len(),
48,
"Reference should be 48 bytes (public key size)"
);
assert_eq!(
reference.len(),
48,
"Reference should be 48 bytes (public key size)"
);
assert_eq!(pair.pk.as_bytes(), reference);
});
assert_eq!(pair.pk.as_bytes(), reference);
});
}

View File

@ -100,7 +100,9 @@ pub fn start_timer(histogram: &Result<Histogram>) -> Option<HistogramTimer> {
/// Stops a timer created with `start_timer(..)`.
pub fn stop_timer(timer: Option<HistogramTimer>) {
timer.map(|t| t.observe_duration());
if let Some(t) = timer {
t.observe_duration()
}
}
pub fn inc_counter(counter: &Result<IntCounter>) {

View File

@ -21,7 +21,7 @@ lazy_static! {
/// Zero nodes to act as "synthetic" left and right subtrees of other zero nodes.
static ref ZERO_NODES: Vec<MerkleTree> = {
(0..MAX_TREE_DEPTH + 1).map(MerkleTree::Zero).collect()
(0..=MAX_TREE_DEPTH).map(MerkleTree::Zero).collect()
};
}
@ -205,7 +205,7 @@ mod tests {
let merkle_tree = MerkleTree::create(&leaves, depth);
let merkle_root = merkle_tree.hash();
let proofs_ok = (0..leaves.len()).into_iter().all(|i| {
let proofs_ok = (0..leaves.len()).all(|i| {
let (leaf, branch) = merkle_tree.generate_proof(i, depth);
leaf == leaves[i] && verify_merkle_proof(leaf, &branch, depth, i, merkle_root)
});

View File

@ -8,7 +8,6 @@ fn main() {
let vec: Vec<u64> = vec![4242; 8196];
let output: Vec<Vec<u64>> = (0..40_000)
.into_iter()
.map(|_| Vec::from_ssz_bytes(&vec.as_ssz_bytes()).unwrap())
.collect();

View File

@ -24,7 +24,6 @@ fn main() {
let vec: Vec<FixedLen> = vec![fixed_len; 8196];
let output: Vec<Vec<u64>> = (0..40_000)
.into_iter()
.map(|_| Vec::from_ssz_bytes(&vec.as_ssz_bytes()).unwrap())
.collect();

View File

@ -57,7 +57,7 @@ impl Decode for Foo {
}
fn main() {
let foo = Foo {
let my_foo = Foo {
a: 42,
b: vec![0, 1, 2, 3],
c: 11,
@ -65,9 +65,9 @@ fn main() {
let bytes = vec![42, 0, 8, 0, 0, 0, 11, 0, 0, 1, 2, 3];
assert_eq!(foo.as_ssz_bytes(), bytes);
assert_eq!(my_foo.as_ssz_bytes(), bytes);
let decoded_foo = Foo::from_ssz_bytes(&bytes).unwrap();
assert_eq!(foo, decoded_foo);
assert_eq!(my_foo, decoded_foo);
}

View File

@ -239,7 +239,7 @@ impl<T: Encode> Encode for Vec<T> {
if <T as Encode>::is_ssz_fixed_len() {
<T as Encode>::ssz_fixed_len() * self.len()
} else {
let mut len = self.into_iter().map(|item| item.ssz_bytes_len()).sum();
let mut len = self.iter().map(|item| item.ssz_bytes_len()).sum();
len += BYTES_PER_LENGTH_OFFSET * self.len();
len
}

View File

@ -21,7 +21,7 @@ pub fn run_parse_hex(matches: &ArgMatches) -> Result<(), String> {
info!("Using minimal spec");
info!("Type: {:?}", type_str);
match type_str.as_ref() {
match type_str {
"block" => decode_and_print::<BeaconBlock<MinimalEthSpec>>(&hex)?,
"state" => decode_and_print::<BeaconState<MinimalEthSpec>>(&hex)?,
other => return Err(format!("Unknown type: {}", other)),

View File

@ -222,15 +222,12 @@ fn ssz_generic_test<T: SszStaticType>(path: &Path) -> Result<(), Error> {
}
}
// Invalid
else {
if let Ok(decoded) = T::from_ssz_bytes(&serialized) {
return Err(Error::DidntFail(format!(
"Decoded invalid bytes into: {:?}",
decoded
)));
}
else if let Ok(decoded) = T::from_ssz_bytes(&serialized) {
return Err(Error::DidntFail(format!(
"Decoded invalid bytes into: {:?}",
decoded
)));
}
Ok(())
}
@ -291,11 +288,11 @@ where
let decoded: Vec<u8> = hex::decode(&s.as_str()[2..]).map_err(D::Error::custom)?;
if decoded.len() > N::to_usize() {
return Err(D::Error::custom(format!(
Err(D::Error::custom(format!(
"Too many values for list, got: {}, limit: {}",
decoded.len(),
N::to_usize()
)));
)))
} else {
Ok(decoded.into())
}

View File

@ -171,17 +171,17 @@ impl Config {
.map_err(|e| format!("Unable to deserialize private key: {:?}", e))?;
let ki = key.identifier();
if &ki
!= &path
if ki
!= path
.file_name()
.ok_or_else(|| "Invalid path".to_string())?
.to_string_lossy()
{
return Err(format!(
Err(format!(
"The validator key ({:?}) did not match the directory filename {:?}.",
ki,
path.to_str()
));
))
} else {
Ok(key)
}
@ -219,10 +219,7 @@ impl Config {
&self,
range: std::ops::Range<usize>,
) -> Result<Vec<Keypair>, String> {
Ok(range
.into_iter()
.map(generate_deterministic_keypair)
.collect())
Ok(range.map(generate_deterministic_keypair).collect())
}
/// Loads the keypairs according to `self.key_source`. Will return one or more keypairs, or an

View File

@ -118,14 +118,14 @@ impl EpochDutiesMap {
if let Some(epoch_duty) = epoch_duties.get(signer) {
if let Some(duty) = epoch_duty {
// Retrieves the duty for a validator at a given slot
return Ok(duty.is_work_slot(slot));
Ok(duty.is_work_slot(slot))
} else {
// the validator isn't active
return Ok(None);
Ok(None)
}
} else {
// validator isn't known
return Err(EpochDutiesMapError::UnknownValidator);
Err(EpochDutiesMapError::UnknownValidator)
}
}
}