Minify slashing protection interchange data (#2380)

## Issue Addressed

Closes #2354

## Proposed Changes

Add a `minify` method to `slashing_protection::Interchange` that keeps only the maximum-epoch attestation and maximum-slot block for each validator. Specifically, `minify` constructs "synthetic" attestations (with no `signing_root`) containing the maximum source epoch _and_ the maximum target epoch from the input. This is equivalent to the `minify_synth` algorithm that I've formally verified in this repository:

https://github.com/michaelsproul/slashing-proofs

## Additional Info

Includes the JSON loading optimisation from #2347
This commit is contained in:
Michael Sproul 2021-06-21 05:46:36 +00:00
parent b84ff9f793
commit 6583ce325b
11 changed files with 441 additions and 95 deletions

1
Cargo.lock generated
View File

@ -5897,6 +5897,7 @@ name = "slashing_protection"
version = "0.1.0"
dependencies = [
"filesystem",
"lazy_static",
"parking_lot",
"r2d2",
"r2d2_sqlite",

View File

@ -1,7 +1,7 @@
use clap::{App, Arg, ArgMatches};
use environment::Environment;
use slashing_protection::{
interchange::Interchange, InterchangeImportOutcome, SlashingDatabase,
interchange::Interchange, InterchangeError, InterchangeImportOutcome, SlashingDatabase,
SLASHING_PROTECTION_FILENAME,
};
use std::fs::File;
@ -15,6 +15,8 @@ pub const EXPORT_CMD: &str = "export";
pub const IMPORT_FILE_ARG: &str = "IMPORT-FILE";
pub const EXPORT_FILE_ARG: &str = "EXPORT-FILE";
pub const MINIFY_FLAG: &str = "minify";
pub fn cli_app<'a, 'b>() -> App<'a, 'b> {
App::new(CMD)
.about("Import or export slashing protection data to or from another client")
@ -26,6 +28,17 @@ pub fn cli_app<'a, 'b>() -> App<'a, 'b> {
.takes_value(true)
.value_name("FILE")
.help("The slashing protection interchange file to import (.json)"),
)
.arg(
Arg::with_name(MINIFY_FLAG)
.long(MINIFY_FLAG)
.takes_value(true)
.default_value("true")
.possible_values(&["false", "true"])
.help(
"Minify the input file before processing. This is *much* faster, \
but will not detect slashable data in the input.",
),
),
)
.subcommand(
@ -36,6 +49,17 @@ pub fn cli_app<'a, 'b>() -> App<'a, 'b> {
.takes_value(true)
.value_name("FILE")
.help("The filename to export the interchange file to"),
)
.arg(
Arg::with_name(MINIFY_FLAG)
.long(MINIFY_FLAG)
.takes_value(true)
.default_value("false")
.possible_values(&["false", "true"])
.help(
"Minify the output file. This will make it smaller and faster to \
import, but not faster to generate.",
),
),
)
}
@ -64,6 +88,7 @@ pub fn cli_run<T: EthSpec>(
match matches.subcommand() {
(IMPORT_CMD, Some(matches)) => {
let import_filename: PathBuf = clap_utils::parse_required(&matches, IMPORT_FILE_ARG)?;
let minify: bool = clap_utils::parse_required(&matches, MINIFY_FLAG)?;
let import_file = File::open(&import_filename).map_err(|e| {
format!(
"Unable to open import file at {}: {:?}",
@ -72,8 +97,18 @@ pub fn cli_run<T: EthSpec>(
)
})?;
let interchange = Interchange::from_json_reader(&import_file)
eprint!("Loading JSON file into memory & deserializing");
let mut interchange = Interchange::from_json_reader(&import_file)
.map_err(|e| format!("Error parsing file for import: {:?}", e))?;
eprintln!(" [done].");
if minify {
eprint!("Minifying input file for faster loading");
interchange = interchange
.minify()
.map_err(|e| format!("Minification failed: {:?}", e))?;
eprintln!(" [done].");
}
let slashing_protection_database =
SlashingDatabase::open_or_create(&slashing_protection_db_path).map_err(|e| {
@ -84,16 +119,6 @@ pub fn cli_run<T: EthSpec>(
)
})?;
let outcomes = slashing_protection_database
.import_interchange_info(interchange, genesis_validators_root)
.map_err(|e| {
format!(
"Error during import: {:?}\n\
IT IS NOT SAFE TO START VALIDATING",
e
)
})?;
let display_slot = |slot: Option<Slot>| {
slot.map_or("none".to_string(), |slot| format!("{}", slot.as_u64()))
};
@ -105,48 +130,77 @@ pub fn cli_run<T: EthSpec>(
(source, target) => format!("{}=>{}", display_epoch(source), display_epoch(target)),
};
let mut num_failed = 0;
match slashing_protection_database
.import_interchange_info(interchange, genesis_validators_root)
{
Ok(outcomes) => {
eprintln!("All records imported successfully:");
for outcome in &outcomes {
match outcome {
InterchangeImportOutcome::Success { pubkey, summary } => {
eprintln!("- {:?} SUCCESS min block: {}, max block: {}, min attestation: {}, max attestation: {}",
pubkey,
display_slot(summary.min_block_slot),
display_slot(summary.max_block_slot),
display_attestation(summary.min_attestation_source, summary.min_attestation_target),
display_attestation(summary.max_attestation_source,
summary.max_attestation_target),
eprintln!("- {:?}", pubkey);
eprintln!(
" - min block: {}",
display_slot(summary.min_block_slot)
);
eprintln!(
" - min attestation: {}",
display_attestation(
summary.min_attestation_source,
summary.min_attestation_target
)
);
eprintln!(
" - max attestation: {}",
display_attestation(
summary.max_attestation_source,
summary.max_attestation_target
)
);
}
InterchangeImportOutcome::Failure { pubkey, error } => {
eprintln!("- {:?} ERROR: {:?}", pubkey, error);
num_failed += 1;
panic!(
"import should be atomic, but key {:?} was imported despite error: {:?}",
pubkey, error
);
}
}
}
}
Err(InterchangeError::AtomicBatchAborted(outcomes)) => {
eprintln!("ERROR, slashable data in input:");
for outcome in &outcomes {
if let InterchangeImportOutcome::Failure { pubkey, error } = outcome {
eprintln!("- {:?}", pubkey);
eprintln!(" - error: {:?}", error);
}
}
return Err(
"ERROR: import aborted due to slashable data, see above.\n\
Please see https://lighthouse-book.sigmaprime.io/slashing-protection.html#slashable-data-in-import\n\
IT IS NOT SAFE TO START VALIDATING".to_string()
);
}
Err(e) => {
return Err(format!(
"Fatal error during import: {:?}\n\
IT IS NOT SAFE TO START VALIDATING",
e
));
}
}
if num_failed == 0 {
eprintln!("Import completed successfully.");
eprintln!(
"Please double-check that the minimum and maximum blocks and slots above \
"Please double-check that the minimum and maximum blocks and attestations above \
match your expectations."
);
} else {
eprintln!(
"WARNING: history was NOT imported for {} of {} records",
num_failed,
outcomes.len()
);
eprintln!("IT IS NOT SAFE TO START VALIDATING");
eprintln!("Please see https://lighthouse-book.sigmaprime.io/slashing-protection.html#slashable-data-in-import");
return Err("Partial import".to_string());
}
Ok(())
}
(EXPORT_CMD, Some(matches)) => {
let export_filename: PathBuf = clap_utils::parse_required(&matches, EXPORT_FILE_ARG)?;
let minify: bool = clap_utils::parse_required(&matches, MINIFY_FLAG)?;
if !slashing_protection_db_path.exists() {
return Err(format!(
@ -164,10 +218,17 @@ pub fn cli_run<T: EthSpec>(
)
})?;
let interchange = slashing_protection_database
let mut interchange = slashing_protection_database
.export_interchange_info(genesis_validators_root)
.map_err(|e| format!("Error during export: {:?}", e))?;
if minify {
eprintln!("Minifying output file");
interchange = interchange
.minify()
.map_err(|e| format!("Unable to minify output: {:?}", e))?;
}
let output_file = File::create(export_filename)
.map_err(|e| format!("Error creating output file: {:?}", e))?;

View File

@ -91,6 +91,32 @@ up to date.
[EIP-3076]: https://eips.ethereum.org/EIPS/eip-3076
### Minification
Since version 1.5.0 Lighthouse automatically _minifies_ slashing protection data upon import.
Minification safely shrinks the input file, making it faster to import.
If an import file contains slashable data, then its minification is still safe to import _even
though_ the non-minified file would fail to be imported. This means that leaving minification
enabled is recommended if the input could contain slashable data. Conversely, if you would like to
double-check that the input file is not slashable with respect to itself, then you should disable
minification.
Minification can be disabled for imports by adding `--minify=false` to the command:
```
lighthouse account validator slashing-protection import --minify=false <my_interchange.json>
```
It can also be enabled for exports (disabled by default):
```
lighthouse account validator slashing-protection export --minify=true <lighthouse_interchange.json>
```
Minifying the export file should make it faster to import, and may allow it to be imported into an
implementation that is rejecting the non-minified equivalent due to slashable data.
## Troubleshooting
### Misplaced Slashing Database
@ -137,11 +163,11 @@ and _could_ indicate a serious error or misconfiguration (see [Avoiding Slashing
### Slashable Data in Import
If you receive a warning when trying to import an [interchange file](#import-and-export) about
During import of an [interchange file](#import-and-export) if you receive an error about
the file containing slashable data, then you must carefully consider whether you want to continue.
There are several potential causes for this warning, each of which require a different reaction. If
you have seen the warning for multiple validator keys, the cause could be different for each of them.
There are several potential causes for this error, each of which require a different reaction. If
the error output lists multiple validator keys, the cause could be different for each of them.
1. Your validator has actually signed slashable data. If this is the case, you should assess
whether your validator has been slashed (or is likely to be slashed). It's up to you
@ -156,6 +182,9 @@ you have seen the warning for multiple validator keys, the cause could be differ
It might be safe to continue as-is, or you could consider a [Drop and
Re-import](#drop-and-re-import).
If you are running the import command with `--minify=false`, you should consider enabling
[minification](#minification).
#### Drop and Re-import
If you'd like to prioritize an interchange file over any existing database stored by Lighthouse

View File

@ -19,4 +19,5 @@ serde_utils = { path = "../../consensus/serde_utils" }
filesystem = { path = "../../common/filesystem" }
[dev-dependencies]
lazy_static = "1.4.0"
rayon = "1.4.1"

View File

@ -1,4 +1,4 @@
TESTS_TAG := f495032df9c26c678536cd2b7854e836ea94c217
TESTS_TAG := v5.1.0
GENERATE_DIR := generated-tests
OUTPUT_DIR := interchange-tests
TARBALL := $(OUTPUT_DIR)-$(TESTS_TAG).tar.gz

View File

@ -220,13 +220,40 @@ fn main() {
vec![
TestCase::new(interchange(vec![(0, vec![40], vec![])])),
TestCase::new(interchange(vec![(0, vec![20], vec![])]))
.allow_partial_import()
.contains_slashable_data()
.with_blocks(vec![(0, 20, false)]),
],
),
MultiTestCase::new(
"multiple_interchanges_single_validator_fail_iff_imported",
vec![
TestCase::new(interchange(vec![(0, vec![40], vec![])])),
TestCase::new(interchange(vec![(0, vec![20, 50], vec![])]))
.contains_slashable_data()
.with_blocks(vec![(0, 20, false), (0, 50, false)]),
],
),
MultiTestCase::single(
"single_validator_source_greater_than_target",
TestCase::new(interchange(vec![(0, vec![], vec![(8, 7)])])).allow_partial_import(),
TestCase::new(interchange(vec![(0, vec![], vec![(8, 7)])])).contains_slashable_data(),
),
MultiTestCase::single(
"single_validator_source_greater_than_target_surrounding",
TestCase::new(interchange(vec![(0, vec![], vec![(5, 2)])]))
.contains_slashable_data()
.with_attestations(vec![(0, 3, 4, false)]),
),
MultiTestCase::single(
"single_validator_source_greater_than_target_surrounded",
TestCase::new(interchange(vec![(0, vec![], vec![(5, 2)])]))
.contains_slashable_data()
.with_attestations(vec![(0, 6, 1, false)]),
),
MultiTestCase::single(
"single_validator_source_greater_than_target_sensible_iff_minified",
TestCase::new(interchange(vec![(0, vec![], vec![(5, 2), (6, 7)])]))
.contains_slashable_data()
.with_attestations(vec![(0, 5, 8, false), (0, 6, 8, true)]),
),
MultiTestCase::single(
"single_validator_out_of_order_blocks",
@ -304,11 +331,11 @@ fn main() {
vec![(10, Some(0)), (10, Some(11))],
vec![],
)]))
.allow_partial_import(),
.contains_slashable_data(),
),
MultiTestCase::single(
"single_validator_slashable_blocks_no_root",
TestCase::new(interchange(vec![(0, vec![10, 10], vec![])])).allow_partial_import(),
TestCase::new(interchange(vec![(0, vec![10, 10], vec![])])).contains_slashable_data(),
),
MultiTestCase::single(
"single_validator_slashable_attestations_double_vote",
@ -317,17 +344,17 @@ fn main() {
vec![],
vec![(2, 3, Some(0)), (2, 3, Some(1))],
)]))
.allow_partial_import(),
.contains_slashable_data(),
),
MultiTestCase::single(
"single_validator_slashable_attestations_surrounds_existing",
TestCase::new(interchange(vec![(0, vec![], vec![(2, 3), (0, 4)])]))
.allow_partial_import(),
.contains_slashable_data(),
),
MultiTestCase::single(
"single_validator_slashable_attestations_surrounded_by_existing",
TestCase::new(interchange(vec![(0, vec![], vec![(0, 4), (2, 3)])]))
.allow_partial_import(),
.contains_slashable_data(),
),
MultiTestCase::single(
"duplicate_pubkey_not_slashable",
@ -338,6 +365,29 @@ fn main() {
.with_blocks(vec![(0, 10, false), (0, 13, false), (0, 14, true)])
.with_attestations(vec![(0, 0, 2, false), (0, 1, 3, false)]),
),
MultiTestCase::single(
"duplicate_pubkey_slashable_block",
TestCase::new(interchange(vec![
(0, vec![10], vec![(0, 2)]),
(0, vec![10], vec![(1, 3)]),
]))
.contains_slashable_data()
.with_blocks(vec![(0, 10, false), (0, 11, true)]),
),
MultiTestCase::single(
"duplicate_pubkey_slashable_attestation",
TestCase::new(interchange_with_signing_roots(vec![
(0, vec![], vec![(0, 3, Some(3))]),
(0, vec![], vec![(1, 2, None)]),
]))
.contains_slashable_data()
.with_attestations(vec![
(0, 0, 1, false),
(0, 0, 2, false),
(0, 0, 4, false),
(0, 1, 4, true),
]),
),
];
let args = std::env::args().collect::<Vec<_>>();
@ -345,7 +395,12 @@ fn main() {
fs::create_dir_all(output_dir).unwrap();
for test in tests {
test.run();
// Check that test case passes without minification
test.run(false);
// Check that test case passes with minification
test.run(true);
let f = File::create(output_dir.join(format!("{}.json", test.name))).unwrap();
serde_json::to_writer_pretty(&f, &test).unwrap();
writeln!(&f).unwrap();

View File

@ -1,5 +1,8 @@
use crate::InterchangeError;
use serde_derive::{Deserialize, Serialize};
use std::collections::HashSet;
use std::cmp::max;
use std::collections::{HashMap, HashSet};
use std::io;
use types::{Epoch, Hash256, PublicKeyBytes, Slot};
#[derive(Debug, Clone, PartialEq, Deserialize, Serialize)]
@ -49,8 +52,12 @@ impl Interchange {
serde_json::from_str(json)
}
pub fn from_json_reader(reader: impl std::io::Read) -> Result<Self, serde_json::Error> {
serde_json::from_reader(reader)
pub fn from_json_reader(mut reader: impl std::io::Read) -> Result<Self, io::Error> {
// We read the entire file into memory first, as this is *a lot* faster than using
// `serde_json::from_reader`. See https://github.com/serde-rs/json/issues/160
let mut json_str = String::new();
reader.read_to_string(&mut json_str)?;
Ok(Interchange::from_json_str(&json_str)?)
}
pub fn write_to(&self, writer: impl std::io::Write) -> Result<(), serde_json::Error> {
@ -73,4 +80,75 @@ impl Interchange {
pub fn is_empty(&self) -> bool {
self.len() == 0
}
/// Minify an interchange by constructing a synthetic block & attestation for each validator.
pub fn minify(&self) -> Result<Self, InterchangeError> {
// Map from pubkey to optional max block and max attestation.
let mut validator_data =
HashMap::<PublicKeyBytes, (Option<SignedBlock>, Option<SignedAttestation>)>::new();
for data in self.data.iter() {
// Existing maximum attestation and maximum block.
let (max_block, max_attestation) = validator_data
.entry(data.pubkey)
.or_insert_with(|| (None, None));
// Find maximum source and target epochs.
let max_source_epoch = data
.signed_attestations
.iter()
.map(|attestation| attestation.source_epoch)
.max();
let max_target_epoch = data
.signed_attestations
.iter()
.map(|attestation| attestation.target_epoch)
.max();
match (max_source_epoch, max_target_epoch) {
(Some(source_epoch), Some(target_epoch)) => {
if let Some(prev_max) = max_attestation {
prev_max.source_epoch = max(prev_max.source_epoch, source_epoch);
prev_max.target_epoch = max(prev_max.target_epoch, target_epoch);
} else {
*max_attestation = Some(SignedAttestation {
source_epoch,
target_epoch,
signing_root: None,
});
}
}
(None, None) => {}
_ => return Err(InterchangeError::MinAndMaxInconsistent),
};
// Find maximum block slot.
let max_block_slot = data.signed_blocks.iter().map(|block| block.slot).max();
if let Some(max_slot) = max_block_slot {
if let Some(prev_max) = max_block {
prev_max.slot = max(prev_max.slot, max_slot);
} else {
*max_block = Some(SignedBlock {
slot: max_slot,
signing_root: None,
});
}
}
}
let data = validator_data
.into_iter()
.map(|(pubkey, (maybe_block, maybe_att))| InterchangeData {
pubkey,
signed_blocks: maybe_block.into_iter().collect(),
signed_attestations: maybe_att.into_iter().collect(),
})
.collect();
Ok(Self {
metadata: self.metadata.clone(),
data,
})
}
}

View File

@ -1,9 +1,10 @@
use crate::{
interchange::Interchange,
interchange::{Interchange, SignedAttestation, SignedBlock},
test_utils::{pubkey, DEFAULT_GENESIS_VALIDATORS_ROOT},
SigningRoot, SlashingDatabase,
};
use serde_derive::{Deserialize, Serialize};
use std::collections::HashSet;
use tempfile::tempdir;
use types::{Epoch, Hash256, PublicKeyBytes, Slot};
@ -17,7 +18,7 @@ pub struct MultiTestCase {
#[derive(Debug, Clone, Deserialize, Serialize)]
pub struct TestCase {
pub should_succeed: bool,
pub allow_partial_import: bool,
pub contains_slashable_data: bool,
pub interchange: Interchange,
pub blocks: Vec<TestBlock>,
pub attestations: Vec<TestAttestation>,
@ -58,41 +59,53 @@ impl MultiTestCase {
self
}
pub fn run(&self) {
pub fn run(&self, minify: bool) {
let dir = tempdir().unwrap();
let slashing_db_file = dir.path().join("slashing_protection.sqlite");
let slashing_db = SlashingDatabase::create(&slashing_db_file).unwrap();
// If minification is used, false positives are allowed, i.e. there may be some situations
// in which signing is safe but the minified file prevents it.
let allow_false_positives = minify;
for test_case in &self.steps {
match slashing_db.import_interchange_info(
test_case.interchange.clone(),
self.genesis_validators_root,
) {
// If the test case is marked as containing slashable data, then it is permissible
// that we fail to import the file, in which case execution of the whole test should
// be aborted.
let allow_import_failure = test_case.contains_slashable_data;
let interchange = if minify {
let minified = test_case.interchange.minify().unwrap();
check_minification_invariants(&test_case.interchange, &minified);
minified
} else {
test_case.interchange.clone()
};
match slashing_db.import_interchange_info(interchange, self.genesis_validators_root) {
Ok(import_outcomes) => {
let failed_records = import_outcomes
.iter()
.filter(|o| o.failed())
.collect::<Vec<_>>();
let none_failed = import_outcomes.iter().all(|o| !o.failed());
assert!(
none_failed,
"test `{}` failed to import some records: {:#?}",
self.name, import_outcomes
);
if !test_case.should_succeed {
panic!(
"test `{}` succeeded on import when it should have failed",
self.name
);
}
if !failed_records.is_empty() && !test_case.allow_partial_import {
panic!(
"test `{}` failed to import some records but should have succeeded: {:#?}",
self.name, failed_records,
);
}
}
Err(e) if test_case.should_succeed => {
Err(e) => {
if test_case.should_succeed && !allow_import_failure {
panic!(
"test `{}` failed on import when it should have succeeded, error: {:?}",
self.name, e
);
}
_ => (),
break;
}
}
for (i, block) in test_case.blocks.iter().enumerate() {
@ -107,7 +120,7 @@ impl MultiTestCase {
i, self.name, safe
);
}
Err(e) if block.should_succeed => {
Err(e) if block.should_succeed && !allow_false_positives => {
panic!(
"block {} from `{}` failed when it should have succeeded: {:?}",
i, self.name, e
@ -130,7 +143,7 @@ impl MultiTestCase {
i, self.name, safe
);
}
Err(e) if att.should_succeed => {
Err(e) if att.should_succeed && !allow_false_positives => {
panic!(
"attestation {} from `{}` failed when it should have succeeded: {:?}",
i, self.name, e
@ -147,7 +160,7 @@ impl TestCase {
pub fn new(interchange: Interchange) -> Self {
TestCase {
should_succeed: true,
allow_partial_import: false,
contains_slashable_data: false,
interchange,
blocks: vec![],
attestations: vec![],
@ -159,8 +172,8 @@ impl TestCase {
self
}
pub fn allow_partial_import(mut self) -> Self {
self.allow_partial_import = true;
pub fn contains_slashable_data(mut self) -> Self {
self.contains_slashable_data = true;
self
}
@ -216,3 +229,81 @@ impl TestCase {
self
}
}
fn check_minification_invariants(interchange: &Interchange, minified: &Interchange) {
// Metadata should be unchanged.
assert_eq!(interchange.metadata, minified.metadata);
// Minified data should contain one entry per *unique* public key.
let uniq_pubkeys = get_uniq_pubkeys(interchange);
assert_eq!(uniq_pubkeys, get_uniq_pubkeys(minified));
assert_eq!(uniq_pubkeys.len(), minified.data.len());
for &pubkey in uniq_pubkeys.iter() {
// Minified data should contain 1 block per validator, unless the validator never signed any
// blocks. All of those blocks should have slots <= the slot of the minified block.
let original_blocks = get_blocks_of_validator(interchange, pubkey);
let minified_blocks = get_blocks_of_validator(minified, pubkey);
if original_blocks.is_empty() {
assert!(minified_blocks.is_empty());
} else {
// Should have exactly 1 block.
assert_eq!(minified_blocks.len(), 1);
// That block should have no signing root (it's synthetic).
let mini_block = minified_blocks.first().unwrap();
assert_eq!(mini_block.signing_root, None);
// All original blocks should have slots <= the mini block.
assert!(original_blocks
.iter()
.all(|block| block.slot <= mini_block.slot));
}
// Minified data should contain 1 attestation per validator, unless the validator never
// signed any attestations. All attestations should have source and target <= the source
// and target of the minified attestation.
let original_attestations = get_attestations_of_validator(interchange, pubkey);
let minified_attestations = get_attestations_of_validator(minified, pubkey);
if original_attestations.is_empty() {
assert!(minified_attestations.is_empty());
} else {
assert_eq!(minified_attestations.len(), 1);
let mini_attestation = minified_attestations.first().unwrap();
assert_eq!(mini_attestation.signing_root, None);
assert!(original_attestations
.iter()
.all(|att| att.source_epoch <= mini_attestation.source_epoch
&& att.target_epoch <= mini_attestation.target_epoch));
}
}
}
fn get_uniq_pubkeys(interchange: &Interchange) -> HashSet<PublicKeyBytes> {
interchange.data.iter().map(|data| data.pubkey).collect()
}
fn get_blocks_of_validator(interchange: &Interchange, pubkey: PublicKeyBytes) -> Vec<&SignedBlock> {
interchange
.data
.iter()
.filter(|data| data.pubkey == pubkey)
.flat_map(|data| data.signed_blocks.iter())
.collect()
}
fn get_attestations_of_validator(
interchange: &Interchange,
pubkey: PublicKeyBytes,
) -> Vec<&SignedAttestation> {
interchange
.data
.iter()
.filter(|data| data.pubkey == pubkey)
.flat_map(|data| data.signed_attestations.iter())
.collect()
}

View File

@ -12,7 +12,8 @@ pub mod test_utils;
pub use crate::signed_attestation::{InvalidAttestation, SignedAttestation};
pub use crate::signed_block::{InvalidBlock, SignedBlock};
pub use crate::slashing_database::{
InterchangeImportOutcome, SlashingDatabase, SUPPORTED_INTERCHANGE_FORMAT_VERSION,
InterchangeError, InterchangeImportOutcome, SlashingDatabase,
SUPPORTED_INTERCHANGE_FORMAT_VERSION,
};
use rusqlite::Error as SQLError;
use std::io::{Error as IOError, ErrorKind};

View File

@ -562,8 +562,8 @@ impl SlashingDatabase {
/// Import slashing protection from another client in the interchange format.
///
/// Return a vector of public keys and errors for any validators whose data could not be
/// imported.
/// This function will atomically import the entire interchange, failing if *any*
/// record cannot be imported.
pub fn import_interchange_info(
&self,
interchange: Interchange,
@ -581,25 +581,33 @@ impl SlashingDatabase {
});
}
// Create a single transaction for the entire batch, which will only be committed if
// all records are imported successfully.
let mut conn = self.conn_pool.get()?;
let txn = conn.transaction()?;
let mut import_outcomes = vec![];
let mut commit = true;
for record in interchange.data {
let pubkey = record.pubkey;
let txn = conn.transaction()?;
match self.import_interchange_record(record, &txn) {
Ok(summary) => {
import_outcomes.push(InterchangeImportOutcome::Success { pubkey, summary });
txn.commit()?;
}
Err(error) => {
import_outcomes.push(InterchangeImportOutcome::Failure { pubkey, error });
commit = false;
}
}
}
if commit {
txn.commit()?;
Ok(import_outcomes)
} else {
Err(InterchangeError::AtomicBatchAborted(import_outcomes))
}
}
pub fn import_interchange_record(
@ -914,12 +922,14 @@ pub enum InterchangeError {
interchange_file: Hash256,
client: Hash256,
},
MinimalAttestationSourceAndTargetInconsistent,
MinAndMaxInconsistent,
SQLError(String),
SQLPoolError(r2d2::Error),
SerdeJsonError(serde_json::Error),
InvalidPubkey(String),
NotSafe(NotSafe),
/// One or more records were found to be slashable, so the whole batch was aborted.
AtomicBatchAborted(Vec<InterchangeImportOutcome>),
}
impl From<NotSafe> for InterchangeError {

View File

@ -1,7 +1,12 @@
use lazy_static::lazy_static;
use slashing_protection::interchange_test::MultiTestCase;
use std::fs::File;
use std::path::PathBuf;
lazy_static! {
pub static ref TEST_ROOT_DIR: PathBuf = test_root_dir();
}
fn download_tests() {
let make_output = std::process::Command::new("make")
.current_dir(std::env::var("CARGO_MANIFEST_DIR").unwrap())
@ -22,7 +27,7 @@ fn test_root_dir() -> PathBuf {
#[test]
fn generated() {
for entry in test_root_dir()
for entry in TEST_ROOT_DIR
.join("generated")
.read_dir()
.unwrap()
@ -30,6 +35,20 @@ fn generated() {
{
let file = File::open(entry.path()).unwrap();
let test_case: MultiTestCase = serde_json::from_reader(&file).unwrap();
test_case.run();
test_case.run(false);
}
}
#[test]
fn generated_with_minification() {
for entry in TEST_ROOT_DIR
.join("generated")
.read_dir()
.unwrap()
.map(Result::unwrap)
{
let file = File::open(entry.path()).unwrap();
let test_case: MultiTestCase = serde_json::from_reader(&file).unwrap();
test_case.run(true);
}
}