Merge branch 'master' into attestation-update

This commit is contained in:
Paul Hauner 2018-12-12 09:42:12 +11:00
commit fb03b0a649
No known key found for this signature in database
GPG Key ID: 303E4494BB28068C
5 changed files with 250 additions and 24 deletions

View File

@ -1,4 +1,7 @@
language: rust language: rust
script:
- cargo build --verbose --all
- cargo test --verbose --all
rust: rust:
- stable - stable
- beta - beta

View File

@ -125,18 +125,26 @@ This code-base is still very much under-development and does not provide any
user-facing functionality. For developers and researchers, there are several user-facing functionality. For developers and researchers, there are several
tests and benchmarks which may be of interest. tests and benchmarks which may be of interest.
To run tests, use: A few basic steps are needed to get set up:
1. Install [rustup](https://rustup.rs/). It's a toolchain manager for Rust (Linux | macos | Windows). For installation run the below command in your terminal
```
$ curl https://sh.rustup.rs -sSf | sh
```
2. To configure your current shell run:
``` ```
$ cargo test --all $ source $HOME/.cargo/env
``` ```
To run benchmarks, use: 3. Use the command `rustup show` to get information about the Rust installation. You should see that the active toolchain is the stable version.
4. Run `rustc --version` to check the installation and version of rust.
``` - Updates can be performed using` rustup update` .
$ cargo bench --all 5. Navigate to the working directory.
``` 6. Run the test by using command `cargo test --all` . By running, it will pass all the required test cases. If you are doing it for the first time, then you can grab a coffee meantime. Usually, it takes time to build, compile and pass all test cases. If there is no error then, it means everything is working properly and it's time to get hand's dirty. In case, if there is an error, then please raise the [issue](https://github.com/sigp/lighthouse/issues). We will help you.
7. As an alternative to, or instead of the above step, you may also run benchmarks by using the command `cargo bench --all`
##### Note:
Lighthouse presently runs on Rust `stable`, however, benchmarks currently require the Lighthouse presently runs on Rust `stable`, however, benchmarks currently require the
`nightly` version. `nightly` version.

View File

@ -96,6 +96,108 @@ mod tests {
use std::sync::Arc; use std::sync::Arc;
use std::thread; use std::thread;
#[test]
fn test_put_serialized_block() {
let db = Arc::new(MemoryDB::open());
let store = BeaconBlockStore::new(db.clone());
let ssz = "some bytes".as_bytes();
let hash = &Hash256::from("some hash".as_bytes()).to_vec();
store.put_serialized_block(hash, ssz).unwrap();
assert_eq!(db.get(DB_COLUMN, hash).unwrap().unwrap(), ssz);
}
#[test]
fn test_get_serialized_block() {
let db = Arc::new(MemoryDB::open());
let store = BeaconBlockStore::new(db.clone());
let ssz = "some bytes".as_bytes();
let hash = &Hash256::from("some hash".as_bytes()).to_vec();
db.put(DB_COLUMN, hash, ssz).unwrap();
assert_eq!(store.get_serialized_block(hash).unwrap().unwrap(), ssz);
}
#[test]
fn test_get_unknown_serialized_block() {
let db = Arc::new(MemoryDB::open());
let store = BeaconBlockStore::new(db.clone());
let ssz = "some bytes".as_bytes();
let hash = &Hash256::from("some hash".as_bytes()).to_vec();
let other_hash = &Hash256::from("another hash".as_bytes()).to_vec();
db.put(DB_COLUMN, other_hash, ssz).unwrap();
assert_eq!(store.get_serialized_block(hash).unwrap(), None);
}
#[test]
fn test_block_exists() {
let db = Arc::new(MemoryDB::open());
let store = BeaconBlockStore::new(db.clone());
let ssz = "some bytes".as_bytes();
let hash = &Hash256::from("some hash".as_bytes()).to_vec();
db.put(DB_COLUMN, hash, ssz).unwrap();
assert!(store.block_exists(hash).unwrap());
}
#[test]
fn test_block_does_not_exist() {
let db = Arc::new(MemoryDB::open());
let store = BeaconBlockStore::new(db.clone());
let ssz = "some bytes".as_bytes();
let hash = &Hash256::from("some hash".as_bytes()).to_vec();
let other_hash = &Hash256::from("another hash".as_bytes()).to_vec();
db.put(DB_COLUMN, hash, ssz).unwrap();
assert!(!store.block_exists(other_hash).unwrap());
}
#[test]
fn test_delete_block() {
let db = Arc::new(MemoryDB::open());
let store = BeaconBlockStore::new(db.clone());
let ssz = "some bytes".as_bytes();
let hash = &Hash256::from("some hash".as_bytes()).to_vec();
db.put(DB_COLUMN, hash, ssz).unwrap();
assert!(db.exists(DB_COLUMN, hash).unwrap());
store.delete_block(hash).unwrap();
assert!(!db.exists(DB_COLUMN, hash).unwrap());
}
#[test]
fn test_invalid_block_at_slot() {
let db = Arc::new(MemoryDB::open());
let store = BeaconBlockStore::new(db.clone());
let ssz = "definitly not a valid block".as_bytes();
let hash = &Hash256::from("some hash".as_bytes()).to_vec();
db.put(DB_COLUMN, hash, ssz).unwrap();
assert_eq!(store.block_at_slot(hash, 42), Err(BeaconBlockAtSlotError::InvalidBeaconBlock));
}
#[test]
fn test_unknown_block_at_slot() {
let db = Arc::new(MemoryDB::open());
let store = BeaconBlockStore::new(db.clone());
let ssz = "some bytes".as_bytes();
let hash = &Hash256::from("some hash".as_bytes()).to_vec();
let other_hash = &Hash256::from("another hash".as_bytes()).to_vec();
db.put(DB_COLUMN, hash, ssz).unwrap();
assert_eq!(store.block_at_slot(other_hash, 42), Err(BeaconBlockAtSlotError::UnknownBeaconBlock));
}
#[test] #[test]
fn test_block_store_on_memory_db() { fn test_block_store_on_memory_db() {
let db = Arc::new(MemoryDB::open()); let db = Arc::new(MemoryDB::open());
@ -171,7 +273,7 @@ mod tests {
let mut s = SszStream::new(); let mut s = SszStream::new();
s.append(&block); s.append(&block);
let ssz = s.drain(); let ssz = s.drain();
bs.put_serialized_block(&hashes[i].to_vec(), &ssz).unwrap(); db.put(DB_COLUMN, &hashes[i].to_vec(), &ssz).unwrap();
} }
let tuple = bs.block_at_slot(&hashes[4], 5).unwrap().unwrap(); let tuple = bs.block_at_slot(&hashes[4], 5).unwrap().unwrap();

View File

@ -23,4 +23,46 @@ impl<T: ClientDB> PoWChainStore<T> {
} }
} }
// TODO: add tests once a memory-db is implemented #[cfg(test)]
mod tests {
extern crate types;
use super::*;
use super::super::super::MemoryDB;
use self::types::Hash256;
#[test]
fn test_put_block_hash() {
let db = Arc::new(MemoryDB::open());
let store = PoWChainStore::new(db.clone());
let hash = &Hash256::from("some hash".as_bytes()).to_vec();
store.put_block_hash(hash).unwrap();
assert!(db.exists(DB_COLUMN, hash).unwrap());
}
#[test]
fn test_block_hash_exists() {
let db = Arc::new(MemoryDB::open());
let store = PoWChainStore::new(db.clone());
let hash = &Hash256::from("some hash".as_bytes()).to_vec();
db.put(DB_COLUMN, hash, &[0]).unwrap();
assert!(store.block_hash_exists(hash).unwrap());
}
#[test]
fn test_block_hash_does_not_exist() {
let db = Arc::new(MemoryDB::open());
let store = PoWChainStore::new(db.clone());
let hash = &Hash256::from("some hash".as_bytes()).to_vec();
let other_hash = &Hash256::from("another hash".as_bytes()).to_vec();
db.put(DB_COLUMN, hash, &[0]).unwrap();
assert!(!store.block_hash_exists(other_hash).unwrap());
}
}

View File

@ -82,6 +82,91 @@ mod tests {
use super::super::bls::Keypair; use super::super::bls::Keypair;
use super::*; use super::*;
#[test]
fn test_prefix_bytes() {
let db = Arc::new(MemoryDB::open());
let store = ValidatorStore::new(db.clone());
assert_eq!(store.prefix_bytes(&KeyPrefixes::PublicKey), b"pubkey".to_vec());
}
#[test]
fn test_get_db_key_for_index() {
let db = Arc::new(MemoryDB::open());
let store = ValidatorStore::new(db.clone());
let mut buf = BytesMut::with_capacity(6 + 8);
buf.put(b"pubkey".to_vec());
buf.put_u64_be(42);
assert_eq!(store.get_db_key_for_index(&KeyPrefixes::PublicKey, 42), buf.take().to_vec())
}
#[test]
fn test_put_public_key_by_index() {
let db = Arc::new(MemoryDB::open());
let store = ValidatorStore::new(db.clone());
let index = 3;
let public_key = Keypair::random().pk;
store.put_public_key_by_index(index, &public_key).unwrap();
let public_key_at_index = db.get(
DB_COLUMN,
&store.get_db_key_for_index(&KeyPrefixes::PublicKey, index)[..]
).unwrap().unwrap();
assert_eq!(public_key_at_index, public_key.as_bytes());
}
#[test]
fn test_get_public_key_by_index() {
let db = Arc::new(MemoryDB::open());
let store = ValidatorStore::new(db.clone());
let index = 4;
let public_key = Keypair::random().pk;
db.put(
DB_COLUMN,
&store.get_db_key_for_index(&KeyPrefixes::PublicKey, index)[..],
&public_key.as_bytes()[..]
).unwrap();
let public_key_at_index = store.get_public_key_by_index(index).unwrap().unwrap();
assert_eq!(public_key_at_index, public_key);
}
#[test]
fn test_get_public_key_by_unknown_index() {
let db = Arc::new(MemoryDB::open());
let store = ValidatorStore::new(db.clone());
let public_key = Keypair::random().pk;
db.put(
DB_COLUMN,
&store.get_db_key_for_index(&KeyPrefixes::PublicKey, 3)[..],
&public_key.as_bytes()[..]
).unwrap();
let public_key_at_index = store.get_public_key_by_index(4).unwrap();
assert_eq!(public_key_at_index, None);
}
#[test]
fn test_get_invalid_public_key() {
let db = Arc::new(MemoryDB::open());
let store = ValidatorStore::new(db.clone());
let key = store.get_db_key_for_index(&KeyPrefixes::PublicKey, 42);
db.put(DB_COLUMN, &key[..], "cats".as_bytes()).unwrap();
assert_eq!(
store.get_public_key_by_index(42),
Err(ValidatorStoreError::DecodeError)
);
}
#[test] #[test]
fn test_validator_store_put_get() { fn test_validator_store_put_get() {
let db = Arc::new(MemoryDB::open()); let db = Arc::new(MemoryDB::open());
@ -117,18 +202,4 @@ mod tests {
.is_none() .is_none()
); );
} }
#[test]
fn test_validator_store_bad_key() {
let db = Arc::new(MemoryDB::open());
let store = ValidatorStore::new(db.clone());
let key = store.get_db_key_for_index(&KeyPrefixes::PublicKey, 42);
db.put(DB_COLUMN, &key[..], "cats".as_bytes()).unwrap();
assert_eq!(
store.get_public_key_by_index(42),
Err(ValidatorStoreError::DecodeError)
);
}
} }