Implement VC API (#1657)
## Issue Addressed NA ## Proposed Changes - Implements a HTTP API for the validator client. - Creates EIP-2335 keystores with an empty `description` field, instead of a missing `description` field. Adds option to set name. - Be more graceful with setups without any validators (yet) - Remove an error log when there are no validators. - Create the `validator` dir if it doesn't exist. - Allow building a `ValidatorDir` without a withdrawal keystore (required for the API method where we only post a voting keystore). - Add optional `description` field to `validator_definitions.yml` ## TODO - [x] Signature header, as per https://github.com/sigp/lighthouse/issues/1269#issuecomment-649879855 - [x] Return validator descriptions - [x] Return deposit data - [x] Respect the mnemonic offset - [x] Check that mnemonic can derive returned keys - [x] Be strict about non-localhost - [x] Allow graceful start without any validators (+ create validator dir) - [x] Docs final pass - [x] Swap to EIP-2335 description field. - [x] Fix Zerioze TODO in VC api types. - [x] Zeroize secp256k1 key ## Endpoints - [x] `GET /lighthouse/version` - [x] `GET /lighthouse/health` - [x] `GET /lighthouse/validators` - [x] `POST /lighthouse/validators/hd` - [x] `POST /lighthouse/validators/keystore` - [x] `PATCH /lighthouse/validators/:validator_pubkey` - [ ] ~~`POST /lighthouse/validators/:validator_pubkey/exit/:epoch`~~ Future works ## Additional Info TBC
This commit is contained in:
parent
1d278aaa83
commit
6ea3bc5e52
105
Cargo.lock
generated
105
Cargo.lock
generated
@ -720,15 +720,13 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "chrono"
|
name = "chrono"
|
||||||
version = "0.4.18"
|
version = "0.4.15"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "d021fddb7bd3e734370acfa4a83f34095571d8570c039f1420d77540f68d5772"
|
checksum = "942f72db697d8767c22d46a598e01f2d3b475501ea43d0db4f16d90259182d0b"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"libc",
|
|
||||||
"num-integer",
|
"num-integer",
|
||||||
"num-traits",
|
"num-traits",
|
||||||
"time 0.1.44",
|
"time 0.1.44",
|
||||||
"winapi 0.3.9",
|
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@ -790,7 +788,7 @@ dependencies = [
|
|||||||
"sloggers",
|
"sloggers",
|
||||||
"slot_clock",
|
"slot_clock",
|
||||||
"store",
|
"store",
|
||||||
"time 0.2.22",
|
"time 0.2.21",
|
||||||
"timer",
|
"timer",
|
||||||
"tokio 0.2.22",
|
"tokio 0.2.22",
|
||||||
"toml",
|
"toml",
|
||||||
@ -1292,6 +1290,12 @@ dependencies = [
|
|||||||
"zeroize",
|
"zeroize",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "doc-comment"
|
||||||
|
version = "0.3.3"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "fea41bba32d969b513997752735605054bc0dfa92b4c56bf1189f2e174be7a10"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "dtoa"
|
name = "dtoa"
|
||||||
version = "0.4.6"
|
version = "0.4.6"
|
||||||
@ -1469,16 +1473,22 @@ dependencies = [
|
|||||||
name = "eth2"
|
name = "eth2"
|
||||||
version = "0.1.0"
|
version = "0.1.0"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
|
"account_utils",
|
||||||
|
"bytes 0.5.6",
|
||||||
|
"eth2_keystore",
|
||||||
"eth2_libp2p",
|
"eth2_libp2p",
|
||||||
"hex 0.4.2",
|
"hex 0.4.2",
|
||||||
|
"libsecp256k1",
|
||||||
"procinfo",
|
"procinfo",
|
||||||
"proto_array",
|
"proto_array",
|
||||||
"psutil",
|
"psutil",
|
||||||
"reqwest",
|
"reqwest",
|
||||||
|
"ring",
|
||||||
"serde",
|
"serde",
|
||||||
"serde_json",
|
"serde_json",
|
||||||
"serde_utils",
|
"serde_utils",
|
||||||
"types",
|
"types",
|
||||||
|
"zeroize",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@ -2157,9 +2167,9 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "hashbrown"
|
name = "hashbrown"
|
||||||
version = "0.9.1"
|
version = "0.9.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "d7afe4a420e3fe79967a00898cc1f4db7c8a49a9333a29f8a4bd76a253d5cd04"
|
checksum = "00d63df3d41950fb462ed38308eea019113ad1508da725bbedcd0fa5a85ef5f7"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "hashset_delay"
|
name = "hashset_delay"
|
||||||
@ -2543,7 +2553,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
|||||||
checksum = "55e2e4c765aa53a0424761bf9f41aa7a6ac1efa87238f59560640e27fca028f2"
|
checksum = "55e2e4c765aa53a0424761bf9f41aa7a6ac1efa87238f59560640e27fca028f2"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"autocfg 1.0.1",
|
"autocfg 1.0.1",
|
||||||
"hashbrown 0.9.1",
|
"hashbrown 0.9.0",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@ -3641,9 +3651,9 @@ checksum = "77af24da69f9d9341038eba93a073b1fdaaa1b788221b00a69bce9e762cb32de"
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "openssl-src"
|
name = "openssl-src"
|
||||||
version = "111.11.0+1.1.1h"
|
version = "111.10.2+1.1.1g"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "380fe324132bea01f45239fadfec9343adb044615f29930d039bec1ae7b9fa5b"
|
checksum = "a287fdb22e32b5b60624d4a5a7a02dbe82777f730ec0dbc42a0554326fef5a70"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"cc",
|
"cc",
|
||||||
]
|
]
|
||||||
@ -3844,18 +3854,18 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "pin-project"
|
name = "pin-project"
|
||||||
version = "0.4.24"
|
version = "0.4.23"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "f48fad7cfbff853437be7cf54d7b993af21f53be7f0988cbfe4a51535aa77205"
|
checksum = "ca4433fff2ae79342e497d9f8ee990d174071408f28f726d6d83af93e58e48aa"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"pin-project-internal",
|
"pin-project-internal",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "pin-project-internal"
|
name = "pin-project-internal"
|
||||||
version = "0.4.24"
|
version = "0.4.23"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "24c6d293bdd3ca5a1697997854c6cf7855e43fb6a0ba1c47af57a5bcafd158ae"
|
checksum = "2c0e815c3ee9a031fdf5af21c10aa17c573c9c6a566328d99e3936c34e36461f"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"proc-macro2",
|
"proc-macro2",
|
||||||
"quote",
|
"quote",
|
||||||
@ -3864,9 +3874,9 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "pin-project-lite"
|
name = "pin-project-lite"
|
||||||
version = "0.1.9"
|
version = "0.1.7"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "4fe74897791e156a0cd8cce0db31b9b2198e67877316bf3086c3acd187f719f0"
|
checksum = "282adbf10f2698a7a77f8e983a74b2d18176c19a7fd32a45446139ae7b02b715"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "pin-utils"
|
name = "pin-utils"
|
||||||
@ -3956,9 +3966,9 @@ checksum = "eba180dafb9038b050a4c280019bbedf9f2467b61e5d892dcad585bb57aadc5a"
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "proc-macro2"
|
name = "proc-macro2"
|
||||||
version = "1.0.23"
|
version = "1.0.21"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "51ef7cd2518ead700af67bf9d1a658d90b6037d77110fd9c0445429d0ba1c6c9"
|
checksum = "36e28516df94f3dd551a587da5357459d9b36d945a7c37c3557928c1c2ff2a2c"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"unicode-xid",
|
"unicode-xid",
|
||||||
]
|
]
|
||||||
@ -4060,9 +4070,9 @@ checksum = "cb14183cc7f213ee2410067e1ceeadba2a7478a59432ff0747a335202798b1e2"
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "psutil"
|
name = "psutil"
|
||||||
version = "3.2.0"
|
version = "3.1.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "7cdb732329774b8765346796abd1e896e9b3c86aae7f135bb1dda98c2c460f55"
|
checksum = "094d0f0f32f77f62cd7d137d9b9599ef257d5c1323b36b25746679de2806f547"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"cfg-if",
|
"cfg-if",
|
||||||
"darwin-libproc",
|
"darwin-libproc",
|
||||||
@ -4073,7 +4083,7 @@ dependencies = [
|
|||||||
"num_cpus",
|
"num_cpus",
|
||||||
"once_cell",
|
"once_cell",
|
||||||
"platforms",
|
"platforms",
|
||||||
"thiserror",
|
"snafu",
|
||||||
"unescape",
|
"unescape",
|
||||||
]
|
]
|
||||||
|
|
||||||
@ -5066,6 +5076,27 @@ version = "1.4.2"
|
|||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "fbee7696b84bbf3d89a1c2eccff0850e3047ed46bfcd2e92c29a2d074d57e252"
|
checksum = "fbee7696b84bbf3d89a1c2eccff0850e3047ed46bfcd2e92c29a2d074d57e252"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "snafu"
|
||||||
|
version = "0.6.9"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "9c4e6046e4691afe918fd1b603fd6e515bcda5388a1092a9edbada307d159f09"
|
||||||
|
dependencies = [
|
||||||
|
"doc-comment",
|
||||||
|
"snafu-derive",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "snafu-derive"
|
||||||
|
version = "0.6.9"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "7073448732a89f2f3e6581989106067f403d378faeafb4a50812eb814170d3e5"
|
||||||
|
dependencies = [
|
||||||
|
"proc-macro2",
|
||||||
|
"quote",
|
||||||
|
"syn",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "snap"
|
name = "snap"
|
||||||
version = "1.0.1"
|
version = "1.0.1"
|
||||||
@ -5297,9 +5328,9 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "syn"
|
name = "syn"
|
||||||
version = "1.0.42"
|
version = "1.0.41"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "9c51d92969d209b54a98397e1b91c8ae82d8c87a7bb87df0b29aa2ad81454228"
|
checksum = "6690e3e9f692504b941dc6c3b188fd28df054f7fb8469ab40680df52fdcc842b"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"proc-macro2",
|
"proc-macro2",
|
||||||
"quote",
|
"quote",
|
||||||
@ -5441,9 +5472,9 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "time"
|
name = "time"
|
||||||
version = "0.2.22"
|
version = "0.2.21"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "55b7151c9065e80917fbf285d9a5d1432f60db41d170ccafc749a136b41a93af"
|
checksum = "2c2e31fb28e2a9f01f5ed6901b066c1ba2333c04b64dc61254142bafcb3feb2c"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"const_fn",
|
"const_fn",
|
||||||
"libc",
|
"libc",
|
||||||
@ -5456,9 +5487,9 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "time-macros"
|
name = "time-macros"
|
||||||
version = "0.1.1"
|
version = "0.1.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "957e9c6e26f12cb6d0dd7fc776bb67a706312e7299aed74c8dd5b17ebb27e2f1"
|
checksum = "9ae9b6e9f095bc105e183e3cd493d72579be3181ad4004fceb01adbe9eecab2d"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"proc-macro-hack",
|
"proc-macro-hack",
|
||||||
"time-macros-impl",
|
"time-macros-impl",
|
||||||
@ -5891,21 +5922,20 @@ checksum = "e987b6bf443f4b5b3b6f38704195592cca41c5bb7aedd3c3693c7081f8289860"
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "tracing"
|
name = "tracing"
|
||||||
version = "0.1.21"
|
version = "0.1.19"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "b0987850db3733619253fe60e17cb59b82d37c7e6c0236bb81e4d6b87c879f27"
|
checksum = "6d79ca061b032d6ce30c660fded31189ca0b9922bf483cd70759f13a2d86786c"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"cfg-if",
|
"cfg-if",
|
||||||
"log 0.4.11",
|
"log 0.4.11",
|
||||||
"pin-project-lite",
|
|
||||||
"tracing-core",
|
"tracing-core",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "tracing-core"
|
name = "tracing-core"
|
||||||
version = "0.1.17"
|
version = "0.1.16"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "f50de3927f93d202783f4513cda820ab47ef17f624b03c096e86ef00c67e6b5f"
|
checksum = "5bcf46c1f1f06aeea2d6b81f3c863d0930a596c86ad1920d4e5bad6dd1d7119a"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"lazy_static",
|
"lazy_static",
|
||||||
]
|
]
|
||||||
@ -6236,13 +6266,19 @@ dependencies = [
|
|||||||
"exit-future",
|
"exit-future",
|
||||||
"futures 0.3.5",
|
"futures 0.3.5",
|
||||||
"hex 0.4.2",
|
"hex 0.4.2",
|
||||||
|
"hyper 0.13.8",
|
||||||
"libc",
|
"libc",
|
||||||
|
"libsecp256k1",
|
||||||
|
"lighthouse_version",
|
||||||
"logging",
|
"logging",
|
||||||
"parking_lot 0.11.0",
|
"parking_lot 0.11.0",
|
||||||
|
"rand 0.7.3",
|
||||||
"rayon",
|
"rayon",
|
||||||
|
"ring",
|
||||||
"serde",
|
"serde",
|
||||||
"serde_derive",
|
"serde_derive",
|
||||||
"serde_json",
|
"serde_json",
|
||||||
|
"serde_utils",
|
||||||
"serde_yaml",
|
"serde_yaml",
|
||||||
"slashing_protection",
|
"slashing_protection",
|
||||||
"slog",
|
"slog",
|
||||||
@ -6250,10 +6286,13 @@ dependencies = [
|
|||||||
"slog-term",
|
"slog-term",
|
||||||
"slot_clock",
|
"slot_clock",
|
||||||
"tempdir",
|
"tempdir",
|
||||||
|
"tempfile",
|
||||||
"tokio 0.2.22",
|
"tokio 0.2.22",
|
||||||
"tree_hash",
|
"tree_hash",
|
||||||
"types",
|
"types",
|
||||||
"validator_dir",
|
"validator_dir",
|
||||||
|
"warp",
|
||||||
|
"warp_utils",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@ -6369,7 +6408,9 @@ dependencies = [
|
|||||||
"beacon_chain",
|
"beacon_chain",
|
||||||
"eth2",
|
"eth2",
|
||||||
"safe_arith",
|
"safe_arith",
|
||||||
|
"serde",
|
||||||
"state_processing",
|
"state_processing",
|
||||||
|
"tokio 0.2.22",
|
||||||
"types",
|
"types",
|
||||||
"warp",
|
"warp",
|
||||||
]
|
]
|
||||||
|
@ -218,7 +218,8 @@ pub fn cli_run<T: EthSpec>(
|
|||||||
)
|
)
|
||||||
})?;
|
})?;
|
||||||
|
|
||||||
ValidatorDirBuilder::new(validator_dir.clone(), secrets_dir.clone())
|
ValidatorDirBuilder::new(validator_dir.clone())
|
||||||
|
.password_dir(secrets_dir.clone())
|
||||||
.voting_keystore(keystores.voting, voting_password.as_bytes())
|
.voting_keystore(keystores.voting, voting_password.as_bytes())
|
||||||
.withdrawal_keystore(keystores.withdrawal, withdrawal_password.as_bytes())
|
.withdrawal_keystore(keystores.withdrawal, withdrawal_password.as_bytes())
|
||||||
.create_eth1_tx_data(deposit_gwei, &spec)
|
.create_eth1_tx_data(deposit_gwei, &spec)
|
||||||
|
@ -124,7 +124,8 @@ pub fn cli_run(matches: &ArgMatches, validator_dir: PathBuf) -> Result<(), Strin
|
|||||||
|
|
||||||
let voting_pubkey = keystores.voting.pubkey().to_string();
|
let voting_pubkey = keystores.voting.pubkey().to_string();
|
||||||
|
|
||||||
ValidatorDirBuilder::new(validator_dir.clone(), secrets_dir.clone())
|
ValidatorDirBuilder::new(validator_dir.clone())
|
||||||
|
.password_dir(secrets_dir.clone())
|
||||||
.voting_keystore(keystores.voting, voting_password.as_bytes())
|
.voting_keystore(keystores.voting, voting_password.as_bytes())
|
||||||
.withdrawal_keystore(keystores.withdrawal, withdrawal_password.as_bytes())
|
.withdrawal_keystore(keystores.withdrawal, withdrawal_password.as_bytes())
|
||||||
.store_withdrawal_keystore(matches.is_present(STORE_WITHDRAW_FLAG))
|
.store_withdrawal_keystore(matches.is_present(STORE_WITHDRAW_FLAG))
|
||||||
|
@ -42,6 +42,7 @@ use types::{
|
|||||||
SignedBeaconBlock, SignedVoluntaryExit, Slot, YamlConfig,
|
SignedBeaconBlock, SignedVoluntaryExit, Slot, YamlConfig,
|
||||||
};
|
};
|
||||||
use warp::Filter;
|
use warp::Filter;
|
||||||
|
use warp_utils::task::{blocking_json_task, blocking_task};
|
||||||
|
|
||||||
const API_PREFIX: &str = "eth";
|
const API_PREFIX: &str = "eth";
|
||||||
const API_VERSION: &str = "v1";
|
const API_VERSION: &str = "v1";
|
||||||
@ -1727,23 +1728,3 @@ fn publish_network_message<T: EthSpec>(
|
|||||||
))
|
))
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Execute some task in a tokio "blocking thread". These threads are ideal for long-running
|
|
||||||
/// (blocking) tasks since they don't jam up the core executor.
|
|
||||||
async fn blocking_task<F, T>(func: F) -> T
|
|
||||||
where
|
|
||||||
F: Fn() -> T,
|
|
||||||
{
|
|
||||||
tokio::task::block_in_place(func)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// A convenience wrapper around `blocking_task` for use with `warp` JSON responses.
|
|
||||||
async fn blocking_json_task<F, T>(func: F) -> Result<warp::reply::Json, warp::Rejection>
|
|
||||||
where
|
|
||||||
F: Fn() -> Result<T, warp::Rejection>,
|
|
||||||
T: Serialize,
|
|
||||||
{
|
|
||||||
blocking_task(func)
|
|
||||||
.await
|
|
||||||
.map(|resp| warp::reply::json(&resp))
|
|
||||||
}
|
|
||||||
|
@ -19,6 +19,9 @@
|
|||||||
* [/lighthouse](./api-lighthouse.md)
|
* [/lighthouse](./api-lighthouse.md)
|
||||||
* [Validator Inclusion APIs](./validator-inclusion.md)
|
* [Validator Inclusion APIs](./validator-inclusion.md)
|
||||||
* [Validator Client API](./api-vc.md)
|
* [Validator Client API](./api-vc.md)
|
||||||
|
* [Endpoints](./api-vc-endpoints.md)
|
||||||
|
* [Authorization Header](./api-vc-auth-header.md)
|
||||||
|
* [Signature Header](./api-vc-sig-header.md)
|
||||||
* [Prometheus Metrics](./advanced_metrics.md)
|
* [Prometheus Metrics](./advanced_metrics.md)
|
||||||
* [Advanced Usage](./advanced.md)
|
* [Advanced Usage](./advanced.md)
|
||||||
* [Database Configuration](./advanced_database.md)
|
* [Database Configuration](./advanced_database.md)
|
||||||
|
55
book/src/api-vc-auth-header.md
Normal file
55
book/src/api-vc-auth-header.md
Normal file
@ -0,0 +1,55 @@
|
|||||||
|
# Validator Client API: Authorization Header
|
||||||
|
|
||||||
|
## Overview
|
||||||
|
|
||||||
|
The validator client HTTP server requires that all requests have the following
|
||||||
|
HTTP header:
|
||||||
|
|
||||||
|
- Name: `Authorization`
|
||||||
|
- Value: `Basic <api-token>`
|
||||||
|
|
||||||
|
Where `<api-token>` is a string that can be obtained from the validator client
|
||||||
|
host. Here is an example `Authorization` header:
|
||||||
|
|
||||||
|
```
|
||||||
|
Authorization Basic api-token-0x03eace4c98e8f77477bb99efb74f9af10d800bd3318f92c33b719a4644254d4123
|
||||||
|
```
|
||||||
|
|
||||||
|
## Obtaining the API token
|
||||||
|
|
||||||
|
The API token can be obtained via two methods:
|
||||||
|
|
||||||
|
### Method 1: Reading from a file
|
||||||
|
|
||||||
|
The API token is stored as a file in the `validators` directory. For most users
|
||||||
|
this is `~/.lighthouse/{testnet}/validators/api-token.txt`. Here's an
|
||||||
|
example using the `cat` command to print the token to the terminal, but any
|
||||||
|
text editor will suffice:
|
||||||
|
|
||||||
|
```
|
||||||
|
$ cat api-token.txt
|
||||||
|
api-token-0x03eace4c98e8f77477bb99efb74f9af10d800bd3318f92c33b719a4644254d4123
|
||||||
|
```
|
||||||
|
|
||||||
|
### Method 2: Reading from logs
|
||||||
|
|
||||||
|
When starting the validator client it will output a log message containing an
|
||||||
|
`api-token` field:
|
||||||
|
|
||||||
|
```
|
||||||
|
Sep 28 19:17:52.615 INFO HTTP API started api_token: api-token-0x03eace4c98e8f77477bb99efb74f9af10d800bd3318f92c33b719a4644254d4123, listen_address: 127.0.0.1:5062
|
||||||
|
```
|
||||||
|
|
||||||
|
## Example
|
||||||
|
|
||||||
|
Here is an example `curl` command using the API token in the `Authorization` header:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
curl localhost:5062/lighthouse/version -H "Authorization: Basic api-token-0x03eace4c98e8f77477bb99efb74f9af10d800bd3318f92c33b719a4644254d4123"
|
||||||
|
```
|
||||||
|
|
||||||
|
The server should respond with its version:
|
||||||
|
|
||||||
|
```json
|
||||||
|
{"data":{"version":"Lighthouse/v0.2.11-fc0654fbe+/x86_64-linux"}}
|
||||||
|
```
|
363
book/src/api-vc-endpoints.md
Normal file
363
book/src/api-vc-endpoints.md
Normal file
@ -0,0 +1,363 @@
|
|||||||
|
# Validator Client API: Endpoints
|
||||||
|
|
||||||
|
## Endpoints
|
||||||
|
|
||||||
|
HTTP Path | Description |
|
||||||
|
| --- | -- |
|
||||||
|
[`GET /lighthouse/version`](#get-lighthouseversion) | Get the Lighthouse software version
|
||||||
|
[`GET /lighthouse/health`](#get-lighthousehealth) | Get information about the host machine
|
||||||
|
[`GET /lighthouse/spec`](#get-lighthousespec) | Get the Eth2 specification used by the validator
|
||||||
|
[`GET /lighthouse/validators`](#get-lighthousevalidators) | List all validators
|
||||||
|
[`GET /lighthouse/validators/:voting_pubkey`](#get-lighthousevalidatorsvoting_pubkey) | Get a specific validator
|
||||||
|
[`PATCH /lighthouse/validators/:voting_pubkey`](#patch-lighthousevalidatorsvoting_pubkey) | Update a specific validator
|
||||||
|
[`POST /lighthouse/validators`](#post-lighthousevalidators) | Create a new validator and mnemonic.
|
||||||
|
[`POST /lighthouse/validators/mnemonic`](#post-lighthousevalidatorsmnemonic) | Create a new validator from an existing mnemonic.
|
||||||
|
|
||||||
|
## `GET /lighthouse/version`
|
||||||
|
|
||||||
|
Returns the software version and `git` commit hash for the Lighthouse binary.
|
||||||
|
|
||||||
|
### HTTP Specification
|
||||||
|
|
||||||
|
| Property | Specification |
|
||||||
|
| --- |--- |
|
||||||
|
Path | `/lighthouse/version`
|
||||||
|
Method | GET
|
||||||
|
Required Headers | [`Authorization`](./api-vc-auth-header.md)
|
||||||
|
Typical Responses | 200
|
||||||
|
|
||||||
|
### Example Response Body
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"data": {
|
||||||
|
"version": "Lighthouse/v0.2.11-fc0654fbe+/x86_64-linux"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
## `GET /lighthouse/health`
|
||||||
|
|
||||||
|
Returns information regarding the health of the host machine.
|
||||||
|
|
||||||
|
### HTTP Specification
|
||||||
|
|
||||||
|
| Property | Specification |
|
||||||
|
| --- |--- |
|
||||||
|
Path | `/lighthouse/health`
|
||||||
|
Method | GET
|
||||||
|
Required Headers | [`Authorization`](./api-vc-auth-header.md)
|
||||||
|
Typical Responses | 200
|
||||||
|
|
||||||
|
*Note: this endpoint is presently only available on Linux.*
|
||||||
|
|
||||||
|
### Example Response Body
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"data": {
|
||||||
|
"pid": 1476293,
|
||||||
|
"pid_num_threads": 19,
|
||||||
|
"pid_mem_resident_set_size": 4009984,
|
||||||
|
"pid_mem_virtual_memory_size": 1306775552,
|
||||||
|
"sys_virt_mem_total": 33596100608,
|
||||||
|
"sys_virt_mem_available": 23073017856,
|
||||||
|
"sys_virt_mem_used": 9346957312,
|
||||||
|
"sys_virt_mem_free": 22410510336,
|
||||||
|
"sys_virt_mem_percent": 31.322334,
|
||||||
|
"sys_loadavg_1": 0.98,
|
||||||
|
"sys_loadavg_5": 0.98,
|
||||||
|
"sys_loadavg_15": 1.01
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
## `GET /lighthouse/spec`
|
||||||
|
|
||||||
|
Returns the Eth2 specification loaded for this validator.
|
||||||
|
|
||||||
|
### HTTP Specification
|
||||||
|
|
||||||
|
| Property | Specification |
|
||||||
|
| --- |--- |
|
||||||
|
Path | `/lighthouse/spec`
|
||||||
|
Method | GET
|
||||||
|
Required Headers | [`Authorization`](./api-vc-auth-header.md)
|
||||||
|
Typical Responses | 200
|
||||||
|
|
||||||
|
### Example Response Body
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"data": {
|
||||||
|
"CONFIG_NAME": "mainnet",
|
||||||
|
"MAX_COMMITTEES_PER_SLOT": "64",
|
||||||
|
"TARGET_COMMITTEE_SIZE": "128",
|
||||||
|
"MIN_PER_EPOCH_CHURN_LIMIT": "4",
|
||||||
|
"CHURN_LIMIT_QUOTIENT": "65536",
|
||||||
|
"SHUFFLE_ROUND_COUNT": "90",
|
||||||
|
"MIN_GENESIS_ACTIVE_VALIDATOR_COUNT": "1024",
|
||||||
|
"MIN_GENESIS_TIME": "1601380800",
|
||||||
|
"GENESIS_DELAY": "172800",
|
||||||
|
"MIN_DEPOSIT_AMOUNT": "1000000000",
|
||||||
|
"MAX_EFFECTIVE_BALANCE": "32000000000",
|
||||||
|
"EJECTION_BALANCE": "16000000000",
|
||||||
|
"EFFECTIVE_BALANCE_INCREMENT": "1000000000",
|
||||||
|
"HYSTERESIS_QUOTIENT": "4",
|
||||||
|
"HYSTERESIS_DOWNWARD_MULTIPLIER": "1",
|
||||||
|
"HYSTERESIS_UPWARD_MULTIPLIER": "5",
|
||||||
|
"PROPORTIONAL_SLASHING_MULTIPLIER": "3",
|
||||||
|
"GENESIS_FORK_VERSION": "0x00000002",
|
||||||
|
"BLS_WITHDRAWAL_PREFIX": "0x00",
|
||||||
|
"SECONDS_PER_SLOT": "12",
|
||||||
|
"MIN_ATTESTATION_INCLUSION_DELAY": "1",
|
||||||
|
"MIN_SEED_LOOKAHEAD": "1",
|
||||||
|
"MAX_SEED_LOOKAHEAD": "4",
|
||||||
|
"MIN_EPOCHS_TO_INACTIVITY_PENALTY": "4",
|
||||||
|
"MIN_VALIDATOR_WITHDRAWABILITY_DELAY": "256",
|
||||||
|
"SHARD_COMMITTEE_PERIOD": "256",
|
||||||
|
"BASE_REWARD_FACTOR": "64",
|
||||||
|
"WHISTLEBLOWER_REWARD_QUOTIENT": "512",
|
||||||
|
"PROPOSER_REWARD_QUOTIENT": "8",
|
||||||
|
"INACTIVITY_PENALTY_QUOTIENT": "16777216",
|
||||||
|
"MIN_SLASHING_PENALTY_QUOTIENT": "32",
|
||||||
|
"SAFE_SLOTS_TO_UPDATE_JUSTIFIED": "8",
|
||||||
|
"DOMAIN_BEACON_PROPOSER": "0x00000000",
|
||||||
|
"DOMAIN_BEACON_ATTESTER": "0x01000000",
|
||||||
|
"DOMAIN_RANDAO": "0x02000000",
|
||||||
|
"DOMAIN_DEPOSIT": "0x03000000",
|
||||||
|
"DOMAIN_VOLUNTARY_EXIT": "0x04000000",
|
||||||
|
"DOMAIN_SELECTION_PROOF": "0x05000000",
|
||||||
|
"DOMAIN_AGGREGATE_AND_PROOF": "0x06000000",
|
||||||
|
"MAX_VALIDATORS_PER_COMMITTEE": "2048",
|
||||||
|
"SLOTS_PER_EPOCH": "32",
|
||||||
|
"EPOCHS_PER_ETH1_VOTING_PERIOD": "32",
|
||||||
|
"SLOTS_PER_HISTORICAL_ROOT": "8192",
|
||||||
|
"EPOCHS_PER_HISTORICAL_VECTOR": "65536",
|
||||||
|
"EPOCHS_PER_SLASHINGS_VECTOR": "8192",
|
||||||
|
"HISTORICAL_ROOTS_LIMIT": "16777216",
|
||||||
|
"VALIDATOR_REGISTRY_LIMIT": "1099511627776",
|
||||||
|
"MAX_PROPOSER_SLASHINGS": "16",
|
||||||
|
"MAX_ATTESTER_SLASHINGS": "2",
|
||||||
|
"MAX_ATTESTATIONS": "128",
|
||||||
|
"MAX_DEPOSITS": "16",
|
||||||
|
"MAX_VOLUNTARY_EXITS": "16",
|
||||||
|
"ETH1_FOLLOW_DISTANCE": "1024",
|
||||||
|
"TARGET_AGGREGATORS_PER_COMMITTEE": "16",
|
||||||
|
"RANDOM_SUBNETS_PER_VALIDATOR": "1",
|
||||||
|
"EPOCHS_PER_RANDOM_SUBNET_SUBSCRIPTION": "256",
|
||||||
|
"SECONDS_PER_ETH1_BLOCK": "14",
|
||||||
|
"DEPOSIT_CONTRACT_ADDRESS": "0x48b597f4b53c21b48ad95c7256b49d1779bd5890"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
## `GET /lighthouse/validators`
|
||||||
|
|
||||||
|
Lists all validators managed by this validator client.
|
||||||
|
|
||||||
|
### HTTP Specification
|
||||||
|
|
||||||
|
| Property | Specification |
|
||||||
|
| --- |--- |
|
||||||
|
Path | `/lighthouse/validators`
|
||||||
|
Method | GET
|
||||||
|
Required Headers | [`Authorization`](./api-vc-auth-header.md)
|
||||||
|
Typical Responses | 200
|
||||||
|
|
||||||
|
### Example Response Body
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"data": [
|
||||||
|
{
|
||||||
|
"enabled": true,
|
||||||
|
"voting_pubkey": "0xb0148e6348264131bf47bcd1829590e870c836dc893050fd0dadc7a28949f9d0a72f2805d027521b45441101f0cc1cde"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"enabled": true,
|
||||||
|
"voting_pubkey": "0xb0441246ed813af54c0a11efd53019f63dd454a1fa2a9939ce3c228419fbe113fb02b443ceeb38736ef97877eb88d43a"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"enabled": true,
|
||||||
|
"voting_pubkey": "0xad77e388d745f24e13890353031dd8137432ee4225752642aad0a2ab003c86620357d91973b6675932ff51f817088f38"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
## `GET /lighthouse/validators/:voting_pubkey`
|
||||||
|
|
||||||
|
Get a validator by their `voting_pubkey`.
|
||||||
|
|
||||||
|
### HTTP Specification
|
||||||
|
|
||||||
|
| Property | Specification |
|
||||||
|
| --- |--- |
|
||||||
|
Path | `/lighthouse/validators/:voting_pubkey`
|
||||||
|
Method | GET
|
||||||
|
Required Headers | [`Authorization`](./api-vc-auth-header.md)
|
||||||
|
Typical Responses | 200, 400
|
||||||
|
|
||||||
|
### Example Path
|
||||||
|
|
||||||
|
```
|
||||||
|
localhost:5062/lighthouse/validators/0xb0148e6348264131bf47bcd1829590e870c836dc893050fd0dadc7a28949f9d0a72f2805d027521b45441101f0cc1cde
|
||||||
|
```
|
||||||
|
|
||||||
|
### Example Response Body
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"data": {
|
||||||
|
"enabled": true,
|
||||||
|
"voting_pubkey": "0xb0148e6348264131bf47bcd1829590e870c836dc893050fd0dadc7a28949f9d0a72f2805d027521b45441101f0cc1cde"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
## `PATCH /lighthouse/validators/:voting_pubkey`
|
||||||
|
|
||||||
|
Update some values for the validator with `voting_pubkey`.
|
||||||
|
|
||||||
|
### HTTP Specification
|
||||||
|
|
||||||
|
| Property | Specification |
|
||||||
|
| --- |--- |
|
||||||
|
Path | `/lighthouse/validators/:voting_pubkey`
|
||||||
|
Method | PATCH
|
||||||
|
Required Headers | [`Authorization`](./api-vc-auth-header.md)
|
||||||
|
Typical Responses | 200, 400
|
||||||
|
|
||||||
|
### Example Path
|
||||||
|
|
||||||
|
```
|
||||||
|
localhost:5062/lighthouse/validators/0xb0148e6348264131bf47bcd1829590e870c836dc893050fd0dadc7a28949f9d0a72f2805d027521b45441101f0cc1cde
|
||||||
|
```
|
||||||
|
|
||||||
|
### Example Request Body
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"enabled": false
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### Example Response Body
|
||||||
|
|
||||||
|
```json
|
||||||
|
null
|
||||||
|
```
|
||||||
|
|
||||||
|
## `POST /lighthouse/validators/`
|
||||||
|
|
||||||
|
Create any number of new validators, all of which will share a common mnemonic
|
||||||
|
generated by the server.
|
||||||
|
|
||||||
|
A BIP-39 mnemonic will be randomly generated and returned with the response.
|
||||||
|
This mnemonic can be used to recover all keys returned in the response.
|
||||||
|
Validators are generated from the mnemonic according to
|
||||||
|
[EIP-2334](https://eips.ethereum.org/EIPS/eip-2334), starting at index `0`.
|
||||||
|
|
||||||
|
### HTTP Specification
|
||||||
|
|
||||||
|
| Property | Specification |
|
||||||
|
| --- |--- |
|
||||||
|
Path | `/lighthouse/validators`
|
||||||
|
Method | POST
|
||||||
|
Required Headers | [`Authorization`](./api-vc-auth-header.md)
|
||||||
|
Typical Responses | 200
|
||||||
|
|
||||||
|
### Example Request Body
|
||||||
|
|
||||||
|
```json
|
||||||
|
[
|
||||||
|
{
|
||||||
|
"enable": true,
|
||||||
|
"description": "validator_one",
|
||||||
|
"deposit_gwei": "32000000000"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"enable": false,
|
||||||
|
"description": "validator two",
|
||||||
|
"deposit_gwei": "34000000000"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
```
|
||||||
|
|
||||||
|
### Example Response Body
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"data": {
|
||||||
|
"mnemonic": "marine orchard scout label trim only narrow taste art belt betray soda deal diagram glare hero scare shadow ramp blur junior behave resource tourist",
|
||||||
|
"validators": [
|
||||||
|
{
|
||||||
|
"enabled": true,
|
||||||
|
"description": "validator_one",
|
||||||
|
"voting_pubkey": "0x8ffbc881fb60841a4546b4b385ec5e9b5090fd1c4395e568d98b74b94b41a912c6101113da39d43c101369eeb9b48e50",
|
||||||
|
"eth1_deposit_tx_data": "0x22895118000000000000000000000000000000000000000000000000000000000000008000000000000000000000000000000000000000000000000000000000000000e000000000000000000000000000000000000000000000000000000000000001206c68675776d418bfd63468789e7c68a6788c4dd45a3a911fe3d642668220bbf200000000000000000000000000000000000000000000000000000000000000308ffbc881fb60841a4546b4b385ec5e9b5090fd1c4395e568d98b74b94b41a912c6101113da39d43c101369eeb9b48e5000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000002000cf8b3abbf0ecd91f3b0affcc3a11e9c5f8066efb8982d354ee9a812219b17000000000000000000000000000000000000000000000000000000000000000608fbe2cc0e17a98d4a58bd7a65f0475a58850d3c048da7b718f8809d8943fee1dbd5677c04b5fa08a9c44d271d009edcd15caa56387dc217159b300aad66c2cf8040696d383d0bff37b2892a7fe9ba78b2220158f3dc1b9cd6357bdcaee3eb9f2",
|
||||||
|
"deposit_gwei": "32000000000"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"enabled": false,
|
||||||
|
"description": "validator two",
|
||||||
|
"voting_pubkey": "0xa9fadd620dc68e9fe0d6e1a69f6c54a0271ad65ab5a509e645e45c6e60ff8f4fc538f301781193a08b55821444801502",
|
||||||
|
"eth1_deposit_tx_data": "0x22895118000000000000000000000000000000000000000000000000000000000000008000000000000000000000000000000000000000000000000000000000000000e00000000000000000000000000000000000000000000000000000000000000120b1911954c1b8d23233e0e2bf8c4878c8f56d25a4f790ec09a94520ec88af30490000000000000000000000000000000000000000000000000000000000000030a9fadd620dc68e9fe0d6e1a69f6c54a0271ad65ab5a509e645e45c6e60ff8f4fc538f301781193a08b5582144480150200000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000002000a96df8b95c3ba749265e48a101f2ed974fffd7487487ed55f8dded99b617ad000000000000000000000000000000000000000000000000000000000000006090421299179824950e2f5a592ab1fdefe5349faea1e8126146a006b64777b74cce3cfc5b39d35b370e8f844e99c2dc1b19a1ebd38c7605f28e9c4540aea48f0bc48e853ae5f477fa81a9fc599d1732968c772730e1e47aaf5c5117bd045b788e",
|
||||||
|
"deposit_gwei": "34000000000"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
## `POST /lighthouse/validators/mnemonic`
|
||||||
|
|
||||||
|
Create any number of new validators, all of which will share a common mnemonic.
|
||||||
|
|
||||||
|
The supplied BIP-39 mnemonic will be used to generate the validator keys
|
||||||
|
according to [EIP-2334](https://eips.ethereum.org/EIPS/eip-2334), starting at
|
||||||
|
the supplied `key_derivation_path_offset`. For example, if
|
||||||
|
`key_derivation_path_offset = 42`, then the first validator voting key will be
|
||||||
|
generated with the path `m/12381/3600/i/42`.
|
||||||
|
|
||||||
|
### HTTP Specification
|
||||||
|
|
||||||
|
| Property | Specification |
|
||||||
|
| --- |--- |
|
||||||
|
Path | `/lighthouse/validators/mnemonic`
|
||||||
|
Method | POST
|
||||||
|
Required Headers | [`Authorization`](./api-vc-auth-header.md)
|
||||||
|
Typical Responses | 200
|
||||||
|
|
||||||
|
### Example Request Body
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"mnemonic": "theme onion deal plastic claim silver fancy youth lock ordinary hotel elegant balance ridge web skill burger survey demand distance legal fish salad cloth",
|
||||||
|
"key_derivation_path_offset": 0,
|
||||||
|
"validators": [
|
||||||
|
{
|
||||||
|
"enable": true,
|
||||||
|
"description": "validator_one",
|
||||||
|
"deposit_gwei": "32000000000"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### Example Response Body
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"data": [
|
||||||
|
{
|
||||||
|
"enabled": true,
|
||||||
|
"description": "validator_one",
|
||||||
|
"voting_pubkey": "0xa062f95fee747144d5e511940624bc6546509eeaeae9383257a9c43e7ddc58c17c2bab4ae62053122184c381b90db380",
|
||||||
|
"eth1_deposit_tx_data": "0x22895118000000000000000000000000000000000000000000000000000000000000008000000000000000000000000000000000000000000000000000000000000000e00000000000000000000000000000000000000000000000000000000000000120a57324d95ae9c7abfb5cc9bd4db253ed0605dc8a19f84810bcf3f3874d0e703a0000000000000000000000000000000000000000000000000000000000000030a062f95fee747144d5e511940624bc6546509eeaeae9383257a9c43e7ddc58c17c2bab4ae62053122184c381b90db3800000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000200046e4199f18102b5d4e8842d0eeafaa1268ee2c21340c63f9c2cd5b03ff19320000000000000000000000000000000000000000000000000000000000000060b2a897b4ba4f3910e9090abc4c22f81f13e8923ea61c0043506950b6ae174aa643540554037b465670d28fa7b7d716a301e9b172297122acc56be1131621c072f7c0a73ea7b8c5a90ecd5da06d79d90afaea17cdeeef8ed323912c70ad62c04b",
|
||||||
|
"deposit_gwei": "32000000000"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
```
|
108
book/src/api-vc-sig-header.md
Normal file
108
book/src/api-vc-sig-header.md
Normal file
@ -0,0 +1,108 @@
|
|||||||
|
# Validator Client API: Signature Header
|
||||||
|
|
||||||
|
## Overview
|
||||||
|
|
||||||
|
The validator client HTTP server adds the following header to all responses:
|
||||||
|
|
||||||
|
- Name: `Signature`
|
||||||
|
- Value: a secp256k1 signature across the SHA256 of the response body.
|
||||||
|
|
||||||
|
Example `Signature` header:
|
||||||
|
|
||||||
|
```
|
||||||
|
Signature: 0x304402205b114366444112580bf455d919401e9c869f5af067cd496016ab70d428b5a99d0220067aede1eb5819eecfd5dd7a2b57c5ac2b98f25a7be214b05684b04523aef873
|
||||||
|
```
|
||||||
|
|
||||||
|
## Verifying the Signature
|
||||||
|
|
||||||
|
Below is a browser-ready example of signature verification.
|
||||||
|
|
||||||
|
### HTML
|
||||||
|
|
||||||
|
```html
|
||||||
|
<script src="https://rawgit.com/emn178/js-sha256/master/src/sha256.js" type="text/javascript"></script>
|
||||||
|
<script src="https://rawgit.com/indutny/elliptic/master/dist/elliptic.min.js" type="text/javascript"></script>
|
||||||
|
```
|
||||||
|
|
||||||
|
### Javascript
|
||||||
|
|
||||||
|
```javascript
|
||||||
|
// Helper function to turn a hex-string into bytes.
|
||||||
|
function hexStringToByte(str) {
|
||||||
|
if (!str) {
|
||||||
|
return new Uint8Array();
|
||||||
|
}
|
||||||
|
|
||||||
|
var a = [];
|
||||||
|
for (var i = 0, len = str.length; i < len; i+=2) {
|
||||||
|
a.push(parseInt(str.substr(i,2),16));
|
||||||
|
}
|
||||||
|
|
||||||
|
return new Uint8Array(a);
|
||||||
|
}
|
||||||
|
|
||||||
|
// This example uses the secp256k1 curve from the "elliptic" library:
|
||||||
|
//
|
||||||
|
// https://github.com/indutny/elliptic
|
||||||
|
var ec = new elliptic.ec('secp256k1');
|
||||||
|
|
||||||
|
// The public key is contained in the API token:
|
||||||
|
//
|
||||||
|
// Authorization: Basic api-token-0x03eace4c98e8f77477bb99efb74f9af10d800bd3318f92c33b719a4644254d4123
|
||||||
|
var pk_bytes = hexStringToByte('03eace4c98e8f77477bb99efb74f9af10d800bd3318f92c33b719a4644254d4123');
|
||||||
|
|
||||||
|
// The signature is in the `Signature` header of the response:
|
||||||
|
//
|
||||||
|
// Signature: 0x304402205b114366444112580bf455d919401e9c869f5af067cd496016ab70d428b5a99d0220067aede1eb5819eecfd5dd7a2b57c5ac2b98f25a7be214b05684b04523aef873
|
||||||
|
var sig_bytes = hexStringToByte('304402205b114366444112580bf455d919401e9c869f5af067cd496016ab70d428b5a99d0220067aede1eb5819eecfd5dd7a2b57c5ac2b98f25a7be214b05684b04523aef873');
|
||||||
|
|
||||||
|
// The HTTP response body.
|
||||||
|
var response_body = "{\"data\":{\"version\":\"Lighthouse/v0.2.11-fc0654fbe+/x86_64-linux\"}}";
|
||||||
|
|
||||||
|
// The HTTP response body is hashed (SHA256) to determine the 32-byte message.
|
||||||
|
let hash = sha256.create();
|
||||||
|
hash.update(response_body);
|
||||||
|
let message = hash.array();
|
||||||
|
|
||||||
|
// The 32-byte message hash, the signature and the public key are verified.
|
||||||
|
if (ec.verify(message, sig_bytes, pk_bytes)) {
|
||||||
|
console.log("The signature is valid")
|
||||||
|
} else {
|
||||||
|
console.log("The signature is invalid")
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
*This example is also available as a [JSFiddle](https://jsfiddle.net/wnqd74Lz/).*
|
||||||
|
|
||||||
|
## Example
|
||||||
|
|
||||||
|
The previous Javascript example was written using the output from the following
|
||||||
|
`curl` command:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
curl -v localhost:5062/lighthouse/version -H "Authorization: Basic api-token-0x03eace4c98e8f77477bb99efb74f9af10d800bd3318f92c33b719a4644254d4123"
|
||||||
|
```
|
||||||
|
|
||||||
|
```
|
||||||
|
* Trying ::1:5062...
|
||||||
|
* connect to ::1 port 5062 failed: Connection refused
|
||||||
|
* Trying 127.0.0.1:5062...
|
||||||
|
* Connected to localhost (127.0.0.1) port 5062 (#0)
|
||||||
|
> GET /lighthouse/version HTTP/1.1
|
||||||
|
> Host: localhost:5062
|
||||||
|
> User-Agent: curl/7.72.0
|
||||||
|
> Accept: */*
|
||||||
|
> Authorization: Basic api-token-0x03eace4c98e8f77477bb99efb74f9af10d800bd3318f92c33b719a4644254d4123
|
||||||
|
>
|
||||||
|
* Mark bundle as not supporting multiuse
|
||||||
|
< HTTP/1.1 200 OK
|
||||||
|
< content-type: application/json
|
||||||
|
< signature: 0x304402205b114366444112580bf455d919401e9c869f5af067cd496016ab70d428b5a99d0220067aede1eb5819eecfd5dd7a2b57c5ac2b98f25a7be214b05684b04523aef873
|
||||||
|
< server: Lighthouse/v0.2.11-fc0654fbe+/x86_64-linux
|
||||||
|
< access-control-allow-origin:
|
||||||
|
< content-length: 65
|
||||||
|
< date: Tue, 29 Sep 2020 04:23:46 GMT
|
||||||
|
<
|
||||||
|
* Connection #0 to host localhost left intact
|
||||||
|
{"data":{"version":"Lighthouse/v0.2.11-fc0654fbe+/x86_64-linux"}}
|
||||||
|
```
|
@ -1,3 +1,38 @@
|
|||||||
# Validator Client API
|
# Validator Client API
|
||||||
|
|
||||||
The validator client API is planned for release in late September 2020.
|
Lighthouse implements a HTTP/JSON API for the validator client. Since there is
|
||||||
|
no Eth2 standard validator client API, Lighthouse has defined its own.
|
||||||
|
|
||||||
|
A full list of endpoints can be found in [Endpoints](./api-vc-endpoints.md).
|
||||||
|
|
||||||
|
> Note: All requests to the HTTP server must supply an
|
||||||
|
> [`Authorization`](./api-vc-auth-header.md) header. All responses contain a
|
||||||
|
> [`Signature`](./api-vc-sig-header.md) header for optional verification.
|
||||||
|
|
||||||
|
## Starting the server
|
||||||
|
|
||||||
|
A Lighthouse validator client can be configured to expose a HTTP server by supplying the `--http` flag. The default listen address is `127.0.0.1:5062`.
|
||||||
|
|
||||||
|
The following CLI flags control the HTTP server:
|
||||||
|
|
||||||
|
- `--http`: enable the HTTP server (required even if the following flags are
|
||||||
|
provided).
|
||||||
|
- `--http-port`: specify the listen port of the server.
|
||||||
|
- `--http-allow-origin`: specify the value of the `Access-Control-Allow-Origin`
|
||||||
|
header. The default is to not supply a header.
|
||||||
|
|
||||||
|
## Security
|
||||||
|
|
||||||
|
The validator client HTTP is **not encrypted** (i.e., it is **not HTTPS**). For
|
||||||
|
this reason, it will only listen on `127.0.0.1`.
|
||||||
|
|
||||||
|
It is unsafe to expose the validator client to the public Internet without
|
||||||
|
additional transport layer security (e.g., HTTPS via nginx, SSH tunnels, etc.).
|
||||||
|
|
||||||
|
### CLI Example
|
||||||
|
|
||||||
|
Start the validator client with the HTTP server listening on [http://localhost:5062](http://localhost:5062):
|
||||||
|
|
||||||
|
```bash
|
||||||
|
lighthouse vc --http
|
||||||
|
```
|
||||||
|
@ -2,7 +2,10 @@
|
|||||||
//! Lighthouse project.
|
//! Lighthouse project.
|
||||||
|
|
||||||
use eth2_keystore::Keystore;
|
use eth2_keystore::Keystore;
|
||||||
use eth2_wallet::Wallet;
|
use eth2_wallet::{
|
||||||
|
bip39::{Language, Mnemonic, MnemonicType},
|
||||||
|
Wallet,
|
||||||
|
};
|
||||||
use rand::{distributions::Alphanumeric, Rng};
|
use rand::{distributions::Alphanumeric, Rng};
|
||||||
use serde_derive::{Deserialize, Serialize};
|
use serde_derive::{Deserialize, Serialize};
|
||||||
use std::fs::{self, File};
|
use std::fs::{self, File};
|
||||||
@ -15,6 +18,7 @@ use zeroize::Zeroize;
|
|||||||
pub mod validator_definitions;
|
pub mod validator_definitions;
|
||||||
|
|
||||||
pub use eth2_keystore;
|
pub use eth2_keystore;
|
||||||
|
pub use eth2_wallet;
|
||||||
pub use eth2_wallet::PlainText;
|
pub use eth2_wallet::PlainText;
|
||||||
|
|
||||||
/// The minimum number of characters required for a wallet password.
|
/// The minimum number of characters required for a wallet password.
|
||||||
@ -150,6 +154,16 @@ pub fn is_password_sufficiently_complex(password: &[u8]) -> Result<(), String> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Returns a random 24-word english mnemonic.
|
||||||
|
pub fn random_mnemonic() -> Mnemonic {
|
||||||
|
Mnemonic::new(MnemonicType::Words24, Language::English)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Attempts to parse a mnemonic phrase.
|
||||||
|
pub fn mnemonic_from_phrase(phrase: &str) -> Result<Mnemonic, String> {
|
||||||
|
Mnemonic::from_phrase(phrase, Language::English).map_err(|e| e.to_string())
|
||||||
|
}
|
||||||
|
|
||||||
/// Provides a new-type wrapper around `String` that is zeroized on `Drop`.
|
/// Provides a new-type wrapper around `String` that is zeroized on `Drop`.
|
||||||
///
|
///
|
||||||
/// Useful for ensuring that password memory is zeroed-out on drop.
|
/// Useful for ensuring that password memory is zeroed-out on drop.
|
||||||
@ -164,6 +178,12 @@ impl From<String> for ZeroizeString {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl ZeroizeString {
|
||||||
|
pub fn as_str(&self) -> &str {
|
||||||
|
&self.0
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
impl AsRef<[u8]> for ZeroizeString {
|
impl AsRef<[u8]> for ZeroizeString {
|
||||||
fn as_ref(&self) -> &[u8] {
|
fn as_ref(&self) -> &[u8] {
|
||||||
self.0.as_bytes()
|
self.0.as_bytes()
|
||||||
|
@ -63,6 +63,8 @@ pub enum SigningDefinition {
|
|||||||
pub struct ValidatorDefinition {
|
pub struct ValidatorDefinition {
|
||||||
pub enabled: bool,
|
pub enabled: bool,
|
||||||
pub voting_public_key: PublicKey,
|
pub voting_public_key: PublicKey,
|
||||||
|
#[serde(default)]
|
||||||
|
pub description: String,
|
||||||
#[serde(flatten)]
|
#[serde(flatten)]
|
||||||
pub signing_definition: SigningDefinition,
|
pub signing_definition: SigningDefinition,
|
||||||
}
|
}
|
||||||
@ -88,6 +90,7 @@ impl ValidatorDefinition {
|
|||||||
Ok(ValidatorDefinition {
|
Ok(ValidatorDefinition {
|
||||||
enabled: true,
|
enabled: true,
|
||||||
voting_public_key,
|
voting_public_key,
|
||||||
|
description: keystore.description().unwrap_or_else(|| "").to_string(),
|
||||||
signing_definition: SigningDefinition::LocalKeystore {
|
signing_definition: SigningDefinition::LocalKeystore {
|
||||||
voting_keystore_path,
|
voting_keystore_path,
|
||||||
voting_keystore_password_path: None,
|
voting_keystore_password_path: None,
|
||||||
@ -205,6 +208,7 @@ impl ValidatorDefinitions {
|
|||||||
Some(ValidatorDefinition {
|
Some(ValidatorDefinition {
|
||||||
enabled: true,
|
enabled: true,
|
||||||
voting_public_key,
|
voting_public_key,
|
||||||
|
description: keystore.description().unwrap_or_else(|| "").to_string(),
|
||||||
signing_definition: SigningDefinition::LocalKeystore {
|
signing_definition: SigningDefinition::LocalKeystore {
|
||||||
voting_keystore_path,
|
voting_keystore_path,
|
||||||
voting_keystore_password_path,
|
voting_keystore_password_path,
|
||||||
|
@ -15,6 +15,12 @@ reqwest = { version = "0.10.8", features = ["json"] }
|
|||||||
eth2_libp2p = { path = "../../beacon_node/eth2_libp2p" }
|
eth2_libp2p = { path = "../../beacon_node/eth2_libp2p" }
|
||||||
proto_array = { path = "../../consensus/proto_array", optional = true }
|
proto_array = { path = "../../consensus/proto_array", optional = true }
|
||||||
serde_utils = { path = "../../consensus/serde_utils" }
|
serde_utils = { path = "../../consensus/serde_utils" }
|
||||||
|
zeroize = { version = "1.0.0", features = ["zeroize_derive"] }
|
||||||
|
eth2_keystore = { path = "../../crypto/eth2_keystore" }
|
||||||
|
libsecp256k1 = "0.3.5"
|
||||||
|
ring = "0.16.12"
|
||||||
|
bytes = "0.5.6"
|
||||||
|
account_utils = { path = "../../common/account_utils" }
|
||||||
|
|
||||||
[target.'cfg(target_os = "linux")'.dependencies]
|
[target.'cfg(target_os = "linux")'.dependencies]
|
||||||
psutil = { version = "3.1.0", optional = true }
|
psutil = { version = "3.1.0", optional = true }
|
||||||
|
@ -9,6 +9,7 @@
|
|||||||
|
|
||||||
#[cfg(feature = "lighthouse")]
|
#[cfg(feature = "lighthouse")]
|
||||||
pub mod lighthouse;
|
pub mod lighthouse;
|
||||||
|
pub mod lighthouse_vc;
|
||||||
pub mod types;
|
pub mod types;
|
||||||
|
|
||||||
use self::types::*;
|
use self::types::*;
|
||||||
@ -30,6 +31,14 @@ pub enum Error {
|
|||||||
StatusCode(StatusCode),
|
StatusCode(StatusCode),
|
||||||
/// The supplied URL is badly formatted. It should look something like `http://127.0.0.1:5052`.
|
/// The supplied URL is badly formatted. It should look something like `http://127.0.0.1:5052`.
|
||||||
InvalidUrl(Url),
|
InvalidUrl(Url),
|
||||||
|
/// The supplied validator client secret is invalid.
|
||||||
|
InvalidSecret(String),
|
||||||
|
/// The server returned a response with an invalid signature. It may be an impostor.
|
||||||
|
InvalidSignatureHeader,
|
||||||
|
/// The server returned a response without a signature header. It may be an impostor.
|
||||||
|
MissingSignatureHeader,
|
||||||
|
/// The server returned an invalid JSON response.
|
||||||
|
InvalidJson(serde_json::Error),
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Error {
|
impl Error {
|
||||||
@ -40,6 +49,10 @@ impl Error {
|
|||||||
Error::ServerMessage(msg) => StatusCode::try_from(msg.code).ok(),
|
Error::ServerMessage(msg) => StatusCode::try_from(msg.code).ok(),
|
||||||
Error::StatusCode(status) => Some(*status),
|
Error::StatusCode(status) => Some(*status),
|
||||||
Error::InvalidUrl(_) => None,
|
Error::InvalidUrl(_) => None,
|
||||||
|
Error::InvalidSecret(_) => None,
|
||||||
|
Error::InvalidSignatureHeader => None,
|
||||||
|
Error::MissingSignatureHeader => None,
|
||||||
|
Error::InvalidJson(_) => None,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -531,7 +544,7 @@ impl BeaconNodeHttpClient {
|
|||||||
self.get(path).await
|
self.get(path).await
|
||||||
}
|
}
|
||||||
|
|
||||||
/// `GET config/fork_schedule`
|
/// `GET config/spec`
|
||||||
pub async fn get_config_spec(&self) -> Result<GenericResponse<YamlConfig>, Error> {
|
pub async fn get_config_spec(&self) -> Result<GenericResponse<YamlConfig>, Error> {
|
||||||
let mut path = self.eth_path()?;
|
let mut path = self.eth_path()?;
|
||||||
|
|
||||||
|
331
common/eth2/src/lighthouse_vc/http_client.rs
Normal file
331
common/eth2/src/lighthouse_vc/http_client.rs
Normal file
@ -0,0 +1,331 @@
|
|||||||
|
use super::{types::*, PK_LEN, SECRET_PREFIX};
|
||||||
|
use crate::Error;
|
||||||
|
use account_utils::ZeroizeString;
|
||||||
|
use bytes::Bytes;
|
||||||
|
use reqwest::{
|
||||||
|
header::{HeaderMap, HeaderValue},
|
||||||
|
IntoUrl,
|
||||||
|
};
|
||||||
|
use ring::digest::{digest, SHA256};
|
||||||
|
use secp256k1::{Message, PublicKey, Signature};
|
||||||
|
use serde::{de::DeserializeOwned, Serialize};
|
||||||
|
|
||||||
|
pub use reqwest;
|
||||||
|
pub use reqwest::{Response, StatusCode, Url};
|
||||||
|
|
||||||
|
/// A wrapper around `reqwest::Client` which provides convenience methods for interfacing with a
|
||||||
|
/// Lighthouse Validator Client HTTP server (`validator_client/src/http_api`).
|
||||||
|
#[derive(Clone)]
|
||||||
|
pub struct ValidatorClientHttpClient {
|
||||||
|
client: reqwest::Client,
|
||||||
|
server: Url,
|
||||||
|
secret: ZeroizeString,
|
||||||
|
server_pubkey: PublicKey,
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Parse an API token and return a secp256k1 public key.
|
||||||
|
pub fn parse_pubkey(secret: &str) -> Result<PublicKey, Error> {
|
||||||
|
let secret = if !secret.starts_with(SECRET_PREFIX) {
|
||||||
|
return Err(Error::InvalidSecret(format!(
|
||||||
|
"secret does not start with {}",
|
||||||
|
SECRET_PREFIX
|
||||||
|
)));
|
||||||
|
} else {
|
||||||
|
&secret[SECRET_PREFIX.len()..]
|
||||||
|
};
|
||||||
|
|
||||||
|
serde_utils::hex::decode(&secret)
|
||||||
|
.map_err(|e| Error::InvalidSecret(format!("invalid hex: {:?}", e)))
|
||||||
|
.and_then(|bytes| {
|
||||||
|
if bytes.len() != PK_LEN {
|
||||||
|
return Err(Error::InvalidSecret(format!(
|
||||||
|
"expected {} bytes not {}",
|
||||||
|
PK_LEN,
|
||||||
|
bytes.len()
|
||||||
|
)));
|
||||||
|
}
|
||||||
|
|
||||||
|
let mut arr = [0; PK_LEN];
|
||||||
|
arr.copy_from_slice(&bytes);
|
||||||
|
PublicKey::parse_compressed(&arr)
|
||||||
|
.map_err(|e| Error::InvalidSecret(format!("invalid secp256k1 pubkey: {:?}", e)))
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ValidatorClientHttpClient {
|
||||||
|
pub fn new(server: Url, secret: String) -> Result<Self, Error> {
|
||||||
|
Ok(Self {
|
||||||
|
client: reqwest::Client::new(),
|
||||||
|
server,
|
||||||
|
server_pubkey: parse_pubkey(&secret)?,
|
||||||
|
secret: secret.into(),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn from_components(
|
||||||
|
server: Url,
|
||||||
|
client: reqwest::Client,
|
||||||
|
secret: String,
|
||||||
|
) -> Result<Self, Error> {
|
||||||
|
Ok(Self {
|
||||||
|
client,
|
||||||
|
server,
|
||||||
|
server_pubkey: parse_pubkey(&secret)?,
|
||||||
|
secret: secret.into(),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn signed_body(&self, response: Response) -> Result<Bytes, Error> {
|
||||||
|
let sig = response
|
||||||
|
.headers()
|
||||||
|
.get("Signature")
|
||||||
|
.ok_or_else(|| Error::MissingSignatureHeader)?
|
||||||
|
.to_str()
|
||||||
|
.map_err(|_| Error::InvalidSignatureHeader)?
|
||||||
|
.to_string();
|
||||||
|
|
||||||
|
let body = response.bytes().await.map_err(Error::Reqwest)?;
|
||||||
|
|
||||||
|
let message =
|
||||||
|
Message::parse_slice(digest(&SHA256, &body).as_ref()).expect("sha256 is 32 bytes");
|
||||||
|
|
||||||
|
serde_utils::hex::decode(&sig)
|
||||||
|
.ok()
|
||||||
|
.and_then(|bytes| {
|
||||||
|
let sig = Signature::parse_der(&bytes).ok()?;
|
||||||
|
Some(secp256k1::verify(&message, &sig, &self.server_pubkey))
|
||||||
|
})
|
||||||
|
.filter(|is_valid| *is_valid)
|
||||||
|
.ok_or_else(|| Error::InvalidSignatureHeader)?;
|
||||||
|
|
||||||
|
Ok(body)
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn signed_json<T: DeserializeOwned>(&self, response: Response) -> Result<T, Error> {
|
||||||
|
let body = self.signed_body(response).await?;
|
||||||
|
serde_json::from_slice(&body).map_err(Error::InvalidJson)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn headers(&self) -> Result<HeaderMap, Error> {
|
||||||
|
let header_value = HeaderValue::from_str(&format!("Basic {}", self.secret.as_str()))
|
||||||
|
.map_err(|e| {
|
||||||
|
Error::InvalidSecret(format!("secret is invalid as a header value: {}", e))
|
||||||
|
})?;
|
||||||
|
|
||||||
|
let mut headers = HeaderMap::new();
|
||||||
|
headers.insert("Authorization", header_value);
|
||||||
|
|
||||||
|
Ok(headers)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Perform a HTTP GET request.
|
||||||
|
async fn get<T: DeserializeOwned, U: IntoUrl>(&self, url: U) -> Result<T, Error> {
|
||||||
|
let response = self
|
||||||
|
.client
|
||||||
|
.get(url)
|
||||||
|
.headers(self.headers()?)
|
||||||
|
.send()
|
||||||
|
.await
|
||||||
|
.map_err(Error::Reqwest)?;
|
||||||
|
let response = ok_or_error(response).await?;
|
||||||
|
self.signed_json(response).await
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Perform a HTTP GET request, returning `None` on a 404 error.
|
||||||
|
async fn get_opt<T: DeserializeOwned, U: IntoUrl>(&self, url: U) -> Result<Option<T>, Error> {
|
||||||
|
let response = self
|
||||||
|
.client
|
||||||
|
.get(url)
|
||||||
|
.headers(self.headers()?)
|
||||||
|
.send()
|
||||||
|
.await
|
||||||
|
.map_err(Error::Reqwest)?;
|
||||||
|
match ok_or_error(response).await {
|
||||||
|
Ok(resp) => self.signed_json(resp).await.map(Option::Some),
|
||||||
|
Err(err) => {
|
||||||
|
if err.status() == Some(StatusCode::NOT_FOUND) {
|
||||||
|
Ok(None)
|
||||||
|
} else {
|
||||||
|
Err(err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Perform a HTTP POST request.
|
||||||
|
async fn post<T: Serialize, U: IntoUrl, V: DeserializeOwned>(
|
||||||
|
&self,
|
||||||
|
url: U,
|
||||||
|
body: &T,
|
||||||
|
) -> Result<V, Error> {
|
||||||
|
let response = self
|
||||||
|
.client
|
||||||
|
.post(url)
|
||||||
|
.headers(self.headers()?)
|
||||||
|
.json(body)
|
||||||
|
.send()
|
||||||
|
.await
|
||||||
|
.map_err(Error::Reqwest)?;
|
||||||
|
let response = ok_or_error(response).await?;
|
||||||
|
self.signed_json(response).await
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Perform a HTTP PATCH request.
|
||||||
|
async fn patch<T: Serialize, U: IntoUrl>(&self, url: U, body: &T) -> Result<(), Error> {
|
||||||
|
let response = self
|
||||||
|
.client
|
||||||
|
.patch(url)
|
||||||
|
.headers(self.headers()?)
|
||||||
|
.json(body)
|
||||||
|
.send()
|
||||||
|
.await
|
||||||
|
.map_err(Error::Reqwest)?;
|
||||||
|
let response = ok_or_error(response).await?;
|
||||||
|
self.signed_body(response).await?;
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// `GET lighthouse/version`
|
||||||
|
pub async fn get_lighthouse_version(&self) -> Result<GenericResponse<VersionData>, Error> {
|
||||||
|
let mut path = self.server.clone();
|
||||||
|
|
||||||
|
path.path_segments_mut()
|
||||||
|
.map_err(|()| Error::InvalidUrl(self.server.clone()))?
|
||||||
|
.push("lighthouse")
|
||||||
|
.push("version");
|
||||||
|
|
||||||
|
self.get(path).await
|
||||||
|
}
|
||||||
|
|
||||||
|
/// `GET lighthouse/health`
|
||||||
|
pub async fn get_lighthouse_health(&self) -> Result<GenericResponse<Health>, Error> {
|
||||||
|
let mut path = self.server.clone();
|
||||||
|
|
||||||
|
path.path_segments_mut()
|
||||||
|
.map_err(|()| Error::InvalidUrl(self.server.clone()))?
|
||||||
|
.push("lighthouse")
|
||||||
|
.push("health");
|
||||||
|
|
||||||
|
self.get(path).await
|
||||||
|
}
|
||||||
|
|
||||||
|
/// `GET lighthouse/spec`
|
||||||
|
pub async fn get_lighthouse_spec(&self) -> Result<GenericResponse<YamlConfig>, Error> {
|
||||||
|
let mut path = self.server.clone();
|
||||||
|
|
||||||
|
path.path_segments_mut()
|
||||||
|
.map_err(|()| Error::InvalidUrl(self.server.clone()))?
|
||||||
|
.push("lighthouse")
|
||||||
|
.push("spec");
|
||||||
|
|
||||||
|
self.get(path).await
|
||||||
|
}
|
||||||
|
|
||||||
|
/// `GET lighthouse/validators`
|
||||||
|
pub async fn get_lighthouse_validators(
|
||||||
|
&self,
|
||||||
|
) -> Result<GenericResponse<Vec<ValidatorData>>, Error> {
|
||||||
|
let mut path = self.server.clone();
|
||||||
|
|
||||||
|
path.path_segments_mut()
|
||||||
|
.map_err(|()| Error::InvalidUrl(self.server.clone()))?
|
||||||
|
.push("lighthouse")
|
||||||
|
.push("validators");
|
||||||
|
|
||||||
|
self.get(path).await
|
||||||
|
}
|
||||||
|
|
||||||
|
/// `GET lighthouse/validators/{validator_pubkey}`
|
||||||
|
pub async fn get_lighthouse_validators_pubkey(
|
||||||
|
&self,
|
||||||
|
validator_pubkey: &PublicKeyBytes,
|
||||||
|
) -> Result<Option<GenericResponse<ValidatorData>>, Error> {
|
||||||
|
let mut path = self.server.clone();
|
||||||
|
|
||||||
|
path.path_segments_mut()
|
||||||
|
.map_err(|()| Error::InvalidUrl(self.server.clone()))?
|
||||||
|
.push("lighthouse")
|
||||||
|
.push("validators")
|
||||||
|
.push(&validator_pubkey.to_string());
|
||||||
|
|
||||||
|
self.get_opt(path).await
|
||||||
|
}
|
||||||
|
|
||||||
|
/// `POST lighthouse/validators`
|
||||||
|
pub async fn post_lighthouse_validators(
|
||||||
|
&self,
|
||||||
|
validators: Vec<ValidatorRequest>,
|
||||||
|
) -> Result<GenericResponse<PostValidatorsResponseData>, Error> {
|
||||||
|
let mut path = self.server.clone();
|
||||||
|
|
||||||
|
path.path_segments_mut()
|
||||||
|
.map_err(|()| Error::InvalidUrl(self.server.clone()))?
|
||||||
|
.push("lighthouse")
|
||||||
|
.push("validators");
|
||||||
|
|
||||||
|
self.post(path, &validators).await
|
||||||
|
}
|
||||||
|
|
||||||
|
/// `POST lighthouse/validators/mnemonic`
|
||||||
|
pub async fn post_lighthouse_validators_mnemonic(
|
||||||
|
&self,
|
||||||
|
request: &CreateValidatorsMnemonicRequest,
|
||||||
|
) -> Result<GenericResponse<Vec<CreatedValidator>>, Error> {
|
||||||
|
let mut path = self.server.clone();
|
||||||
|
|
||||||
|
path.path_segments_mut()
|
||||||
|
.map_err(|()| Error::InvalidUrl(self.server.clone()))?
|
||||||
|
.push("lighthouse")
|
||||||
|
.push("validators")
|
||||||
|
.push("mnemonic");
|
||||||
|
|
||||||
|
self.post(path, &request).await
|
||||||
|
}
|
||||||
|
|
||||||
|
/// `POST lighthouse/validators/keystore`
|
||||||
|
pub async fn post_lighthouse_validators_keystore(
|
||||||
|
&self,
|
||||||
|
request: &KeystoreValidatorsPostRequest,
|
||||||
|
) -> Result<GenericResponse<ValidatorData>, Error> {
|
||||||
|
let mut path = self.server.clone();
|
||||||
|
|
||||||
|
path.path_segments_mut()
|
||||||
|
.map_err(|()| Error::InvalidUrl(self.server.clone()))?
|
||||||
|
.push("lighthouse")
|
||||||
|
.push("validators")
|
||||||
|
.push("keystore");
|
||||||
|
|
||||||
|
self.post(path, &request).await
|
||||||
|
}
|
||||||
|
|
||||||
|
/// `PATCH lighthouse/validators/{validator_pubkey}`
|
||||||
|
pub async fn patch_lighthouse_validators(
|
||||||
|
&self,
|
||||||
|
voting_pubkey: &PublicKeyBytes,
|
||||||
|
enabled: bool,
|
||||||
|
) -> Result<(), Error> {
|
||||||
|
let mut path = self.server.clone();
|
||||||
|
|
||||||
|
path.path_segments_mut()
|
||||||
|
.map_err(|()| Error::InvalidUrl(self.server.clone()))?
|
||||||
|
.push("lighthouse")
|
||||||
|
.push("validators")
|
||||||
|
.push(&voting_pubkey.to_string());
|
||||||
|
|
||||||
|
self.patch(path, &ValidatorPatchRequest { enabled }).await
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns `Ok(response)` if the response is a `200 OK` response. Otherwise, creates an
|
||||||
|
/// appropriate error message.
|
||||||
|
async fn ok_or_error(response: Response) -> Result<Response, Error> {
|
||||||
|
let status = response.status();
|
||||||
|
|
||||||
|
if status == StatusCode::OK {
|
||||||
|
Ok(response)
|
||||||
|
} else if let Ok(message) = response.json().await {
|
||||||
|
Err(Error::ServerMessage(message))
|
||||||
|
} else {
|
||||||
|
Err(Error::StatusCode(status))
|
||||||
|
}
|
||||||
|
}
|
9
common/eth2/src/lighthouse_vc/mod.rs
Normal file
9
common/eth2/src/lighthouse_vc/mod.rs
Normal file
@ -0,0 +1,9 @@
|
|||||||
|
pub mod http_client;
|
||||||
|
pub mod types;
|
||||||
|
|
||||||
|
/// The number of bytes in the secp256k1 public key used as the authorization token for the VC API.
|
||||||
|
pub const PK_LEN: usize = 33;
|
||||||
|
|
||||||
|
/// The prefix for the secp256k1 public key when it is used as the authorization token for the VC
|
||||||
|
/// API.
|
||||||
|
pub const SECRET_PREFIX: &str = "api-token-";
|
58
common/eth2/src/lighthouse_vc/types.rs
Normal file
58
common/eth2/src/lighthouse_vc/types.rs
Normal file
@ -0,0 +1,58 @@
|
|||||||
|
use account_utils::ZeroizeString;
|
||||||
|
use eth2_keystore::Keystore;
|
||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
|
||||||
|
pub use crate::lighthouse::Health;
|
||||||
|
pub use crate::types::{GenericResponse, VersionData};
|
||||||
|
pub use types::*;
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
|
||||||
|
pub struct ValidatorData {
|
||||||
|
pub enabled: bool,
|
||||||
|
pub description: String,
|
||||||
|
pub voting_pubkey: PublicKeyBytes,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
|
||||||
|
pub struct ValidatorRequest {
|
||||||
|
pub enable: bool,
|
||||||
|
pub description: String,
|
||||||
|
#[serde(with = "serde_utils::quoted_u64")]
|
||||||
|
pub deposit_gwei: u64,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone, PartialEq, Serialize, Deserialize)]
|
||||||
|
pub struct CreateValidatorsMnemonicRequest {
|
||||||
|
pub mnemonic: ZeroizeString,
|
||||||
|
#[serde(with = "serde_utils::quoted_u32")]
|
||||||
|
pub key_derivation_path_offset: u32,
|
||||||
|
pub validators: Vec<ValidatorRequest>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
|
||||||
|
pub struct CreatedValidator {
|
||||||
|
pub enabled: bool,
|
||||||
|
pub description: String,
|
||||||
|
pub voting_pubkey: PublicKeyBytes,
|
||||||
|
pub eth1_deposit_tx_data: String,
|
||||||
|
#[serde(with = "serde_utils::quoted_u64")]
|
||||||
|
pub deposit_gwei: u64,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone, PartialEq, Serialize, Deserialize)]
|
||||||
|
pub struct PostValidatorsResponseData {
|
||||||
|
pub mnemonic: ZeroizeString,
|
||||||
|
pub validators: Vec<CreatedValidator>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
|
||||||
|
pub struct ValidatorPatchRequest {
|
||||||
|
pub enabled: bool,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone, PartialEq, Serialize, Deserialize)]
|
||||||
|
pub struct KeystoreValidatorsPostRequest {
|
||||||
|
pub password: ZeroizeString,
|
||||||
|
pub enable: bool,
|
||||||
|
pub keystore: Keystore,
|
||||||
|
}
|
@ -40,6 +40,7 @@ pub enum Error {
|
|||||||
UninitializedWithdrawalKeystore,
|
UninitializedWithdrawalKeystore,
|
||||||
#[cfg(feature = "insecure_keys")]
|
#[cfg(feature = "insecure_keys")]
|
||||||
InsecureKeysError(String),
|
InsecureKeysError(String),
|
||||||
|
MissingPasswordDir,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl From<KeystoreError> for Error {
|
impl From<KeystoreError> for Error {
|
||||||
@ -51,7 +52,7 @@ impl From<KeystoreError> for Error {
|
|||||||
/// A builder for creating a `ValidatorDir`.
|
/// A builder for creating a `ValidatorDir`.
|
||||||
pub struct Builder<'a> {
|
pub struct Builder<'a> {
|
||||||
base_validators_dir: PathBuf,
|
base_validators_dir: PathBuf,
|
||||||
password_dir: PathBuf,
|
password_dir: Option<PathBuf>,
|
||||||
pub(crate) voting_keystore: Option<(Keystore, PlainText)>,
|
pub(crate) voting_keystore: Option<(Keystore, PlainText)>,
|
||||||
pub(crate) withdrawal_keystore: Option<(Keystore, PlainText)>,
|
pub(crate) withdrawal_keystore: Option<(Keystore, PlainText)>,
|
||||||
store_withdrawal_keystore: bool,
|
store_withdrawal_keystore: bool,
|
||||||
@ -60,10 +61,10 @@ pub struct Builder<'a> {
|
|||||||
|
|
||||||
impl<'a> Builder<'a> {
|
impl<'a> Builder<'a> {
|
||||||
/// Instantiate a new builder.
|
/// Instantiate a new builder.
|
||||||
pub fn new(base_validators_dir: PathBuf, password_dir: PathBuf) -> Self {
|
pub fn new(base_validators_dir: PathBuf) -> Self {
|
||||||
Self {
|
Self {
|
||||||
base_validators_dir,
|
base_validators_dir,
|
||||||
password_dir,
|
password_dir: None,
|
||||||
voting_keystore: None,
|
voting_keystore: None,
|
||||||
withdrawal_keystore: None,
|
withdrawal_keystore: None,
|
||||||
store_withdrawal_keystore: true,
|
store_withdrawal_keystore: true,
|
||||||
@ -71,6 +72,12 @@ impl<'a> Builder<'a> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Supply a directory in which to store the passwords for the validator keystores.
|
||||||
|
pub fn password_dir<P: Into<PathBuf>>(mut self, password_dir: P) -> Self {
|
||||||
|
self.password_dir = Some(password_dir.into());
|
||||||
|
self
|
||||||
|
}
|
||||||
|
|
||||||
/// Build the `ValidatorDir` use the given `keystore` which can be unlocked with `password`.
|
/// Build the `ValidatorDir` use the given `keystore` which can be unlocked with `password`.
|
||||||
///
|
///
|
||||||
/// The builder will not necessarily check that `password` can unlock `keystore`.
|
/// The builder will not necessarily check that `password` can unlock `keystore`.
|
||||||
@ -215,26 +222,35 @@ impl<'a> Builder<'a> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Only the withdrawal keystore if explicitly required.
|
if self.password_dir.is_none() && self.store_withdrawal_keystore {
|
||||||
if self.store_withdrawal_keystore {
|
return Err(Error::MissingPasswordDir);
|
||||||
// Write the withdrawal password to file.
|
}
|
||||||
write_password_to_file(
|
|
||||||
self.password_dir
|
|
||||||
.join(withdrawal_keypair.pk.to_hex_string()),
|
|
||||||
withdrawal_password.as_bytes(),
|
|
||||||
)?;
|
|
||||||
|
|
||||||
// Write the withdrawal keystore to file.
|
if let Some(password_dir) = self.password_dir.as_ref() {
|
||||||
write_keystore_to_file(dir.join(WITHDRAWAL_KEYSTORE_FILE), &withdrawal_keystore)?;
|
// Only the withdrawal keystore if explicitly required.
|
||||||
|
if self.store_withdrawal_keystore {
|
||||||
|
// Write the withdrawal password to file.
|
||||||
|
write_password_to_file(
|
||||||
|
password_dir.join(withdrawal_keypair.pk.to_hex_string()),
|
||||||
|
withdrawal_password.as_bytes(),
|
||||||
|
)?;
|
||||||
|
|
||||||
|
// Write the withdrawal keystore to file.
|
||||||
|
write_keystore_to_file(
|
||||||
|
dir.join(WITHDRAWAL_KEYSTORE_FILE),
|
||||||
|
&withdrawal_keystore,
|
||||||
|
)?;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Write the voting password to file.
|
if let Some(password_dir) = self.password_dir.as_ref() {
|
||||||
write_password_to_file(
|
// Write the voting password to file.
|
||||||
self.password_dir
|
write_password_to_file(
|
||||||
.join(format!("0x{}", voting_keystore.pubkey())),
|
password_dir.join(format!("0x{}", voting_keystore.pubkey())),
|
||||||
voting_password.as_bytes(),
|
voting_password.as_bytes(),
|
||||||
)?;
|
)?;
|
||||||
|
}
|
||||||
|
|
||||||
// Write the voting keystore to file.
|
// Write the voting keystore to file.
|
||||||
write_keystore_to_file(dir.join(VOTING_KEYSTORE_FILE), &voting_keystore)?;
|
write_keystore_to_file(dir.join(VOTING_KEYSTORE_FILE), &voting_keystore)?;
|
||||||
|
@ -73,7 +73,8 @@ pub fn build_deterministic_validator_dirs(
|
|||||||
indices: &[usize],
|
indices: &[usize],
|
||||||
) -> Result<(), String> {
|
) -> Result<(), String> {
|
||||||
for &i in indices {
|
for &i in indices {
|
||||||
Builder::new(validators_dir.clone(), password_dir.clone())
|
Builder::new(validators_dir.clone())
|
||||||
|
.password_dir(password_dir.clone())
|
||||||
.insecure_voting_keypair(i)
|
.insecure_voting_keypair(i)
|
||||||
.map_err(|e| format!("Unable to generate insecure keypair: {:?}", e))?
|
.map_err(|e| format!("Unable to generate insecure keypair: {:?}", e))?
|
||||||
.store_withdrawal_keystore(false)
|
.store_withdrawal_keystore(false)
|
||||||
|
@ -129,6 +129,11 @@ impl ValidatorDir {
|
|||||||
&self.dir
|
&self.dir
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Returns the path to the voting keystore JSON file.
|
||||||
|
pub fn voting_keystore_path(&self) -> PathBuf {
|
||||||
|
self.dir.join(VOTING_KEYSTORE_FILE)
|
||||||
|
}
|
||||||
|
|
||||||
/// Attempts to read the keystore in `self.dir` and decrypt the keypair using a password file
|
/// Attempts to read the keystore in `self.dir` and decrypt the keypair using a password file
|
||||||
/// in `password_dir`.
|
/// in `password_dir`.
|
||||||
///
|
///
|
||||||
|
@ -78,13 +78,11 @@ impl Harness {
|
|||||||
* Build the `ValidatorDir`.
|
* Build the `ValidatorDir`.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
let builder = Builder::new(
|
let builder = Builder::new(self.validators_dir.path().into())
|
||||||
self.validators_dir.path().into(),
|
.password_dir(self.password_dir.path())
|
||||||
self.password_dir.path().into(),
|
// Note: setting the withdrawal keystore here ensure that it can get replaced by
|
||||||
)
|
// further calls to `random_withdrawal_keystore`.
|
||||||
// Note: setting the withdrawal keystore here ensure that it can get overriden by later
|
.store_withdrawal_keystore(config.store_withdrawal_keystore);
|
||||||
// calls to `random_withdrawal_keystore`.
|
|
||||||
.store_withdrawal_keystore(config.store_withdrawal_keystore);
|
|
||||||
|
|
||||||
let builder = if config.random_voting_keystore {
|
let builder = if config.random_voting_keystore {
|
||||||
builder.random_voting_keystore().unwrap()
|
builder.random_voting_keystore().unwrap()
|
||||||
@ -208,13 +206,11 @@ fn without_voting_keystore() {
|
|||||||
let harness = Harness::new();
|
let harness = Harness::new();
|
||||||
|
|
||||||
assert!(matches!(
|
assert!(matches!(
|
||||||
Builder::new(
|
Builder::new(harness.validators_dir.path().into(),)
|
||||||
harness.validators_dir.path().into(),
|
.password_dir(harness.password_dir.path())
|
||||||
harness.password_dir.path().into(),
|
.random_withdrawal_keystore()
|
||||||
)
|
.unwrap()
|
||||||
.random_withdrawal_keystore()
|
.build(),
|
||||||
.unwrap()
|
|
||||||
.build(),
|
|
||||||
Err(BuilderError::UninitializedVotingKeystore)
|
Err(BuilderError::UninitializedVotingKeystore)
|
||||||
))
|
))
|
||||||
}
|
}
|
||||||
@ -225,26 +221,22 @@ fn without_withdrawal_keystore() {
|
|||||||
let spec = &MainnetEthSpec::default_spec();
|
let spec = &MainnetEthSpec::default_spec();
|
||||||
|
|
||||||
// Should build without withdrawal keystore if not storing the it or creating eth1 data.
|
// Should build without withdrawal keystore if not storing the it or creating eth1 data.
|
||||||
Builder::new(
|
Builder::new(harness.validators_dir.path().into())
|
||||||
harness.validators_dir.path().into(),
|
.password_dir(harness.password_dir.path())
|
||||||
harness.password_dir.path().into(),
|
.random_voting_keystore()
|
||||||
)
|
.unwrap()
|
||||||
.random_voting_keystore()
|
.store_withdrawal_keystore(false)
|
||||||
.unwrap()
|
.build()
|
||||||
.store_withdrawal_keystore(false)
|
.unwrap();
|
||||||
.build()
|
|
||||||
.unwrap();
|
|
||||||
|
|
||||||
assert!(
|
assert!(
|
||||||
matches!(
|
matches!(
|
||||||
Builder::new(
|
Builder::new(harness.validators_dir.path().into(),)
|
||||||
harness.validators_dir.path().into(),
|
.password_dir(harness.password_dir.path())
|
||||||
harness.password_dir.path().into(),
|
.random_voting_keystore()
|
||||||
)
|
.unwrap()
|
||||||
.random_voting_keystore()
|
.store_withdrawal_keystore(true)
|
||||||
.unwrap()
|
.build(),
|
||||||
.store_withdrawal_keystore(true)
|
|
||||||
.build(),
|
|
||||||
Err(BuilderError::UninitializedWithdrawalKeystore)
|
Err(BuilderError::UninitializedWithdrawalKeystore)
|
||||||
),
|
),
|
||||||
"storing the keystore requires keystore"
|
"storing the keystore requires keystore"
|
||||||
@ -252,14 +244,12 @@ fn without_withdrawal_keystore() {
|
|||||||
|
|
||||||
assert!(
|
assert!(
|
||||||
matches!(
|
matches!(
|
||||||
Builder::new(
|
Builder::new(harness.validators_dir.path().into(),)
|
||||||
harness.validators_dir.path().into(),
|
.password_dir(harness.password_dir.path())
|
||||||
harness.password_dir.path().into(),
|
.random_voting_keystore()
|
||||||
)
|
.unwrap()
|
||||||
.random_voting_keystore()
|
.create_eth1_tx_data(42, spec)
|
||||||
.unwrap()
|
.build(),
|
||||||
.create_eth1_tx_data(42, spec)
|
|
||||||
.build(),
|
|
||||||
Err(BuilderError::UninitializedWithdrawalKeystore)
|
Err(BuilderError::UninitializedWithdrawalKeystore)
|
||||||
),
|
),
|
||||||
"storing the keystore requires keystore"
|
"storing the keystore requires keystore"
|
||||||
|
@ -13,3 +13,5 @@ types = { path = "../../consensus/types" }
|
|||||||
beacon_chain = { path = "../../beacon_node/beacon_chain" }
|
beacon_chain = { path = "../../beacon_node/beacon_chain" }
|
||||||
state_processing = { path = "../../consensus/state_processing" }
|
state_processing = { path = "../../consensus/state_processing" }
|
||||||
safe_arith = { path = "../../consensus/safe_arith" }
|
safe_arith = { path = "../../consensus/safe_arith" }
|
||||||
|
serde = { version = "1.0.110", features = ["derive"] }
|
||||||
|
tokio = { version = "0.2.21", features = ["sync"] }
|
||||||
|
@ -3,3 +3,4 @@
|
|||||||
|
|
||||||
pub mod reject;
|
pub mod reject;
|
||||||
pub mod reply;
|
pub mod reply;
|
||||||
|
pub mod task;
|
||||||
|
@ -101,6 +101,15 @@ pub fn not_synced(msg: String) -> warp::reject::Rejection {
|
|||||||
warp::reject::custom(NotSynced(msg))
|
warp::reject::custom(NotSynced(msg))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub struct InvalidAuthorization(pub String);
|
||||||
|
|
||||||
|
impl Reject for InvalidAuthorization {}
|
||||||
|
|
||||||
|
pub fn invalid_auth(msg: String) -> warp::reject::Rejection {
|
||||||
|
warp::reject::custom(InvalidAuthorization(msg))
|
||||||
|
}
|
||||||
|
|
||||||
/// This function receives a `Rejection` and tries to return a custom
|
/// This function receives a `Rejection` and tries to return a custom
|
||||||
/// value, otherwise simply passes the rejection along.
|
/// value, otherwise simply passes the rejection along.
|
||||||
pub async fn handle_rejection(err: warp::Rejection) -> Result<impl warp::Reply, Infallible> {
|
pub async fn handle_rejection(err: warp::Rejection) -> Result<impl warp::Reply, Infallible> {
|
||||||
@ -150,6 +159,15 @@ pub async fn handle_rejection(err: warp::Rejection) -> Result<impl warp::Reply,
|
|||||||
} else if let Some(e) = err.find::<crate::reject::NotSynced>() {
|
} else if let Some(e) = err.find::<crate::reject::NotSynced>() {
|
||||||
code = StatusCode::SERVICE_UNAVAILABLE;
|
code = StatusCode::SERVICE_UNAVAILABLE;
|
||||||
message = format!("SERVICE_UNAVAILABLE: beacon node is syncing: {}", e.0);
|
message = format!("SERVICE_UNAVAILABLE: beacon node is syncing: {}", e.0);
|
||||||
|
} else if let Some(e) = err.find::<crate::reject::InvalidAuthorization>() {
|
||||||
|
code = StatusCode::FORBIDDEN;
|
||||||
|
message = format!("FORBIDDEN: Invalid auth token: {}", e.0);
|
||||||
|
} else if let Some(e) = err.find::<warp::reject::MissingHeader>() {
|
||||||
|
code = StatusCode::BAD_REQUEST;
|
||||||
|
message = format!("BAD_REQUEST: missing {} header", e.name());
|
||||||
|
} else if let Some(e) = err.find::<warp::reject::InvalidHeader>() {
|
||||||
|
code = StatusCode::BAD_REQUEST;
|
||||||
|
message = format!("BAD_REQUEST: invalid {} header", e.name());
|
||||||
} else if err.find::<warp::reject::MethodNotAllowed>().is_some() {
|
} else if err.find::<warp::reject::MethodNotAllowed>().is_some() {
|
||||||
code = StatusCode::METHOD_NOT_ALLOWED;
|
code = StatusCode::METHOD_NOT_ALLOWED;
|
||||||
message = "METHOD_NOT_ALLOWED".to_string();
|
message = "METHOD_NOT_ALLOWED".to_string();
|
||||||
|
21
common/warp_utils/src/task.rs
Normal file
21
common/warp_utils/src/task.rs
Normal file
@ -0,0 +1,21 @@
|
|||||||
|
use serde::Serialize;
|
||||||
|
|
||||||
|
/// Execute some task in a tokio "blocking thread". These threads are ideal for long-running
|
||||||
|
/// (blocking) tasks since they don't jam up the core executor.
|
||||||
|
pub async fn blocking_task<F, T>(func: F) -> T
|
||||||
|
where
|
||||||
|
F: Fn() -> T,
|
||||||
|
{
|
||||||
|
tokio::task::block_in_place(func)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// A convenience wrapper around `blocking_task` for use with `warp` JSON responses.
|
||||||
|
pub async fn blocking_json_task<F, T>(func: F) -> Result<warp::reply::Json, warp::Rejection>
|
||||||
|
where
|
||||||
|
F: Fn() -> Result<T, warp::Rejection>,
|
||||||
|
T: Serialize,
|
||||||
|
{
|
||||||
|
blocking_task(func)
|
||||||
|
.await
|
||||||
|
.map(|resp| warp::reply::json(&resp))
|
||||||
|
}
|
@ -81,6 +81,7 @@ pub struct KeystoreBuilder<'a> {
|
|||||||
cipher: Cipher,
|
cipher: Cipher,
|
||||||
uuid: Uuid,
|
uuid: Uuid,
|
||||||
path: String,
|
path: String,
|
||||||
|
description: String,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a> KeystoreBuilder<'a> {
|
impl<'a> KeystoreBuilder<'a> {
|
||||||
@ -105,10 +106,17 @@ impl<'a> KeystoreBuilder<'a> {
|
|||||||
cipher: Cipher::Aes128Ctr(Aes128Ctr { iv }),
|
cipher: Cipher::Aes128Ctr(Aes128Ctr { iv }),
|
||||||
uuid: Uuid::new_v4(),
|
uuid: Uuid::new_v4(),
|
||||||
path,
|
path,
|
||||||
|
description: "".to_string(),
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Build the keystore with a specific description instead of an empty string.
|
||||||
|
pub fn description(mut self, description: String) -> Self {
|
||||||
|
self.description = description;
|
||||||
|
self
|
||||||
|
}
|
||||||
|
|
||||||
/// Build the keystore using the supplied `kdf` instead of `crate::default_kdf`.
|
/// Build the keystore using the supplied `kdf` instead of `crate::default_kdf`.
|
||||||
pub fn kdf(mut self, kdf: Kdf) -> Self {
|
pub fn kdf(mut self, kdf: Kdf) -> Self {
|
||||||
self.kdf = kdf;
|
self.kdf = kdf;
|
||||||
@ -124,6 +132,7 @@ impl<'a> KeystoreBuilder<'a> {
|
|||||||
self.cipher,
|
self.cipher,
|
||||||
self.uuid,
|
self.uuid,
|
||||||
self.path,
|
self.path,
|
||||||
|
self.description,
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -147,6 +156,7 @@ impl Keystore {
|
|||||||
cipher: Cipher,
|
cipher: Cipher,
|
||||||
uuid: Uuid,
|
uuid: Uuid,
|
||||||
path: String,
|
path: String,
|
||||||
|
description: String,
|
||||||
) -> Result<Self, Error> {
|
) -> Result<Self, Error> {
|
||||||
let secret: ZeroizeHash = keypair.sk.serialize();
|
let secret: ZeroizeHash = keypair.sk.serialize();
|
||||||
|
|
||||||
@ -175,7 +185,7 @@ impl Keystore {
|
|||||||
path: Some(path),
|
path: Some(path),
|
||||||
pubkey: keypair.pk.to_hex_string()[2..].to_string(),
|
pubkey: keypair.pk.to_hex_string()[2..].to_string(),
|
||||||
version: Version::four(),
|
version: Version::four(),
|
||||||
description: None,
|
description: Some(description),
|
||||||
name: None,
|
name: None,
|
||||||
},
|
},
|
||||||
})
|
})
|
||||||
@ -228,6 +238,18 @@ impl Keystore {
|
|||||||
&self.json.pubkey
|
&self.json.pubkey
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Returns the description for the keystore, if the field is present.
|
||||||
|
pub fn description(&self) -> Option<&str> {
|
||||||
|
self.json.description.as_deref()
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Sets the description for the keystore.
|
||||||
|
///
|
||||||
|
/// Note: this does not save the keystore to disk.
|
||||||
|
pub fn set_description(&mut self, description: String) {
|
||||||
|
self.json.description = Some(description)
|
||||||
|
}
|
||||||
|
|
||||||
/// Returns the pubkey for the keystore, parsed as a `PublicKey` if it parses.
|
/// Returns the pubkey for the keystore, parsed as a `PublicKey` if it parses.
|
||||||
pub fn public_key(&self) -> Option<PublicKey> {
|
pub fn public_key(&self) -> Option<PublicKey> {
|
||||||
serde_json::from_str(&format!("\"0x{}\"", &self.json.pubkey)).ok()
|
serde_json::from_str(&format!("\"0x{}\"", &self.json.pubkey)).ok()
|
||||||
|
@ -215,6 +215,23 @@ impl Wallet {
|
|||||||
self.json.nextaccount
|
self.json.nextaccount
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Sets the value of the JSON wallet `nextaccount` field.
|
||||||
|
///
|
||||||
|
/// This will be the index of the next wallet generated with `Self::next_validator`.
|
||||||
|
///
|
||||||
|
/// ## Errors
|
||||||
|
///
|
||||||
|
/// Returns `Err(())` if `nextaccount` is less than `self.nextaccount()` without mutating
|
||||||
|
/// `self`. This is to protect against duplicate validator generation.
|
||||||
|
pub fn set_nextaccount(&mut self, nextaccount: u32) -> Result<(), ()> {
|
||||||
|
if nextaccount >= self.nextaccount() {
|
||||||
|
self.json.nextaccount = nextaccount;
|
||||||
|
Ok(())
|
||||||
|
} else {
|
||||||
|
Err(())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/// Returns the value of the JSON wallet `name` field.
|
/// Returns the value of the JSON wallet `name` field.
|
||||||
pub fn name(&self) -> &str {
|
pub fn name(&self) -> &str {
|
||||||
&self.json.name
|
&self.json.name
|
||||||
|
@ -21,7 +21,8 @@ pub fn run(matches: &ArgMatches) -> Result<(), String> {
|
|||||||
for i in 0..validator_count {
|
for i in 0..validator_count {
|
||||||
println!("Validator {}/{}", i + 1, validator_count);
|
println!("Validator {}/{}", i + 1, validator_count);
|
||||||
|
|
||||||
ValidatorBuilder::new(validators_dir.clone(), secrets_dir.clone())
|
ValidatorBuilder::new(validators_dir.clone())
|
||||||
|
.password_dir(secrets_dir.clone())
|
||||||
.store_withdrawal_keystore(false)
|
.store_withdrawal_keystore(false)
|
||||||
.insecure_voting_keypair(i)
|
.insecure_voting_keypair(i)
|
||||||
.map_err(|e| format!("Unable to generate keys: {:?}", e))?
|
.map_err(|e| format!("Unable to generate keys: {:?}", e))?
|
||||||
|
@ -5,7 +5,7 @@ authors = ["Paul Hauner <paul@paulhauner.com>"]
|
|||||||
edition = "2018"
|
edition = "2018"
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
tokio = { version = "0.2.21", features = ["macros"] }
|
tokio = { version = "0.2.21", features = ["full"] }
|
||||||
slog = { version = "2.5.2", features = ["max_level_trace"] }
|
slog = { version = "2.5.2", features = ["max_level_trace"] }
|
||||||
sloggers = "1.0.0"
|
sloggers = "1.0.0"
|
||||||
types = { "path" = "../../consensus/types" }
|
types = { "path" = "../../consensus/types" }
|
||||||
|
@ -283,7 +283,7 @@ fn run<E: EthSpec>(
|
|||||||
let context = environment.core_context();
|
let context = environment.core_context();
|
||||||
let log = context.log().clone();
|
let log = context.log().clone();
|
||||||
let executor = context.executor.clone();
|
let executor = context.executor.clone();
|
||||||
let config = validator_client::Config::from_cli(&matches)
|
let config = validator_client::Config::from_cli(&matches, context.log())
|
||||||
.map_err(|e| format!("Unable to initialize validator config: {}", e))?;
|
.map_err(|e| format!("Unable to initialize validator config: {}", e))?;
|
||||||
environment.runtime().spawn(async move {
|
environment.runtime().spawn(async move {
|
||||||
let run = async {
|
let run = async {
|
||||||
|
@ -481,6 +481,7 @@ fn validator_import_launchpad() {
|
|||||||
|
|
||||||
let expected_def = ValidatorDefinition {
|
let expected_def = ValidatorDefinition {
|
||||||
enabled: true,
|
enabled: true,
|
||||||
|
description: "".into(),
|
||||||
voting_public_key: keystore.public_key().unwrap(),
|
voting_public_key: keystore.public_key().unwrap(),
|
||||||
signing_definition: SigningDefinition::LocalKeystore {
|
signing_definition: SigningDefinition::LocalKeystore {
|
||||||
voting_keystore_path,
|
voting_keystore_path,
|
||||||
|
@ -128,7 +128,7 @@ impl<E: EthSpec> LocalNetwork<E> {
|
|||||||
.expect("Must have http started")
|
.expect("Must have http started")
|
||||||
};
|
};
|
||||||
|
|
||||||
validator_config.http_server =
|
validator_config.beacon_node =
|
||||||
format!("http://{}:{}", socket_addr.ip(), socket_addr.port());
|
format!("http://{}:{}", socket_addr.ip(), socket_addr.port());
|
||||||
let validator_client = LocalValidatorClient::production_with_insecure_keypairs(
|
let validator_client = LocalValidatorClient::production_with_insecure_keypairs(
|
||||||
context,
|
context,
|
||||||
|
@ -10,6 +10,8 @@ path = "src/lib.rs"
|
|||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
tokio = { version = "0.2.21", features = ["time", "rt-threaded", "macros"] }
|
tokio = { version = "0.2.21", features = ["time", "rt-threaded", "macros"] }
|
||||||
|
tempfile = "3.1.0"
|
||||||
|
deposit_contract = { path = "../common/deposit_contract" }
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
eth2_ssz = "0.1.2"
|
eth2_ssz = "0.1.2"
|
||||||
@ -47,3 +49,11 @@ validator_dir = { path = "../common/validator_dir" }
|
|||||||
clap_utils = { path = "../common/clap_utils" }
|
clap_utils = { path = "../common/clap_utils" }
|
||||||
eth2_keystore = { path = "../crypto/eth2_keystore" }
|
eth2_keystore = { path = "../crypto/eth2_keystore" }
|
||||||
account_utils = { path = "../common/account_utils" }
|
account_utils = { path = "../common/account_utils" }
|
||||||
|
lighthouse_version = { path = "../common/lighthouse_version" }
|
||||||
|
warp_utils = { path = "../common/warp_utils" }
|
||||||
|
warp = "0.2.5"
|
||||||
|
hyper = "0.13.5"
|
||||||
|
serde_utils = { path = "../consensus/serde_utils" }
|
||||||
|
libsecp256k1 = "0.3.5"
|
||||||
|
ring = "0.16.12"
|
||||||
|
rand = "0.7.3"
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
use crate::config::DEFAULT_HTTP_SERVER;
|
use crate::config::DEFAULT_BEACON_NODE;
|
||||||
use clap::{App, Arg};
|
use clap::{App, Arg};
|
||||||
|
|
||||||
pub fn cli_app<'a, 'b>() -> App<'a, 'b> {
|
pub fn cli_app<'a, 'b>() -> App<'a, 'b> {
|
||||||
@ -8,13 +8,22 @@ pub fn cli_app<'a, 'b>() -> App<'a, 'b> {
|
|||||||
"When connected to a beacon node, performs the duties of a staked \
|
"When connected to a beacon node, performs the duties of a staked \
|
||||||
validator (e.g., proposing blocks and attestations).",
|
validator (e.g., proposing blocks and attestations).",
|
||||||
)
|
)
|
||||||
|
.arg(
|
||||||
|
Arg::with_name("beacon-node")
|
||||||
|
.long("beacon-node")
|
||||||
|
.value_name("NETWORK_ADDRESS")
|
||||||
|
.help("Address to a beacon node HTTP API")
|
||||||
|
.default_value(&DEFAULT_BEACON_NODE)
|
||||||
|
.takes_value(true),
|
||||||
|
)
|
||||||
|
// This argument is deprecated, use `--beacon-node` instead.
|
||||||
.arg(
|
.arg(
|
||||||
Arg::with_name("server")
|
Arg::with_name("server")
|
||||||
.long("server")
|
.long("server")
|
||||||
.value_name("NETWORK_ADDRESS")
|
.value_name("NETWORK_ADDRESS")
|
||||||
.help("Address to connect to BeaconNode.")
|
.help("Deprecated. Use --beacon-node.")
|
||||||
.default_value(&DEFAULT_HTTP_SERVER)
|
.takes_value(true)
|
||||||
.takes_value(true),
|
.conflicts_with("beacon-node"),
|
||||||
)
|
)
|
||||||
.arg(
|
.arg(
|
||||||
Arg::with_name("validators-dir")
|
Arg::with_name("validators-dir")
|
||||||
@ -97,4 +106,40 @@ pub fn cli_app<'a, 'b>() -> App<'a, 'b> {
|
|||||||
.value_name("GRAFFITI")
|
.value_name("GRAFFITI")
|
||||||
.takes_value(true)
|
.takes_value(true)
|
||||||
)
|
)
|
||||||
|
/* REST API related arguments */
|
||||||
|
.arg(
|
||||||
|
Arg::with_name("http")
|
||||||
|
.long("http")
|
||||||
|
.help("Enable the RESTful HTTP API server. Disabled by default.")
|
||||||
|
.takes_value(false),
|
||||||
|
)
|
||||||
|
/*
|
||||||
|
* Note: there is purposefully no `--http-address` flag provided.
|
||||||
|
*
|
||||||
|
* The HTTP server is **not** encrypted (i.e., not HTTPS) and therefore it is unsafe to
|
||||||
|
* publish on a public network.
|
||||||
|
*
|
||||||
|
* We restrict the user to `127.0.0.1` and they must provide some other transport-layer
|
||||||
|
* encryption (e.g., SSH tunnels).
|
||||||
|
*/
|
||||||
|
.arg(
|
||||||
|
Arg::with_name("http-port")
|
||||||
|
.long("http-port")
|
||||||
|
.value_name("PORT")
|
||||||
|
.help("Set the listen TCP port for the RESTful HTTP API server. This server does **not** \
|
||||||
|
provide encryption and is completely unsuitable to expose to a public network. \
|
||||||
|
We do not provide a --http-address flag and restrict the user to listening on \
|
||||||
|
127.0.0.1. For access via the Internet, apply a transport-layer security like \
|
||||||
|
a HTTPS reverse-proxy or SSH tunnelling.")
|
||||||
|
.default_value("5062")
|
||||||
|
.takes_value(true),
|
||||||
|
)
|
||||||
|
.arg(
|
||||||
|
Arg::with_name("http-allow-origin")
|
||||||
|
.long("http-allow-origin")
|
||||||
|
.value_name("ORIGIN")
|
||||||
|
.help("Set the value of the Access-Control-Allow-Origin response HTTP header. Use * to allow any origin (not recommended in production)")
|
||||||
|
.default_value("")
|
||||||
|
.takes_value(true),
|
||||||
|
)
|
||||||
}
|
}
|
||||||
|
@ -1,3 +1,4 @@
|
|||||||
|
use crate::http_api;
|
||||||
use clap::ArgMatches;
|
use clap::ArgMatches;
|
||||||
use clap_utils::{parse_optional, parse_required};
|
use clap_utils::{parse_optional, parse_required};
|
||||||
use directory::{
|
use directory::{
|
||||||
@ -6,10 +7,12 @@ use directory::{
|
|||||||
};
|
};
|
||||||
use eth2::types::Graffiti;
|
use eth2::types::Graffiti;
|
||||||
use serde_derive::{Deserialize, Serialize};
|
use serde_derive::{Deserialize, Serialize};
|
||||||
|
use slog::{warn, Logger};
|
||||||
|
use std::fs;
|
||||||
use std::path::PathBuf;
|
use std::path::PathBuf;
|
||||||
use types::GRAFFITI_BYTES_LEN;
|
use types::GRAFFITI_BYTES_LEN;
|
||||||
|
|
||||||
pub const DEFAULT_HTTP_SERVER: &str = "http://localhost:5052/";
|
pub const DEFAULT_BEACON_NODE: &str = "http://localhost:5052/";
|
||||||
|
|
||||||
/// Stores the core configuration for this validator instance.
|
/// Stores the core configuration for this validator instance.
|
||||||
#[derive(Clone, Serialize, Deserialize)]
|
#[derive(Clone, Serialize, Deserialize)]
|
||||||
@ -21,7 +24,7 @@ pub struct Config {
|
|||||||
/// The http endpoint of the beacon node API.
|
/// The http endpoint of the beacon node API.
|
||||||
///
|
///
|
||||||
/// Should be similar to `http://localhost:8080`
|
/// Should be similar to `http://localhost:8080`
|
||||||
pub http_server: String,
|
pub beacon_node: String,
|
||||||
/// If true, the validator client will still poll for duties and produce blocks even if the
|
/// If true, the validator client will still poll for duties and produce blocks even if the
|
||||||
/// beacon node is not synced at startup.
|
/// beacon node is not synced at startup.
|
||||||
pub allow_unsynced_beacon_node: bool,
|
pub allow_unsynced_beacon_node: bool,
|
||||||
@ -33,6 +36,8 @@ pub struct Config {
|
|||||||
pub strict_slashing_protection: bool,
|
pub strict_slashing_protection: bool,
|
||||||
/// Graffiti to be inserted everytime we create a block.
|
/// Graffiti to be inserted everytime we create a block.
|
||||||
pub graffiti: Option<Graffiti>,
|
pub graffiti: Option<Graffiti>,
|
||||||
|
/// Configuration for the HTTP REST API.
|
||||||
|
pub http_api: http_api::Config,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Default for Config {
|
impl Default for Config {
|
||||||
@ -49,12 +54,13 @@ impl Default for Config {
|
|||||||
Self {
|
Self {
|
||||||
validator_dir,
|
validator_dir,
|
||||||
secrets_dir,
|
secrets_dir,
|
||||||
http_server: DEFAULT_HTTP_SERVER.to_string(),
|
beacon_node: DEFAULT_BEACON_NODE.to_string(),
|
||||||
allow_unsynced_beacon_node: false,
|
allow_unsynced_beacon_node: false,
|
||||||
delete_lockfiles: false,
|
delete_lockfiles: false,
|
||||||
disable_auto_discover: false,
|
disable_auto_discover: false,
|
||||||
strict_slashing_protection: false,
|
strict_slashing_protection: false,
|
||||||
graffiti: None,
|
graffiti: None,
|
||||||
|
http_api: <_>::default(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -62,7 +68,7 @@ impl Default for Config {
|
|||||||
impl Config {
|
impl Config {
|
||||||
/// Returns a `Default` implementation of `Self` with some parameters modified by the supplied
|
/// Returns a `Default` implementation of `Self` with some parameters modified by the supplied
|
||||||
/// `cli_args`.
|
/// `cli_args`.
|
||||||
pub fn from_cli(cli_args: &ArgMatches) -> Result<Config, String> {
|
pub fn from_cli(cli_args: &ArgMatches, log: &Logger) -> Result<Config, String> {
|
||||||
let mut config = Config::default();
|
let mut config = Config::default();
|
||||||
|
|
||||||
let default_root_dir = dirs::home_dir()
|
let default_root_dir = dirs::home_dir()
|
||||||
@ -95,14 +101,22 @@ impl Config {
|
|||||||
});
|
});
|
||||||
|
|
||||||
if !config.validator_dir.exists() {
|
if !config.validator_dir.exists() {
|
||||||
return Err(format!(
|
fs::create_dir_all(&config.validator_dir)
|
||||||
"The directory for validator data does not exist: {:?}",
|
.map_err(|e| format!("Failed to create {:?}: {:?}", config.validator_dir, e))?;
|
||||||
config.validator_dir
|
|
||||||
));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if let Some(beacon_node) = parse_optional(cli_args, "beacon-node")? {
|
||||||
|
config.beacon_node = beacon_node;
|
||||||
|
}
|
||||||
|
|
||||||
|
// To be deprecated.
|
||||||
if let Some(server) = parse_optional(cli_args, "server")? {
|
if let Some(server) = parse_optional(cli_args, "server")? {
|
||||||
config.http_server = server;
|
warn!(
|
||||||
|
log,
|
||||||
|
"The --server flag is deprecated";
|
||||||
|
"msg" => "please use --beacon-node instead"
|
||||||
|
);
|
||||||
|
config.beacon_node = server;
|
||||||
}
|
}
|
||||||
|
|
||||||
config.allow_unsynced_beacon_node = cli_args.is_present("allow-unsynced");
|
config.allow_unsynced_beacon_node = cli_args.is_present("allow-unsynced");
|
||||||
@ -129,6 +143,29 @@ impl Config {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* Http API server
|
||||||
|
*/
|
||||||
|
|
||||||
|
if cli_args.is_present("http") {
|
||||||
|
config.http_api.enabled = true;
|
||||||
|
}
|
||||||
|
|
||||||
|
if let Some(port) = cli_args.value_of("http-port") {
|
||||||
|
config.http_api.listen_port = port
|
||||||
|
.parse::<u16>()
|
||||||
|
.map_err(|_| "http-port is not a valid u16.")?;
|
||||||
|
}
|
||||||
|
|
||||||
|
if let Some(allow_origin) = cli_args.value_of("http-allow-origin") {
|
||||||
|
// Pre-validate the config value to give feedback to the user on node startup, instead of
|
||||||
|
// as late as when the first API response is produced.
|
||||||
|
hyper::header::HeaderValue::from_str(allow_origin)
|
||||||
|
.map_err(|_| "Invalid allow-origin value")?;
|
||||||
|
|
||||||
|
config.http_api.allow_origin = Some(allow_origin.to_string());
|
||||||
|
}
|
||||||
|
|
||||||
Ok(config)
|
Ok(config)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -2,31 +2,32 @@ use environment::RuntimeContext;
|
|||||||
use eth2::{types::StateId, BeaconNodeHttpClient};
|
use eth2::{types::StateId, BeaconNodeHttpClient};
|
||||||
use futures::StreamExt;
|
use futures::StreamExt;
|
||||||
use parking_lot::RwLock;
|
use parking_lot::RwLock;
|
||||||
|
use slog::Logger;
|
||||||
use slog::{debug, trace};
|
use slog::{debug, trace};
|
||||||
use slot_clock::SlotClock;
|
use slot_clock::SlotClock;
|
||||||
use std::ops::Deref;
|
use std::ops::Deref;
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
use tokio::time::{interval_at, Duration, Instant};
|
use tokio::time::{interval_at, Duration, Instant};
|
||||||
use types::{ChainSpec, EthSpec, Fork};
|
use types::{EthSpec, Fork};
|
||||||
|
|
||||||
/// Delay this period of time after the slot starts. This allows the node to process the new slot.
|
/// Delay this period of time after the slot starts. This allows the node to process the new slot.
|
||||||
const TIME_DELAY_FROM_SLOT: Duration = Duration::from_millis(80);
|
const TIME_DELAY_FROM_SLOT: Duration = Duration::from_millis(80);
|
||||||
|
|
||||||
/// Builds a `ForkService`.
|
/// Builds a `ForkService`.
|
||||||
pub struct ForkServiceBuilder<T, E: EthSpec> {
|
pub struct ForkServiceBuilder<T> {
|
||||||
fork: Option<Fork>,
|
fork: Option<Fork>,
|
||||||
slot_clock: Option<T>,
|
slot_clock: Option<T>,
|
||||||
beacon_node: Option<BeaconNodeHttpClient>,
|
beacon_node: Option<BeaconNodeHttpClient>,
|
||||||
context: Option<RuntimeContext<E>>,
|
log: Option<Logger>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<T: SlotClock + 'static, E: EthSpec> ForkServiceBuilder<T, E> {
|
impl<T: SlotClock + 'static> ForkServiceBuilder<T> {
|
||||||
pub fn new() -> Self {
|
pub fn new() -> Self {
|
||||||
Self {
|
Self {
|
||||||
fork: None,
|
fork: None,
|
||||||
slot_clock: None,
|
slot_clock: None,
|
||||||
beacon_node: None,
|
beacon_node: None,
|
||||||
context: None,
|
log: None,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -40,12 +41,12 @@ impl<T: SlotClock + 'static, E: EthSpec> ForkServiceBuilder<T, E> {
|
|||||||
self
|
self
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn runtime_context(mut self, context: RuntimeContext<E>) -> Self {
|
pub fn log(mut self, log: Logger) -> Self {
|
||||||
self.context = Some(context);
|
self.log = Some(log);
|
||||||
self
|
self
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn build(self) -> Result<ForkService<T, E>, String> {
|
pub fn build(self) -> Result<ForkService<T>, String> {
|
||||||
Ok(ForkService {
|
Ok(ForkService {
|
||||||
inner: Arc::new(Inner {
|
inner: Arc::new(Inner {
|
||||||
fork: RwLock::new(self.fork),
|
fork: RwLock::new(self.fork),
|
||||||
@ -55,28 +56,48 @@ impl<T: SlotClock + 'static, E: EthSpec> ForkServiceBuilder<T, E> {
|
|||||||
beacon_node: self
|
beacon_node: self
|
||||||
.beacon_node
|
.beacon_node
|
||||||
.ok_or_else(|| "Cannot build ForkService without beacon_node")?,
|
.ok_or_else(|| "Cannot build ForkService without beacon_node")?,
|
||||||
context: self
|
log: self
|
||||||
.context
|
.log
|
||||||
.ok_or_else(|| "Cannot build ForkService without runtime_context")?,
|
.ok_or_else(|| "Cannot build ForkService without logger")?
|
||||||
|
.clone(),
|
||||||
}),
|
}),
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
#[allow(dead_code)]
|
||||||
|
impl ForkServiceBuilder<slot_clock::TestingSlotClock> {
|
||||||
|
pub fn testing_only(log: Logger) -> Self {
|
||||||
|
Self {
|
||||||
|
fork: Some(types::Fork::default()),
|
||||||
|
slot_clock: Some(slot_clock::TestingSlotClock::new(
|
||||||
|
types::Slot::new(0),
|
||||||
|
std::time::Duration::from_secs(42),
|
||||||
|
std::time::Duration::from_secs(42),
|
||||||
|
)),
|
||||||
|
beacon_node: Some(eth2::BeaconNodeHttpClient::new(
|
||||||
|
eth2::Url::parse("http://127.0.0.1").unwrap(),
|
||||||
|
)),
|
||||||
|
log: Some(log),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/// Helper to minimise `Arc` usage.
|
/// Helper to minimise `Arc` usage.
|
||||||
pub struct Inner<T, E: EthSpec> {
|
pub struct Inner<T> {
|
||||||
fork: RwLock<Option<Fork>>,
|
fork: RwLock<Option<Fork>>,
|
||||||
beacon_node: BeaconNodeHttpClient,
|
beacon_node: BeaconNodeHttpClient,
|
||||||
context: RuntimeContext<E>,
|
log: Logger,
|
||||||
slot_clock: T,
|
slot_clock: T,
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Attempts to download the `Fork` struct from the beacon node at the start of each epoch.
|
/// Attempts to download the `Fork` struct from the beacon node at the start of each epoch.
|
||||||
pub struct ForkService<T, E: EthSpec> {
|
pub struct ForkService<T> {
|
||||||
inner: Arc<Inner<T, E>>,
|
inner: Arc<Inner<T>>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<T, E: EthSpec> Clone for ForkService<T, E> {
|
impl<T> Clone for ForkService<T> {
|
||||||
fn clone(&self) -> Self {
|
fn clone(&self) -> Self {
|
||||||
Self {
|
Self {
|
||||||
inner: self.inner.clone(),
|
inner: self.inner.clone(),
|
||||||
@ -84,22 +105,27 @@ impl<T, E: EthSpec> Clone for ForkService<T, E> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<T, E: EthSpec> Deref for ForkService<T, E> {
|
impl<T> Deref for ForkService<T> {
|
||||||
type Target = Inner<T, E>;
|
type Target = Inner<T>;
|
||||||
|
|
||||||
fn deref(&self) -> &Self::Target {
|
fn deref(&self) -> &Self::Target {
|
||||||
self.inner.deref()
|
self.inner.deref()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<T: SlotClock + 'static, E: EthSpec> ForkService<T, E> {
|
impl<T: SlotClock + 'static> ForkService<T> {
|
||||||
/// Returns the last fork downloaded from the beacon node, if any.
|
/// Returns the last fork downloaded from the beacon node, if any.
|
||||||
pub fn fork(&self) -> Option<Fork> {
|
pub fn fork(&self) -> Option<Fork> {
|
||||||
*self.fork.read()
|
*self.fork.read()
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Starts the service that periodically polls for the `Fork`.
|
/// Starts the service that periodically polls for the `Fork`.
|
||||||
pub fn start_update_service(self, spec: &ChainSpec) -> Result<(), String> {
|
pub fn start_update_service<E: EthSpec>(
|
||||||
|
self,
|
||||||
|
context: &RuntimeContext<E>,
|
||||||
|
) -> Result<(), String> {
|
||||||
|
let spec = &context.eth2_config.spec;
|
||||||
|
|
||||||
let duration_to_next_epoch = self
|
let duration_to_next_epoch = self
|
||||||
.slot_clock
|
.slot_clock
|
||||||
.duration_to_next_epoch(E::slots_per_epoch())
|
.duration_to_next_epoch(E::slots_per_epoch())
|
||||||
@ -115,13 +141,12 @@ impl<T: SlotClock + 'static, E: EthSpec> ForkService<T, E> {
|
|||||||
};
|
};
|
||||||
|
|
||||||
// Run an immediate update before starting the updater service.
|
// Run an immediate update before starting the updater service.
|
||||||
self.inner
|
context
|
||||||
.context
|
|
||||||
.executor
|
.executor
|
||||||
.runtime_handle()
|
.runtime_handle()
|
||||||
.spawn(self.clone().do_update());
|
.spawn(self.clone().do_update());
|
||||||
|
|
||||||
let executor = self.inner.context.executor.clone();
|
let executor = context.executor.clone();
|
||||||
|
|
||||||
let interval_fut = async move {
|
let interval_fut = async move {
|
||||||
while interval.next().await.is_some() {
|
while interval.next().await.is_some() {
|
||||||
@ -136,8 +161,6 @@ impl<T: SlotClock + 'static, E: EthSpec> ForkService<T, E> {
|
|||||||
|
|
||||||
/// Attempts to download the `Fork` from the server.
|
/// Attempts to download the `Fork` from the server.
|
||||||
async fn do_update(self) -> Result<(), ()> {
|
async fn do_update(self) -> Result<(), ()> {
|
||||||
let log = self.context.log();
|
|
||||||
|
|
||||||
let fork = self
|
let fork = self
|
||||||
.inner
|
.inner
|
||||||
.beacon_node
|
.beacon_node
|
||||||
@ -145,14 +168,14 @@ impl<T: SlotClock + 'static, E: EthSpec> ForkService<T, E> {
|
|||||||
.await
|
.await
|
||||||
.map_err(|e| {
|
.map_err(|e| {
|
||||||
trace!(
|
trace!(
|
||||||
log,
|
self.log,
|
||||||
"Fork update failed";
|
"Fork update failed";
|
||||||
"error" => format!("Error retrieving fork: {:?}", e)
|
"error" => format!("Error retrieving fork: {:?}", e)
|
||||||
)
|
)
|
||||||
})?
|
})?
|
||||||
.ok_or_else(|| {
|
.ok_or_else(|| {
|
||||||
trace!(
|
trace!(
|
||||||
log,
|
self.log,
|
||||||
"Fork update failed";
|
"Fork update failed";
|
||||||
"error" => "The beacon head fork is unknown"
|
"error" => "The beacon head fork is unknown"
|
||||||
)
|
)
|
||||||
@ -163,7 +186,7 @@ impl<T: SlotClock + 'static, E: EthSpec> ForkService<T, E> {
|
|||||||
*(self.fork.write()) = Some(fork);
|
*(self.fork.write()) = Some(fork);
|
||||||
}
|
}
|
||||||
|
|
||||||
debug!(log, "Fork update success");
|
debug!(self.log, "Fork update success");
|
||||||
|
|
||||||
// Returning an error will stop the interval. This is not desired, a single failure
|
// Returning an error will stop the interval. This is not desired, a single failure
|
||||||
// should not stop all future attempts.
|
// should not stop all future attempts.
|
||||||
|
184
validator_client/src/http_api/api_secret.rs
Normal file
184
validator_client/src/http_api/api_secret.rs
Normal file
@ -0,0 +1,184 @@
|
|||||||
|
use eth2::lighthouse_vc::{PK_LEN, SECRET_PREFIX as PK_PREFIX};
|
||||||
|
use rand::thread_rng;
|
||||||
|
use ring::digest::{digest, SHA256};
|
||||||
|
use secp256k1::{Message, PublicKey, SecretKey};
|
||||||
|
use std::fs;
|
||||||
|
use std::path::Path;
|
||||||
|
use warp::Filter;
|
||||||
|
|
||||||
|
/// The name of the file which stores the secret key.
|
||||||
|
///
|
||||||
|
/// It is purposefully opaque to prevent users confusing it with the "secret" that they need to
|
||||||
|
/// share with API consumers (which is actually the public key).
|
||||||
|
pub const SK_FILENAME: &str = ".secp-sk";
|
||||||
|
|
||||||
|
/// Length of the raw secret key, in bytes.
|
||||||
|
pub const SK_LEN: usize = 32;
|
||||||
|
|
||||||
|
/// The name of the file which stores the public key.
|
||||||
|
///
|
||||||
|
/// For users, this public key is a "secret" that can be shared with API consumers to provide them
|
||||||
|
/// access to the API. We avoid calling it a "public" key to users, since they should not post this
|
||||||
|
/// value in a public forum.
|
||||||
|
pub const PK_FILENAME: &str = "api-token.txt";
|
||||||
|
|
||||||
|
/// Contains a `secp256k1` keypair that is saved-to/loaded-from disk on instantiation. The keypair
|
||||||
|
/// is used for authorization/authentication for requests/responses on the HTTP API.
|
||||||
|
///
|
||||||
|
/// Provides convenience functions to ultimately provide:
|
||||||
|
///
|
||||||
|
/// - A signature across outgoing HTTP responses, applied to the `Signature` header.
|
||||||
|
/// - Verification of proof-of-knowledge of the public key in `self` for incoming HTTP requests,
|
||||||
|
/// via the `Authorization` header.
|
||||||
|
///
|
||||||
|
/// The aforementioned scheme was first defined here:
|
||||||
|
///
|
||||||
|
/// https://github.com/sigp/lighthouse/issues/1269#issuecomment-649879855
|
||||||
|
pub struct ApiSecret {
|
||||||
|
pk: PublicKey,
|
||||||
|
sk: SecretKey,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ApiSecret {
|
||||||
|
/// If both the secret and public keys are already on-disk, parse them and ensure they're both
|
||||||
|
/// from the same keypair.
|
||||||
|
///
|
||||||
|
/// The provided `dir` is a directory containing two files, `SK_FILENAME` and `PK_FILENAME`.
|
||||||
|
///
|
||||||
|
/// If either the secret or public key files are missing on disk, create a new keypair and
|
||||||
|
/// write it to disk (over-writing any existing files).
|
||||||
|
pub fn create_or_open<P: AsRef<Path>>(dir: P) -> Result<Self, String> {
|
||||||
|
let sk_path = dir.as_ref().join(SK_FILENAME);
|
||||||
|
let pk_path = dir.as_ref().join(PK_FILENAME);
|
||||||
|
|
||||||
|
if !(sk_path.exists() && pk_path.exists()) {
|
||||||
|
let sk = SecretKey::random(&mut thread_rng());
|
||||||
|
let pk = PublicKey::from_secret_key(&sk);
|
||||||
|
|
||||||
|
fs::write(
|
||||||
|
&sk_path,
|
||||||
|
serde_utils::hex::encode(&sk.serialize()).as_bytes(),
|
||||||
|
)
|
||||||
|
.map_err(|e| e.to_string())?;
|
||||||
|
fs::write(
|
||||||
|
&pk_path,
|
||||||
|
format!(
|
||||||
|
"{}{}",
|
||||||
|
PK_PREFIX,
|
||||||
|
serde_utils::hex::encode(&pk.serialize_compressed()[..])
|
||||||
|
)
|
||||||
|
.as_bytes(),
|
||||||
|
)
|
||||||
|
.map_err(|e| e.to_string())?;
|
||||||
|
}
|
||||||
|
|
||||||
|
let sk = fs::read(&sk_path)
|
||||||
|
.map_err(|e| format!("cannot read {}: {}", SK_FILENAME, e))
|
||||||
|
.and_then(|bytes| {
|
||||||
|
serde_utils::hex::decode(&String::from_utf8_lossy(&bytes))
|
||||||
|
.map_err(|_| format!("{} should be 0x-prefixed hex", PK_FILENAME))
|
||||||
|
})
|
||||||
|
.and_then(|bytes| {
|
||||||
|
if bytes.len() == SK_LEN {
|
||||||
|
let mut array = [0; SK_LEN];
|
||||||
|
array.copy_from_slice(&bytes);
|
||||||
|
SecretKey::parse(&array).map_err(|e| format!("invalid {}: {}", SK_FILENAME, e))
|
||||||
|
} else {
|
||||||
|
Err(format!(
|
||||||
|
"{} expected {} bytes not {}",
|
||||||
|
SK_FILENAME,
|
||||||
|
SK_LEN,
|
||||||
|
bytes.len()
|
||||||
|
))
|
||||||
|
}
|
||||||
|
})?;
|
||||||
|
|
||||||
|
let pk = fs::read(&pk_path)
|
||||||
|
.map_err(|e| format!("cannot read {}: {}", PK_FILENAME, e))
|
||||||
|
.and_then(|bytes| {
|
||||||
|
let hex =
|
||||||
|
String::from_utf8(bytes).map_err(|_| format!("{} is not utf8", SK_FILENAME))?;
|
||||||
|
if hex.starts_with(PK_PREFIX) {
|
||||||
|
serde_utils::hex::decode(&hex[PK_PREFIX.len()..])
|
||||||
|
.map_err(|_| format!("{} should be 0x-prefixed hex", SK_FILENAME))
|
||||||
|
} else {
|
||||||
|
Err(format!("unable to parse {}", SK_FILENAME))
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.and_then(|bytes| {
|
||||||
|
if bytes.len() == PK_LEN {
|
||||||
|
let mut array = [0; PK_LEN];
|
||||||
|
array.copy_from_slice(&bytes);
|
||||||
|
PublicKey::parse_compressed(&array)
|
||||||
|
.map_err(|e| format!("invalid {}: {}", PK_FILENAME, e))
|
||||||
|
} else {
|
||||||
|
Err(format!(
|
||||||
|
"{} expected {} bytes not {}",
|
||||||
|
PK_FILENAME,
|
||||||
|
PK_LEN,
|
||||||
|
bytes.len()
|
||||||
|
))
|
||||||
|
}
|
||||||
|
})?;
|
||||||
|
|
||||||
|
// Ensure that the keys loaded from disk are indeed a pair.
|
||||||
|
if PublicKey::from_secret_key(&sk) != pk {
|
||||||
|
fs::remove_file(&sk_path)
|
||||||
|
.map_err(|e| format!("unable to remove {}: {}", SK_FILENAME, e))?;
|
||||||
|
fs::remove_file(&pk_path)
|
||||||
|
.map_err(|e| format!("unable to remove {}: {}", PK_FILENAME, e))?;
|
||||||
|
return Err(format!(
|
||||||
|
"{:?} does not match {:?} and the files have been deleted. Please try again.",
|
||||||
|
sk_path, pk_path
|
||||||
|
));
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(Self { sk, pk })
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns the public key of `self` as a 0x-prefixed hex string.
|
||||||
|
fn pubkey_string(&self) -> String {
|
||||||
|
serde_utils::hex::encode(&self.pk.serialize_compressed()[..])
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns the API token.
|
||||||
|
pub fn api_token(&self) -> String {
|
||||||
|
format!("{}{}", PK_PREFIX, self.pubkey_string())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns the value of the `Authorization` header which is used for verifying incoming HTTP
|
||||||
|
/// requests.
|
||||||
|
fn auth_header_value(&self) -> String {
|
||||||
|
format!("Basic {}", self.api_token())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns a `warp` header which filters out request that have a missing or inaccurate
|
||||||
|
/// `Authorization` header.
|
||||||
|
pub fn authorization_header_filter(&self) -> warp::filters::BoxedFilter<()> {
|
||||||
|
let expected = self.auth_header_value();
|
||||||
|
warp::any()
|
||||||
|
.map(move || expected.clone())
|
||||||
|
.and(warp::filters::header::header("Authorization"))
|
||||||
|
.and_then(move |expected: String, header: String| async move {
|
||||||
|
if header == expected {
|
||||||
|
Ok(())
|
||||||
|
} else {
|
||||||
|
Err(warp_utils::reject::invalid_auth(header))
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.untuple_one()
|
||||||
|
.boxed()
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns a closure which produces a signature over some bytes using the secret key in
|
||||||
|
/// `self`. The signature is a 32-byte hash formatted as a 0x-prefixed string.
|
||||||
|
pub fn signer(&self) -> impl Fn(&[u8]) -> String + Clone {
|
||||||
|
let sk = self.sk.clone();
|
||||||
|
move |input: &[u8]| -> String {
|
||||||
|
let message =
|
||||||
|
Message::parse_slice(digest(&SHA256, input).as_ref()).expect("sha256 is 32 bytes");
|
||||||
|
let (signature, _) = secp256k1::sign(&message, &sk);
|
||||||
|
serde_utils::hex::encode(signature.serialize_der().as_ref())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
151
validator_client/src/http_api/create_validator.rs
Normal file
151
validator_client/src/http_api/create_validator.rs
Normal file
@ -0,0 +1,151 @@
|
|||||||
|
use crate::ValidatorStore;
|
||||||
|
use account_utils::{
|
||||||
|
eth2_wallet::{bip39::Mnemonic, WalletBuilder},
|
||||||
|
random_mnemonic, random_password, ZeroizeString,
|
||||||
|
};
|
||||||
|
use eth2::lighthouse_vc::types::{self as api_types};
|
||||||
|
use slot_clock::SlotClock;
|
||||||
|
use std::path::Path;
|
||||||
|
use types::ChainSpec;
|
||||||
|
use types::EthSpec;
|
||||||
|
use validator_dir::Builder as ValidatorDirBuilder;
|
||||||
|
|
||||||
|
/// Create some validator EIP-2335 keystores and store them on disk. Then, enroll the validators in
|
||||||
|
/// this validator client.
|
||||||
|
///
|
||||||
|
/// Returns the list of created validators and the mnemonic used to derive them via EIP-2334.
|
||||||
|
///
|
||||||
|
/// ## Detail
|
||||||
|
///
|
||||||
|
/// If `mnemonic_opt` is not supplied it will be randomly generated and returned in the response.
|
||||||
|
///
|
||||||
|
/// If `key_derivation_path_offset` is supplied then the EIP-2334 validator index will start at
|
||||||
|
/// this point.
|
||||||
|
pub fn create_validators<P: AsRef<Path>, T: 'static + SlotClock, E: EthSpec>(
|
||||||
|
mnemonic_opt: Option<Mnemonic>,
|
||||||
|
key_derivation_path_offset: Option<u32>,
|
||||||
|
validator_requests: &[api_types::ValidatorRequest],
|
||||||
|
validator_dir: P,
|
||||||
|
validator_store: &ValidatorStore<T, E>,
|
||||||
|
spec: &ChainSpec,
|
||||||
|
) -> Result<(Vec<api_types::CreatedValidator>, Mnemonic), warp::Rejection> {
|
||||||
|
let mnemonic = mnemonic_opt.unwrap_or_else(random_mnemonic);
|
||||||
|
|
||||||
|
let wallet_password = random_password();
|
||||||
|
let mut wallet =
|
||||||
|
WalletBuilder::from_mnemonic(&mnemonic, wallet_password.as_bytes(), String::new())
|
||||||
|
.and_then(|builder| builder.build())
|
||||||
|
.map_err(|e| {
|
||||||
|
warp_utils::reject::custom_server_error(format!(
|
||||||
|
"unable to create EIP-2386 wallet: {:?}",
|
||||||
|
e
|
||||||
|
))
|
||||||
|
})?;
|
||||||
|
|
||||||
|
if let Some(nextaccount) = key_derivation_path_offset {
|
||||||
|
wallet.set_nextaccount(nextaccount).map_err(|()| {
|
||||||
|
warp_utils::reject::custom_server_error("unable to set wallet nextaccount".to_string())
|
||||||
|
})?;
|
||||||
|
}
|
||||||
|
|
||||||
|
let mut validators = Vec::with_capacity(validator_requests.len());
|
||||||
|
|
||||||
|
for request in validator_requests {
|
||||||
|
let voting_password = random_password();
|
||||||
|
let withdrawal_password = random_password();
|
||||||
|
let voting_password_string = ZeroizeString::from(
|
||||||
|
String::from_utf8(voting_password.as_bytes().to_vec()).map_err(|e| {
|
||||||
|
warp_utils::reject::custom_server_error(format!(
|
||||||
|
"locally generated password is not utf8: {:?}",
|
||||||
|
e
|
||||||
|
))
|
||||||
|
})?,
|
||||||
|
);
|
||||||
|
|
||||||
|
let mut keystores = wallet
|
||||||
|
.next_validator(
|
||||||
|
wallet_password.as_bytes(),
|
||||||
|
voting_password.as_bytes(),
|
||||||
|
withdrawal_password.as_bytes(),
|
||||||
|
)
|
||||||
|
.map_err(|e| {
|
||||||
|
warp_utils::reject::custom_server_error(format!(
|
||||||
|
"unable to create validator keys: {:?}",
|
||||||
|
e
|
||||||
|
))
|
||||||
|
})?;
|
||||||
|
|
||||||
|
keystores
|
||||||
|
.voting
|
||||||
|
.set_description(request.description.clone());
|
||||||
|
keystores
|
||||||
|
.withdrawal
|
||||||
|
.set_description(request.description.clone());
|
||||||
|
|
||||||
|
let voting_pubkey = format!("0x{}", keystores.voting.pubkey())
|
||||||
|
.parse()
|
||||||
|
.map_err(|e| {
|
||||||
|
warp_utils::reject::custom_server_error(format!(
|
||||||
|
"created invalid public key: {:?}",
|
||||||
|
e
|
||||||
|
))
|
||||||
|
})?;
|
||||||
|
|
||||||
|
let validator_dir = ValidatorDirBuilder::new(validator_dir.as_ref().into())
|
||||||
|
.voting_keystore(keystores.voting, voting_password.as_bytes())
|
||||||
|
.withdrawal_keystore(keystores.withdrawal, withdrawal_password.as_bytes())
|
||||||
|
.create_eth1_tx_data(request.deposit_gwei, &spec)
|
||||||
|
.store_withdrawal_keystore(false)
|
||||||
|
.build()
|
||||||
|
.map_err(|e| {
|
||||||
|
warp_utils::reject::custom_server_error(format!(
|
||||||
|
"failed to build validator directory: {:?}",
|
||||||
|
e
|
||||||
|
))
|
||||||
|
})?;
|
||||||
|
|
||||||
|
let eth1_deposit_data = validator_dir
|
||||||
|
.eth1_deposit_data()
|
||||||
|
.map_err(|e| {
|
||||||
|
warp_utils::reject::custom_server_error(format!(
|
||||||
|
"failed to read local deposit data: {:?}",
|
||||||
|
e
|
||||||
|
))
|
||||||
|
})?
|
||||||
|
.ok_or_else(|| {
|
||||||
|
warp_utils::reject::custom_server_error(
|
||||||
|
"failed to create local deposit data: {:?}".to_string(),
|
||||||
|
)
|
||||||
|
})?;
|
||||||
|
|
||||||
|
if eth1_deposit_data.deposit_data.amount != request.deposit_gwei {
|
||||||
|
return Err(warp_utils::reject::custom_server_error(format!(
|
||||||
|
"invalid deposit_gwei {}, expected {}",
|
||||||
|
eth1_deposit_data.deposit_data.amount, request.deposit_gwei
|
||||||
|
)));
|
||||||
|
}
|
||||||
|
|
||||||
|
tokio::runtime::Handle::current()
|
||||||
|
.block_on(validator_store.add_validator_keystore(
|
||||||
|
validator_dir.voting_keystore_path(),
|
||||||
|
voting_password_string,
|
||||||
|
request.enable,
|
||||||
|
))
|
||||||
|
.map_err(|e| {
|
||||||
|
warp_utils::reject::custom_server_error(format!(
|
||||||
|
"failed to initialize validator: {:?}",
|
||||||
|
e
|
||||||
|
))
|
||||||
|
})?;
|
||||||
|
|
||||||
|
validators.push(api_types::CreatedValidator {
|
||||||
|
enabled: request.enable,
|
||||||
|
description: request.description.clone(),
|
||||||
|
voting_pubkey,
|
||||||
|
eth1_deposit_tx_data: serde_utils::hex::encode(ð1_deposit_data.rlp),
|
||||||
|
deposit_gwei: request.deposit_gwei,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok((validators, mnemonic))
|
||||||
|
}
|
488
validator_client/src/http_api/mod.rs
Normal file
488
validator_client/src/http_api/mod.rs
Normal file
@ -0,0 +1,488 @@
|
|||||||
|
mod api_secret;
|
||||||
|
mod create_validator;
|
||||||
|
mod tests;
|
||||||
|
|
||||||
|
use crate::ValidatorStore;
|
||||||
|
use account_utils::mnemonic_from_phrase;
|
||||||
|
use create_validator::create_validators;
|
||||||
|
use eth2::lighthouse_vc::types::{self as api_types, PublicKey, PublicKeyBytes};
|
||||||
|
use lighthouse_version::version_with_platform;
|
||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
use slog::{crit, info, Logger};
|
||||||
|
use slot_clock::SlotClock;
|
||||||
|
use std::future::Future;
|
||||||
|
use std::marker::PhantomData;
|
||||||
|
use std::net::{Ipv4Addr, SocketAddr, SocketAddrV4};
|
||||||
|
use std::path::PathBuf;
|
||||||
|
use std::sync::Arc;
|
||||||
|
use types::{ChainSpec, EthSpec, YamlConfig};
|
||||||
|
use validator_dir::Builder as ValidatorDirBuilder;
|
||||||
|
use warp::{
|
||||||
|
http::{
|
||||||
|
header::{HeaderValue, CONTENT_TYPE},
|
||||||
|
response::Response,
|
||||||
|
StatusCode,
|
||||||
|
},
|
||||||
|
Filter,
|
||||||
|
};
|
||||||
|
|
||||||
|
pub use api_secret::ApiSecret;
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub enum Error {
|
||||||
|
Warp(warp::Error),
|
||||||
|
Other(String),
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<warp::Error> for Error {
|
||||||
|
fn from(e: warp::Error) -> Self {
|
||||||
|
Error::Warp(e)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<String> for Error {
|
||||||
|
fn from(e: String) -> Self {
|
||||||
|
Error::Other(e)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// A wrapper around all the items required to spawn the HTTP server.
|
||||||
|
///
|
||||||
|
/// The server will gracefully handle the case where any fields are `None`.
|
||||||
|
pub struct Context<T: Clone, E: EthSpec> {
|
||||||
|
pub api_secret: ApiSecret,
|
||||||
|
pub validator_store: Option<ValidatorStore<T, E>>,
|
||||||
|
pub validator_dir: Option<PathBuf>,
|
||||||
|
pub spec: ChainSpec,
|
||||||
|
pub config: Config,
|
||||||
|
pub log: Logger,
|
||||||
|
pub _phantom: PhantomData<E>,
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Configuration for the HTTP server.
|
||||||
|
#[derive(PartialEq, Debug, Clone, Serialize, Deserialize)]
|
||||||
|
pub struct Config {
|
||||||
|
pub enabled: bool,
|
||||||
|
pub listen_addr: Ipv4Addr,
|
||||||
|
pub listen_port: u16,
|
||||||
|
pub allow_origin: Option<String>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Default for Config {
|
||||||
|
fn default() -> Self {
|
||||||
|
Self {
|
||||||
|
enabled: false,
|
||||||
|
listen_addr: Ipv4Addr::new(127, 0, 0, 1),
|
||||||
|
listen_port: 5062,
|
||||||
|
allow_origin: None,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Creates a server that will serve requests using information from `ctx`.
|
||||||
|
///
|
||||||
|
/// The server will shut down gracefully when the `shutdown` future resolves.
|
||||||
|
///
|
||||||
|
/// ## Returns
|
||||||
|
///
|
||||||
|
/// This function will bind the server to the provided address and then return a tuple of:
|
||||||
|
///
|
||||||
|
/// - `SocketAddr`: the address that the HTTP server will listen on.
|
||||||
|
/// - `Future`: the actual server future that will need to be awaited.
|
||||||
|
///
|
||||||
|
/// ## Errors
|
||||||
|
///
|
||||||
|
/// Returns an error if the server is unable to bind or there is another error during
|
||||||
|
/// configuration.
|
||||||
|
pub fn serve<T: 'static + SlotClock + Clone, E: EthSpec>(
|
||||||
|
ctx: Arc<Context<T, E>>,
|
||||||
|
shutdown: impl Future<Output = ()> + Send + Sync + 'static,
|
||||||
|
) -> Result<(SocketAddr, impl Future<Output = ()>), Error> {
|
||||||
|
let config = &ctx.config;
|
||||||
|
let log = ctx.log.clone();
|
||||||
|
let allow_origin = config.allow_origin.clone();
|
||||||
|
|
||||||
|
// Sanity check.
|
||||||
|
if !config.enabled {
|
||||||
|
crit!(log, "Cannot start disabled metrics HTTP server");
|
||||||
|
return Err(Error::Other(
|
||||||
|
"A disabled metrics server should not be started".to_string(),
|
||||||
|
));
|
||||||
|
}
|
||||||
|
|
||||||
|
let authorization_header_filter = ctx.api_secret.authorization_header_filter();
|
||||||
|
let api_token = ctx.api_secret.api_token();
|
||||||
|
let signer = ctx.api_secret.signer();
|
||||||
|
let signer = warp::any().map(move || signer.clone());
|
||||||
|
|
||||||
|
let inner_validator_store = ctx.validator_store.clone();
|
||||||
|
let validator_store_filter = warp::any()
|
||||||
|
.map(move || inner_validator_store.clone())
|
||||||
|
.and_then(|validator_store: Option<_>| async move {
|
||||||
|
validator_store.ok_or_else(|| {
|
||||||
|
warp_utils::reject::custom_not_found(
|
||||||
|
"validator store is not initialized.".to_string(),
|
||||||
|
)
|
||||||
|
})
|
||||||
|
});
|
||||||
|
|
||||||
|
let inner_validator_dir = ctx.validator_dir.clone();
|
||||||
|
let validator_dir_filter = warp::any()
|
||||||
|
.map(move || inner_validator_dir.clone())
|
||||||
|
.and_then(|validator_dir: Option<_>| async move {
|
||||||
|
validator_dir.ok_or_else(|| {
|
||||||
|
warp_utils::reject::custom_not_found(
|
||||||
|
"validator_dir directory is not initialized.".to_string(),
|
||||||
|
)
|
||||||
|
})
|
||||||
|
});
|
||||||
|
|
||||||
|
let inner_spec = Arc::new(ctx.spec.clone());
|
||||||
|
let spec_filter = warp::any().map(move || inner_spec.clone());
|
||||||
|
|
||||||
|
// GET lighthouse/version
|
||||||
|
let get_node_version = warp::path("lighthouse")
|
||||||
|
.and(warp::path("version"))
|
||||||
|
.and(warp::path::end())
|
||||||
|
.and(signer.clone())
|
||||||
|
.and_then(|signer| {
|
||||||
|
blocking_signed_json_task(signer, move || {
|
||||||
|
Ok(api_types::GenericResponse::from(api_types::VersionData {
|
||||||
|
version: version_with_platform(),
|
||||||
|
}))
|
||||||
|
})
|
||||||
|
});
|
||||||
|
|
||||||
|
// GET lighthouse/health
|
||||||
|
let get_lighthouse_health = warp::path("lighthouse")
|
||||||
|
.and(warp::path("health"))
|
||||||
|
.and(warp::path::end())
|
||||||
|
.and(signer.clone())
|
||||||
|
.and_then(|signer| {
|
||||||
|
blocking_signed_json_task(signer, move || {
|
||||||
|
eth2::lighthouse::Health::observe()
|
||||||
|
.map(api_types::GenericResponse::from)
|
||||||
|
.map_err(warp_utils::reject::custom_bad_request)
|
||||||
|
})
|
||||||
|
});
|
||||||
|
|
||||||
|
// GET lighthouse/spec
|
||||||
|
let get_lighthouse_spec = warp::path("lighthouse")
|
||||||
|
.and(warp::path("spec"))
|
||||||
|
.and(warp::path::end())
|
||||||
|
.and(spec_filter.clone())
|
||||||
|
.and(signer.clone())
|
||||||
|
.and_then(|spec: Arc<_>, signer| {
|
||||||
|
blocking_signed_json_task(signer, move || {
|
||||||
|
Ok(api_types::GenericResponse::from(
|
||||||
|
YamlConfig::from_spec::<E>(&spec),
|
||||||
|
))
|
||||||
|
})
|
||||||
|
});
|
||||||
|
|
||||||
|
// GET lighthouse/validators
|
||||||
|
let get_lighthouse_validators = warp::path("lighthouse")
|
||||||
|
.and(warp::path("validators"))
|
||||||
|
.and(warp::path::end())
|
||||||
|
.and(validator_store_filter.clone())
|
||||||
|
.and(signer.clone())
|
||||||
|
.and_then(|validator_store: ValidatorStore<T, E>, signer| {
|
||||||
|
blocking_signed_json_task(signer, move || {
|
||||||
|
let validators = validator_store
|
||||||
|
.initialized_validators()
|
||||||
|
.read()
|
||||||
|
.validator_definitions()
|
||||||
|
.iter()
|
||||||
|
.map(|def| api_types::ValidatorData {
|
||||||
|
enabled: def.enabled,
|
||||||
|
description: def.description.clone(),
|
||||||
|
voting_pubkey: PublicKeyBytes::from(&def.voting_public_key),
|
||||||
|
})
|
||||||
|
.collect::<Vec<_>>();
|
||||||
|
|
||||||
|
Ok(api_types::GenericResponse::from(validators))
|
||||||
|
})
|
||||||
|
});
|
||||||
|
|
||||||
|
// GET lighthouse/validators/{validator_pubkey}
|
||||||
|
let get_lighthouse_validators_pubkey = warp::path("lighthouse")
|
||||||
|
.and(warp::path("validators"))
|
||||||
|
.and(warp::path::param::<PublicKey>())
|
||||||
|
.and(warp::path::end())
|
||||||
|
.and(validator_store_filter.clone())
|
||||||
|
.and(signer.clone())
|
||||||
|
.and_then(
|
||||||
|
|validator_pubkey: PublicKey, validator_store: ValidatorStore<T, E>, signer| {
|
||||||
|
blocking_signed_json_task(signer, move || {
|
||||||
|
let validator = validator_store
|
||||||
|
.initialized_validators()
|
||||||
|
.read()
|
||||||
|
.validator_definitions()
|
||||||
|
.iter()
|
||||||
|
.find(|def| def.voting_public_key == validator_pubkey)
|
||||||
|
.map(|def| api_types::ValidatorData {
|
||||||
|
enabled: def.enabled,
|
||||||
|
description: def.description.clone(),
|
||||||
|
voting_pubkey: PublicKeyBytes::from(&def.voting_public_key),
|
||||||
|
})
|
||||||
|
.ok_or_else(|| {
|
||||||
|
warp_utils::reject::custom_not_found(format!(
|
||||||
|
"no validator for {:?}",
|
||||||
|
validator_pubkey
|
||||||
|
))
|
||||||
|
})?;
|
||||||
|
|
||||||
|
Ok(api_types::GenericResponse::from(validator))
|
||||||
|
})
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|
||||||
|
// POST lighthouse/validators/
|
||||||
|
let post_validators = warp::path("lighthouse")
|
||||||
|
.and(warp::path("validators"))
|
||||||
|
.and(warp::path::end())
|
||||||
|
.and(warp::body::json())
|
||||||
|
.and(validator_dir_filter.clone())
|
||||||
|
.and(validator_store_filter.clone())
|
||||||
|
.and(spec_filter.clone())
|
||||||
|
.and(signer.clone())
|
||||||
|
.and_then(
|
||||||
|
|body: Vec<api_types::ValidatorRequest>,
|
||||||
|
validator_dir: PathBuf,
|
||||||
|
validator_store: ValidatorStore<T, E>,
|
||||||
|
spec: Arc<ChainSpec>,
|
||||||
|
signer| {
|
||||||
|
blocking_signed_json_task(signer, move || {
|
||||||
|
let (validators, mnemonic) = create_validators(
|
||||||
|
None,
|
||||||
|
None,
|
||||||
|
&body,
|
||||||
|
&validator_dir,
|
||||||
|
&validator_store,
|
||||||
|
&spec,
|
||||||
|
)?;
|
||||||
|
let response = api_types::PostValidatorsResponseData {
|
||||||
|
mnemonic: mnemonic.into_phrase().into(),
|
||||||
|
validators,
|
||||||
|
};
|
||||||
|
Ok(api_types::GenericResponse::from(response))
|
||||||
|
})
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|
||||||
|
// POST lighthouse/validators/mnemonic
|
||||||
|
let post_validators_mnemonic = warp::path("lighthouse")
|
||||||
|
.and(warp::path("validators"))
|
||||||
|
.and(warp::path("mnemonic"))
|
||||||
|
.and(warp::path::end())
|
||||||
|
.and(warp::body::json())
|
||||||
|
.and(validator_dir_filter.clone())
|
||||||
|
.and(validator_store_filter.clone())
|
||||||
|
.and(spec_filter)
|
||||||
|
.and(signer.clone())
|
||||||
|
.and_then(
|
||||||
|
|body: api_types::CreateValidatorsMnemonicRequest,
|
||||||
|
validator_dir: PathBuf,
|
||||||
|
validator_store: ValidatorStore<T, E>,
|
||||||
|
spec: Arc<ChainSpec>,
|
||||||
|
signer| {
|
||||||
|
blocking_signed_json_task(signer, move || {
|
||||||
|
let mnemonic = mnemonic_from_phrase(body.mnemonic.as_str()).map_err(|e| {
|
||||||
|
warp_utils::reject::custom_bad_request(format!("invalid mnemonic: {:?}", e))
|
||||||
|
})?;
|
||||||
|
let (validators, _mnemonic) = create_validators(
|
||||||
|
Some(mnemonic),
|
||||||
|
Some(body.key_derivation_path_offset),
|
||||||
|
&body.validators,
|
||||||
|
&validator_dir,
|
||||||
|
&validator_store,
|
||||||
|
&spec,
|
||||||
|
)?;
|
||||||
|
Ok(api_types::GenericResponse::from(validators))
|
||||||
|
})
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|
||||||
|
// POST lighthouse/validators/keystore
|
||||||
|
let post_validators_keystore = warp::path("lighthouse")
|
||||||
|
.and(warp::path("validators"))
|
||||||
|
.and(warp::path("keystore"))
|
||||||
|
.and(warp::path::end())
|
||||||
|
.and(warp::body::json())
|
||||||
|
.and(validator_dir_filter)
|
||||||
|
.and(validator_store_filter.clone())
|
||||||
|
.and(signer.clone())
|
||||||
|
.and_then(
|
||||||
|
|body: api_types::KeystoreValidatorsPostRequest,
|
||||||
|
validator_dir: PathBuf,
|
||||||
|
validator_store: ValidatorStore<T, E>,
|
||||||
|
signer| {
|
||||||
|
blocking_signed_json_task(signer, move || {
|
||||||
|
// Check to ensure the password is correct.
|
||||||
|
let keypair = body
|
||||||
|
.keystore
|
||||||
|
.decrypt_keypair(body.password.as_ref())
|
||||||
|
.map_err(|e| {
|
||||||
|
warp_utils::reject::custom_bad_request(format!(
|
||||||
|
"invalid keystore: {:?}",
|
||||||
|
e
|
||||||
|
))
|
||||||
|
})?;
|
||||||
|
|
||||||
|
let validator_dir = ValidatorDirBuilder::new(validator_dir.clone())
|
||||||
|
.voting_keystore(body.keystore.clone(), body.password.as_ref())
|
||||||
|
.store_withdrawal_keystore(false)
|
||||||
|
.build()
|
||||||
|
.map_err(|e| {
|
||||||
|
warp_utils::reject::custom_server_error(format!(
|
||||||
|
"failed to build validator directory: {:?}",
|
||||||
|
e
|
||||||
|
))
|
||||||
|
})?;
|
||||||
|
|
||||||
|
let voting_password = body.password.clone();
|
||||||
|
|
||||||
|
let validator_def = tokio::runtime::Handle::current()
|
||||||
|
.block_on(validator_store.add_validator_keystore(
|
||||||
|
validator_dir.voting_keystore_path(),
|
||||||
|
voting_password,
|
||||||
|
body.enable,
|
||||||
|
))
|
||||||
|
.map_err(|e| {
|
||||||
|
warp_utils::reject::custom_server_error(format!(
|
||||||
|
"failed to initialize validator: {:?}",
|
||||||
|
e
|
||||||
|
))
|
||||||
|
})?;
|
||||||
|
|
||||||
|
Ok(api_types::GenericResponse::from(api_types::ValidatorData {
|
||||||
|
enabled: body.enable,
|
||||||
|
description: validator_def.description,
|
||||||
|
voting_pubkey: keypair.pk.into(),
|
||||||
|
}))
|
||||||
|
})
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|
||||||
|
// PATCH lighthouse/validators/{validator_pubkey}
|
||||||
|
let patch_validators = warp::path("lighthouse")
|
||||||
|
.and(warp::path("validators"))
|
||||||
|
.and(warp::path::param::<PublicKey>())
|
||||||
|
.and(warp::path::end())
|
||||||
|
.and(warp::body::json())
|
||||||
|
.and(validator_store_filter)
|
||||||
|
.and(signer)
|
||||||
|
.and_then(
|
||||||
|
|validator_pubkey: PublicKey,
|
||||||
|
body: api_types::ValidatorPatchRequest,
|
||||||
|
validator_store: ValidatorStore<T, E>,
|
||||||
|
signer| {
|
||||||
|
blocking_signed_json_task(signer, move || {
|
||||||
|
let initialized_validators_rw_lock = validator_store.initialized_validators();
|
||||||
|
let mut initialized_validators = initialized_validators_rw_lock.write();
|
||||||
|
|
||||||
|
match initialized_validators.is_enabled(&validator_pubkey) {
|
||||||
|
None => Err(warp_utils::reject::custom_not_found(format!(
|
||||||
|
"no validator for {:?}",
|
||||||
|
validator_pubkey
|
||||||
|
))),
|
||||||
|
Some(enabled) if enabled == body.enabled => Ok(()),
|
||||||
|
Some(_) => {
|
||||||
|
tokio::runtime::Handle::current()
|
||||||
|
.block_on(
|
||||||
|
initialized_validators
|
||||||
|
.set_validator_status(&validator_pubkey, body.enabled),
|
||||||
|
)
|
||||||
|
.map_err(|e| {
|
||||||
|
warp_utils::reject::custom_server_error(format!(
|
||||||
|
"unable to set validator status: {:?}",
|
||||||
|
e
|
||||||
|
))
|
||||||
|
})?;
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
})
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|
||||||
|
let routes = warp::any()
|
||||||
|
.and(authorization_header_filter)
|
||||||
|
.and(
|
||||||
|
warp::get().and(
|
||||||
|
get_node_version
|
||||||
|
.or(get_lighthouse_health)
|
||||||
|
.or(get_lighthouse_spec)
|
||||||
|
.or(get_lighthouse_validators)
|
||||||
|
.or(get_lighthouse_validators_pubkey),
|
||||||
|
),
|
||||||
|
)
|
||||||
|
.or(warp::post().and(
|
||||||
|
post_validators
|
||||||
|
.or(post_validators_keystore)
|
||||||
|
.or(post_validators_mnemonic),
|
||||||
|
))
|
||||||
|
.or(warp::patch().and(patch_validators))
|
||||||
|
// Maps errors into HTTP responses.
|
||||||
|
.recover(warp_utils::reject::handle_rejection)
|
||||||
|
// Add a `Server` header.
|
||||||
|
.map(|reply| warp::reply::with_header(reply, "Server", &version_with_platform()))
|
||||||
|
// Maybe add some CORS headers.
|
||||||
|
.map(move |reply| warp_utils::reply::maybe_cors(reply, allow_origin.as_ref()));
|
||||||
|
|
||||||
|
let (listening_socket, server) = warp::serve(routes).try_bind_with_graceful_shutdown(
|
||||||
|
SocketAddrV4::new(config.listen_addr, config.listen_port),
|
||||||
|
async {
|
||||||
|
shutdown.await;
|
||||||
|
},
|
||||||
|
)?;
|
||||||
|
|
||||||
|
info!(
|
||||||
|
log,
|
||||||
|
"HTTP API started";
|
||||||
|
"listen_address" => listening_socket.to_string(),
|
||||||
|
"api_token" => api_token,
|
||||||
|
);
|
||||||
|
|
||||||
|
Ok((listening_socket, server))
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Executes `func` in blocking tokio task (i.e., where long-running tasks are permitted).
|
||||||
|
/// JSON-encodes the return value of `func`, using the `signer` function to produce a signature of
|
||||||
|
/// those bytes.
|
||||||
|
pub async fn blocking_signed_json_task<S, F, T>(
|
||||||
|
signer: S,
|
||||||
|
func: F,
|
||||||
|
) -> Result<impl warp::Reply, warp::Rejection>
|
||||||
|
where
|
||||||
|
S: Fn(&[u8]) -> String,
|
||||||
|
F: Fn() -> Result<T, warp::Rejection>,
|
||||||
|
T: Serialize,
|
||||||
|
{
|
||||||
|
warp_utils::task::blocking_task(func)
|
||||||
|
.await
|
||||||
|
.map(|func_output| {
|
||||||
|
let mut response = match serde_json::to_vec(&func_output) {
|
||||||
|
Ok(body) => {
|
||||||
|
let mut res = Response::new(body);
|
||||||
|
res.headers_mut()
|
||||||
|
.insert(CONTENT_TYPE, HeaderValue::from_static("application/json"));
|
||||||
|
res
|
||||||
|
}
|
||||||
|
Err(_) => Response::builder()
|
||||||
|
.status(StatusCode::INTERNAL_SERVER_ERROR)
|
||||||
|
.body(vec![])
|
||||||
|
.expect("can produce simple response from static values"),
|
||||||
|
};
|
||||||
|
|
||||||
|
let body: &Vec<u8> = response.body();
|
||||||
|
let signature = signer(body);
|
||||||
|
let header_value =
|
||||||
|
HeaderValue::from_str(&signature).expect("hash can be encoded as header");
|
||||||
|
|
||||||
|
response.headers_mut().append("Signature", header_value);
|
||||||
|
|
||||||
|
response
|
||||||
|
})
|
||||||
|
}
|
527
validator_client/src/http_api/tests.rs
Normal file
527
validator_client/src/http_api/tests.rs
Normal file
@ -0,0 +1,527 @@
|
|||||||
|
#![cfg(test)]
|
||||||
|
#![cfg(not(debug_assertions))]
|
||||||
|
|
||||||
|
use crate::{
|
||||||
|
http_api::{ApiSecret, Config as HttpConfig, Context},
|
||||||
|
Config, ForkServiceBuilder, InitializedValidators, ValidatorDefinitions, ValidatorStore,
|
||||||
|
};
|
||||||
|
use account_utils::{
|
||||||
|
eth2_wallet::WalletBuilder, mnemonic_from_phrase, random_mnemonic, random_password,
|
||||||
|
ZeroizeString,
|
||||||
|
};
|
||||||
|
use deposit_contract::decode_eth1_tx_data;
|
||||||
|
use environment::null_logger;
|
||||||
|
use eth2::{
|
||||||
|
lighthouse_vc::{http_client::ValidatorClientHttpClient, types::*},
|
||||||
|
Url,
|
||||||
|
};
|
||||||
|
use eth2_keystore::KeystoreBuilder;
|
||||||
|
use parking_lot::RwLock;
|
||||||
|
use slot_clock::TestingSlotClock;
|
||||||
|
use std::marker::PhantomData;
|
||||||
|
use std::net::Ipv4Addr;
|
||||||
|
use std::sync::Arc;
|
||||||
|
use tempfile::{tempdir, TempDir};
|
||||||
|
use tokio::sync::oneshot;
|
||||||
|
|
||||||
|
const PASSWORD_BYTES: &[u8] = &[42, 13, 37];
|
||||||
|
|
||||||
|
type E = MainnetEthSpec;
|
||||||
|
|
||||||
|
struct ApiTester {
|
||||||
|
client: ValidatorClientHttpClient,
|
||||||
|
initialized_validators: Arc<RwLock<InitializedValidators>>,
|
||||||
|
url: Url,
|
||||||
|
_server_shutdown: oneshot::Sender<()>,
|
||||||
|
_validator_dir: TempDir,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ApiTester {
|
||||||
|
pub async fn new() -> Self {
|
||||||
|
let log = null_logger().unwrap();
|
||||||
|
|
||||||
|
let validator_dir = tempdir().unwrap();
|
||||||
|
let secrets_dir = tempdir().unwrap();
|
||||||
|
|
||||||
|
let validator_defs = ValidatorDefinitions::open_or_create(validator_dir.path()).unwrap();
|
||||||
|
|
||||||
|
let initialized_validators = InitializedValidators::from_definitions(
|
||||||
|
validator_defs,
|
||||||
|
validator_dir.path().into(),
|
||||||
|
false,
|
||||||
|
log.clone(),
|
||||||
|
)
|
||||||
|
.await
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
|
let api_secret = ApiSecret::create_or_open(validator_dir.path()).unwrap();
|
||||||
|
let api_pubkey = api_secret.api_token();
|
||||||
|
|
||||||
|
let mut config = Config::default();
|
||||||
|
config.validator_dir = validator_dir.path().into();
|
||||||
|
config.secrets_dir = secrets_dir.path().into();
|
||||||
|
|
||||||
|
let fork_service = ForkServiceBuilder::testing_only(log.clone())
|
||||||
|
.build()
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
|
let validator_store: ValidatorStore<TestingSlotClock, E> = ValidatorStore::new(
|
||||||
|
initialized_validators,
|
||||||
|
&config,
|
||||||
|
Hash256::repeat_byte(42),
|
||||||
|
E::default_spec(),
|
||||||
|
fork_service.clone(),
|
||||||
|
log.clone(),
|
||||||
|
)
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
|
let initialized_validators = validator_store.initialized_validators();
|
||||||
|
|
||||||
|
let context: Arc<Context<TestingSlotClock, E>> = Arc::new(Context {
|
||||||
|
api_secret,
|
||||||
|
validator_dir: Some(validator_dir.path().into()),
|
||||||
|
validator_store: Some(validator_store),
|
||||||
|
spec: E::default_spec(),
|
||||||
|
config: HttpConfig {
|
||||||
|
enabled: true,
|
||||||
|
listen_addr: Ipv4Addr::new(127, 0, 0, 1),
|
||||||
|
listen_port: 0,
|
||||||
|
allow_origin: None,
|
||||||
|
},
|
||||||
|
log,
|
||||||
|
_phantom: PhantomData,
|
||||||
|
});
|
||||||
|
let ctx = context.clone();
|
||||||
|
let (shutdown_tx, shutdown_rx) = oneshot::channel();
|
||||||
|
let server_shutdown = async {
|
||||||
|
// It's not really interesting why this triggered, just that it happened.
|
||||||
|
let _ = shutdown_rx.await;
|
||||||
|
};
|
||||||
|
let (listening_socket, server) = super::serve(ctx, server_shutdown).unwrap();
|
||||||
|
|
||||||
|
tokio::spawn(async { server.await });
|
||||||
|
|
||||||
|
let url = Url::parse(&format!(
|
||||||
|
"http://{}:{}",
|
||||||
|
listening_socket.ip(),
|
||||||
|
listening_socket.port()
|
||||||
|
))
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
|
let client = ValidatorClientHttpClient::new(url.clone(), api_pubkey).unwrap();
|
||||||
|
|
||||||
|
Self {
|
||||||
|
initialized_validators,
|
||||||
|
_validator_dir: validator_dir,
|
||||||
|
client,
|
||||||
|
url,
|
||||||
|
_server_shutdown: shutdown_tx,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn invalidate_api_token(mut self) -> Self {
|
||||||
|
let tmp = tempdir().unwrap();
|
||||||
|
let api_secret = ApiSecret::create_or_open(tmp.path()).unwrap();
|
||||||
|
let invalid_pubkey = api_secret.api_token();
|
||||||
|
|
||||||
|
self.client = ValidatorClientHttpClient::new(self.url.clone(), invalid_pubkey).unwrap();
|
||||||
|
self
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn test_get_lighthouse_version_invalid(self) -> Self {
|
||||||
|
self.client.get_lighthouse_version().await.unwrap_err();
|
||||||
|
self
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn test_get_lighthouse_spec(self) -> Self {
|
||||||
|
let result = self.client.get_lighthouse_spec().await.unwrap().data;
|
||||||
|
|
||||||
|
let expected = YamlConfig::from_spec::<E>(&E::default_spec());
|
||||||
|
|
||||||
|
assert_eq!(result, expected);
|
||||||
|
|
||||||
|
self
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn test_get_lighthouse_version(self) -> Self {
|
||||||
|
let result = self.client.get_lighthouse_version().await.unwrap().data;
|
||||||
|
|
||||||
|
let expected = VersionData {
|
||||||
|
version: lighthouse_version::version_with_platform(),
|
||||||
|
};
|
||||||
|
|
||||||
|
assert_eq!(result, expected);
|
||||||
|
|
||||||
|
self
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(target_os = "linux")]
|
||||||
|
pub async fn test_get_lighthouse_health(self) -> Self {
|
||||||
|
self.client.get_lighthouse_health().await.unwrap();
|
||||||
|
|
||||||
|
self
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(not(target_os = "linux"))]
|
||||||
|
pub async fn test_get_lighthouse_health(self) -> Self {
|
||||||
|
self.client.get_lighthouse_health().await.unwrap_err();
|
||||||
|
|
||||||
|
self
|
||||||
|
}
|
||||||
|
pub fn vals_total(&self) -> usize {
|
||||||
|
self.initialized_validators.read().num_total()
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn vals_enabled(&self) -> usize {
|
||||||
|
self.initialized_validators.read().num_enabled()
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn assert_enabled_validators_count(self, count: usize) -> Self {
|
||||||
|
assert_eq!(self.vals_enabled(), count);
|
||||||
|
self
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn assert_validators_count(self, count: usize) -> Self {
|
||||||
|
assert_eq!(self.vals_total(), count);
|
||||||
|
self
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn create_hd_validators(self, s: HdValidatorScenario) -> Self {
|
||||||
|
let initial_vals = self.vals_total();
|
||||||
|
let initial_enabled_vals = self.vals_enabled();
|
||||||
|
|
||||||
|
let validators = (0..s.count)
|
||||||
|
.map(|i| ValidatorRequest {
|
||||||
|
enable: !s.disabled.contains(&i),
|
||||||
|
description: format!("boi #{}", i),
|
||||||
|
deposit_gwei: E::default_spec().max_effective_balance,
|
||||||
|
})
|
||||||
|
.collect::<Vec<_>>();
|
||||||
|
|
||||||
|
let (response, mnemonic) = if s.specify_mnemonic {
|
||||||
|
let mnemonic = ZeroizeString::from(random_mnemonic().phrase().to_string());
|
||||||
|
let request = CreateValidatorsMnemonicRequest {
|
||||||
|
mnemonic: mnemonic.clone(),
|
||||||
|
key_derivation_path_offset: s.key_derivation_path_offset,
|
||||||
|
validators: validators.clone(),
|
||||||
|
};
|
||||||
|
let response = self
|
||||||
|
.client
|
||||||
|
.post_lighthouse_validators_mnemonic(&request)
|
||||||
|
.await
|
||||||
|
.unwrap()
|
||||||
|
.data;
|
||||||
|
|
||||||
|
(response, mnemonic)
|
||||||
|
} else {
|
||||||
|
assert_eq!(
|
||||||
|
s.key_derivation_path_offset, 0,
|
||||||
|
"cannot use a derivation offset without specifying a mnemonic"
|
||||||
|
);
|
||||||
|
let response = self
|
||||||
|
.client
|
||||||
|
.post_lighthouse_validators(validators.clone())
|
||||||
|
.await
|
||||||
|
.unwrap()
|
||||||
|
.data;
|
||||||
|
(response.validators.clone(), response.mnemonic.clone())
|
||||||
|
};
|
||||||
|
|
||||||
|
assert_eq!(response.len(), s.count);
|
||||||
|
assert_eq!(self.vals_total(), initial_vals + s.count);
|
||||||
|
assert_eq!(
|
||||||
|
self.vals_enabled(),
|
||||||
|
initial_enabled_vals + s.count - s.disabled.len()
|
||||||
|
);
|
||||||
|
|
||||||
|
let server_vals = self.client.get_lighthouse_validators().await.unwrap().data;
|
||||||
|
|
||||||
|
assert_eq!(server_vals.len(), self.vals_total());
|
||||||
|
|
||||||
|
// Ensure the server lists all of these newly created validators.
|
||||||
|
for validator in &response {
|
||||||
|
assert!(server_vals
|
||||||
|
.iter()
|
||||||
|
.any(|server_val| server_val.voting_pubkey == validator.voting_pubkey));
|
||||||
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* Verify that we can regenerate all the keys from the mnemonic.
|
||||||
|
*/
|
||||||
|
|
||||||
|
let mnemonic = mnemonic_from_phrase(mnemonic.as_str()).unwrap();
|
||||||
|
let mut wallet = WalletBuilder::from_mnemonic(&mnemonic, PASSWORD_BYTES, "".to_string())
|
||||||
|
.unwrap()
|
||||||
|
.build()
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
|
wallet
|
||||||
|
.set_nextaccount(s.key_derivation_path_offset)
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
|
for i in 0..s.count {
|
||||||
|
let keypairs = wallet
|
||||||
|
.next_validator(PASSWORD_BYTES, PASSWORD_BYTES, PASSWORD_BYTES)
|
||||||
|
.unwrap();
|
||||||
|
let voting_keypair = keypairs.voting.decrypt_keypair(PASSWORD_BYTES).unwrap();
|
||||||
|
|
||||||
|
assert_eq!(
|
||||||
|
response[i].voting_pubkey,
|
||||||
|
voting_keypair.pk.clone().into(),
|
||||||
|
"the locally generated voting pk should match the server response"
|
||||||
|
);
|
||||||
|
|
||||||
|
let withdrawal_keypair = keypairs.withdrawal.decrypt_keypair(PASSWORD_BYTES).unwrap();
|
||||||
|
|
||||||
|
let deposit_bytes =
|
||||||
|
serde_utils::hex::decode(&response[i].eth1_deposit_tx_data).unwrap();
|
||||||
|
|
||||||
|
let (deposit_data, _) =
|
||||||
|
decode_eth1_tx_data(&deposit_bytes, E::default_spec().max_effective_balance)
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
|
assert_eq!(
|
||||||
|
deposit_data.pubkey,
|
||||||
|
voting_keypair.pk.clone().into(),
|
||||||
|
"the locally generated voting pk should match the deposit data"
|
||||||
|
);
|
||||||
|
|
||||||
|
assert_eq!(
|
||||||
|
deposit_data.withdrawal_credentials,
|
||||||
|
Hash256::from_slice(&bls::get_withdrawal_credentials(
|
||||||
|
&withdrawal_keypair.pk,
|
||||||
|
E::default_spec().bls_withdrawal_prefix_byte
|
||||||
|
)),
|
||||||
|
"the locally generated withdrawal creds should match the deposit data"
|
||||||
|
);
|
||||||
|
|
||||||
|
assert_eq!(
|
||||||
|
deposit_data.signature,
|
||||||
|
deposit_data.create_signature(&voting_keypair.sk, &E::default_spec()),
|
||||||
|
"the locally-generated deposit sig should create the same deposit sig"
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
self
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn create_keystore_validators(self, s: KeystoreValidatorScenario) -> Self {
|
||||||
|
let initial_vals = self.vals_total();
|
||||||
|
let initial_enabled_vals = self.vals_enabled();
|
||||||
|
|
||||||
|
let password = random_password();
|
||||||
|
let keypair = Keypair::random();
|
||||||
|
let keystore = KeystoreBuilder::new(&keypair, password.as_bytes(), String::new())
|
||||||
|
.unwrap()
|
||||||
|
.build()
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
|
if !s.correct_password {
|
||||||
|
let request = KeystoreValidatorsPostRequest {
|
||||||
|
enable: s.enabled,
|
||||||
|
password: String::from_utf8(random_password().as_ref().to_vec())
|
||||||
|
.unwrap()
|
||||||
|
.into(),
|
||||||
|
keystore,
|
||||||
|
};
|
||||||
|
|
||||||
|
self.client
|
||||||
|
.post_lighthouse_validators_keystore(&request)
|
||||||
|
.await
|
||||||
|
.unwrap_err();
|
||||||
|
|
||||||
|
return self;
|
||||||
|
}
|
||||||
|
|
||||||
|
let request = KeystoreValidatorsPostRequest {
|
||||||
|
enable: s.enabled,
|
||||||
|
password: String::from_utf8(password.as_ref().to_vec())
|
||||||
|
.unwrap()
|
||||||
|
.into(),
|
||||||
|
keystore,
|
||||||
|
};
|
||||||
|
|
||||||
|
let response = self
|
||||||
|
.client
|
||||||
|
.post_lighthouse_validators_keystore(&request)
|
||||||
|
.await
|
||||||
|
.unwrap()
|
||||||
|
.data;
|
||||||
|
|
||||||
|
let num_enabled = s.enabled as usize;
|
||||||
|
|
||||||
|
assert_eq!(self.vals_total(), initial_vals + 1);
|
||||||
|
assert_eq!(self.vals_enabled(), initial_enabled_vals + num_enabled);
|
||||||
|
|
||||||
|
let server_vals = self.client.get_lighthouse_validators().await.unwrap().data;
|
||||||
|
|
||||||
|
assert_eq!(server_vals.len(), self.vals_total());
|
||||||
|
|
||||||
|
assert_eq!(response.voting_pubkey, keypair.pk.into());
|
||||||
|
assert_eq!(response.enabled, s.enabled);
|
||||||
|
|
||||||
|
self
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn set_validator_enabled(self, index: usize, enabled: bool) -> Self {
|
||||||
|
let validator = &self.client.get_lighthouse_validators().await.unwrap().data[index];
|
||||||
|
|
||||||
|
self.client
|
||||||
|
.patch_lighthouse_validators(&validator.voting_pubkey, enabled)
|
||||||
|
.await
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
|
assert_eq!(
|
||||||
|
self.initialized_validators
|
||||||
|
.read()
|
||||||
|
.is_enabled(&validator.voting_pubkey.decompress().unwrap())
|
||||||
|
.unwrap(),
|
||||||
|
enabled
|
||||||
|
);
|
||||||
|
|
||||||
|
assert!(self
|
||||||
|
.client
|
||||||
|
.get_lighthouse_validators()
|
||||||
|
.await
|
||||||
|
.unwrap()
|
||||||
|
.data
|
||||||
|
.into_iter()
|
||||||
|
.find(|v| v.voting_pubkey == validator.voting_pubkey)
|
||||||
|
.map(|v| v.enabled == enabled)
|
||||||
|
.unwrap());
|
||||||
|
|
||||||
|
// Check the server via an individual request.
|
||||||
|
assert_eq!(
|
||||||
|
self.client
|
||||||
|
.get_lighthouse_validators_pubkey(&validator.voting_pubkey)
|
||||||
|
.await
|
||||||
|
.unwrap()
|
||||||
|
.unwrap()
|
||||||
|
.data
|
||||||
|
.enabled,
|
||||||
|
enabled
|
||||||
|
);
|
||||||
|
|
||||||
|
self
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
struct HdValidatorScenario {
|
||||||
|
count: usize,
|
||||||
|
specify_mnemonic: bool,
|
||||||
|
key_derivation_path_offset: u32,
|
||||||
|
disabled: Vec<usize>,
|
||||||
|
}
|
||||||
|
|
||||||
|
struct KeystoreValidatorScenario {
|
||||||
|
enabled: bool,
|
||||||
|
correct_password: bool,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::test(core_threads = 2)]
|
||||||
|
async fn invalid_pubkey() {
|
||||||
|
ApiTester::new()
|
||||||
|
.await
|
||||||
|
.invalidate_api_token()
|
||||||
|
.test_get_lighthouse_version_invalid()
|
||||||
|
.await;
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::test(core_threads = 2)]
|
||||||
|
async fn simple_getters() {
|
||||||
|
ApiTester::new()
|
||||||
|
.await
|
||||||
|
.test_get_lighthouse_version()
|
||||||
|
.await
|
||||||
|
.test_get_lighthouse_health()
|
||||||
|
.await
|
||||||
|
.test_get_lighthouse_spec()
|
||||||
|
.await;
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::test(core_threads = 2)]
|
||||||
|
async fn hd_validator_creation() {
|
||||||
|
ApiTester::new()
|
||||||
|
.await
|
||||||
|
.assert_enabled_validators_count(0)
|
||||||
|
.assert_validators_count(0)
|
||||||
|
.create_hd_validators(HdValidatorScenario {
|
||||||
|
count: 2,
|
||||||
|
specify_mnemonic: true,
|
||||||
|
key_derivation_path_offset: 0,
|
||||||
|
disabled: vec![],
|
||||||
|
})
|
||||||
|
.await
|
||||||
|
.assert_enabled_validators_count(2)
|
||||||
|
.assert_validators_count(2)
|
||||||
|
.create_hd_validators(HdValidatorScenario {
|
||||||
|
count: 1,
|
||||||
|
specify_mnemonic: false,
|
||||||
|
key_derivation_path_offset: 0,
|
||||||
|
disabled: vec![0],
|
||||||
|
})
|
||||||
|
.await
|
||||||
|
.assert_enabled_validators_count(2)
|
||||||
|
.assert_validators_count(3)
|
||||||
|
.create_hd_validators(HdValidatorScenario {
|
||||||
|
count: 0,
|
||||||
|
specify_mnemonic: true,
|
||||||
|
key_derivation_path_offset: 4,
|
||||||
|
disabled: vec![],
|
||||||
|
})
|
||||||
|
.await
|
||||||
|
.assert_enabled_validators_count(2)
|
||||||
|
.assert_validators_count(3);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::test(core_threads = 2)]
|
||||||
|
async fn validator_enabling() {
|
||||||
|
ApiTester::new()
|
||||||
|
.await
|
||||||
|
.create_hd_validators(HdValidatorScenario {
|
||||||
|
count: 2,
|
||||||
|
specify_mnemonic: false,
|
||||||
|
key_derivation_path_offset: 0,
|
||||||
|
disabled: vec![],
|
||||||
|
})
|
||||||
|
.await
|
||||||
|
.assert_enabled_validators_count(2)
|
||||||
|
.assert_validators_count(2)
|
||||||
|
.set_validator_enabled(0, false)
|
||||||
|
.await
|
||||||
|
.assert_enabled_validators_count(1)
|
||||||
|
.assert_validators_count(2)
|
||||||
|
.set_validator_enabled(0, true)
|
||||||
|
.await
|
||||||
|
.assert_enabled_validators_count(2)
|
||||||
|
.assert_validators_count(2);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::test(core_threads = 2)]
|
||||||
|
async fn keystore_validator_creation() {
|
||||||
|
ApiTester::new()
|
||||||
|
.await
|
||||||
|
.assert_enabled_validators_count(0)
|
||||||
|
.assert_validators_count(0)
|
||||||
|
.create_keystore_validators(KeystoreValidatorScenario {
|
||||||
|
correct_password: true,
|
||||||
|
enabled: true,
|
||||||
|
})
|
||||||
|
.await
|
||||||
|
.assert_enabled_validators_count(1)
|
||||||
|
.assert_validators_count(1)
|
||||||
|
.create_keystore_validators(KeystoreValidatorScenario {
|
||||||
|
correct_password: false,
|
||||||
|
enabled: true,
|
||||||
|
})
|
||||||
|
.await
|
||||||
|
.assert_enabled_validators_count(1)
|
||||||
|
.assert_validators_count(1)
|
||||||
|
.create_keystore_validators(KeystoreValidatorScenario {
|
||||||
|
correct_password: true,
|
||||||
|
enabled: false,
|
||||||
|
})
|
||||||
|
.await
|
||||||
|
.assert_enabled_validators_count(1)
|
||||||
|
.assert_validators_count(2);
|
||||||
|
}
|
@ -56,6 +56,8 @@ pub enum Error {
|
|||||||
TokioJoin(tokio::task::JoinError),
|
TokioJoin(tokio::task::JoinError),
|
||||||
/// There was a filesystem error when deleting a lockfile.
|
/// There was a filesystem error when deleting a lockfile.
|
||||||
UnableToDeleteLockfile(io::Error),
|
UnableToDeleteLockfile(io::Error),
|
||||||
|
/// Cannot initialize the same validator twice.
|
||||||
|
DuplicatePublicKey,
|
||||||
}
|
}
|
||||||
|
|
||||||
/// A method used by a validator to sign messages.
|
/// A method used by a validator to sign messages.
|
||||||
@ -322,6 +324,42 @@ impl InitializedValidators {
|
|||||||
.map(|v| v.voting_keypair())
|
.map(|v| v.voting_keypair())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Add a validator definition to `self`, overwriting the on-disk representation of `self`.
|
||||||
|
pub async fn add_definition(&mut self, def: ValidatorDefinition) -> Result<(), Error> {
|
||||||
|
if self
|
||||||
|
.definitions
|
||||||
|
.as_slice()
|
||||||
|
.iter()
|
||||||
|
.any(|existing| existing.voting_public_key == def.voting_public_key)
|
||||||
|
{
|
||||||
|
return Err(Error::DuplicatePublicKey);
|
||||||
|
}
|
||||||
|
|
||||||
|
self.definitions.push(def);
|
||||||
|
|
||||||
|
self.update_validators().await?;
|
||||||
|
|
||||||
|
self.definitions
|
||||||
|
.save(&self.validators_dir)
|
||||||
|
.map_err(Error::UnableToSaveDefinitions)?;
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns a slice of all defined validators (regardless of their enabled state).
|
||||||
|
pub fn validator_definitions(&self) -> &[ValidatorDefinition] {
|
||||||
|
self.definitions.as_slice()
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Indicates if the `voting_public_key` exists in self and if it is enabled.
|
||||||
|
pub fn is_enabled(&self, voting_public_key: &PublicKey) -> Option<bool> {
|
||||||
|
self.definitions
|
||||||
|
.as_slice()
|
||||||
|
.iter()
|
||||||
|
.find(|def| def.voting_public_key == *voting_public_key)
|
||||||
|
.map(|def| def.enabled)
|
||||||
|
}
|
||||||
|
|
||||||
/// Sets the `InitializedValidator` and `ValidatorDefinition` `enabled` values.
|
/// Sets the `InitializedValidator` and `ValidatorDefinition` `enabled` values.
|
||||||
///
|
///
|
||||||
/// ## Notes
|
/// ## Notes
|
||||||
|
@ -10,6 +10,8 @@ mod notifier;
|
|||||||
mod validator_duty;
|
mod validator_duty;
|
||||||
mod validator_store;
|
mod validator_store;
|
||||||
|
|
||||||
|
pub mod http_api;
|
||||||
|
|
||||||
pub use cli::cli_app;
|
pub use cli::cli_app;
|
||||||
pub use config::Config;
|
pub use config::Config;
|
||||||
|
|
||||||
@ -22,11 +24,14 @@ use environment::RuntimeContext;
|
|||||||
use eth2::{reqwest::ClientBuilder, BeaconNodeHttpClient, StatusCode, Url};
|
use eth2::{reqwest::ClientBuilder, BeaconNodeHttpClient, StatusCode, Url};
|
||||||
use fork_service::{ForkService, ForkServiceBuilder};
|
use fork_service::{ForkService, ForkServiceBuilder};
|
||||||
use futures::channel::mpsc;
|
use futures::channel::mpsc;
|
||||||
|
use http_api::ApiSecret;
|
||||||
use initialized_validators::InitializedValidators;
|
use initialized_validators::InitializedValidators;
|
||||||
use notifier::spawn_notifier;
|
use notifier::spawn_notifier;
|
||||||
use slog::{error, info, Logger};
|
use slog::{error, info, Logger};
|
||||||
use slot_clock::SlotClock;
|
use slot_clock::SlotClock;
|
||||||
use slot_clock::SystemTimeSlotClock;
|
use slot_clock::SystemTimeSlotClock;
|
||||||
|
use std::marker::PhantomData;
|
||||||
|
use std::net::SocketAddr;
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
use std::time::{SystemTime, UNIX_EPOCH};
|
use std::time::{SystemTime, UNIX_EPOCH};
|
||||||
use tokio::time::{delay_for, Duration};
|
use tokio::time::{delay_for, Duration};
|
||||||
@ -42,9 +47,11 @@ const HTTP_TIMEOUT: Duration = Duration::from_secs(12);
|
|||||||
pub struct ProductionValidatorClient<T: EthSpec> {
|
pub struct ProductionValidatorClient<T: EthSpec> {
|
||||||
context: RuntimeContext<T>,
|
context: RuntimeContext<T>,
|
||||||
duties_service: DutiesService<SystemTimeSlotClock, T>,
|
duties_service: DutiesService<SystemTimeSlotClock, T>,
|
||||||
fork_service: ForkService<SystemTimeSlotClock, T>,
|
fork_service: ForkService<SystemTimeSlotClock>,
|
||||||
block_service: BlockService<SystemTimeSlotClock, T>,
|
block_service: BlockService<SystemTimeSlotClock, T>,
|
||||||
attestation_service: AttestationService<SystemTimeSlotClock, T>,
|
attestation_service: AttestationService<SystemTimeSlotClock, T>,
|
||||||
|
validator_store: ValidatorStore<SystemTimeSlotClock, T>,
|
||||||
|
http_api_listen_addr: Option<SocketAddr>,
|
||||||
config: Config,
|
config: Config,
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -55,7 +62,7 @@ impl<T: EthSpec> ProductionValidatorClient<T> {
|
|||||||
context: RuntimeContext<T>,
|
context: RuntimeContext<T>,
|
||||||
cli_args: &ArgMatches<'_>,
|
cli_args: &ArgMatches<'_>,
|
||||||
) -> Result<Self, String> {
|
) -> Result<Self, String> {
|
||||||
let config = Config::from_cli(&cli_args)
|
let config = Config::from_cli(&cli_args, context.log())
|
||||||
.map_err(|e| format!("Unable to initialize config: {}", e))?;
|
.map_err(|e| format!("Unable to initialize config: {}", e))?;
|
||||||
Self::new(context, config).await
|
Self::new(context, config).await
|
||||||
}
|
}
|
||||||
@ -68,7 +75,7 @@ impl<T: EthSpec> ProductionValidatorClient<T> {
|
|||||||
info!(
|
info!(
|
||||||
log,
|
log,
|
||||||
"Starting validator client";
|
"Starting validator client";
|
||||||
"beacon_node" => &config.http_server,
|
"beacon_node" => &config.beacon_node,
|
||||||
"validator_dir" => format!("{:?}", config.validator_dir),
|
"validator_dir" => format!("{:?}", config.validator_dir),
|
||||||
);
|
);
|
||||||
|
|
||||||
@ -106,7 +113,7 @@ impl<T: EthSpec> ProductionValidatorClient<T> {
|
|||||||
);
|
);
|
||||||
|
|
||||||
let beacon_node_url: Url = config
|
let beacon_node_url: Url = config
|
||||||
.http_server
|
.beacon_node
|
||||||
.parse()
|
.parse()
|
||||||
.map_err(|e| format!("Unable to parse beacon node URL: {:?}", e))?;
|
.map_err(|e| format!("Unable to parse beacon node URL: {:?}", e))?;
|
||||||
let beacon_node_http_client = ClientBuilder::new()
|
let beacon_node_http_client = ClientBuilder::new()
|
||||||
@ -144,7 +151,7 @@ impl<T: EthSpec> ProductionValidatorClient<T> {
|
|||||||
let fork_service = ForkServiceBuilder::new()
|
let fork_service = ForkServiceBuilder::new()
|
||||||
.slot_clock(slot_clock.clone())
|
.slot_clock(slot_clock.clone())
|
||||||
.beacon_node(beacon_node.clone())
|
.beacon_node(beacon_node.clone())
|
||||||
.runtime_context(context.service_context("fork".into()))
|
.log(log.clone())
|
||||||
.build()?;
|
.build()?;
|
||||||
|
|
||||||
let validator_store: ValidatorStore<SystemTimeSlotClock, T> = ValidatorStore::new(
|
let validator_store: ValidatorStore<SystemTimeSlotClock, T> = ValidatorStore::new(
|
||||||
@ -183,7 +190,7 @@ impl<T: EthSpec> ProductionValidatorClient<T> {
|
|||||||
let attestation_service = AttestationServiceBuilder::new()
|
let attestation_service = AttestationServiceBuilder::new()
|
||||||
.duties_service(duties_service.clone())
|
.duties_service(duties_service.clone())
|
||||||
.slot_clock(slot_clock)
|
.slot_clock(slot_clock)
|
||||||
.validator_store(validator_store)
|
.validator_store(validator_store.clone())
|
||||||
.beacon_node(beacon_node)
|
.beacon_node(beacon_node)
|
||||||
.runtime_context(context.service_context("attestation".into()))
|
.runtime_context(context.service_context("attestation".into()))
|
||||||
.build()?;
|
.build()?;
|
||||||
@ -194,7 +201,9 @@ impl<T: EthSpec> ProductionValidatorClient<T> {
|
|||||||
fork_service,
|
fork_service,
|
||||||
block_service,
|
block_service,
|
||||||
attestation_service,
|
attestation_service,
|
||||||
|
validator_store,
|
||||||
config,
|
config,
|
||||||
|
http_api_listen_addr: None,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -204,6 +213,7 @@ impl<T: EthSpec> ProductionValidatorClient<T> {
|
|||||||
// whole epoch!
|
// whole epoch!
|
||||||
let channel_capacity = T::slots_per_epoch() as usize;
|
let channel_capacity = T::slots_per_epoch() as usize;
|
||||||
let (block_service_tx, block_service_rx) = mpsc::channel(channel_capacity);
|
let (block_service_tx, block_service_rx) = mpsc::channel(channel_capacity);
|
||||||
|
let log = self.context.log();
|
||||||
|
|
||||||
self.duties_service
|
self.duties_service
|
||||||
.clone()
|
.clone()
|
||||||
@ -215,7 +225,7 @@ impl<T: EthSpec> ProductionValidatorClient<T> {
|
|||||||
|
|
||||||
self.fork_service
|
self.fork_service
|
||||||
.clone()
|
.clone()
|
||||||
.start_update_service(&self.context.eth2_config.spec)
|
.start_update_service(&self.context)
|
||||||
.map_err(|e| format!("Unable to start fork service: {}", e))?;
|
.map_err(|e| format!("Unable to start fork service: {}", e))?;
|
||||||
|
|
||||||
self.block_service
|
self.block_service
|
||||||
@ -230,6 +240,35 @@ impl<T: EthSpec> ProductionValidatorClient<T> {
|
|||||||
|
|
||||||
spawn_notifier(self).map_err(|e| format!("Failed to start notifier: {}", e))?;
|
spawn_notifier(self).map_err(|e| format!("Failed to start notifier: {}", e))?;
|
||||||
|
|
||||||
|
let api_secret = ApiSecret::create_or_open(&self.config.validator_dir)?;
|
||||||
|
|
||||||
|
self.http_api_listen_addr = if self.config.http_api.enabled {
|
||||||
|
let ctx: Arc<http_api::Context<SystemTimeSlotClock, T>> = Arc::new(http_api::Context {
|
||||||
|
api_secret,
|
||||||
|
validator_store: Some(self.validator_store.clone()),
|
||||||
|
validator_dir: Some(self.config.validator_dir.clone()),
|
||||||
|
spec: self.context.eth2_config.spec.clone(),
|
||||||
|
config: self.config.http_api.clone(),
|
||||||
|
log: log.clone(),
|
||||||
|
_phantom: PhantomData,
|
||||||
|
});
|
||||||
|
|
||||||
|
let exit = self.context.executor.exit();
|
||||||
|
|
||||||
|
let (listen_addr, server) = http_api::serve(ctx, exit)
|
||||||
|
.map_err(|e| format!("Unable to start HTTP API server: {:?}", e))?;
|
||||||
|
|
||||||
|
self.context
|
||||||
|
.clone()
|
||||||
|
.executor
|
||||||
|
.spawn_without_exit(async move { server.await }, "http-api");
|
||||||
|
|
||||||
|
Some(listen_addr)
|
||||||
|
} else {
|
||||||
|
info!(log, "HTTP API server is disabled");
|
||||||
|
None
|
||||||
|
};
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -45,7 +45,12 @@ pub fn spawn_notifier<T: EthSpec>(client: &ProductionValidatorClient<T>) -> Resu
|
|||||||
let attesting_validators = duties_service.attester_count(epoch);
|
let attesting_validators = duties_service.attester_count(epoch);
|
||||||
|
|
||||||
if total_validators == 0 {
|
if total_validators == 0 {
|
||||||
error!(log, "No validators present")
|
info!(
|
||||||
|
log,
|
||||||
|
"No validators present";
|
||||||
|
"msg" => "see `lighthouse account validator create --help` \
|
||||||
|
or the HTTP API documentation"
|
||||||
|
)
|
||||||
} else if total_validators == attesting_validators {
|
} else if total_validators == attesting_validators {
|
||||||
info!(
|
info!(
|
||||||
log,
|
log,
|
||||||
|
@ -1,11 +1,13 @@
|
|||||||
use crate::{
|
use crate::{
|
||||||
config::Config, fork_service::ForkService, initialized_validators::InitializedValidators,
|
config::Config, fork_service::ForkService, initialized_validators::InitializedValidators,
|
||||||
};
|
};
|
||||||
|
use account_utils::{validator_definitions::ValidatorDefinition, ZeroizeString};
|
||||||
use parking_lot::RwLock;
|
use parking_lot::RwLock;
|
||||||
use slashing_protection::{NotSafe, Safe, SlashingDatabase, SLASHING_PROTECTION_FILENAME};
|
use slashing_protection::{NotSafe, Safe, SlashingDatabase, SLASHING_PROTECTION_FILENAME};
|
||||||
use slog::{crit, error, warn, Logger};
|
use slog::{crit, error, warn, Logger};
|
||||||
use slot_clock::SlotClock;
|
use slot_clock::SlotClock;
|
||||||
use std::marker::PhantomData;
|
use std::marker::PhantomData;
|
||||||
|
use std::path::Path;
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
use tempdir::TempDir;
|
use tempdir::TempDir;
|
||||||
use types::{
|
use types::{
|
||||||
@ -47,7 +49,7 @@ pub struct ValidatorStore<T, E: EthSpec> {
|
|||||||
spec: Arc<ChainSpec>,
|
spec: Arc<ChainSpec>,
|
||||||
log: Logger,
|
log: Logger,
|
||||||
temp_dir: Option<Arc<TempDir>>,
|
temp_dir: Option<Arc<TempDir>>,
|
||||||
fork_service: ForkService<T, E>,
|
fork_service: ForkService<T>,
|
||||||
_phantom: PhantomData<E>,
|
_phantom: PhantomData<E>,
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -57,7 +59,7 @@ impl<T: SlotClock + 'static, E: EthSpec> ValidatorStore<T, E> {
|
|||||||
config: &Config,
|
config: &Config,
|
||||||
genesis_validators_root: Hash256,
|
genesis_validators_root: Hash256,
|
||||||
spec: ChainSpec,
|
spec: ChainSpec,
|
||||||
fork_service: ForkService<T, E>,
|
fork_service: ForkService<T>,
|
||||||
log: Logger,
|
log: Logger,
|
||||||
) -> Result<Self, String> {
|
) -> Result<Self, String> {
|
||||||
let slashing_db_path = config.validator_dir.join(SLASHING_PROTECTION_FILENAME);
|
let slashing_db_path = config.validator_dir.join(SLASHING_PROTECTION_FILENAME);
|
||||||
@ -91,6 +93,43 @@ impl<T: SlotClock + 'static, E: EthSpec> ValidatorStore<T, E> {
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn initialized_validators(&self) -> Arc<RwLock<InitializedValidators>> {
|
||||||
|
self.validators.clone()
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Insert a new validator to `self`, where the validator is represented by an EIP-2335
|
||||||
|
/// keystore on the filesystem.
|
||||||
|
///
|
||||||
|
/// This function includes:
|
||||||
|
///
|
||||||
|
/// - Add the validator definition to the YAML file, saving it to the filesystem.
|
||||||
|
/// - Enable validator with the slashing protection database.
|
||||||
|
/// - If `enable == true`, start performing duties for the validator.
|
||||||
|
pub async fn add_validator_keystore<P: AsRef<Path>>(
|
||||||
|
&self,
|
||||||
|
voting_keystore_path: P,
|
||||||
|
password: ZeroizeString,
|
||||||
|
enable: bool,
|
||||||
|
) -> Result<ValidatorDefinition, String> {
|
||||||
|
let mut validator_def =
|
||||||
|
ValidatorDefinition::new_keystore_with_password(voting_keystore_path, Some(password))
|
||||||
|
.map_err(|e| format!("failed to create validator definitions: {:?}", e))?;
|
||||||
|
|
||||||
|
self.slashing_protection
|
||||||
|
.register_validator(&validator_def.voting_public_key)
|
||||||
|
.map_err(|e| format!("failed to register validator: {:?}", e))?;
|
||||||
|
|
||||||
|
validator_def.enabled = enable;
|
||||||
|
|
||||||
|
self.validators
|
||||||
|
.write()
|
||||||
|
.add_definition(validator_def.clone())
|
||||||
|
.await
|
||||||
|
.map_err(|e| format!("Unable to add definition: {:?}", e))?;
|
||||||
|
|
||||||
|
Ok(validator_def)
|
||||||
|
}
|
||||||
|
|
||||||
/// Register all known validators with the slashing protection database.
|
/// Register all known validators with the slashing protection database.
|
||||||
///
|
///
|
||||||
/// Registration is required to protect against a lost or missing slashing database,
|
/// Registration is required to protect against a lost or missing slashing database,
|
||||||
|
Loading…
Reference in New Issue
Block a user