Merge remote-tracking branch 'origin/unstable' into capella

This commit is contained in:
Michael Sproul 2023-01-21 10:37:26 +11:00
commit bb0e99c097
30 changed files with 266 additions and 58 deletions

4
.cargo/config.toml Normal file
View File

@ -0,0 +1,4 @@
[env]
# Set the number of arenas to 16 when using jemalloc.
JEMALLOC_SYS_WITH_MALLOC_CONF = "abort_conf:true,narenas:16"

View File

@ -106,6 +106,7 @@ jobs:
--platform=linux/${SHORT_ARCH} \ --platform=linux/${SHORT_ARCH} \
--file ./Dockerfile.cross . \ --file ./Dockerfile.cross . \
--tag ${IMAGE_NAME}:${VERSION}-${SHORT_ARCH}${VERSION_SUFFIX}${MODERNITY_SUFFIX} \ --tag ${IMAGE_NAME}:${VERSION}-${SHORT_ARCH}${VERSION_SUFFIX}${MODERNITY_SUFFIX} \
--provenance=false \
--push --push
build-docker-multiarch: build-docker-multiarch:
name: build-docker-multiarch${{ matrix.modernity }} name: build-docker-multiarch${{ matrix.modernity }}

View File

@ -306,16 +306,6 @@ jobs:
repo-token: ${{ secrets.GITHUB_TOKEN }} repo-token: ${{ secrets.GITHUB_TOKEN }}
- name: Typecheck benchmark code without running it - name: Typecheck benchmark code without running it
run: make check-benches run: make check-benches
check-consensus:
name: check-consensus
runs-on: ubuntu-latest
needs: cargo-fmt
steps:
- uses: actions/checkout@v3
- name: Get latest version of stable Rust
run: rustup update stable
- name: Typecheck consensus code in strict mode
run: make check-consensus
clippy: clippy:
name: clippy name: clippy
runs-on: ubuntu-latest runs-on: ubuntu-latest
@ -382,14 +372,12 @@ jobs:
- uses: actions/checkout@v3 - uses: actions/checkout@v3
- name: Install Rust (${{ env.PINNED_NIGHTLY }}) - name: Install Rust (${{ env.PINNED_NIGHTLY }})
run: rustup toolchain install $PINNED_NIGHTLY run: rustup toolchain install $PINNED_NIGHTLY
# NOTE: cargo-udeps version is pinned until this issue is resolved:
# https://github.com/est31/cargo-udeps/issues/135
- name: Install Protoc - name: Install Protoc
uses: arduino/setup-protoc@e52d9eb8f7b63115df1ac544a1376fdbf5a39612 uses: arduino/setup-protoc@e52d9eb8f7b63115df1ac544a1376fdbf5a39612
with: with:
repo-token: ${{ secrets.GITHUB_TOKEN }} repo-token: ${{ secrets.GITHUB_TOKEN }}
- name: Install cargo-udeps - name: Install cargo-udeps
run: cargo install cargo-udeps --locked --force --version 0.1.30 run: cargo install cargo-udeps --locked --force
- name: Create Cargo config dir - name: Create Cargo config dir
run: mkdir -p .cargo run: mkdir -p .cargo
- name: Install custom Cargo config - name: Install custom Cargo config

41
Cargo.lock generated
View File

@ -2696,6 +2696,12 @@ dependencies = [
"winapi", "winapi",
] ]
[[package]]
name = "fs_extra"
version = "1.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2022715d62ab30faffd124d40b76f4134a550a87792276512b18d63272333394"
[[package]] [[package]]
name = "funty" name = "funty"
version = "1.1.0" version = "1.1.0"
@ -3596,6 +3602,38 @@ version = "1.0.5"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fad582f4b9e86b6caa621cabeb0963332d92eea04729ab12892c2533951e6440" checksum = "fad582f4b9e86b6caa621cabeb0963332d92eea04729ab12892c2533951e6440"
[[package]]
name = "jemalloc-ctl"
version = "0.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c1891c671f3db85d8ea8525dd43ab147f9977041911d24a03e5a36187a7bfde9"
dependencies = [
"jemalloc-sys",
"libc",
"paste",
]
[[package]]
name = "jemalloc-sys"
version = "0.5.2+5.3.0-patched"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "134163979b6eed9564c98637b710b40979939ba351f59952708234ea11b5f3f8"
dependencies = [
"cc",
"fs_extra",
"libc",
]
[[package]]
name = "jemallocator"
version = "0.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "16c2514137880c52b0b4822b563fadd38257c1f380858addb74a400889696ea6"
dependencies = [
"jemalloc-sys",
"libc",
]
[[package]] [[package]]
name = "js-sys" name = "js-sys"
version = "0.3.60" version = "0.3.60"
@ -3704,6 +3742,7 @@ dependencies = [
"lighthouse_network", "lighthouse_network",
"lighthouse_version", "lighthouse_version",
"log", "log",
"malloc_utils",
"sensitive_url", "sensitive_url",
"serde", "serde",
"serde_json", "serde_json",
@ -4513,6 +4552,8 @@ dependencies = [
name = "malloc_utils" name = "malloc_utils"
version = "0.1.0" version = "0.1.0"
dependencies = [ dependencies = [
"jemalloc-ctl",
"jemallocator",
"lazy_static", "lazy_static",
"libc", "libc",
"lighthouse_metrics", "lighthouse_metrics",

View File

@ -88,6 +88,7 @@ members = [
"validator_client", "validator_client",
"validator_client/slashing_protection", "validator_client/slashing_protection",
] ]
resolver = "2"
[patch] [patch]
[patch.crates-io] [patch.crates-io]

View File

@ -14,8 +14,16 @@ BUILD_PATH_AARCH64 = "target/$(AARCH64_TAG)/release"
PINNED_NIGHTLY ?= nightly PINNED_NIGHTLY ?= nightly
CLIPPY_PINNED_NIGHTLY=nightly-2022-05-19 CLIPPY_PINNED_NIGHTLY=nightly-2022-05-19
# List of features to use when building natively. Can be overriden via the environment.
# No jemalloc on Windows
ifeq ($(OS),Windows_NT)
FEATURES?=
else
FEATURES?=jemalloc
endif
# List of features to use when cross-compiling. Can be overridden via the environment. # List of features to use when cross-compiling. Can be overridden via the environment.
CROSS_FEATURES ?= gnosis,slasher-lmdb,slasher-mdbx CROSS_FEATURES ?= gnosis,slasher-lmdb,slasher-mdbx,jemalloc
# Cargo profile for Cross builds. Default is for local builds, CI uses an override. # Cargo profile for Cross builds. Default is for local builds, CI uses an override.
CROSS_PROFILE ?= release CROSS_PROFILE ?= release
@ -104,10 +112,6 @@ cargo-fmt:
check-benches: check-benches:
cargo check --workspace --benches cargo check --workspace --benches
# Typechecks consensus code *without* allowing deprecated legacy arithmetic or metrics.
check-consensus:
cargo check -p state_processing --no-default-features
# Runs only the ef-test vectors. # Runs only the ef-test vectors.
run-ef-tests: run-ef-tests:
rm -rf $(EF_TESTS)/.accessed_file_log.txt rm -rf $(EF_TESTS)/.accessed_file_log.txt

View File

@ -66,7 +66,7 @@ of the Lighthouse book.
The best place for discussion is the [Lighthouse Discord The best place for discussion is the [Lighthouse Discord
server](https://discord.gg/cyAszAh). server](https://discord.gg/cyAszAh).
Sign up to the [Lighthouse Development Updates](https://eepurl.com/dh9Lvb/) mailing list for email Sign up to the [Lighthouse Development Updates](https://eepurl.com/dh9Lvb) mailing list for email
notifications about releases, network status and other important information. notifications about releases, network status and other important information.
Encrypt sensitive messages using our [PGP Encrypt sensitive messages using our [PGP

View File

@ -3461,7 +3461,8 @@ pub fn serve<T: BeaconChainTypes>(
.or(get_lighthouse_attestation_performance.boxed()) .or(get_lighthouse_attestation_performance.boxed())
.or(get_lighthouse_block_packing_efficiency.boxed()) .or(get_lighthouse_block_packing_efficiency.boxed())
.or(get_lighthouse_merge_readiness.boxed()) .or(get_lighthouse_merge_readiness.boxed())
.or(get_events.boxed()), .or(get_events.boxed())
.recover(warp_utils::reject::handle_rejection),
) )
.boxed() .boxed()
.or(warp::post().and( .or(warp::post().and(
@ -3486,7 +3487,8 @@ pub fn serve<T: BeaconChainTypes>(
.or(post_lighthouse_database_reconstruct.boxed()) .or(post_lighthouse_database_reconstruct.boxed())
.or(post_lighthouse_database_historical_blocks.boxed()) .or(post_lighthouse_database_historical_blocks.boxed())
.or(post_lighthouse_block_rewards.boxed()) .or(post_lighthouse_block_rewards.boxed())
.or(post_lighthouse_ui_validator_metrics.boxed()), .or(post_lighthouse_ui_validator_metrics.boxed())
.recover(warp_utils::reject::handle_rejection),
)) ))
.recover(warp_utils::reject::handle_rejection) .recover(warp_utils::reject::handle_rejection)
.with(slog_logging(log.clone())) .with(slog_logging(log.clone()))

View File

@ -716,6 +716,10 @@ impl<T: BeaconChainTypes> Worker<T> {
&metrics::BEACON_BLOCK_GOSSIP_SLOT_START_DELAY_TIME, &metrics::BEACON_BLOCK_GOSSIP_SLOT_START_DELAY_TIME,
block_delay, block_delay,
); );
metrics::set_gauge(
&metrics::BEACON_BLOCK_LAST_DELAY,
block_delay.as_millis() as i64,
);
let verification_result = self let verification_result = self
.chain .chain

View File

@ -357,10 +357,18 @@ lazy_static! {
pub static ref BEACON_BLOCK_GOSSIP_SLOT_START_DELAY_TIME: Result<Histogram> = try_create_histogram_with_buckets( pub static ref BEACON_BLOCK_GOSSIP_SLOT_START_DELAY_TIME: Result<Histogram> = try_create_histogram_with_buckets(
"beacon_block_gossip_slot_start_delay_time", "beacon_block_gossip_slot_start_delay_time",
"Duration between when the block is received and the start of the slot it belongs to.", "Duration between when the block is received and the start of the slot it belongs to.",
// Create a custom bucket list for greater granularity in block delay
Ok(vec![0.1, 0.2, 0.3,0.4,0.5,0.75,1.0,1.25,1.5,1.75,2.0,2.5,3.0,3.5,4.0,5.0,6.0,7.0,8.0,9.0,10.0,15.0,20.0])
// NOTE: Previous values, which we may want to switch back to.
// [0.1, 0.2, 0.5, 1, 2, 5, 10, 20, 50] // [0.1, 0.2, 0.5, 1, 2, 5, 10, 20, 50]
decimal_buckets(-1,2) //decimal_buckets(-1,2)
); );
pub static ref BEACON_BLOCK_LAST_DELAY: Result<IntGauge> = try_create_int_gauge(
"beacon_block_last_delay",
"Keeps track of the last block's delay from the start of the slot"
);
pub static ref BEACON_BLOCK_GOSSIP_ARRIVED_LATE_TOTAL: Result<IntCounter> = try_create_int_counter( pub static ref BEACON_BLOCK_GOSSIP_ARRIVED_LATE_TOTAL: Result<IntCounter> = try_create_int_counter(
"beacon_block_gossip_arrived_late_total", "beacon_block_gossip_arrived_late_total",
"Count of times when a gossip block arrived from the network later than the attestation deadline.", "Count of times when a gossip block arrived from the network later than the attestation deadline.",

View File

@ -48,17 +48,6 @@ The Ethereum community provides various [public endpoints](https://eth-clients.g
lighthouse bn --checkpoint-sync-url https://example.com/ ... lighthouse bn --checkpoint-sync-url https://example.com/ ...
``` ```
### Use Infura as a remote beacon node provider
You can use Infura as the remote beacon node provider to load the initial checkpoint state.
1. Sign up for the free Infura ETH2 API using the `Create new project tab` on the [Infura dashboard](https://infura.io/dashboard).
2. Copy the HTTPS endpoint for the required network (Mainnet/Prater).
3. Use it as the url for the `--checkpoint-sync-url` flag. e.g.
```
lighthouse bn --checkpoint-sync-url https://<PROJECT-ID>:<PROJECT-SECRET>@eth2-beacon-mainnet.infura.io ...
```
## Backfilling Blocks ## Backfilling Blocks
Once forwards sync completes, Lighthouse will commence a "backfill sync" to download the blocks Once forwards sync completes, Lighthouse will commence a "backfill sync" to download the blocks

View File

@ -64,6 +64,7 @@ choco install protoc
These dependencies are for compiling Lighthouse natively on Windows. Lighthouse can also run These dependencies are for compiling Lighthouse natively on Windows. Lighthouse can also run
successfully under the [Windows Subsystem for Linux (WSL)][WSL]. If using Ubuntu under WSL, you successfully under the [Windows Subsystem for Linux (WSL)][WSL]. If using Ubuntu under WSL, you
should follow the instructions for Ubuntu listed in the [Dependencies (Ubuntu)](#ubuntu) section. should follow the instructions for Ubuntu listed in the [Dependencies (Ubuntu)](#ubuntu) section.
[WSL]: https://docs.microsoft.com/en-us/windows/wsl/about [WSL]: https://docs.microsoft.com/en-us/windows/wsl/about
## Build Lighthouse ## Build Lighthouse
@ -128,8 +129,12 @@ Commonly used features include:
* `gnosis`: support for the Gnosis Beacon Chain. * `gnosis`: support for the Gnosis Beacon Chain.
* `portable`: support for legacy hardware. * `portable`: support for legacy hardware.
* `modern`: support for exclusively modern hardware. * `modern`: support for exclusively modern hardware.
* `slasher-mdbx`: support for the MDBX slasher backend (enabled by default). * `slasher-mdbx`: support for the MDBX slasher backend. Enabled by default.
* `slasher-lmdb`: support for the LMDB slasher backend. * `slasher-lmdb`: support for the LMDB slasher backend.
* `jemalloc`: use [`jemalloc`][jemalloc] to allocate memory. Enabled by default on Linux and macOS.
Not supported on Windows.
[jemalloc]: https://jemalloc.net/
## Compilation Profiles ## Compilation Profiles

View File

@ -58,7 +58,7 @@ supported.
Each execution engine has its own flags for configuring the engine API and JWT. Please consult Each execution engine has its own flags for configuring the engine API and JWT. Please consult
the relevant page for your execution engine for the required flags: the relevant page for your execution engine for the required flags:
- [Geth: Connecting to Consensus Clients](https://geth.ethereum.org/docs/interface/consensus-clients) - [Geth: Connecting to Consensus Clients](https://geth.ethereum.org/docs/getting-started/consensus-clients)
- [Nethermind: Running Nethermind Post Merge](https://docs.nethermind.io/nethermind/first-steps-with-nethermind/running-nethermind-post-merge) - [Nethermind: Running Nethermind Post Merge](https://docs.nethermind.io/nethermind/first-steps-with-nethermind/running-nethermind-post-merge)
- [Besu: Prepare For The Merge](https://besu.hyperledger.org/en/stable/HowTo/Upgrade/Prepare-for-The-Merge/) - [Besu: Prepare For The Merge](https://besu.hyperledger.org/en/stable/HowTo/Upgrade/Prepare-for-The-Merge/)
- [Erigon: Beacon Chain (Consensus Layer)](https://github.com/ledgerwatch/erigon#beacon-chain-consensus-layer) - [Erigon: Beacon Chain (Consensus Layer)](https://github.com/ledgerwatch/erigon#beacon-chain-consensus-layer)
@ -203,5 +203,5 @@ guidance for specific setups.
- [Ethereum.org: The Merge](https://ethereum.org/en/upgrades/merge/) - [Ethereum.org: The Merge](https://ethereum.org/en/upgrades/merge/)
- [Ethereum Staking Launchpad: Merge Readiness](https://launchpad.ethereum.org/en/merge-readiness). - [Ethereum Staking Launchpad: Merge Readiness](https://launchpad.ethereum.org/en/merge-readiness).
- [CoinCashew: Ethereum Merge Upgrade Checklist](https://www.coincashew.com/coins/overview-eth/ethereum-merge-upgrade-checklist-for-home-stakers-and-validators) - [CoinCashew: Ethereum Merge Upgrade Checklist](https://www.coincashew.com/coins/overview-eth/ethereum-merge-upgrade-checklist-for-home-stakers-and-validators)
- [EthDocker: Merge Preparation](https://eth-docker.net/docs/About/MergePrep/) - [EthDocker: Merge Preparation](https://eth-docker.net/About/MergePrep/)
- [Remy Roy: How to join the Goerli/Prater merge testnet](https://github.com/remyroy/ethstaker/blob/main/merge-goerli-prater.md) - [Remy Roy: How to join the Goerli/Prater merge testnet](https://github.com/remyroy/ethstaker/blob/main/merge-goerli-prater.md)

View File

@ -26,7 +26,7 @@ has authority to control the execution engine.
Each execution engine has its own flags for configuring the engine API and JWT. Each execution engine has its own flags for configuring the engine API and JWT.
Please consult the relevant page of your execution engine for the required flags: Please consult the relevant page of your execution engine for the required flags:
- [Geth: Connecting to Consensus Clients](https://geth.ethereum.org/docs/interface/consensus-clients) - [Geth: Connecting to Consensus Clients](https://geth.ethereum.org/docs/getting-started/consensus-clients)
- [Nethermind: Running Nethermind & CL](https://docs.nethermind.io/nethermind/first-steps-with-nethermind/running-nethermind-post-merge) - [Nethermind: Running Nethermind & CL](https://docs.nethermind.io/nethermind/first-steps-with-nethermind/running-nethermind-post-merge)
- [Besu: Connect to Mainnet](https://besu.hyperledger.org/en/stable/public-networks/get-started/connect/mainnet/) - [Besu: Connect to Mainnet](https://besu.hyperledger.org/en/stable/public-networks/get-started/connect/mainnet/)
- [Erigon: Beacon Chain (Consensus Layer)](https://github.com/ledgerwatch/erigon#beacon-chain-consensus-layer) - [Erigon: Beacon Chain (Consensus Layer)](https://github.com/ledgerwatch/erigon#beacon-chain-consensus-layer)

View File

@ -10,7 +10,6 @@ status = [
"merge-transition-ubuntu", "merge-transition-ubuntu",
"no-eth1-simulator-ubuntu", "no-eth1-simulator-ubuntu",
"check-benchmarks", "check-benchmarks",
"check-consensus",
"clippy", "clippy",
"arbitrary-check", "arbitrary-check",
"cargo-audit", "cargo-audit",

View File

@ -4,13 +4,21 @@ version = "0.1.0"
authors = ["Paul Hauner <paul@paulhauner.com>"] authors = ["Paul Hauner <paul@paulhauner.com>"]
edition = "2021" edition = "2021"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies] [dependencies]
lighthouse_metrics = { path = "../lighthouse_metrics" } lighthouse_metrics = { path = "../lighthouse_metrics" }
lazy_static = "1.4.0" lazy_static = "1.4.0"
libc = "0.2.79" libc = "0.2.79"
parking_lot = "0.12.0" parking_lot = "0.12.0"
jemalloc-ctl = { version = "0.5.0", optional = true }
# Jemalloc's background_threads feature requires Linux (pthreads).
[target.'cfg(target_os = "linux")'.dependencies]
jemallocator = { version = "0.5.0", optional = true, features = ["stats", "background_threads"] }
[target.'cfg(not(target_os = "linux"))'.dependencies]
jemallocator = { version = "0.5.0", optional = true, features = ["stats"] }
[features] [features]
mallinfo2 = [] mallinfo2 = []
jemalloc = ["jemallocator", "jemalloc-ctl"]
jemalloc-profiling = ["jemallocator/profiling"]

View File

@ -0,0 +1,52 @@
//! Set the allocator to `jemalloc`.
//!
//! Due to `jemalloc` requiring configuration at compile time or immediately upon runtime
//! initialisation it is configured via a Cargo config file in `.cargo/config.toml`.
//!
//! The `jemalloc` tuning can be overriden by:
//!
//! A) `JEMALLOC_SYS_WITH_MALLOC_CONF` at compile-time.
//! B) `_RJEM_MALLOC_CONF` at runtime.
use jemalloc_ctl::{arenas, epoch, stats, Error};
use lazy_static::lazy_static;
use lighthouse_metrics::{set_gauge, try_create_int_gauge, IntGauge};
#[global_allocator]
static ALLOC: jemallocator::Jemalloc = jemallocator::Jemalloc;
// Metrics for jemalloc.
lazy_static! {
pub static ref NUM_ARENAS: lighthouse_metrics::Result<IntGauge> =
try_create_int_gauge("jemalloc_num_arenas", "The number of arenas in use");
pub static ref BYTES_ALLOCATED: lighthouse_metrics::Result<IntGauge> =
try_create_int_gauge("jemalloc_bytes_allocated", "Equivalent to stats.allocated");
pub static ref BYTES_ACTIVE: lighthouse_metrics::Result<IntGauge> =
try_create_int_gauge("jemalloc_bytes_active", "Equivalent to stats.active");
pub static ref BYTES_MAPPED: lighthouse_metrics::Result<IntGauge> =
try_create_int_gauge("jemalloc_bytes_mapped", "Equivalent to stats.mapped");
pub static ref BYTES_METADATA: lighthouse_metrics::Result<IntGauge> =
try_create_int_gauge("jemalloc_bytes_metadata", "Equivalent to stats.metadata");
pub static ref BYTES_RESIDENT: lighthouse_metrics::Result<IntGauge> =
try_create_int_gauge("jemalloc_bytes_resident", "Equivalent to stats.resident");
pub static ref BYTES_RETAINED: lighthouse_metrics::Result<IntGauge> =
try_create_int_gauge("jemalloc_bytes_retained", "Equivalent to stats.retained");
}
pub fn scrape_jemalloc_metrics() {
scrape_jemalloc_metrics_fallible().unwrap()
}
pub fn scrape_jemalloc_metrics_fallible() -> Result<(), Error> {
// Advance the epoch so that the underlying statistics are updated.
epoch::advance()?;
set_gauge(&NUM_ARENAS, arenas::narenas::read()? as i64);
set_gauge(&BYTES_ALLOCATED, stats::allocated::read()? as i64);
set_gauge(&BYTES_ACTIVE, stats::active::read()? as i64);
set_gauge(&BYTES_MAPPED, stats::mapped::read()? as i64);
set_gauge(&BYTES_METADATA, stats::metadata::read()? as i64);
set_gauge(&BYTES_RESIDENT, stats::resident::read()? as i64);
set_gauge(&BYTES_RETAINED, stats::retained::read()? as i64);
Ok(())
}

View File

@ -2,18 +2,18 @@
//! //!
//! ## Conditional Compilation //! ## Conditional Compilation
//! //!
//! Presently, only configuration for "The GNU Allocator" from `glibc` is supported. All other //! This crate can be compiled with different feature flags to support different allocators:
//! allocators are ignored.
//! //!
//! It is assumed that if the following two statements are correct then we should expect to //! - Jemalloc, via the `jemalloc` feature.
//! configure `glibc`: //! - GNU malloc, if no features are set and the system supports it.
//! - The system allocator, if no features are set and the allocator is not GNU malloc.
//!
//! It is assumed that if Jemalloc is not in use, and the following two statements are correct then
//! we should expect to configure `glibc`:
//! //!
//! - `target_os = linux` //! - `target_os = linux`
//! - `target_env != musl` //! - `target_env != musl`
//! //!
//! In all other cases this library will not attempt to do anything (i.e., all functions are
//! no-ops).
//!
//! If the above conditions are fulfilled but `glibc` still isn't present at runtime then a panic //! If the above conditions are fulfilled but `glibc` still isn't present at runtime then a panic
//! may be triggered. It is understood that there's no way to be certain that a compatible `glibc` //! may be triggered. It is understood that there's no way to be certain that a compatible `glibc`
//! is present: https://github.com/rust-lang/rust/issues/33244. //! is present: https://github.com/rust-lang/rust/issues/33244.
@ -24,18 +24,42 @@
//! detecting `glibc` are best-effort. If this crate throws errors about undefined external //! detecting `glibc` are best-effort. If this crate throws errors about undefined external
//! functions, then try to compile with the `not_glibc_interface` module. //! functions, then try to compile with the `not_glibc_interface` module.
#[cfg(all(target_os = "linux", not(target_env = "musl")))] #[cfg(all(
target_os = "linux",
not(target_env = "musl"),
not(feature = "jemalloc")
))]
mod glibc; mod glibc;
#[cfg(feature = "jemalloc")]
mod jemalloc;
pub use interface::*; pub use interface::*;
#[cfg(all(target_os = "linux", not(target_env = "musl")))] #[cfg(all(
target_os = "linux",
not(target_env = "musl"),
not(feature = "jemalloc")
))]
mod interface { mod interface {
pub use crate::glibc::configure_glibc_malloc as configure_memory_allocator; pub use crate::glibc::configure_glibc_malloc as configure_memory_allocator;
pub use crate::glibc::scrape_mallinfo_metrics as scrape_allocator_metrics; pub use crate::glibc::scrape_mallinfo_metrics as scrape_allocator_metrics;
} }
#[cfg(any(not(target_os = "linux"), target_env = "musl"))] #[cfg(feature = "jemalloc")]
mod interface {
#[allow(dead_code)]
pub fn configure_memory_allocator() -> Result<(), String> {
Ok(())
}
pub use crate::jemalloc::scrape_jemalloc_metrics as scrape_allocator_metrics;
}
#[cfg(all(
any(not(target_os = "linux"), target_env = "musl"),
not(feature = "jemalloc")
))]
mod interface { mod interface {
#[allow(dead_code, clippy::unnecessary_wraps)] #[allow(dead_code, clippy::unnecessary_wraps)]
pub fn configure_memory_allocator() -> Result<(), String> { pub fn configure_memory_allocator() -> Result<(), String> {

View File

@ -8,6 +8,7 @@ edition = "2021"
[features] [features]
portable = ["bls/supranational-portable"] portable = ["bls/supranational-portable"]
fake_crypto = ['bls/fake_crypto'] fake_crypto = ['bls/fake_crypto']
jemalloc = ["malloc_utils/jemalloc"]
[dependencies] [dependencies]
bls = { path = "../crypto/bls" } bls = { path = "../crypto/bls" }
@ -40,3 +41,7 @@ eth2 = { path = "../common/eth2" }
snap = "1.0.1" snap = "1.0.1"
beacon_chain = { path = "../beacon_node/beacon_chain" } beacon_chain = { path = "../beacon_node/beacon_chain" }
store = { path = "../beacon_node/store" } store = { path = "../beacon_node/store" }
malloc_utils = { path = "../common/malloc_utils" }
[package.metadata.cargo-udeps.ignore]
normal = ["malloc_utils"]

View File

@ -800,6 +800,7 @@ fn run<T: EthSpec>(
debug_level: String::from("trace"), debug_level: String::from("trace"),
logfile_debug_level: String::from("trace"), logfile_debug_level: String::from("trace"),
log_format: None, log_format: None,
logfile_format: None,
log_color: false, log_color: false,
disable_log_timestamp: false, disable_log_timestamp: false,
max_log_size: 0, max_log_size: 0,

View File

@ -24,6 +24,8 @@ gnosis = []
slasher-mdbx = ["slasher/mdbx"] slasher-mdbx = ["slasher/mdbx"]
# Support slasher LMDB backend. # Support slasher LMDB backend.
slasher-lmdb = ["slasher/lmdb"] slasher-lmdb = ["slasher/lmdb"]
# Use jemalloc.
jemalloc = ["malloc_utils/jemalloc"]
[dependencies] [dependencies]
beacon_node = { "path" = "../beacon_node" } beacon_node = { "path" = "../beacon_node" }

View File

@ -50,6 +50,7 @@ pub struct LoggerConfig {
pub debug_level: String, pub debug_level: String,
pub logfile_debug_level: String, pub logfile_debug_level: String,
pub log_format: Option<String>, pub log_format: Option<String>,
pub logfile_format: Option<String>,
pub log_color: bool, pub log_color: bool,
pub disable_log_timestamp: bool, pub disable_log_timestamp: bool,
pub max_log_size: u64, pub max_log_size: u64,
@ -64,6 +65,7 @@ impl Default for LoggerConfig {
debug_level: String::from("info"), debug_level: String::from("info"),
logfile_debug_level: String::from("debug"), logfile_debug_level: String::from("debug"),
log_format: None, log_format: None,
logfile_format: None,
log_color: false, log_color: false,
disable_log_timestamp: false, disable_log_timestamp: false,
max_log_size: 200, max_log_size: 200,
@ -252,7 +254,7 @@ impl<E: EthSpec> EnvironmentBuilder<E> {
let file_logger = FileLoggerBuilder::new(&path) let file_logger = FileLoggerBuilder::new(&path)
.level(logfile_level) .level(logfile_level)
.channel_size(LOG_CHANNEL_SIZE) .channel_size(LOG_CHANNEL_SIZE)
.format(match config.log_format.as_deref() { .format(match config.logfile_format.as_deref() {
Some("JSON") => Format::Json, Some("JSON") => Format::Json,
_ => Format::default(), _ => Format::default(),
}) })

View File

@ -31,6 +31,14 @@ fn bls_library_name() -> &'static str {
} }
} }
fn allocator_name() -> &'static str {
if cfg!(feature = "jemalloc") {
"jemalloc"
} else {
"system"
}
}
fn main() { fn main() {
// Enable backtraces unless a RUST_BACKTRACE value has already been explicitly provided. // Enable backtraces unless a RUST_BACKTRACE value has already been explicitly provided.
if std::env::var("RUST_BACKTRACE").is_err() { if std::env::var("RUST_BACKTRACE").is_err() {
@ -51,10 +59,12 @@ fn main() {
"{}\n\ "{}\n\
BLS library: {}\n\ BLS library: {}\n\
SHA256 hardware acceleration: {}\n\ SHA256 hardware acceleration: {}\n\
Allocator: {}\n\
Specs: mainnet (true), minimal ({}), gnosis ({})", Specs: mainnet (true), minimal ({}), gnosis ({})",
VERSION.replace("Lighthouse/", ""), VERSION.replace("Lighthouse/", ""),
bls_library_name(), bls_library_name(),
have_sha_extensions(), have_sha_extensions(),
allocator_name(),
cfg!(feature = "spec-minimal"), cfg!(feature = "spec-minimal"),
cfg!(feature = "gnosis"), cfg!(feature = "gnosis"),
).as_str() ).as_str()
@ -99,6 +109,15 @@ fn main() {
.default_value("debug") .default_value("debug")
.global(true), .global(true),
) )
.arg(
Arg::with_name("logfile-format")
.long("logfile-format")
.value_name("FORMAT")
.help("Specifies the log format used when emitting logs to the logfile.")
.possible_values(&["DEFAULT", "JSON"])
.takes_value(true)
.global(true)
)
.arg( .arg(
Arg::with_name("logfile-max-size") Arg::with_name("logfile-max-size")
.long("logfile-max-size") .long("logfile-max-size")
@ -402,6 +421,11 @@ fn run<E: EthSpec>(
.value_of("logfile-debug-level") .value_of("logfile-debug-level")
.ok_or("Expected --logfile-debug-level flag")?; .ok_or("Expected --logfile-debug-level flag")?;
let logfile_format = matches
.value_of("logfile-format")
// Ensure that `logfile-format` defaults to the value of `log-format`.
.or_else(|| matches.value_of("log-format"));
let logfile_max_size: u64 = matches let logfile_max_size: u64 = matches
.value_of("logfile-max-size") .value_of("logfile-max-size")
.ok_or("Expected --logfile-max-size flag")? .ok_or("Expected --logfile-max-size flag")?
@ -452,6 +476,7 @@ fn run<E: EthSpec>(
debug_level: String::from(debug_level), debug_level: String::from(debug_level),
logfile_debug_level: String::from(logfile_debug_level), logfile_debug_level: String::from(logfile_debug_level),
log_format: log_format.map(String::from), log_format: log_format.map(String::from),
logfile_format: logfile_format.map(String::from),
log_color, log_color,
disable_log_timestamp, disable_log_timestamp,
max_log_size: logfile_max_size * 1_024 * 1_024, max_log_size: logfile_max_size * 1_024 * 1_024,

View File

@ -1662,7 +1662,24 @@ fn logfile_no_restricted_perms_flag() {
assert!(config.logger_config.is_restricted == false); assert!(config.logger_config.is_restricted == false);
}); });
} }
#[test]
fn logfile_format_default() {
CommandLineTest::new()
.run_with_zero_port()
.with_config(|config| assert_eq!(config.logger_config.logfile_format, None));
}
#[test]
fn logfile_format_flag() {
CommandLineTest::new()
.flag("logfile-format", Some("JSON"))
.run_with_zero_port()
.with_config(|config| {
assert_eq!(
config.logger_config.logfile_format,
Some("JSON".to_string())
)
});
}
#[test] #[test]
fn sync_eth1_chain_default() { fn sync_eth1_chain_default() {
CommandLineTest::new() CommandLineTest::new()

View File

@ -1,11 +1,9 @@
FROM rust:1.62.1-bullseye AS builder FROM rust:1.66.1-bullseye AS builder
RUN apt-get update && apt-get -y upgrade && apt-get install -y cmake libclang-dev RUN apt-get update && apt-get -y upgrade && apt-get install -y cmake libclang-dev protobuf-compiler
COPY . lighthouse COPY . lighthouse
# Build lighthouse directly with a cargo build command, bypassing the Makefile. # Build lighthouse directly with a cargo build command, bypassing the Makefile.
# We have to use nightly in order to disable the new LLVM pass manager. RUN cd lighthouse && LD_LIBRARY_PATH=/lighthouse/testing/antithesis/libvoidstar/ RUSTFLAGS="-Cpasses=sancov-module -Cllvm-args=-sanitizer-coverage-level=3 -Cllvm-args=-sanitizer-coverage-trace-pc-guard -Ccodegen-units=1 -Cdebuginfo=2 -L/lighthouse/testing/antithesis/libvoidstar/ -lvoidstar" cargo build --release --manifest-path lighthouse/Cargo.toml --target x86_64-unknown-linux-gnu --features modern --verbose --bin lighthouse
RUN rustup default nightly-2022-07-26 && cd lighthouse && LD_LIBRARY_PATH=/lighthouse/testing/antithesis/libvoidstar/ RUSTFLAGS="-Znew-llvm-pass-manager=no -Cpasses=sancov -Cllvm-args=-sanitizer-coverage-level=3 -Cllvm-args=-sanitizer-coverage-trace-pc-guard -Ccodegen-units=1 -Cdebuginfo=2 -L/lighthouse/testing/antithesis/libvoidstar/ -lvoidstar" cargo build --release --manifest-path lighthouse/Cargo.toml --target x86_64-unknown-linux-gnu --features modern --verbose --bin lighthouse
# build lcli binary directly with cargo install command, bypassing the makefile # build lcli binary directly with cargo install command, bypassing the makefile
RUN cargo install --path /lighthouse/lcli --force --locked RUN cargo install --path /lighthouse/lcli --force --locked

View File

@ -62,6 +62,7 @@ pub fn run_eth1_sim(matches: &ArgMatches) -> Result<(), String> {
debug_level: String::from("debug"), debug_level: String::from("debug"),
logfile_debug_level: String::from("debug"), logfile_debug_level: String::from("debug"),
log_format: None, log_format: None,
logfile_format: None,
log_color: false, log_color: false,
disable_log_timestamp: false, disable_log_timestamp: false,
max_log_size: 0, max_log_size: 0,

View File

@ -47,6 +47,7 @@ pub fn run_no_eth1_sim(matches: &ArgMatches) -> Result<(), String> {
debug_level: String::from("debug"), debug_level: String::from("debug"),
logfile_debug_level: String::from("debug"), logfile_debug_level: String::from("debug"),
log_format: None, log_format: None,
logfile_format: None,
log_color: false, log_color: false,
disable_log_timestamp: false, disable_log_timestamp: false,
max_log_size: 0, max_log_size: 0,

View File

@ -51,6 +51,7 @@ fn syncing_sim(
debug_level: String::from(log_level), debug_level: String::from(log_level),
logfile_debug_level: String::from("debug"), logfile_debug_level: String::from("debug"),
log_format: log_format.map(String::from), log_format: log_format.map(String::from),
logfile_format: None,
log_color: false, log_color: false,
disable_log_timestamp: false, disable_log_timestamp: false,
max_log_size: 0, max_log_size: 0,

View File

@ -335,6 +335,11 @@ impl<T: SlotClock + 'static, E: EthSpec> BlockService<T, E> {
let proposer_index = self.validator_store.validator_index(&validator_pubkey); let proposer_index = self.validator_store.validator_index(&validator_pubkey);
let validator_pubkey_ref = &validator_pubkey; let validator_pubkey_ref = &validator_pubkey;
info!(
log,
"Requesting unsigned block";
"slot" => slot.as_u64(),
);
// Request block from first responsive beacon node. // Request block from first responsive beacon node.
let block = self let block = self
.beacon_nodes .beacon_nodes
@ -385,6 +390,11 @@ impl<T: SlotClock + 'static, E: EthSpec> BlockService<T, E> {
} }
}; };
info!(
log,
"Received unsigned block";
"slot" => slot.as_u64(),
);
if proposer_index != Some(block.proposer_index()) { if proposer_index != Some(block.proposer_index()) {
return Err(BlockError::Recoverable( return Err(BlockError::Recoverable(
"Proposer index does not match block proposer. Beacon chain re-orged" "Proposer index does not match block proposer. Beacon chain re-orged"
@ -403,6 +413,11 @@ impl<T: SlotClock + 'static, E: EthSpec> BlockService<T, E> {
.await .await
.map_err(|e| BlockError::Recoverable(format!("Unable to sign block: {:?}", e)))?; .map_err(|e| BlockError::Recoverable(format!("Unable to sign block: {:?}", e)))?;
info!(
log,
"Publishing signed block";
"slot" => slot.as_u64(),
);
// Publish block with first available beacon node. // Publish block with first available beacon node.
self.beacon_nodes self.beacon_nodes
.first_success( .first_success(

View File

@ -31,6 +31,7 @@ use crate::beacon_node_fallback::{
}; };
use crate::doppelganger_service::DoppelgangerService; use crate::doppelganger_service::DoppelgangerService;
use crate::graffiti_file::GraffitiFile; use crate::graffiti_file::GraffitiFile;
use crate::initialized_validators::Error::UnableToOpenVotingKeystore;
use account_utils::validator_definitions::ValidatorDefinitions; use account_utils::validator_definitions::ValidatorDefinitions;
use attestation_service::{AttestationService, AttestationServiceBuilder}; use attestation_service::{AttestationService, AttestationServiceBuilder};
use block_service::{BlockService, BlockServiceBuilder}; use block_service::{BlockService, BlockServiceBuilder};
@ -184,7 +185,16 @@ impl<T: EthSpec> ProductionValidatorClient<T> {
log.clone(), log.clone(),
) )
.await .await
.map_err(|e| format!("Unable to initialize validators: {:?}", e))?; .map_err(|e| {
match e {
UnableToOpenVotingKeystore(err) => {
format!("Unable to initialize validators: {:?}. If you have recently moved the location of your data directory \
make sure to update the location of voting_keystore_path in your validator_definitions.yml", err)
},
err => {
format!("Unable to initialize validators: {:?}", err)}
}
})?;
let voting_pubkeys: Vec<_> = validators.iter_voting_pubkeys().collect(); let voting_pubkeys: Vec<_> = validators.iter_voting_pubkeys().collect();