Add background file logging (#2762)

## Issue Addressed

Closes #1996 

## Proposed Changes

Run a second `Logger` via `sloggers` which logs to a file in the background with:
- separate `debug-level` for background and terminal logging
- the ability to limit log size
- rotation through a customizable number of log files
- an option to compress old log files (`.gz` format)

Add the following new CLI flags:
- `--logfile-debug-level`: The debug level of the log files
- `--logfile-max-size`: The maximum size of each log file
- `--logfile-max-number`: The number of old log files to store
- `--logfile-compress`: Whether to compress old log files

By default background logging uses the `debug` log level and saves logfiles to:
- Beacon Node:  `$HOME/.lighthouse/$network/beacon/logs/beacon.log`
- Validator Client:  `$HOME/.lighthouse/$network/validators/logs/validator.log`

Or, when using the `--datadir` flag:
`$datadir/beacon/logs/beacon.log` and `$datadir/validators/logs/validator.log`

Once rotated, old logs are stored like so: `beacon.log.1`, `beacon.log.2` etc. 
> Note: `beacon.log.1` is always newer than `beacon.log.2`.

## Additional Info

Currently the default value of `--logfile-max-size` is 200 (MB) and `--logfile-max-number` is 5.
This means that the maximum storage space that the logs will take up by default is 1.2GB. 
(200MB x 5 from old log files + <200MB the current logfile being written to)
Happy to adjust these default values to whatever people think is appropriate. 

It's also worth noting that when logging to a file, we lose our custom `slog` formatting. This means the logfile logs look like this:
```
Oct 27 16:02:50.305 INFO Lighthouse started, version: Lighthouse/v2.0.1-8edd9d4+, module: lighthouse:413
Oct 27 16:02:50.305 INFO Configured for network, name: prater, module: lighthouse:414
```
This commit is contained in:
Mac L 2021-11-30 03:25:32 +00:00
parent 6625aa4afe
commit fe75a0a9a1
16 changed files with 258 additions and 128 deletions

3
Cargo.lock generated
View File

@ -1383,7 +1383,6 @@ dependencies = [
"eth2_config", "eth2_config",
"eth2_network_config", "eth2_network_config",
"exit-future", "exit-future",
"filesystem",
"futures", "futures",
"logging", "logging",
"slog", "slog",
@ -3214,6 +3213,7 @@ dependencies = [
"boot_node", "boot_node",
"clap", "clap",
"clap_utils", "clap_utils",
"directory",
"env_logger 0.9.0", "env_logger 0.9.0",
"environment", "environment",
"eth2_hashing 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)", "eth2_hashing 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
@ -5414,6 +5414,7 @@ dependencies = [
"serde", "serde",
"slog", "slog",
"slog-async", "slog-async",
"slog-json",
"slog-kvfilter", "slog-kvfilter",
"slog-scope", "slog-scope",
"slog-stdlog", "slog-stdlog",

View File

@ -26,7 +26,7 @@ rayon = "1.4.1"
serde = "1.0.116" serde = "1.0.116"
serde_derive = "1.0.116" serde_derive = "1.0.116"
slog = { version = "2.5.2", features = ["max_level_trace"] } slog = { version = "2.5.2", features = ["max_level_trace"] }
sloggers = "2.0.2" sloggers = { version = "2.1.1", features = ["json"] }
slot_clock = { path = "../../common/slot_clock" } slot_clock = { path = "../../common/slot_clock" }
eth2_hashing = "0.2.0" eth2_hashing = "0.2.0"
eth2_ssz = "0.4.0" eth2_ssz = "0.4.0"

View File

@ -8,7 +8,7 @@ edition = "2018"
eth1_test_rig = { path = "../../testing/eth1_test_rig" } eth1_test_rig = { path = "../../testing/eth1_test_rig" }
toml = "0.5.6" toml = "0.5.6"
web3 = { version = "0.17.0", default-features = false, features = ["http-tls", "signing", "ws-tls-tokio"] } web3 = { version = "0.17.0", default-features = false, features = ["http-tls", "signing", "ws-tls-tokio"] }
sloggers = "2.0.2" sloggers = { version = "2.1.1", features = ["json"] }
environment = { path = "../../lighthouse/environment" } environment = { path = "../../lighthouse/environment" }
[dependencies] [dependencies]

View File

@ -5,7 +5,7 @@ authors = ["Sigma Prime <contact@sigmaprime.io>"]
edition = "2018" edition = "2018"
[dev-dependencies] [dev-dependencies]
sloggers = "2.0.2" sloggers = { version = "2.1.1", features = ["json"] }
genesis = { path = "../genesis" } genesis = { path = "../genesis" }
matches = "0.1.8" matches = "0.1.8"
exit-future = "0.2.0" exit-future = "0.2.0"

View File

@ -23,5 +23,5 @@ serde_derive = "1.0.116"
lazy_static = "1.4.0" lazy_static = "1.4.0"
lighthouse_metrics = { path = "../../common/lighthouse_metrics" } lighthouse_metrics = { path = "../../common/lighthouse_metrics" }
lru = "0.6.0" lru = "0.6.0"
sloggers = "2.0.2" sloggers = { version = "2.1.1", features = ["json"] }
directory = { path = "../../common/directory" } directory = { path = "../../common/directory" }

View File

@ -40,6 +40,19 @@ pub fn ensure_dir_exists<P: AsRef<Path>>(path: P) -> Result<(), String> {
Ok(()) Ok(())
} }
/// If `arg` is in `matches`, parses the value as a path.
///
/// Otherwise, attempts to find the default directory for the `testnet` from the `matches`.
pub fn parse_path_or_default(matches: &ArgMatches, arg: &'static str) -> Result<PathBuf, String> {
clap_utils::parse_path_with_default_in_home_dir(
matches,
arg,
PathBuf::new()
.join(DEFAULT_ROOT_DIR)
.join(get_network_dir(matches)),
)
}
/// If `arg` is in `matches`, parses the value as a path. /// If `arg` is in `matches`, parses the value as a path.
/// ///
/// Otherwise, attempts to find the default directory for the `testnet` from the `matches` /// Otherwise, attempts to find the default directory for the `testnet` from the `matches`

View File

@ -12,4 +12,4 @@ slog = "2.5.2"
slog-term = "2.6.0" slog-term = "2.6.0"
lighthouse_metrics = { path = "../lighthouse_metrics" } lighthouse_metrics = { path = "../lighthouse_metrics" }
lazy_static = "1.4.0" lazy_static = "1.4.0"
sloggers = "2.0.2" sloggers = { version = "2.1.1", features = ["json"] }

View File

@ -16,7 +16,7 @@ mod transition_blocks;
use clap::{App, Arg, ArgMatches, SubCommand}; use clap::{App, Arg, ArgMatches, SubCommand};
use clap_utils::parse_path_with_default_in_home_dir; use clap_utils::parse_path_with_default_in_home_dir;
use environment::EnvironmentBuilder; use environment::{EnvironmentBuilder, LoggerConfig};
use parse_ssz::run_parse_ssz; use parse_ssz::run_parse_ssz;
use std::path::PathBuf; use std::path::PathBuf;
use std::process; use std::process;
@ -584,8 +584,16 @@ fn run<T: EthSpec>(
let env = env_builder let env = env_builder
.multi_threaded_tokio_runtime() .multi_threaded_tokio_runtime()
.map_err(|e| format!("should start tokio runtime: {:?}", e))? .map_err(|e| format!("should start tokio runtime: {:?}", e))?
.async_logger("trace", None) .initialize_logger(LoggerConfig {
.map_err(|e| format!("should start null logger: {:?}", e))? path: None,
debug_level: "trace",
logfile_debug_level: "trace",
log_format: None,
max_log_size: 0,
max_log_number: 0,
compression: false,
})
.map_err(|e| format!("should start logger: {:?}", e))?
.build() .build()
.map_err(|e| format!("should build env: {:?}", e))?; .map_err(|e| format!("should build env: {:?}", e))?;

View File

@ -20,7 +20,7 @@ spec-minimal = []
[dependencies] [dependencies]
beacon_node = { "path" = "../beacon_node" } beacon_node = { "path" = "../beacon_node" }
slog = { version = "2.5.2", features = ["max_level_trace"] } slog = { version = "2.5.2", features = ["max_level_trace"] }
sloggers = "2.0.2" sloggers = { version = "2.1.1", features = ["json"] }
types = { "path" = "../consensus/types" } types = { "path" = "../consensus/types" }
bls = { path = "../crypto/bls" } bls = { path = "../crypto/bls" }
eth2_hashing = "0.2.0" eth2_hashing = "0.2.0"
@ -41,6 +41,7 @@ serde = { version = "1.0.116", features = ["derive"] }
serde_json = "1.0.59" serde_json = "1.0.59"
task_executor = { path = "../common/task_executor" } task_executor = { path = "../common/task_executor" }
malloc_utils = { path = "../common/malloc_utils" } malloc_utils = { path = "../common/malloc_utils" }
directory = { path = "../common/directory" }
[dev-dependencies] [dev-dependencies]
tempfile = "3.1.0" tempfile = "3.1.0"

View File

@ -7,7 +7,7 @@ edition = "2018"
[dependencies] [dependencies]
tokio = { version = "1.14.0", features = ["macros", "rt", "rt-multi-thread", "signal" ] } tokio = { version = "1.14.0", features = ["macros", "rt", "rt-multi-thread", "signal" ] }
slog = { version = "2.5.2", features = ["max_level_trace"] } slog = { version = "2.5.2", features = ["max_level_trace"] }
sloggers = "2.0.2" sloggers = { version = "2.1.1", features = ["json"] }
types = { path = "../../consensus/types" } types = { path = "../../consensus/types" }
eth2_config = { path = "../../common/eth2_config" } eth2_config = { path = "../../common/eth2_config" }
task_executor = { path = "../../common/task_executor" } task_executor = { path = "../../common/task_executor" }
@ -18,7 +18,6 @@ slog-async = "2.5.0"
futures = "0.3.7" futures = "0.3.7"
slog-json = "2.3.0" slog-json = "2.3.0"
exit-future = "0.2.0" exit-future = "0.2.0"
filesystem = {"path" = "../../common/filesystem"}
[target.'cfg(not(target_family = "unix"))'.dependencies] [target.'cfg(not(target_family = "unix"))'.dependencies]
ctrlc = { version = "3.1.6", features = ["termination"] } ctrlc = { version = "3.1.6", features = ["termination"] }

View File

@ -9,17 +9,16 @@
use eth2_config::Eth2Config; use eth2_config::Eth2Config;
use eth2_network_config::Eth2NetworkConfig; use eth2_network_config::Eth2NetworkConfig;
use filesystem::restrict_file_permissions;
use futures::channel::mpsc::{channel, Receiver, Sender}; use futures::channel::mpsc::{channel, Receiver, Sender};
use futures::{future, StreamExt}; use futures::{future, StreamExt};
use slog::{error, info, o, warn, Drain, Level, Logger}; use slog::{error, info, o, warn, Drain, Duplicate, Level, Logger};
use sloggers::{null::NullLoggerBuilder, Build}; use sloggers::{
use std::ffi::OsStr; file::FileLoggerBuilder, null::NullLoggerBuilder, types::Format, types::Severity, Build,
use std::fs::{rename as FsRename, OpenOptions}; };
use std::fs::create_dir_all;
use std::path::PathBuf; use std::path::PathBuf;
use std::sync::Arc; use std::sync::Arc;
use std::time::{SystemTime, UNIX_EPOCH};
use task_executor::{ShutdownReason, TaskExecutor}; use task_executor::{ShutdownReason, TaskExecutor};
use tokio::runtime::{Builder as RuntimeBuilder, Runtime}; use tokio::runtime::{Builder as RuntimeBuilder, Runtime};
use types::{EthSpec, MainnetEthSpec, MinimalEthSpec}; use types::{EthSpec, MainnetEthSpec, MinimalEthSpec};
@ -38,6 +37,21 @@ const LOG_CHANNEL_SIZE: usize = 2048;
/// The maximum time in seconds the client will wait for all internal tasks to shutdown. /// The maximum time in seconds the client will wait for all internal tasks to shutdown.
const MAXIMUM_SHUTDOWN_TIME: u64 = 15; const MAXIMUM_SHUTDOWN_TIME: u64 = 15;
/// Configuration for logging.
/// Background file logging is disabled if one of:
/// - `path` == None,
/// - `max_log_size` == 0,
/// - `max_log_number` == 0,
pub struct LoggerConfig<'a> {
pub path: Option<PathBuf>,
pub debug_level: &'a str,
pub logfile_debug_level: &'a str,
pub log_format: Option<&'a str>,
pub max_log_size: u64,
pub max_log_number: usize,
pub compression: bool,
}
/// Builds an `Environment`. /// Builds an `Environment`.
pub struct EnvironmentBuilder<E: EthSpec> { pub struct EnvironmentBuilder<E: EthSpec> {
runtime: Option<Arc<Runtime>>, runtime: Option<Arc<Runtime>>,
@ -93,118 +107,98 @@ impl<E: EthSpec> EnvironmentBuilder<E> {
Ok(self) Ok(self)
} }
/// Specifies that the `slog` asynchronous logger should be used. Ideal for production. /// Initializes the logger using the specified configuration.
///
/// The logger is "async" because it has a dedicated thread that accepts logs and then /// The logger is "async" because it has a dedicated thread that accepts logs and then
/// asynchronously flushes them to stdout/files/etc. This means the thread that raised the log /// asynchronously flushes them to stdout/files/etc. This means the thread that raised the log
/// does not have to wait for the logs to be flushed. /// does not have to wait for the logs to be flushed.
pub fn async_logger( /// The logger can be duplicated and more detailed logs can be output to `logfile`.
mut self, /// Note that background file logging will spawn a new thread.
debug_level: &str, pub fn initialize_logger(mut self, config: LoggerConfig) -> Result<Self, String> {
log_format: Option<&str>, // Setting up the initial logger format and build it.
) -> Result<Self, String> { let stdout_drain = if let Some(format) = config.log_format {
// Setting up the initial logger format and building it.
let drain = if let Some(format) = log_format {
match format.to_uppercase().as_str() { match format.to_uppercase().as_str() {
"JSON" => { "JSON" => {
let drain = slog_json::Json::default(std::io::stdout()).fuse(); let stdout_drain = slog_json::Json::default(std::io::stdout()).fuse();
slog_async::Async::new(drain) slog_async::Async::new(stdout_drain)
.chan_size(LOG_CHANNEL_SIZE) .chan_size(LOG_CHANNEL_SIZE)
.build() .build()
} }
_ => return Err("Logging format provided is not supported".to_string()), _ => return Err("Logging format provided is not supported".to_string()),
} }
} else { } else {
let decorator = slog_term::TermDecorator::new().build(); let stdout_decorator = slog_term::TermDecorator::new().build();
let decorator = let stdout_decorator =
logging::AlignedTermDecorator::new(decorator, logging::MAX_MESSAGE_WIDTH); logging::AlignedTermDecorator::new(stdout_decorator, logging::MAX_MESSAGE_WIDTH);
let drain = slog_term::FullFormat::new(decorator).build().fuse(); let stdout_drain = slog_term::FullFormat::new(stdout_decorator).build().fuse();
slog_async::Async::new(drain) slog_async::Async::new(stdout_drain)
.chan_size(LOG_CHANNEL_SIZE) .chan_size(LOG_CHANNEL_SIZE)
.build() .build()
}; };
let drain = match debug_level { let stdout_drain = match config.debug_level {
"info" => drain.filter_level(Level::Info), "info" => stdout_drain.filter_level(Level::Info),
"debug" => drain.filter_level(Level::Debug), "debug" => stdout_drain.filter_level(Level::Debug),
"trace" => drain.filter_level(Level::Trace), "trace" => stdout_drain.filter_level(Level::Trace),
"warn" => drain.filter_level(Level::Warning), "warn" => stdout_drain.filter_level(Level::Warning),
"error" => drain.filter_level(Level::Error), "error" => stdout_drain.filter_level(Level::Error),
"crit" => drain.filter_level(Level::Critical), "crit" => stdout_drain.filter_level(Level::Critical),
unknown => return Err(format!("Unknown debug-level: {}", unknown)), unknown => return Err(format!("Unknown debug-level: {}", unknown)),
}; };
self.log = Some(Logger::root(drain.fuse(), o!())); let stdout_logger = Logger::root(stdout_drain.fuse(), o!());
Ok(self)
}
/// Sets the logger (and all child loggers) to log to a file. // Disable file logging if values set to 0.
pub fn log_to_file( if config.max_log_size == 0 || config.max_log_number == 0 {
mut self, self.log = Some(stdout_logger);
path: PathBuf, return Ok(self);
debug_level: &str,
log_format: Option<&str>,
) -> Result<Self, String> {
// Creating a backup if the logfile already exists.
if path.exists() {
let start = SystemTime::now();
let timestamp = start
.duration_since(UNIX_EPOCH)
.map_err(|e| e.to_string())?
.as_secs();
let file_stem = path
.file_stem()
.ok_or("Invalid file name")?
.to_str()
.ok_or("Failed to create str from filename")?;
let file_ext = path.extension().unwrap_or_else(|| OsStr::new(""));
let backup_name = format!("{}_backup_{}", file_stem, timestamp);
let backup_path = path.with_file_name(backup_name).with_extension(file_ext);
FsRename(&path, &backup_path).map_err(|e| e.to_string())?;
} }
let file = OpenOptions::new() // Disable file logging if no path is specified.
.create(true) let path = match config.path {
.write(true) Some(path) => path,
.truncate(true) None => {
.open(&path) self.log = Some(stdout_logger);
.map_err(|e| format!("Unable to open logfile: {:?}", e))?; return Ok(self);
restrict_file_permissions(&path)
.map_err(|e| format!("Unable to set file permissions for {:?}: {:?}", path, e))?;
// Setting up the initial logger format and building it.
let drain = if let Some(format) = log_format {
match format.to_uppercase().as_str() {
"JSON" => {
let drain = slog_json::Json::default(file).fuse();
slog_async::Async::new(drain)
.chan_size(LOG_CHANNEL_SIZE)
.build()
}
_ => return Err("Logging format provided is not supported".to_string()),
} }
} else {
let decorator = slog_term::PlainDecorator::new(file);
let decorator =
logging::AlignedTermDecorator::new(decorator, logging::MAX_MESSAGE_WIDTH);
let drain = slog_term::FullFormat::new(decorator).build().fuse();
slog_async::Async::new(drain)
.chan_size(LOG_CHANNEL_SIZE)
.build()
}; };
let drain = match debug_level { // Ensure directories are created becfore the logfile.
"info" => drain.filter_level(Level::Info), if !path.exists() {
"debug" => drain.filter_level(Level::Debug), let mut dir = path.clone();
"trace" => drain.filter_level(Level::Trace), dir.pop();
"warn" => drain.filter_level(Level::Warning),
"error" => drain.filter_level(Level::Error), // Create the necessary directories for the correct service and network.
"crit" => drain.filter_level(Level::Critical), if !dir.exists() {
unknown => return Err(format!("Unknown debug-level: {}", unknown)), create_dir_all(dir).map_err(|e| format!("Unable to create directory: {:?}", e))?;
}
}
let logfile_level = match config.logfile_debug_level {
"info" => Severity::Info,
"debug" => Severity::Debug,
"trace" => Severity::Trace,
"warn" => Severity::Warning,
"error" => Severity::Error,
"crit" => Severity::Critical,
unknown => return Err(format!("Unknown loglevel-debug-level: {}", unknown)),
}; };
let log = Logger::root(drain.fuse(), o!()); let file_logger = FileLoggerBuilder::new(&path)
.level(logfile_level)
.channel_size(LOG_CHANNEL_SIZE)
.format(match config.log_format {
Some("JSON") => Format::Json,
_ => Format::default(),
})
.rotate_size(config.max_log_size)
.rotate_keep(config.max_log_number)
.rotate_compress(config.compression)
.restrict_permissions(true)
.build()
.map_err(|e| format!("Unable to build file logger: {}", e))?;
let log = Logger::root(Duplicate::new(stdout_logger, file_logger).fuse(), o!());
info!( info!(
log, log,
"Logging to file"; "Logging to file";

View File

@ -4,9 +4,10 @@ mod metrics;
use beacon_node::{get_eth2_network_config, ProductionBeaconNode}; use beacon_node::{get_eth2_network_config, ProductionBeaconNode};
use clap::{App, Arg, ArgMatches}; use clap::{App, Arg, ArgMatches};
use clap_utils::flags::DISABLE_MALLOC_TUNING_FLAG; use clap_utils::{flags::DISABLE_MALLOC_TUNING_FLAG, parse_optional};
use directory::{parse_path_or_default, DEFAULT_BEACON_NODE_DIR, DEFAULT_VALIDATOR_DIR};
use env_logger::{Builder, Env}; use env_logger::{Builder, Env};
use environment::EnvironmentBuilder; use environment::{EnvironmentBuilder, LoggerConfig};
use eth2_hashing::have_sha_extensions; use eth2_hashing::have_sha_extensions;
use eth2_network_config::{Eth2NetworkConfig, DEFAULT_HARDCODED_NETWORK, HARDCODED_NET_NAMES}; use eth2_network_config::{Eth2NetworkConfig, DEFAULT_HARDCODED_NETWORK, HARDCODED_NET_NAMES};
use lighthouse_version::VERSION; use lighthouse_version::VERSION;
@ -80,23 +81,68 @@ fn main() {
.long("logfile") .long("logfile")
.value_name("FILE") .value_name("FILE")
.help( .help(
"File path where output will be written.", "File path where the log file will be stored. Once it grows to the \
) value specified in `--logfile-max-size` a new log file is generated where \
.takes_value(true), future logs are stored. \
Once the number of log files exceeds the value specified in \
`--logfile-max-number` the oldest log file will be overwritten.")
.takes_value(true)
.global(true),
)
.arg(
Arg::with_name("logfile-debug-level")
.long("logfile-debug-level")
.value_name("LEVEL")
.help("The verbosity level used when emitting logs to the log file.")
.takes_value(true)
.possible_values(&["info", "debug", "trace", "warn", "error", "crit"])
.default_value("debug")
.global(true),
)
.arg(
Arg::with_name("logfile-max-size")
.long("logfile-max-size")
.value_name("SIZE")
.help(
"The maximum size (in MB) each log file can grow to before rotating. If set \
to 0, background file logging is disabled.")
.takes_value(true)
.default_value("200")
.global(true),
)
.arg(
Arg::with_name("logfile-max-number")
.long("logfile-max-number")
.value_name("COUNT")
.help(
"The maximum number of log files that will be stored. If set to 0, \
background file logging is disabled.")
.takes_value(true)
.default_value("5")
.global(true),
)
.arg(
Arg::with_name("logfile-compress")
.long("logfile-compress")
.help(
"If present, compress old log files. This can help reduce the space needed \
to store old logs.")
.global(true),
) )
.arg( .arg(
Arg::with_name("log-format") Arg::with_name("log-format")
.long("log-format") .long("log-format")
.value_name("FORMAT") .value_name("FORMAT")
.help("Specifies the format used for logging.") .help("Specifies the log format used when emitting logs to the terminal.")
.possible_values(&["JSON"]) .possible_values(&["JSON"])
.takes_value(true), .takes_value(true)
.global(true),
) )
.arg( .arg(
Arg::with_name("debug-level") Arg::with_name("debug-level")
.long("debug-level") .long("debug-level")
.value_name("LEVEL") .value_name("LEVEL")
.help("The verbosity level for emitting logs.") .help("Specifies the verbosity level used when emitting logs to the terminal.")
.takes_value(true) .takes_value(true)
.possible_values(&["info", "debug", "trace", "warn", "error", "crit"]) .possible_values(&["info", "debug", "trace", "warn", "error", "crit"])
.global(true) .global(true)
@ -257,15 +303,58 @@ fn run<E: EthSpec>(
let log_format = matches.value_of("log-format"); let log_format = matches.value_of("log-format");
let builder = if let Some(log_path) = matches.value_of("logfile") { let logfile_debug_level = matches
let path = log_path .value_of("logfile-debug-level")
.parse::<PathBuf>() .ok_or("Expected --logfile-debug-level flag")?;
.map_err(|e| format!("Failed to parse log path: {:?}", e))?;
environment_builder.log_to_file(path, debug_level, log_format)? let logfile_max_size: u64 = matches
} else { .value_of("logfile-max-size")
environment_builder.async_logger(debug_level, log_format)? .ok_or("Expected --logfile-max-size flag")?
.parse()
.map_err(|e| format!("Failed to parse `logfile-max-size`: {:?}", e))?;
let logfile_max_number: usize = matches
.value_of("logfile-max-number")
.ok_or("Expected --logfile-max-number flag")?
.parse()
.map_err(|e| format!("Failed to parse `logfile-max-number`: {:?}", e))?;
let logfile_compress = matches.is_present("logfile-compress");
// Construct the path to the log file.
let mut log_path: Option<PathBuf> = parse_optional(matches, "logfile")?;
if log_path.is_none() {
log_path = match matches.subcommand_name() {
Some("beacon_node") => Some(
parse_path_or_default(matches, "datadir")?
.join(DEFAULT_BEACON_NODE_DIR)
.join("logs")
.join("beacon")
.with_extension("log"),
),
Some("validator_client") => Some(
parse_path_or_default(matches, "datadir")?
.join(DEFAULT_VALIDATOR_DIR)
.join("logs")
.join("validator")
.with_extension("log"),
),
_ => None,
};
}
let logger_config = LoggerConfig {
path: log_path,
debug_level,
logfile_debug_level,
log_format,
max_log_size: logfile_max_size * 1_024 * 1_024,
max_log_number: logfile_max_number,
compression: logfile_compress,
}; };
let builder = environment_builder.initialize_logger(logger_config)?;
let mut environment = builder let mut environment = builder
.multi_threaded_tokio_runtime()? .multi_threaded_tokio_runtime()?
.optional_eth2_network_config(Some(testnet_config))? .optional_eth2_network_config(Some(testnet_config))?

View File

@ -21,7 +21,7 @@ safe_arith = { path = "../consensus/safe_arith" }
serde = "1.0" serde = "1.0"
serde_derive = "1.0" serde_derive = "1.0"
slog = "2.5.2" slog = "2.5.2"
sloggers = "2.0.2" sloggers = { version = "2.1.1", features = ["json"] }
tree_hash = "0.4.0" tree_hash = "0.4.0"
tree_hash_derive = "0.4.0" tree_hash_derive = "0.4.0"
types = { path = "../consensus/types" } types = { path = "../consensus/types" }

View File

@ -6,8 +6,8 @@ use eth1::{DEFAULT_CHAIN_ID, DEFAULT_NETWORK_ID};
use eth1_test_rig::GanacheEth1Instance; use eth1_test_rig::GanacheEth1Instance;
use futures::prelude::*; use futures::prelude::*;
use node_test_rig::{ use node_test_rig::{
environment::EnvironmentBuilder, testing_client_config, testing_validator_config, environment::{EnvironmentBuilder, LoggerConfig},
ClientGenesis, ValidatorFiles, testing_client_config, testing_validator_config, ClientGenesis, ValidatorFiles,
}; };
use rayon::prelude::*; use rayon::prelude::*;
use sensitive_url::SensitiveUrl; use sensitive_url::SensitiveUrl;
@ -53,7 +53,15 @@ pub fn run_eth1_sim(matches: &ArgMatches) -> Result<(), String> {
let log_format = None; let log_format = None;
let mut env = EnvironmentBuilder::minimal() let mut env = EnvironmentBuilder::minimal()
.async_logger(log_level, log_format)? .initialize_logger(LoggerConfig {
path: None,
debug_level: log_level,
logfile_debug_level: "debug",
log_format,
max_log_size: 0,
max_log_number: 0,
compression: false,
})?
.multi_threaded_tokio_runtime()? .multi_threaded_tokio_runtime()?
.build()?; .build()?;

View File

@ -2,8 +2,8 @@ use crate::{checks, LocalNetwork};
use clap::ArgMatches; use clap::ArgMatches;
use futures::prelude::*; use futures::prelude::*;
use node_test_rig::{ use node_test_rig::{
environment::EnvironmentBuilder, testing_client_config, testing_validator_config, environment::{EnvironmentBuilder, LoggerConfig},
ClientGenesis, ValidatorFiles, testing_client_config, testing_validator_config, ClientGenesis, ValidatorFiles,
}; };
use rayon::prelude::*; use rayon::prelude::*;
use std::cmp::max; use std::cmp::max;
@ -45,7 +45,15 @@ pub fn run_no_eth1_sim(matches: &ArgMatches) -> Result<(), String> {
let log_format = None; let log_format = None;
let mut env = EnvironmentBuilder::mainnet() let mut env = EnvironmentBuilder::mainnet()
.async_logger(log_level, log_format)? .initialize_logger(LoggerConfig {
path: None,
debug_level: log_level,
logfile_debug_level: "debug",
log_format,
max_log_size: 0,
max_log_number: 0,
compression: false,
})?
.multi_threaded_tokio_runtime()? .multi_threaded_tokio_runtime()?
.build()?; .build()?;

View File

@ -3,7 +3,8 @@ use crate::local_network::LocalNetwork;
use clap::ArgMatches; use clap::ArgMatches;
use futures::prelude::*; use futures::prelude::*;
use node_test_rig::{ use node_test_rig::{
environment::EnvironmentBuilder, testing_client_config, ClientGenesis, ValidatorFiles, environment::{EnvironmentBuilder, LoggerConfig},
testing_client_config, ClientGenesis, ValidatorFiles,
}; };
use node_test_rig::{testing_validator_config, ClientConfig}; use node_test_rig::{testing_validator_config, ClientConfig};
use std::cmp::max; use std::cmp::max;
@ -45,7 +46,15 @@ fn syncing_sim(
log_format: Option<&str>, log_format: Option<&str>,
) -> Result<(), String> { ) -> Result<(), String> {
let mut env = EnvironmentBuilder::minimal() let mut env = EnvironmentBuilder::minimal()
.async_logger(log_level, log_format)? .initialize_logger(LoggerConfig {
path: None,
debug_level: log_level,
logfile_debug_level: "debug",
log_format,
max_log_size: 0,
max_log_number: 0,
compression: false,
})?
.multi_threaded_tokio_runtime()? .multi_threaded_tokio_runtime()?
.build()?; .build()?;