Add background file logging (#2762)
## Issue Addressed Closes #1996 ## Proposed Changes Run a second `Logger` via `sloggers` which logs to a file in the background with: - separate `debug-level` for background and terminal logging - the ability to limit log size - rotation through a customizable number of log files - an option to compress old log files (`.gz` format) Add the following new CLI flags: - `--logfile-debug-level`: The debug level of the log files - `--logfile-max-size`: The maximum size of each log file - `--logfile-max-number`: The number of old log files to store - `--logfile-compress`: Whether to compress old log files By default background logging uses the `debug` log level and saves logfiles to: - Beacon Node: `$HOME/.lighthouse/$network/beacon/logs/beacon.log` - Validator Client: `$HOME/.lighthouse/$network/validators/logs/validator.log` Or, when using the `--datadir` flag: `$datadir/beacon/logs/beacon.log` and `$datadir/validators/logs/validator.log` Once rotated, old logs are stored like so: `beacon.log.1`, `beacon.log.2` etc. > Note: `beacon.log.1` is always newer than `beacon.log.2`. ## Additional Info Currently the default value of `--logfile-max-size` is 200 (MB) and `--logfile-max-number` is 5. This means that the maximum storage space that the logs will take up by default is 1.2GB. (200MB x 5 from old log files + <200MB the current logfile being written to) Happy to adjust these default values to whatever people think is appropriate. It's also worth noting that when logging to a file, we lose our custom `slog` formatting. This means the logfile logs look like this: ``` Oct 27 16:02:50.305 INFO Lighthouse started, version: Lighthouse/v2.0.1-8edd9d4+, module: lighthouse:413 Oct 27 16:02:50.305 INFO Configured for network, name: prater, module: lighthouse:414 ```
This commit is contained in:
parent
6625aa4afe
commit
fe75a0a9a1
3
Cargo.lock
generated
3
Cargo.lock
generated
@ -1383,7 +1383,6 @@ dependencies = [
|
||||
"eth2_config",
|
||||
"eth2_network_config",
|
||||
"exit-future",
|
||||
"filesystem",
|
||||
"futures",
|
||||
"logging",
|
||||
"slog",
|
||||
@ -3214,6 +3213,7 @@ dependencies = [
|
||||
"boot_node",
|
||||
"clap",
|
||||
"clap_utils",
|
||||
"directory",
|
||||
"env_logger 0.9.0",
|
||||
"environment",
|
||||
"eth2_hashing 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
@ -5414,6 +5414,7 @@ dependencies = [
|
||||
"serde",
|
||||
"slog",
|
||||
"slog-async",
|
||||
"slog-json",
|
||||
"slog-kvfilter",
|
||||
"slog-scope",
|
||||
"slog-stdlog",
|
||||
|
@ -26,7 +26,7 @@ rayon = "1.4.1"
|
||||
serde = "1.0.116"
|
||||
serde_derive = "1.0.116"
|
||||
slog = { version = "2.5.2", features = ["max_level_trace"] }
|
||||
sloggers = "2.0.2"
|
||||
sloggers = { version = "2.1.1", features = ["json"] }
|
||||
slot_clock = { path = "../../common/slot_clock" }
|
||||
eth2_hashing = "0.2.0"
|
||||
eth2_ssz = "0.4.0"
|
||||
|
@ -8,7 +8,7 @@ edition = "2018"
|
||||
eth1_test_rig = { path = "../../testing/eth1_test_rig" }
|
||||
toml = "0.5.6"
|
||||
web3 = { version = "0.17.0", default-features = false, features = ["http-tls", "signing", "ws-tls-tokio"] }
|
||||
sloggers = "2.0.2"
|
||||
sloggers = { version = "2.1.1", features = ["json"] }
|
||||
environment = { path = "../../lighthouse/environment" }
|
||||
|
||||
[dependencies]
|
||||
|
@ -5,7 +5,7 @@ authors = ["Sigma Prime <contact@sigmaprime.io>"]
|
||||
edition = "2018"
|
||||
|
||||
[dev-dependencies]
|
||||
sloggers = "2.0.2"
|
||||
sloggers = { version = "2.1.1", features = ["json"] }
|
||||
genesis = { path = "../genesis" }
|
||||
matches = "0.1.8"
|
||||
exit-future = "0.2.0"
|
||||
|
@ -23,5 +23,5 @@ serde_derive = "1.0.116"
|
||||
lazy_static = "1.4.0"
|
||||
lighthouse_metrics = { path = "../../common/lighthouse_metrics" }
|
||||
lru = "0.6.0"
|
||||
sloggers = "2.0.2"
|
||||
sloggers = { version = "2.1.1", features = ["json"] }
|
||||
directory = { path = "../../common/directory" }
|
||||
|
@ -40,6 +40,19 @@ pub fn ensure_dir_exists<P: AsRef<Path>>(path: P) -> Result<(), String> {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// If `arg` is in `matches`, parses the value as a path.
|
||||
///
|
||||
/// Otherwise, attempts to find the default directory for the `testnet` from the `matches`.
|
||||
pub fn parse_path_or_default(matches: &ArgMatches, arg: &'static str) -> Result<PathBuf, String> {
|
||||
clap_utils::parse_path_with_default_in_home_dir(
|
||||
matches,
|
||||
arg,
|
||||
PathBuf::new()
|
||||
.join(DEFAULT_ROOT_DIR)
|
||||
.join(get_network_dir(matches)),
|
||||
)
|
||||
}
|
||||
|
||||
/// If `arg` is in `matches`, parses the value as a path.
|
||||
///
|
||||
/// Otherwise, attempts to find the default directory for the `testnet` from the `matches`
|
||||
|
@ -12,4 +12,4 @@ slog = "2.5.2"
|
||||
slog-term = "2.6.0"
|
||||
lighthouse_metrics = { path = "../lighthouse_metrics" }
|
||||
lazy_static = "1.4.0"
|
||||
sloggers = "2.0.2"
|
||||
sloggers = { version = "2.1.1", features = ["json"] }
|
||||
|
@ -16,7 +16,7 @@ mod transition_blocks;
|
||||
|
||||
use clap::{App, Arg, ArgMatches, SubCommand};
|
||||
use clap_utils::parse_path_with_default_in_home_dir;
|
||||
use environment::EnvironmentBuilder;
|
||||
use environment::{EnvironmentBuilder, LoggerConfig};
|
||||
use parse_ssz::run_parse_ssz;
|
||||
use std::path::PathBuf;
|
||||
use std::process;
|
||||
@ -584,8 +584,16 @@ fn run<T: EthSpec>(
|
||||
let env = env_builder
|
||||
.multi_threaded_tokio_runtime()
|
||||
.map_err(|e| format!("should start tokio runtime: {:?}", e))?
|
||||
.async_logger("trace", None)
|
||||
.map_err(|e| format!("should start null logger: {:?}", e))?
|
||||
.initialize_logger(LoggerConfig {
|
||||
path: None,
|
||||
debug_level: "trace",
|
||||
logfile_debug_level: "trace",
|
||||
log_format: None,
|
||||
max_log_size: 0,
|
||||
max_log_number: 0,
|
||||
compression: false,
|
||||
})
|
||||
.map_err(|e| format!("should start logger: {:?}", e))?
|
||||
.build()
|
||||
.map_err(|e| format!("should build env: {:?}", e))?;
|
||||
|
||||
|
@ -20,7 +20,7 @@ spec-minimal = []
|
||||
[dependencies]
|
||||
beacon_node = { "path" = "../beacon_node" }
|
||||
slog = { version = "2.5.2", features = ["max_level_trace"] }
|
||||
sloggers = "2.0.2"
|
||||
sloggers = { version = "2.1.1", features = ["json"] }
|
||||
types = { "path" = "../consensus/types" }
|
||||
bls = { path = "../crypto/bls" }
|
||||
eth2_hashing = "0.2.0"
|
||||
@ -41,6 +41,7 @@ serde = { version = "1.0.116", features = ["derive"] }
|
||||
serde_json = "1.0.59"
|
||||
task_executor = { path = "../common/task_executor" }
|
||||
malloc_utils = { path = "../common/malloc_utils" }
|
||||
directory = { path = "../common/directory" }
|
||||
|
||||
[dev-dependencies]
|
||||
tempfile = "3.1.0"
|
||||
|
@ -7,7 +7,7 @@ edition = "2018"
|
||||
[dependencies]
|
||||
tokio = { version = "1.14.0", features = ["macros", "rt", "rt-multi-thread", "signal" ] }
|
||||
slog = { version = "2.5.2", features = ["max_level_trace"] }
|
||||
sloggers = "2.0.2"
|
||||
sloggers = { version = "2.1.1", features = ["json"] }
|
||||
types = { path = "../../consensus/types" }
|
||||
eth2_config = { path = "../../common/eth2_config" }
|
||||
task_executor = { path = "../../common/task_executor" }
|
||||
@ -18,7 +18,6 @@ slog-async = "2.5.0"
|
||||
futures = "0.3.7"
|
||||
slog-json = "2.3.0"
|
||||
exit-future = "0.2.0"
|
||||
filesystem = {"path" = "../../common/filesystem"}
|
||||
|
||||
[target.'cfg(not(target_family = "unix"))'.dependencies]
|
||||
ctrlc = { version = "3.1.6", features = ["termination"] }
|
||||
|
@ -9,17 +9,16 @@
|
||||
|
||||
use eth2_config::Eth2Config;
|
||||
use eth2_network_config::Eth2NetworkConfig;
|
||||
use filesystem::restrict_file_permissions;
|
||||
use futures::channel::mpsc::{channel, Receiver, Sender};
|
||||
use futures::{future, StreamExt};
|
||||
|
||||
use slog::{error, info, o, warn, Drain, Level, Logger};
|
||||
use sloggers::{null::NullLoggerBuilder, Build};
|
||||
use std::ffi::OsStr;
|
||||
use std::fs::{rename as FsRename, OpenOptions};
|
||||
use slog::{error, info, o, warn, Drain, Duplicate, Level, Logger};
|
||||
use sloggers::{
|
||||
file::FileLoggerBuilder, null::NullLoggerBuilder, types::Format, types::Severity, Build,
|
||||
};
|
||||
use std::fs::create_dir_all;
|
||||
use std::path::PathBuf;
|
||||
use std::sync::Arc;
|
||||
use std::time::{SystemTime, UNIX_EPOCH};
|
||||
use task_executor::{ShutdownReason, TaskExecutor};
|
||||
use tokio::runtime::{Builder as RuntimeBuilder, Runtime};
|
||||
use types::{EthSpec, MainnetEthSpec, MinimalEthSpec};
|
||||
@ -38,6 +37,21 @@ const LOG_CHANNEL_SIZE: usize = 2048;
|
||||
/// The maximum time in seconds the client will wait for all internal tasks to shutdown.
|
||||
const MAXIMUM_SHUTDOWN_TIME: u64 = 15;
|
||||
|
||||
/// Configuration for logging.
|
||||
/// Background file logging is disabled if one of:
|
||||
/// - `path` == None,
|
||||
/// - `max_log_size` == 0,
|
||||
/// - `max_log_number` == 0,
|
||||
pub struct LoggerConfig<'a> {
|
||||
pub path: Option<PathBuf>,
|
||||
pub debug_level: &'a str,
|
||||
pub logfile_debug_level: &'a str,
|
||||
pub log_format: Option<&'a str>,
|
||||
pub max_log_size: u64,
|
||||
pub max_log_number: usize,
|
||||
pub compression: bool,
|
||||
}
|
||||
|
||||
/// Builds an `Environment`.
|
||||
pub struct EnvironmentBuilder<E: EthSpec> {
|
||||
runtime: Option<Arc<Runtime>>,
|
||||
@ -93,118 +107,98 @@ impl<E: EthSpec> EnvironmentBuilder<E> {
|
||||
Ok(self)
|
||||
}
|
||||
|
||||
/// Specifies that the `slog` asynchronous logger should be used. Ideal for production.
|
||||
///
|
||||
/// Initializes the logger using the specified configuration.
|
||||
/// The logger is "async" because it has a dedicated thread that accepts logs and then
|
||||
/// asynchronously flushes them to stdout/files/etc. This means the thread that raised the log
|
||||
/// does not have to wait for the logs to be flushed.
|
||||
pub fn async_logger(
|
||||
mut self,
|
||||
debug_level: &str,
|
||||
log_format: Option<&str>,
|
||||
) -> Result<Self, String> {
|
||||
// Setting up the initial logger format and building it.
|
||||
let drain = if let Some(format) = log_format {
|
||||
/// The logger can be duplicated and more detailed logs can be output to `logfile`.
|
||||
/// Note that background file logging will spawn a new thread.
|
||||
pub fn initialize_logger(mut self, config: LoggerConfig) -> Result<Self, String> {
|
||||
// Setting up the initial logger format and build it.
|
||||
let stdout_drain = if let Some(format) = config.log_format {
|
||||
match format.to_uppercase().as_str() {
|
||||
"JSON" => {
|
||||
let drain = slog_json::Json::default(std::io::stdout()).fuse();
|
||||
slog_async::Async::new(drain)
|
||||
let stdout_drain = slog_json::Json::default(std::io::stdout()).fuse();
|
||||
slog_async::Async::new(stdout_drain)
|
||||
.chan_size(LOG_CHANNEL_SIZE)
|
||||
.build()
|
||||
}
|
||||
_ => return Err("Logging format provided is not supported".to_string()),
|
||||
}
|
||||
} else {
|
||||
let decorator = slog_term::TermDecorator::new().build();
|
||||
let decorator =
|
||||
logging::AlignedTermDecorator::new(decorator, logging::MAX_MESSAGE_WIDTH);
|
||||
let drain = slog_term::FullFormat::new(decorator).build().fuse();
|
||||
slog_async::Async::new(drain)
|
||||
let stdout_decorator = slog_term::TermDecorator::new().build();
|
||||
let stdout_decorator =
|
||||
logging::AlignedTermDecorator::new(stdout_decorator, logging::MAX_MESSAGE_WIDTH);
|
||||
let stdout_drain = slog_term::FullFormat::new(stdout_decorator).build().fuse();
|
||||
slog_async::Async::new(stdout_drain)
|
||||
.chan_size(LOG_CHANNEL_SIZE)
|
||||
.build()
|
||||
};
|
||||
|
||||
let drain = match debug_level {
|
||||
"info" => drain.filter_level(Level::Info),
|
||||
"debug" => drain.filter_level(Level::Debug),
|
||||
"trace" => drain.filter_level(Level::Trace),
|
||||
"warn" => drain.filter_level(Level::Warning),
|
||||
"error" => drain.filter_level(Level::Error),
|
||||
"crit" => drain.filter_level(Level::Critical),
|
||||
let stdout_drain = match config.debug_level {
|
||||
"info" => stdout_drain.filter_level(Level::Info),
|
||||
"debug" => stdout_drain.filter_level(Level::Debug),
|
||||
"trace" => stdout_drain.filter_level(Level::Trace),
|
||||
"warn" => stdout_drain.filter_level(Level::Warning),
|
||||
"error" => stdout_drain.filter_level(Level::Error),
|
||||
"crit" => stdout_drain.filter_level(Level::Critical),
|
||||
unknown => return Err(format!("Unknown debug-level: {}", unknown)),
|
||||
};
|
||||
|
||||
self.log = Some(Logger::root(drain.fuse(), o!()));
|
||||
Ok(self)
|
||||
let stdout_logger = Logger::root(stdout_drain.fuse(), o!());
|
||||
|
||||
// Disable file logging if values set to 0.
|
||||
if config.max_log_size == 0 || config.max_log_number == 0 {
|
||||
self.log = Some(stdout_logger);
|
||||
return Ok(self);
|
||||
}
|
||||
|
||||
/// Sets the logger (and all child loggers) to log to a file.
|
||||
pub fn log_to_file(
|
||||
mut self,
|
||||
path: PathBuf,
|
||||
debug_level: &str,
|
||||
log_format: Option<&str>,
|
||||
) -> Result<Self, String> {
|
||||
// Creating a backup if the logfile already exists.
|
||||
if path.exists() {
|
||||
let start = SystemTime::now();
|
||||
let timestamp = start
|
||||
.duration_since(UNIX_EPOCH)
|
||||
.map_err(|e| e.to_string())?
|
||||
.as_secs();
|
||||
let file_stem = path
|
||||
.file_stem()
|
||||
.ok_or("Invalid file name")?
|
||||
.to_str()
|
||||
.ok_or("Failed to create str from filename")?;
|
||||
let file_ext = path.extension().unwrap_or_else(|| OsStr::new(""));
|
||||
let backup_name = format!("{}_backup_{}", file_stem, timestamp);
|
||||
let backup_path = path.with_file_name(backup_name).with_extension(file_ext);
|
||||
FsRename(&path, &backup_path).map_err(|e| e.to_string())?;
|
||||
// Disable file logging if no path is specified.
|
||||
let path = match config.path {
|
||||
Some(path) => path,
|
||||
None => {
|
||||
self.log = Some(stdout_logger);
|
||||
return Ok(self);
|
||||
}
|
||||
|
||||
let file = OpenOptions::new()
|
||||
.create(true)
|
||||
.write(true)
|
||||
.truncate(true)
|
||||
.open(&path)
|
||||
.map_err(|e| format!("Unable to open logfile: {:?}", e))?;
|
||||
|
||||
restrict_file_permissions(&path)
|
||||
.map_err(|e| format!("Unable to set file permissions for {:?}: {:?}", path, e))?;
|
||||
|
||||
// Setting up the initial logger format and building it.
|
||||
let drain = if let Some(format) = log_format {
|
||||
match format.to_uppercase().as_str() {
|
||||
"JSON" => {
|
||||
let drain = slog_json::Json::default(file).fuse();
|
||||
slog_async::Async::new(drain)
|
||||
.chan_size(LOG_CHANNEL_SIZE)
|
||||
.build()
|
||||
}
|
||||
_ => return Err("Logging format provided is not supported".to_string()),
|
||||
}
|
||||
} else {
|
||||
let decorator = slog_term::PlainDecorator::new(file);
|
||||
let decorator =
|
||||
logging::AlignedTermDecorator::new(decorator, logging::MAX_MESSAGE_WIDTH);
|
||||
let drain = slog_term::FullFormat::new(decorator).build().fuse();
|
||||
slog_async::Async::new(drain)
|
||||
.chan_size(LOG_CHANNEL_SIZE)
|
||||
.build()
|
||||
};
|
||||
|
||||
let drain = match debug_level {
|
||||
"info" => drain.filter_level(Level::Info),
|
||||
"debug" => drain.filter_level(Level::Debug),
|
||||
"trace" => drain.filter_level(Level::Trace),
|
||||
"warn" => drain.filter_level(Level::Warning),
|
||||
"error" => drain.filter_level(Level::Error),
|
||||
"crit" => drain.filter_level(Level::Critical),
|
||||
unknown => return Err(format!("Unknown debug-level: {}", unknown)),
|
||||
// Ensure directories are created becfore the logfile.
|
||||
if !path.exists() {
|
||||
let mut dir = path.clone();
|
||||
dir.pop();
|
||||
|
||||
// Create the necessary directories for the correct service and network.
|
||||
if !dir.exists() {
|
||||
create_dir_all(dir).map_err(|e| format!("Unable to create directory: {:?}", e))?;
|
||||
}
|
||||
}
|
||||
|
||||
let logfile_level = match config.logfile_debug_level {
|
||||
"info" => Severity::Info,
|
||||
"debug" => Severity::Debug,
|
||||
"trace" => Severity::Trace,
|
||||
"warn" => Severity::Warning,
|
||||
"error" => Severity::Error,
|
||||
"crit" => Severity::Critical,
|
||||
unknown => return Err(format!("Unknown loglevel-debug-level: {}", unknown)),
|
||||
};
|
||||
|
||||
let log = Logger::root(drain.fuse(), o!());
|
||||
let file_logger = FileLoggerBuilder::new(&path)
|
||||
.level(logfile_level)
|
||||
.channel_size(LOG_CHANNEL_SIZE)
|
||||
.format(match config.log_format {
|
||||
Some("JSON") => Format::Json,
|
||||
_ => Format::default(),
|
||||
})
|
||||
.rotate_size(config.max_log_size)
|
||||
.rotate_keep(config.max_log_number)
|
||||
.rotate_compress(config.compression)
|
||||
.restrict_permissions(true)
|
||||
.build()
|
||||
.map_err(|e| format!("Unable to build file logger: {}", e))?;
|
||||
|
||||
let log = Logger::root(Duplicate::new(stdout_logger, file_logger).fuse(), o!());
|
||||
|
||||
info!(
|
||||
log,
|
||||
"Logging to file";
|
||||
|
@ -4,9 +4,10 @@ mod metrics;
|
||||
|
||||
use beacon_node::{get_eth2_network_config, ProductionBeaconNode};
|
||||
use clap::{App, Arg, ArgMatches};
|
||||
use clap_utils::flags::DISABLE_MALLOC_TUNING_FLAG;
|
||||
use clap_utils::{flags::DISABLE_MALLOC_TUNING_FLAG, parse_optional};
|
||||
use directory::{parse_path_or_default, DEFAULT_BEACON_NODE_DIR, DEFAULT_VALIDATOR_DIR};
|
||||
use env_logger::{Builder, Env};
|
||||
use environment::EnvironmentBuilder;
|
||||
use environment::{EnvironmentBuilder, LoggerConfig};
|
||||
use eth2_hashing::have_sha_extensions;
|
||||
use eth2_network_config::{Eth2NetworkConfig, DEFAULT_HARDCODED_NETWORK, HARDCODED_NET_NAMES};
|
||||
use lighthouse_version::VERSION;
|
||||
@ -80,23 +81,68 @@ fn main() {
|
||||
.long("logfile")
|
||||
.value_name("FILE")
|
||||
.help(
|
||||
"File path where output will be written.",
|
||||
"File path where the log file will be stored. Once it grows to the \
|
||||
value specified in `--logfile-max-size` a new log file is generated where \
|
||||
future logs are stored. \
|
||||
Once the number of log files exceeds the value specified in \
|
||||
`--logfile-max-number` the oldest log file will be overwritten.")
|
||||
.takes_value(true)
|
||||
.global(true),
|
||||
)
|
||||
.takes_value(true),
|
||||
.arg(
|
||||
Arg::with_name("logfile-debug-level")
|
||||
.long("logfile-debug-level")
|
||||
.value_name("LEVEL")
|
||||
.help("The verbosity level used when emitting logs to the log file.")
|
||||
.takes_value(true)
|
||||
.possible_values(&["info", "debug", "trace", "warn", "error", "crit"])
|
||||
.default_value("debug")
|
||||
.global(true),
|
||||
)
|
||||
.arg(
|
||||
Arg::with_name("logfile-max-size")
|
||||
.long("logfile-max-size")
|
||||
.value_name("SIZE")
|
||||
.help(
|
||||
"The maximum size (in MB) each log file can grow to before rotating. If set \
|
||||
to 0, background file logging is disabled.")
|
||||
.takes_value(true)
|
||||
.default_value("200")
|
||||
.global(true),
|
||||
)
|
||||
.arg(
|
||||
Arg::with_name("logfile-max-number")
|
||||
.long("logfile-max-number")
|
||||
.value_name("COUNT")
|
||||
.help(
|
||||
"The maximum number of log files that will be stored. If set to 0, \
|
||||
background file logging is disabled.")
|
||||
.takes_value(true)
|
||||
.default_value("5")
|
||||
.global(true),
|
||||
)
|
||||
.arg(
|
||||
Arg::with_name("logfile-compress")
|
||||
.long("logfile-compress")
|
||||
.help(
|
||||
"If present, compress old log files. This can help reduce the space needed \
|
||||
to store old logs.")
|
||||
.global(true),
|
||||
)
|
||||
.arg(
|
||||
Arg::with_name("log-format")
|
||||
.long("log-format")
|
||||
.value_name("FORMAT")
|
||||
.help("Specifies the format used for logging.")
|
||||
.help("Specifies the log format used when emitting logs to the terminal.")
|
||||
.possible_values(&["JSON"])
|
||||
.takes_value(true),
|
||||
.takes_value(true)
|
||||
.global(true),
|
||||
)
|
||||
.arg(
|
||||
Arg::with_name("debug-level")
|
||||
.long("debug-level")
|
||||
.value_name("LEVEL")
|
||||
.help("The verbosity level for emitting logs.")
|
||||
.help("Specifies the verbosity level used when emitting logs to the terminal.")
|
||||
.takes_value(true)
|
||||
.possible_values(&["info", "debug", "trace", "warn", "error", "crit"])
|
||||
.global(true)
|
||||
@ -257,14 +303,57 @@ fn run<E: EthSpec>(
|
||||
|
||||
let log_format = matches.value_of("log-format");
|
||||
|
||||
let builder = if let Some(log_path) = matches.value_of("logfile") {
|
||||
let path = log_path
|
||||
.parse::<PathBuf>()
|
||||
.map_err(|e| format!("Failed to parse log path: {:?}", e))?;
|
||||
environment_builder.log_to_file(path, debug_level, log_format)?
|
||||
} else {
|
||||
environment_builder.async_logger(debug_level, log_format)?
|
||||
let logfile_debug_level = matches
|
||||
.value_of("logfile-debug-level")
|
||||
.ok_or("Expected --logfile-debug-level flag")?;
|
||||
|
||||
let logfile_max_size: u64 = matches
|
||||
.value_of("logfile-max-size")
|
||||
.ok_or("Expected --logfile-max-size flag")?
|
||||
.parse()
|
||||
.map_err(|e| format!("Failed to parse `logfile-max-size`: {:?}", e))?;
|
||||
|
||||
let logfile_max_number: usize = matches
|
||||
.value_of("logfile-max-number")
|
||||
.ok_or("Expected --logfile-max-number flag")?
|
||||
.parse()
|
||||
.map_err(|e| format!("Failed to parse `logfile-max-number`: {:?}", e))?;
|
||||
|
||||
let logfile_compress = matches.is_present("logfile-compress");
|
||||
|
||||
// Construct the path to the log file.
|
||||
let mut log_path: Option<PathBuf> = parse_optional(matches, "logfile")?;
|
||||
if log_path.is_none() {
|
||||
log_path = match matches.subcommand_name() {
|
||||
Some("beacon_node") => Some(
|
||||
parse_path_or_default(matches, "datadir")?
|
||||
.join(DEFAULT_BEACON_NODE_DIR)
|
||||
.join("logs")
|
||||
.join("beacon")
|
||||
.with_extension("log"),
|
||||
),
|
||||
Some("validator_client") => Some(
|
||||
parse_path_or_default(matches, "datadir")?
|
||||
.join(DEFAULT_VALIDATOR_DIR)
|
||||
.join("logs")
|
||||
.join("validator")
|
||||
.with_extension("log"),
|
||||
),
|
||||
_ => None,
|
||||
};
|
||||
}
|
||||
|
||||
let logger_config = LoggerConfig {
|
||||
path: log_path,
|
||||
debug_level,
|
||||
logfile_debug_level,
|
||||
log_format,
|
||||
max_log_size: logfile_max_size * 1_024 * 1_024,
|
||||
max_log_number: logfile_max_number,
|
||||
compression: logfile_compress,
|
||||
};
|
||||
|
||||
let builder = environment_builder.initialize_logger(logger_config)?;
|
||||
|
||||
let mut environment = builder
|
||||
.multi_threaded_tokio_runtime()?
|
||||
|
@ -21,7 +21,7 @@ safe_arith = { path = "../consensus/safe_arith" }
|
||||
serde = "1.0"
|
||||
serde_derive = "1.0"
|
||||
slog = "2.5.2"
|
||||
sloggers = "2.0.2"
|
||||
sloggers = { version = "2.1.1", features = ["json"] }
|
||||
tree_hash = "0.4.0"
|
||||
tree_hash_derive = "0.4.0"
|
||||
types = { path = "../consensus/types" }
|
||||
|
@ -6,8 +6,8 @@ use eth1::{DEFAULT_CHAIN_ID, DEFAULT_NETWORK_ID};
|
||||
use eth1_test_rig::GanacheEth1Instance;
|
||||
use futures::prelude::*;
|
||||
use node_test_rig::{
|
||||
environment::EnvironmentBuilder, testing_client_config, testing_validator_config,
|
||||
ClientGenesis, ValidatorFiles,
|
||||
environment::{EnvironmentBuilder, LoggerConfig},
|
||||
testing_client_config, testing_validator_config, ClientGenesis, ValidatorFiles,
|
||||
};
|
||||
use rayon::prelude::*;
|
||||
use sensitive_url::SensitiveUrl;
|
||||
@ -53,7 +53,15 @@ pub fn run_eth1_sim(matches: &ArgMatches) -> Result<(), String> {
|
||||
let log_format = None;
|
||||
|
||||
let mut env = EnvironmentBuilder::minimal()
|
||||
.async_logger(log_level, log_format)?
|
||||
.initialize_logger(LoggerConfig {
|
||||
path: None,
|
||||
debug_level: log_level,
|
||||
logfile_debug_level: "debug",
|
||||
log_format,
|
||||
max_log_size: 0,
|
||||
max_log_number: 0,
|
||||
compression: false,
|
||||
})?
|
||||
.multi_threaded_tokio_runtime()?
|
||||
.build()?;
|
||||
|
||||
|
@ -2,8 +2,8 @@ use crate::{checks, LocalNetwork};
|
||||
use clap::ArgMatches;
|
||||
use futures::prelude::*;
|
||||
use node_test_rig::{
|
||||
environment::EnvironmentBuilder, testing_client_config, testing_validator_config,
|
||||
ClientGenesis, ValidatorFiles,
|
||||
environment::{EnvironmentBuilder, LoggerConfig},
|
||||
testing_client_config, testing_validator_config, ClientGenesis, ValidatorFiles,
|
||||
};
|
||||
use rayon::prelude::*;
|
||||
use std::cmp::max;
|
||||
@ -45,7 +45,15 @@ pub fn run_no_eth1_sim(matches: &ArgMatches) -> Result<(), String> {
|
||||
let log_format = None;
|
||||
|
||||
let mut env = EnvironmentBuilder::mainnet()
|
||||
.async_logger(log_level, log_format)?
|
||||
.initialize_logger(LoggerConfig {
|
||||
path: None,
|
||||
debug_level: log_level,
|
||||
logfile_debug_level: "debug",
|
||||
log_format,
|
||||
max_log_size: 0,
|
||||
max_log_number: 0,
|
||||
compression: false,
|
||||
})?
|
||||
.multi_threaded_tokio_runtime()?
|
||||
.build()?;
|
||||
|
||||
|
@ -3,7 +3,8 @@ use crate::local_network::LocalNetwork;
|
||||
use clap::ArgMatches;
|
||||
use futures::prelude::*;
|
||||
use node_test_rig::{
|
||||
environment::EnvironmentBuilder, testing_client_config, ClientGenesis, ValidatorFiles,
|
||||
environment::{EnvironmentBuilder, LoggerConfig},
|
||||
testing_client_config, ClientGenesis, ValidatorFiles,
|
||||
};
|
||||
use node_test_rig::{testing_validator_config, ClientConfig};
|
||||
use std::cmp::max;
|
||||
@ -45,7 +46,15 @@ fn syncing_sim(
|
||||
log_format: Option<&str>,
|
||||
) -> Result<(), String> {
|
||||
let mut env = EnvironmentBuilder::minimal()
|
||||
.async_logger(log_level, log_format)?
|
||||
.initialize_logger(LoggerConfig {
|
||||
path: None,
|
||||
debug_level: log_level,
|
||||
logfile_debug_level: "debug",
|
||||
log_format,
|
||||
max_log_size: 0,
|
||||
max_log_number: 0,
|
||||
compression: false,
|
||||
})?
|
||||
.multi_threaded_tokio_runtime()?
|
||||
.build()?;
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user