Log to file without json format (#1485)
## Issue Addressed N/A ## Proposed Changes Earlier, to log to a file, the only options were to redirect stdout/stderr to a file or use json logging. Redirecting to stdout/stderr works well but causes issues with mistakenly overwriting the file instead of appending which has resulted in loss of precious logs on multiple occasions for me. Json logging creates a timestamped backup of the file if it already exists, but the json format itself is hugely annoying. This PR modifies the `--logfile` option to log as it does in the terminal to a logfile.
This commit is contained in:
parent
05a8399769
commit
e3d45eda1e
@ -4,6 +4,7 @@ extern crate lazy_static;
|
|||||||
use lighthouse_metrics::{
|
use lighthouse_metrics::{
|
||||||
inc_counter, try_create_int_counter, IntCounter, Result as MetricsResult,
|
inc_counter, try_create_int_counter, IntCounter, Result as MetricsResult,
|
||||||
};
|
};
|
||||||
|
use slog_term::Decorator;
|
||||||
use std::io::{Result, Write};
|
use std::io::{Result, Write};
|
||||||
|
|
||||||
pub const MAX_MESSAGE_WIDTH: usize = 40;
|
pub const MAX_MESSAGE_WIDTH: usize = 40;
|
||||||
@ -19,13 +20,13 @@ lazy_static! {
|
|||||||
try_create_int_counter("crit_total", "Count of crits logged");
|
try_create_int_counter("crit_total", "Count of crits logged");
|
||||||
}
|
}
|
||||||
|
|
||||||
pub struct AlignedTermDecorator {
|
pub struct AlignedTermDecorator<D: Decorator> {
|
||||||
wrapped: slog_term::TermDecorator,
|
wrapped: D,
|
||||||
message_width: usize,
|
message_width: usize,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl AlignedTermDecorator {
|
impl<D: Decorator> AlignedTermDecorator<D> {
|
||||||
pub fn new(decorator: slog_term::TermDecorator, message_width: usize) -> AlignedTermDecorator {
|
pub fn new(decorator: D, message_width: usize) -> Self {
|
||||||
AlignedTermDecorator {
|
AlignedTermDecorator {
|
||||||
wrapped: decorator,
|
wrapped: decorator,
|
||||||
message_width,
|
message_width,
|
||||||
@ -33,7 +34,7 @@ impl AlignedTermDecorator {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl slog_term::Decorator for AlignedTermDecorator {
|
impl<D: Decorator> Decorator for AlignedTermDecorator<D> {
|
||||||
fn with_record<F>(
|
fn with_record<F>(
|
||||||
&self,
|
&self,
|
||||||
record: &slog::Record,
|
record: &slog::Record,
|
||||||
|
@ -151,6 +151,77 @@ impl<E: EthSpec> EnvironmentBuilder<E> {
|
|||||||
Ok(self)
|
Ok(self)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Sets the logger (and all child loggers) to log to a file.
|
||||||
|
pub fn log_to_file(
|
||||||
|
mut self,
|
||||||
|
path: PathBuf,
|
||||||
|
debug_level: &str,
|
||||||
|
log_format: Option<&str>,
|
||||||
|
) -> Result<Self, String> {
|
||||||
|
// Creating a backup if the logfile already exists.
|
||||||
|
if path.exists() {
|
||||||
|
let start = SystemTime::now();
|
||||||
|
let timestamp = start
|
||||||
|
.duration_since(UNIX_EPOCH)
|
||||||
|
.map_err(|e| e.to_string())?
|
||||||
|
.as_secs();
|
||||||
|
let file_stem = path
|
||||||
|
.file_stem()
|
||||||
|
.ok_or_else(|| "Invalid file name".to_string())?
|
||||||
|
.to_str()
|
||||||
|
.ok_or_else(|| "Failed to create str from filename".to_string())?;
|
||||||
|
let file_ext = path.extension().unwrap_or_else(|| OsStr::new(""));
|
||||||
|
let backup_name = format!("{}_backup_{}", file_stem, timestamp);
|
||||||
|
let backup_path = path.with_file_name(backup_name).with_extension(file_ext);
|
||||||
|
FsRename(&path, &backup_path).map_err(|e| e.to_string())?;
|
||||||
|
}
|
||||||
|
|
||||||
|
let file = OpenOptions::new()
|
||||||
|
.create(true)
|
||||||
|
.write(true)
|
||||||
|
.truncate(true)
|
||||||
|
.open(&path)
|
||||||
|
.map_err(|e| format!("Unable to open logfile: {:?}", e))?;
|
||||||
|
|
||||||
|
// Setting up the initial logger format and building it.
|
||||||
|
let drain = if let Some(format) = log_format {
|
||||||
|
match format.to_uppercase().as_str() {
|
||||||
|
"JSON" => {
|
||||||
|
let drain = slog_json::Json::default(file).fuse();
|
||||||
|
slog_async::Async::new(drain).build()
|
||||||
|
}
|
||||||
|
_ => return Err("Logging format provided is not supported".to_string()),
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
let decorator = slog_term::PlainDecorator::new(file);
|
||||||
|
let decorator =
|
||||||
|
logging::AlignedTermDecorator::new(decorator, logging::MAX_MESSAGE_WIDTH);
|
||||||
|
let drain = slog_term::FullFormat::new(decorator).build().fuse();
|
||||||
|
slog_async::Async::new(drain).build()
|
||||||
|
};
|
||||||
|
|
||||||
|
let drain = match debug_level {
|
||||||
|
"info" => drain.filter_level(Level::Info),
|
||||||
|
"debug" => drain.filter_level(Level::Debug),
|
||||||
|
"trace" => drain.filter_level(Level::Trace),
|
||||||
|
"warn" => drain.filter_level(Level::Warning),
|
||||||
|
"error" => drain.filter_level(Level::Error),
|
||||||
|
"crit" => drain.filter_level(Level::Critical),
|
||||||
|
unknown => return Err(format!("Unknown debug-level: {}", unknown)),
|
||||||
|
};
|
||||||
|
|
||||||
|
let log = Logger::root(drain.fuse(), o!());
|
||||||
|
info!(
|
||||||
|
log,
|
||||||
|
"Logging to file";
|
||||||
|
"path" => format!("{:?}", path)
|
||||||
|
);
|
||||||
|
|
||||||
|
self.log = Some(log);
|
||||||
|
|
||||||
|
Ok(self)
|
||||||
|
}
|
||||||
|
|
||||||
/// Adds a testnet configuration to the environment.
|
/// Adds a testnet configuration to the environment.
|
||||||
pub fn eth2_testnet_config(
|
pub fn eth2_testnet_config(
|
||||||
mut self,
|
mut self,
|
||||||
@ -320,68 +391,6 @@ impl<E: EthSpec> Environment<E> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Sets the logger (and all child loggers) to log to a file.
|
|
||||||
pub fn log_to_json_file(
|
|
||||||
&mut self,
|
|
||||||
path: PathBuf,
|
|
||||||
debug_level: &str,
|
|
||||||
log_format: Option<&str>,
|
|
||||||
) -> Result<(), String> {
|
|
||||||
// Creating a backup if the logfile already exists.
|
|
||||||
if path.exists() {
|
|
||||||
let start = SystemTime::now();
|
|
||||||
let timestamp = start
|
|
||||||
.duration_since(UNIX_EPOCH)
|
|
||||||
.map_err(|e| e.to_string())?
|
|
||||||
.as_secs();
|
|
||||||
let file_stem = path
|
|
||||||
.file_stem()
|
|
||||||
.ok_or_else(|| "Invalid file name".to_string())?
|
|
||||||
.to_str()
|
|
||||||
.ok_or_else(|| "Failed to create str from filename".to_string())?;
|
|
||||||
let file_ext = path.extension().unwrap_or_else(|| OsStr::new(""));
|
|
||||||
let backup_name = format!("{}_backup_{}", file_stem, timestamp);
|
|
||||||
let backup_path = path.with_file_name(backup_name).with_extension(file_ext);
|
|
||||||
FsRename(&path, &backup_path).map_err(|e| e.to_string())?;
|
|
||||||
}
|
|
||||||
|
|
||||||
let file = OpenOptions::new()
|
|
||||||
.create(true)
|
|
||||||
.write(true)
|
|
||||||
.truncate(true)
|
|
||||||
.open(&path)
|
|
||||||
.map_err(|e| format!("Unable to open logfile: {:?}", e))?;
|
|
||||||
|
|
||||||
let log_format = log_format.unwrap_or("JSON");
|
|
||||||
let drain = match log_format.to_uppercase().as_str() {
|
|
||||||
"JSON" => {
|
|
||||||
let drain = slog_json::Json::default(file).fuse();
|
|
||||||
slog_async::Async::new(drain).build()
|
|
||||||
}
|
|
||||||
_ => return Err("Logging format provided is not supported".to_string()),
|
|
||||||
};
|
|
||||||
|
|
||||||
let drain = match debug_level {
|
|
||||||
"info" => drain.filter_level(Level::Info),
|
|
||||||
"debug" => drain.filter_level(Level::Debug),
|
|
||||||
"trace" => drain.filter_level(Level::Trace),
|
|
||||||
"warn" => drain.filter_level(Level::Warning),
|
|
||||||
"error" => drain.filter_level(Level::Error),
|
|
||||||
"crit" => drain.filter_level(Level::Critical),
|
|
||||||
unknown => return Err(format!("Unknown debug-level: {}", unknown)),
|
|
||||||
};
|
|
||||||
|
|
||||||
self.log = Logger::root(drain.fuse(), o!());
|
|
||||||
|
|
||||||
info!(
|
|
||||||
self.log,
|
|
||||||
"Logging to JSON file";
|
|
||||||
"path" => format!("{:?}", path)
|
|
||||||
);
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn eth_spec_instance(&self) -> &E {
|
pub fn eth_spec_instance(&self) -> &E {
|
||||||
&self.eth_spec_instance
|
&self.eth_spec_instance
|
||||||
}
|
}
|
||||||
|
@ -63,7 +63,7 @@ fn main() {
|
|||||||
.long("logfile")
|
.long("logfile")
|
||||||
.value_name("FILE")
|
.value_name("FILE")
|
||||||
.help(
|
.help(
|
||||||
"File path where output will be written. Default file logging format is JSON.",
|
"File path where output will be written.",
|
||||||
)
|
)
|
||||||
.takes_value(true),
|
.takes_value(true),
|
||||||
)
|
)
|
||||||
@ -197,21 +197,22 @@ fn run<E: EthSpec>(
|
|||||||
optional_testnet_config = clap_utils::parse_testnet_dir(matches, "testnet-dir")?;
|
optional_testnet_config = clap_utils::parse_testnet_dir(matches, "testnet-dir")?;
|
||||||
};
|
};
|
||||||
|
|
||||||
let mut environment = environment_builder
|
let builder = if let Some(log_path) = matches.value_of("logfile") {
|
||||||
.async_logger(debug_level, log_format)?
|
let path = log_path
|
||||||
|
.parse::<PathBuf>()
|
||||||
|
.map_err(|e| format!("Failed to parse log path: {:?}", e))?;
|
||||||
|
environment_builder.log_to_file(path, debug_level, log_format)?
|
||||||
|
} else {
|
||||||
|
environment_builder.async_logger(debug_level, log_format)?
|
||||||
|
};
|
||||||
|
|
||||||
|
let mut environment = builder
|
||||||
.multi_threaded_tokio_runtime()?
|
.multi_threaded_tokio_runtime()?
|
||||||
.optional_eth2_testnet_config(optional_testnet_config)?
|
.optional_eth2_testnet_config(optional_testnet_config)?
|
||||||
.build()?;
|
.build()?;
|
||||||
|
|
||||||
let log = environment.core_context().log().clone();
|
let log = environment.core_context().log().clone();
|
||||||
|
|
||||||
if let Some(log_path) = matches.value_of("logfile") {
|
|
||||||
let path = log_path
|
|
||||||
.parse::<PathBuf>()
|
|
||||||
.map_err(|e| format!("Failed to parse log path: {:?}", e))?;
|
|
||||||
environment.log_to_json_file(path, debug_level, log_format)?;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Note: the current code technically allows for starting a beacon node _and_ a validator
|
// Note: the current code technically allows for starting a beacon node _and_ a validator
|
||||||
// client at the same time.
|
// client at the same time.
|
||||||
//
|
//
|
||||||
|
Loading…
Reference in New Issue
Block a user