Skip to content

Commit

Permalink
Replace logging backend to log4rs and add log rotation (mimblewimble#…
Browse files Browse the repository at this point in the history
…1789)

* Replace logging backend to flexi-logger and add log rotation
* Changed flexi_logger to log4rs
* Disable logging level filtering in Root logger
* Support different logging levels for file and stdout
* Don't log messages from modules other than Grin-related
* Fix formatting
* Place backed up compressed log copies into log file directory
* Increase default log file size to 16 MiB
* Add comment to config file on log_max_size option
  • Loading branch information
eupn authored and ignopeverell committed Oct 21, 2018
1 parent 0852b0c commit 1195071
Show file tree
Hide file tree
Showing 83 changed files with 580 additions and 895 deletions.
2 changes: 1 addition & 1 deletion Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@ humansize = "1.1.0"
daemonize = "0.3"
serde = "1"
serde_json = "1"
slog = { version = "~2.3", features = ["max_level_trace", "release_max_level_trace"] }
log = "0.4"
term = "0.5"

grin_api = { path = "./api" }
Expand Down
2 changes: 1 addition & 1 deletion api/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@ ring = "0.13"
serde = "1"
serde_derive = "1"
serde_json = "1"
slog = { version = "~2.3", features = ["max_level_trace", "release_max_level_trace"] }
log = "0.4"
tokio = "0.1.7"
tokio-core = "0.1.17"
tokio-tcp = "0.1"
Expand Down
12 changes: 3 additions & 9 deletions api/src/handlers.rs
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,6 @@ use types::*;
use url::form_urlencoded;
use util;
use util::secp::pedersen::Commitment;
use util::LOGGER;
use web::*;

// All handlers use `Weak` references instead of `Arc` to avoid cycles that
Expand Down Expand Up @@ -206,12 +205,8 @@ impl OutputHandler {
}

debug!(
LOGGER,
"outputs_block_batch: {}-{}, {:?}, {:?}",
start_height,
end_height,
commitments,
include_rp,
start_height, end_height, commitments, include_rp,
);

let mut return_vec = vec![];
Expand Down Expand Up @@ -745,7 +740,6 @@ impl PoolPushHandler {
identifier: "?.?.?.?".to_string(),
};
info!(
LOGGER,
"Pushing transaction {} to pool (inputs: {}, outputs: {}, kernels: {})",
tx.hash(),
tx.inputs().len(),
Expand All @@ -759,7 +753,7 @@ impl PoolPushHandler {
tx_pool
.add_to_pool(source, tx, !fluff, &header)
.map_err(|e| {
error!(LOGGER, "update_pool: failed with error: {:?}", e);
error!("update_pool: failed with error: {:?}", e);
ErrorKind::Internal(format!("Failed to update pool: {:?}", e)).into()
})
}),
Expand Down Expand Up @@ -808,7 +802,7 @@ pub fn start_rest_apis(
router.add_middleware(basic_auth_middleware);
}

info!(LOGGER, "Starting HTTP API server at {}.", addr);
info!("Starting HTTP API server at {}.", addr);
let socket_addr: SocketAddr = addr.parse().expect("unable to parse socket address");
apis.start(socket_addr, router, tls_config).is_ok()
}
Expand Down
2 changes: 1 addition & 1 deletion api/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@ extern crate serde;
extern crate serde_derive;
extern crate serde_json;
#[macro_use]
extern crate slog;
extern crate log;
extern crate futures;
extern crate http;
extern crate hyper_rustls;
Expand Down
10 changes: 3 additions & 7 deletions api/src/rest.rs
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,6 @@ use std::sync::Arc;
use std::{io, thread};
use tokio_rustls::ServerConfigExt;
use tokio_tcp;
use util::LOGGER;

/// Errors that can be returned by an ApiEndpoint implementation.
#[derive(Debug)]
Expand Down Expand Up @@ -243,13 +242,10 @@ impl ApiServer {
// TODO re-enable stop after investigation
//let tx = mem::replace(&mut self.shutdown_sender, None).unwrap();
//tx.send(()).expect("Failed to stop API server");
info!(LOGGER, "API server has been stoped");
info!("API server has been stoped");
true
} else {
error!(
LOGGER,
"Can't stop API server, it's not running or doesn't spport stop operation"
);
error!("Can't stop API server, it's not running or doesn't spport stop operation");
false
}
}
Expand All @@ -263,7 +259,7 @@ impl Handler for LoggingMiddleware {
req: Request<Body>,
mut handlers: Box<Iterator<Item = HandlerObj>>,
) -> ResponseFuture {
debug!(LOGGER, "REST call: {} {}", req.method(), req.uri().path());
debug!("REST call: {} {}", req.method(), req.uri().path());
handlers.next().unwrap().call(req, handlers)
}
}
2 changes: 1 addition & 1 deletion chain/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@ lmdb-zero = "0.4.4"
failure = "0.1"
failure_derive = "0.1"
croaring = "0.3"
slog = { version = "~2.3", features = ["max_level_trace", "release_max_level_trace"] }
log = "0.4"
serde = "1"
serde_derive = "1"
chrono = "0.4.4"
Expand Down
49 changes: 11 additions & 38 deletions chain/src/chain.rs
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,6 @@ use store;
use txhashset;
use types::{ChainAdapter, NoStatus, Options, Tip, TxHashSetRoots, TxHashsetWriteStatus};
use util::secp::pedersen::{Commitment, RangeProof};
use util::LOGGER;

/// Orphan pool size is limited by MAX_ORPHAN_SIZE
pub const MAX_ORPHAN_SIZE: usize = 200;
Expand Down Expand Up @@ -184,7 +183,6 @@ impl Chain {

let head = store.head()?;
debug!(
LOGGER,
"Chain init: {} @ {} [{}]",
head.total_difficulty.to_num(),
head.height,
Expand Down Expand Up @@ -261,7 +259,6 @@ impl Chain {
&self.orphans.add(orphan);

debug!(
LOGGER,
"process_block: orphan: {:?}, # orphans {}{}",
block_hash,
self.orphans.len(),
Expand All @@ -275,7 +272,6 @@ impl Chain {
}
ErrorKind::Unfit(ref msg) => {
debug!(
LOGGER,
"Block {} at {} is unfit at this time: {}",
b.hash(),
b.header.height,
Expand All @@ -285,7 +281,6 @@ impl Chain {
}
_ => {
info!(
LOGGER,
"Rejected block {} at {}: {:?}",
b.hash(),
b.header.height,
Expand Down Expand Up @@ -360,7 +355,6 @@ impl Chain {
// Is there an orphan in our orphans that we can now process?
loop {
trace!(
LOGGER,
"check_orphans: at {}, # orphans {}",
height,
self.orphans.len(),
Expand All @@ -373,7 +367,6 @@ impl Chain {
let orphans_len = orphans.len();
for (i, orphan) in orphans.into_iter().enumerate() {
debug!(
LOGGER,
"check_orphans: get block {} at {}{}",
orphan.block.hash(),
height,
Expand Down Expand Up @@ -402,7 +395,6 @@ impl Chain {

if initial_height != height {
debug!(
LOGGER,
"check_orphans: {} blocks accepted since height {}, remaining # orphans {}",
height - initial_height,
initial_height,
Expand Down Expand Up @@ -589,7 +581,6 @@ impl Chain {
txhashset: &txhashset::TxHashSet,
) -> Result<(), Error> {
debug!(
LOGGER,
"chain: validate_kernel_history: rewinding and validating kernel history (readonly)"
);

Expand All @@ -606,8 +597,8 @@ impl Chain {
})?;

debug!(
LOGGER,
"chain: validate_kernel_history: validated kernel root on {} headers", count,
"chain: validate_kernel_history: validated kernel root on {} headers",
count,
);

Ok(())
Expand Down Expand Up @@ -682,10 +673,7 @@ impl Chain {
self.validate_kernel_history(&header, &txhashset)?;

// all good, prepare a new batch and update all the required records
debug!(
LOGGER,
"chain: txhashset_write: rewinding a 2nd time (writeable)"
);
debug!("chain: txhashset_write: rewinding a 2nd time (writeable)");

let mut batch = self.store.batch()?;

Expand All @@ -709,10 +697,7 @@ impl Chain {
Ok(())
})?;

debug!(
LOGGER,
"chain: txhashset_write: finished validating and rebuilding"
);
debug!("chain: txhashset_write: finished validating and rebuilding");

status.on_save();

Expand All @@ -727,21 +712,15 @@ impl Chain {
// Commit all the changes to the db.
batch.commit()?;

debug!(
LOGGER,
"chain: txhashset_write: finished committing the batch (head etc.)"
);
debug!("chain: txhashset_write: finished committing the batch (head etc.)");

// Replace the chain txhashset with the newly built one.
{
let mut txhashset_ref = self.txhashset.write();
*txhashset_ref = txhashset;
}

debug!(
LOGGER,
"chain: txhashset_write: replaced our txhashset with the new one"
);
debug!("chain: txhashset_write: replaced our txhashset with the new one");

// Check for any orphan blocks and process them based on the new chain state.
self.check_orphans(header.height + 1);
Expand All @@ -763,14 +742,11 @@ impl Chain {
/// therefore be called judiciously.
pub fn compact(&self) -> Result<(), Error> {
if self.archive_mode {
debug!(
LOGGER,
"Blockchain compaction disabled, node running in archive mode."
);
debug!("Blockchain compaction disabled, node running in archive mode.");
return Ok(());
}

debug!(LOGGER, "Starting blockchain compaction.");
debug!("Starting blockchain compaction.");
// Compact the txhashset via the extension.
{
let mut txhashset = self.txhashset.write();
Expand All @@ -785,7 +761,7 @@ impl Chain {

// Now check we can still successfully validate the chain state after
// compacting, shouldn't be necessary once all of this is well-oiled
debug!(LOGGER, "Validating state after compaction.");
debug!("Validating state after compaction.");
self.validate(true)?;

// we need to be careful here in testing as 20 blocks is not that long
Expand All @@ -798,7 +774,6 @@ impl Chain {
}

debug!(
LOGGER,
"Compaction remove blocks older than {}.",
head.height - horizon
);
Expand Down Expand Up @@ -831,7 +806,7 @@ impl Chain {
}
}
batch.commit()?;
debug!(LOGGER, "Compaction removed {} blocks, done.", count);
debug!("Compaction removed {} blocks, done.", count);
Ok(())
}

Expand Down Expand Up @@ -1052,7 +1027,6 @@ fn setup_head(
if header.height > 0 && extension.batch.get_block_sums(&header.hash()).is_err()
{
debug!(
LOGGER,
"chain: init: building (missing) block sums for {} @ {}",
header.height,
header.hash()
Expand All @@ -1073,7 +1047,6 @@ fn setup_head(
}

debug!(
LOGGER,
"chain: init: rewinding and validating before we start... {} at {}",
header.hash(),
header.height,
Expand Down Expand Up @@ -1110,7 +1083,7 @@ fn setup_head(
// Save the block_sums to the db for use later.
batch.save_block_sums(&genesis.hash(), &BlockSums::default())?;

info!(LOGGER, "chain: init: saved genesis: {:?}", genesis.hash());
info!("chain: init: saved genesis: {:?}", genesis.hash());
}
Err(e) => return Err(ErrorKind::StoreErr(e, "chain init load head".to_owned()))?,
};
Expand Down
2 changes: 1 addition & 1 deletion chain/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@ extern crate serde;
#[macro_use]
extern crate serde_derive;
#[macro_use]
extern crate slog;
extern crate log;
extern crate chrono;
extern crate failure;
#[macro_use]
Expand Down
Loading

0 comments on commit 1195071

Please sign in to comment.