diff --git a/bindings/ldk_node.udl b/bindings/ldk_node.udl index 805010356..d2425d6e5 100644 --- a/bindings/ldk_node.udl +++ b/bindings/ldk_node.udl @@ -74,7 +74,7 @@ interface Builder { void set_liquidity_source_lsps2(PublicKey node_id, SocketAddress address, string? token); void set_storage_dir_path(string storage_dir_path); void set_filesystem_logger(string? log_file_path, LogLevel? max_log_level); - void set_log_facade_logger(LogLevel? max_log_level); + void set_log_facade_logger(); void set_custom_logger(LogWriter log_writer); void set_network(Network network); [Throws=BuildError] diff --git a/src/builder.rs b/src/builder.rs index 97ff1ea21..07606eb57 100644 --- a/src/builder.rs +++ b/src/builder.rs @@ -113,7 +113,7 @@ struct LiquiditySourceConfig { #[derive(Clone)] enum LogWriterConfig { File { log_file_path: Option, max_log_level: Option }, - Log { max_log_level: Option }, + Log, Custom(Arc), } @@ -125,9 +125,7 @@ impl std::fmt::Debug for LogWriterConfig { .field("max_log_level", max_log_level) .field("log_file_path", log_file_path) .finish(), - LogWriterConfig::Log { max_log_level } => { - f.debug_tuple("Log").field(max_log_level).finish() - }, + LogWriterConfig::Log => write!(f, "LogWriterConfig::Log"), LogWriterConfig::Custom(_) => { f.debug_tuple("Custom").field(&"").finish() }, @@ -385,11 +383,8 @@ impl NodeBuilder { } /// Configures the [`Node`] instance to write logs to the [`log`](https://crates.io/crates/log) facade. - /// - /// If set, the `max_log_level` sets the maximum log level. Otherwise, the latter defaults to - /// [`DEFAULT_LOG_LEVEL`]. - pub fn set_log_facade_logger(&mut self, max_log_level: Option) -> &mut Self { - self.log_writer_config = Some(LogWriterConfig::Log { max_log_level }); + pub fn set_log_facade_logger(&mut self) -> &mut Self { + self.log_writer_config = Some(LogWriterConfig::Log); self } @@ -750,11 +745,8 @@ impl ArcedNodeBuilder { } /// Configures the [`Node`] instance to write logs to the [`log`](https://crates.io/crates/log) facade. - /// - /// If set, the `max_log_level` sets the maximum log level. Otherwise, the latter defaults to - /// [`DEFAULT_LOG_LEVEL`]. - pub fn set_log_facade_logger(&self, log_level: Option) { - self.inner.write().unwrap().set_log_facade_logger(log_level); + pub fn set_log_facade_logger(&self) { + self.inner.write().unwrap().set_log_facade_logger(); } /// Configures the [`Node`] instance to write logs to the provided custom [`LogWriter`]. @@ -1421,10 +1413,7 @@ fn setup_logger( Logger::new_fs_writer(log_file_path, max_log_level) .map_err(|_| BuildError::LoggerSetupFailed)? }, - Some(LogWriterConfig::Log { max_log_level }) => { - let max_log_level = max_log_level.unwrap_or_else(|| DEFAULT_LOG_LEVEL); - Logger::new_log_facade(max_log_level) - }, + Some(LogWriterConfig::Log) => Logger::new_log_facade(), Some(LogWriterConfig::Custom(custom_log_writer)) => { Logger::new_custom_writer(Arc::clone(&custom_log_writer)) diff --git a/src/logger.rs b/src/logger.rs index 7a329fae6..073aa92bc 100644 --- a/src/logger.rs +++ b/src/logger.rs @@ -108,7 +108,7 @@ pub(crate) enum Writer { /// Writes logs to the file system. FileWriter { file_path: String, max_log_level: LogLevel }, /// Forwards logs to the `log` facade. - LogFacadeWriter { max_log_level: LogLevel }, + LogFacadeWriter, /// Forwards logs to a custom writer. CustomWriter(Arc), } @@ -138,10 +138,7 @@ impl LogWriter for Writer { .write_all(log.as_bytes()) .expect("Failed to write to log file") }, - Writer::LogFacadeWriter { max_log_level } => { - if record.level < *max_log_level { - return; - } + Writer::LogFacadeWriter => { macro_rules! log_with_level { ($log_level:expr, $target: expr, $($args:tt)*) => { match $log_level { @@ -186,8 +183,8 @@ impl Logger { Ok(Self { writer: Writer::FileWriter { file_path, max_log_level } }) } - pub fn new_log_facade(max_log_level: LogLevel) -> Self { - Self { writer: Writer::LogFacadeWriter { max_log_level } } + pub fn new_log_facade() -> Self { + Self { writer: Writer::LogFacadeWriter } } pub fn new_custom_writer(log_writer: Arc) -> Self { @@ -204,10 +201,7 @@ impl LdkLogger for Logger { } self.writer.log(record.into()); }, - Writer::LogFacadeWriter { max_log_level } => { - if record.level < *max_log_level { - return; - } + Writer::LogFacadeWriter => { self.writer.log(record.into()); }, Writer::CustomWriter(_arc) => { diff --git a/tests/common/logging.rs b/tests/common/logging.rs new file mode 100644 index 000000000..5d89474da --- /dev/null +++ b/tests/common/logging.rs @@ -0,0 +1,146 @@ +use chrono::Utc; +#[cfg(not(feature = "uniffi"))] +use ldk_node::logger::LogRecord; +use ldk_node::logger::{LogLevel, LogWriter}; +#[cfg(not(feature = "uniffi"))] +use log::Record as LogFacadeRecord; +use log::{Level as LogFacadeLevel, LevelFilter as LogFacadeLevelFilter, Log as LogFacadeLog}; +use std::sync::{Arc, Mutex}; + +#[derive(Clone)] +pub(crate) enum TestLogWriter { + FileWriter, + LogFacade, + Custom(Arc), +} + +impl Default for TestLogWriter { + fn default() -> Self { + TestLogWriter::FileWriter + } +} + +pub(crate) struct MockLogFacadeLogger { + logs: Arc>>, +} + +impl MockLogFacadeLogger { + pub fn new() -> Self { + Self { logs: Arc::new(Mutex::new(Vec::new())) } + } + + pub fn retrieve_logs(&self) -> Vec { + self.logs.lock().unwrap().to_vec() + } +} + +impl LogFacadeLog for MockLogFacadeLogger { + fn enabled(&self, _metadata: &log::Metadata) -> bool { + true + } + + fn log(&self, record: &log::Record) { + let message = format!( + "{} {:<5} [{}:{}] {}", + Utc::now().format("%Y-%m-%d %H:%M:%S"), + record.level().to_string(), + record.module_path().unwrap(), + record.line().unwrap(), + record.args() + ); + println!("{message}"); + self.logs.lock().unwrap().push(message); + } + + fn flush(&self) {} +} + +#[cfg(not(feature = "uniffi"))] +impl LogWriter for MockLogFacadeLogger { + fn log<'a>(&self, record: LogRecord) { + let record = MockLogRecord(record).into(); + LogFacadeLog::log(self, &record); + } +} + +#[cfg(not(feature = "uniffi"))] +struct MockLogRecord<'a>(LogRecord<'a>); +struct MockLogLevel(LogLevel); + +impl From for LogFacadeLevel { + fn from(level: MockLogLevel) -> Self { + match level.0 { + LogLevel::Gossip | LogLevel::Trace => LogFacadeLevel::Trace, + LogLevel::Debug => LogFacadeLevel::Debug, + LogLevel::Info => LogFacadeLevel::Info, + LogLevel::Warn => LogFacadeLevel::Warn, + LogLevel::Error => LogFacadeLevel::Error, + } + } +} + +#[cfg(not(feature = "uniffi"))] +impl<'a> From> for LogFacadeRecord<'a> { + fn from(log_record: MockLogRecord<'a>) -> Self { + let log_record = log_record.0; + let level = MockLogLevel(log_record.level).into(); + + let mut record_builder = LogFacadeRecord::builder(); + let record = record_builder + .level(level) + .module_path(Some(&log_record.module_path)) + .line(Some(log_record.line)) + .args(log_record.args); + + record.build() + } +} + +pub(crate) fn init_log_logger(level: LogFacadeLevelFilter) -> Arc { + let logger = Arc::new(MockLogFacadeLogger::new()); + log::set_boxed_logger(Box::new(logger.clone())).unwrap(); + log::set_max_level(level); + + logger +} + +pub(crate) fn validate_log_entry(entry: &String) { + let parts = entry.splitn(4, ' ').collect::>(); + assert_eq!(parts.len(), 4); + let (day, time, level, path_and_msg) = (parts[0], parts[1], parts[2], parts[3]); + + let day_parts = day.split('-').collect::>(); + assert_eq!(day_parts.len(), 3); + let (year, month, day) = (day_parts[0], day_parts[1], day_parts[2]); + assert!(year.len() == 4 && month.len() == 2 && day.len() == 2); + assert!( + year.chars().all(|c| c.is_digit(10)) + && month.chars().all(|c| c.is_digit(10)) + && day.chars().all(|c| c.is_digit(10)) + ); + + let time_parts = time.split(':').collect::>(); + assert_eq!(time_parts.len(), 3); + let (hour, minute, second) = (time_parts[0], time_parts[1], time_parts[2]); + assert!(hour.len() == 2 && minute.len() == 2 && second.len() == 2); + assert!( + hour.chars().all(|c| c.is_digit(10)) + && minute.chars().all(|c| c.is_digit(10)) + && second.chars().all(|c| c.is_digit(10)) + ); + + assert!(["GOSSIP", "TRACE", "DEBUG", "INFO", "WARN", "ERROR"].contains(&level),); + + let path = path_and_msg.split_whitespace().next().unwrap(); + assert!(path.contains('[') && path.contains(']')); + let module_path = &path[1..path.len() - 1]; + let path_parts = module_path.rsplitn(2, ':').collect::>(); + assert_eq!(path_parts.len(), 2); + let (line_number, module_name) = (path_parts[0], path_parts[1]); + assert!(module_name.chars().all(|c| c.is_alphanumeric() || c == '_' || c == ':')); + assert!(line_number.chars().all(|c| c.is_digit(10))); + + let msg_start_index = path_and_msg.find(']').unwrap() + 1; + let msg = &path_and_msg[msg_start_index..]; + assert!(!msg.is_empty()); +} diff --git a/tests/common/mod.rs b/tests/common/mod.rs index 2ad736823..9687bc3df 100644 --- a/tests/common/mod.rs +++ b/tests/common/mod.rs @@ -8,9 +8,12 @@ #![cfg(any(test, cln_test, vss_test))] #![allow(dead_code)] +pub(crate) mod logging; + +use logging::TestLogWriter; + use ldk_node::config::{Config, EsploraSyncConfig}; use ldk_node::io::sqlite_store::SqliteStore; -use ldk_node::logger::LogLevel; use ldk_node::payment::{PaymentDirection, PaymentKind, PaymentStatus}; use ldk_node::{ Builder, CustomTlvRecord, Event, LightningBalance, Node, NodeError, PendingSweepBalance, @@ -221,29 +224,29 @@ pub(crate) fn random_node_alias() -> Option { Some(NodeAlias(bytes)) } -pub(crate) fn random_config(anchor_channels: bool) -> Config { - let mut config = Config::default(); +pub(crate) fn random_config(anchor_channels: bool) -> TestConfig { + let mut node_config = Config::default(); if !anchor_channels { - config.anchor_channels_config = None; + node_config.anchor_channels_config = None; } - config.network = Network::Regtest; - println!("Setting network: {}", config.network); + node_config.network = Network::Regtest; + println!("Setting network: {}", node_config.network); let rand_dir = random_storage_path(); println!("Setting random LDK storage dir: {}", rand_dir.display()); - config.storage_dir_path = rand_dir.to_str().unwrap().to_owned(); + node_config.storage_dir_path = rand_dir.to_str().unwrap().to_owned(); let rand_listening_addresses = random_listening_addresses(); println!("Setting random LDK listening addresses: {:?}", rand_listening_addresses); - config.listening_addresses = Some(rand_listening_addresses); + node_config.listening_addresses = Some(rand_listening_addresses); let alias = random_node_alias(); println!("Setting random LDK node alias: {:?}", alias); - config.node_alias = alias; + node_config.node_alias = alias; - config + TestConfig { node_config, ..Default::default() } } #[cfg(feature = "uniffi")] @@ -257,6 +260,12 @@ pub(crate) enum TestChainSource<'a> { BitcoindRpc(&'a BitcoinD), } +#[derive(Clone, Default)] +pub(crate) struct TestConfig { + pub node_config: Config, + pub log_writer: TestLogWriter, +} + macro_rules! setup_builder { ($builder: ident, $config: expr) => { #[cfg(feature = "uniffi")] @@ -279,10 +288,11 @@ pub(crate) fn setup_two_nodes( println!("\n== Node B =="); let mut config_b = random_config(anchor_channels); if allow_0conf { - config_b.trusted_peers_0conf.push(node_a.node_id()); + config_b.node_config.trusted_peers_0conf.push(node_a.node_id()); } if anchor_channels && anchors_trusted_no_reserve { config_b + .node_config .anchor_channels_config .as_mut() .unwrap() @@ -294,9 +304,9 @@ pub(crate) fn setup_two_nodes( } pub(crate) fn setup_node( - chain_source: &TestChainSource, config: Config, seed_bytes: Option>, + chain_source: &TestChainSource, config: TestConfig, seed_bytes: Option>, ) -> TestNode { - setup_builder!(builder, config); + setup_builder!(builder, config.node_config); match chain_source { TestChainSource::Esplora(electrsd) => { let esplora_url = format!("http://{}", electrsd.esplora_url.as_ref().unwrap()); @@ -315,14 +325,23 @@ pub(crate) fn setup_node( }, } - let log_file_path = format!("{}/{}", config.storage_dir_path, "ldk_node.log"); - builder.set_filesystem_logger(Some(log_file_path), Some(LogLevel::Gossip)); + match &config.log_writer { + TestLogWriter::FileWriter => { + builder.set_filesystem_logger(None, None); + }, + TestLogWriter::LogFacade => { + builder.set_log_facade_logger(); + }, + TestLogWriter::Custom(custom_log_writer) => { + builder.set_custom_logger(Arc::clone(custom_log_writer)); + }, + } if let Some(seed) = seed_bytes { builder.set_entropy_seed_bytes(seed).unwrap(); } - let test_sync_store = Arc::new(TestSyncStore::new(config.storage_dir_path.into())); + let test_sync_store = Arc::new(TestSyncStore::new(config.node_config.storage_dir_path.into())); let node = builder.build_with_store(test_sync_store).unwrap(); node.start().unwrap(); assert!(node.status().is_running); diff --git a/tests/integration_tests_cln.rs b/tests/integration_tests_cln.rs index 02d091215..875922ce2 100644 --- a/tests/integration_tests_cln.rs +++ b/tests/integration_tests_cln.rs @@ -45,7 +45,7 @@ fn test_cln() { // Setup LDK Node let config = common::random_config(true); - let mut builder = Builder::from_config(config); + let mut builder = Builder::from_config(config.node_config); builder.set_chain_source_esplora("http://127.0.0.1:3002".to_string(), None); let node = builder.build().unwrap(); diff --git a/tests/integration_tests_rust.rs b/tests/integration_tests_rust.rs index 56f5a0fba..8dc0ca50a 100644 --- a/tests/integration_tests_rust.rs +++ b/tests/integration_tests_rust.rs @@ -10,6 +10,7 @@ mod common; use common::{ do_channel_full_cycle, expect_channel_pending_event, expect_channel_ready_event, expect_event, expect_payment_received_event, expect_payment_successful_event, generate_blocks_and_wait, + logging::{init_log_logger, validate_log_entry, TestLogWriter}, open_channel, premine_and_distribute_funds, random_config, setup_bitcoind_and_electrsd, setup_builder, setup_node, setup_two_nodes, wait_for_tx, TestChainSource, TestSyncStore, }; @@ -30,6 +31,7 @@ use bitcoincore_rpc::RpcApi; use bitcoin::hashes::Hash; use bitcoin::Amount; use lightning_invoice::{Bolt11InvoiceDescription, Description}; +use log::LevelFilter; use std::sync::Arc; @@ -128,7 +130,7 @@ fn multi_hop_sending() { let mut sync_config = EsploraSyncConfig::default(); sync_config.onchain_wallet_sync_interval_secs = 100000; sync_config.lightning_wallet_sync_interval_secs = 100000; - setup_builder!(builder, config); + setup_builder!(builder, config.node_config); builder.set_chain_source_esplora(esplora_url.clone(), Some(sync_config)); let node = builder.build().unwrap(); node.start().unwrap(); @@ -223,12 +225,12 @@ fn start_stop_reinit() { let esplora_url = format!("http://{}", electrsd.esplora_url.as_ref().unwrap()); let test_sync_store: Arc = - Arc::new(TestSyncStore::new(config.storage_dir_path.clone().into())); + Arc::new(TestSyncStore::new(config.node_config.storage_dir_path.clone().into())); let mut sync_config = EsploraSyncConfig::default(); sync_config.onchain_wallet_sync_interval_secs = 100000; sync_config.lightning_wallet_sync_interval_secs = 100000; - setup_builder!(builder, config); + setup_builder!(builder, config.node_config); builder.set_chain_source_esplora(esplora_url.clone(), Some(sync_config)); let node = builder.build_with_store(Arc::clone(&test_sync_store)).unwrap(); @@ -252,7 +254,7 @@ fn start_stop_reinit() { node.sync_wallets().unwrap(); assert_eq!(node.list_balances().spendable_onchain_balance_sats, expected_amount.to_sat()); - let log_file = format!("{}/ldk_node.log", config.clone().storage_dir_path); + let log_file = format!("{}/ldk_node.log", config.node_config.clone().storage_dir_path); assert!(std::path::Path::new(&log_file).exists()); node.stop().unwrap(); @@ -265,7 +267,7 @@ fn start_stop_reinit() { assert_eq!(node.stop(), Err(NodeError::NotRunning)); drop(node); - setup_builder!(builder, config); + setup_builder!(builder, config.node_config); builder.set_chain_source_esplora(esplora_url.clone(), Some(sync_config)); let reinitialized_node = builder.build_with_store(Arc::clone(&test_sync_store)).unwrap(); @@ -1066,7 +1068,7 @@ fn lsps2_client_service_integration() { }; let service_config = random_config(true); - setup_builder!(service_builder, service_config); + setup_builder!(service_builder, service_config.node_config); service_builder.set_chain_source_esplora(esplora_url.clone(), Some(sync_config)); service_builder.set_liquidity_provider_lsps2(lsps2_service_config); let service_node = service_builder.build().unwrap(); @@ -1076,14 +1078,14 @@ fn lsps2_client_service_integration() { let service_addr = service_node.listening_addresses().unwrap().first().unwrap().clone(); let client_config = random_config(true); - setup_builder!(client_builder, client_config); + setup_builder!(client_builder, client_config.node_config); client_builder.set_chain_source_esplora(esplora_url.clone(), Some(sync_config)); client_builder.set_liquidity_source_lsps2(service_node_id, service_addr, None); let client_node = client_builder.build().unwrap(); client_node.start().unwrap(); let payer_config = random_config(true); - setup_builder!(payer_builder, payer_config); + setup_builder!(payer_builder, payer_config.node_config); payer_builder.set_chain_source_esplora(esplora_url.clone(), Some(sync_config)); let payer_node = payer_builder.build().unwrap(); payer_node.start().unwrap(); @@ -1144,3 +1146,21 @@ fn lsps2_client_service_integration() { let channel_value_sats = client_node.list_channels().first().unwrap().channel_value_sats; assert_eq!(channel_value_sats, expected_channel_size_sat); } + +#[test] +fn facade_logging() { + let (_bitcoind, electrsd) = setup_bitcoind_and_electrsd(); + let chain_source = TestChainSource::Esplora(&electrsd); + + let logger = init_log_logger(LevelFilter::Trace); + let mut config = random_config(false); + config.log_writer = TestLogWriter::LogFacade; + + println!("== Facade logging starts =="); + let _node = setup_node(&chain_source, config, None); + + assert!(!logger.retrieve_logs().is_empty()); + for (_, entry) in logger.retrieve_logs().iter().enumerate() { + validate_log_entry(entry); + } +} diff --git a/tests/integration_tests_vss.rs b/tests/integration_tests_vss.rs index 525c1f1f1..9d6ec158c 100644 --- a/tests/integration_tests_vss.rs +++ b/tests/integration_tests_vss.rs @@ -18,7 +18,7 @@ fn channel_full_cycle_with_vss_store() { println!("== Node A =="); let esplora_url = format!("http://{}", electrsd.esplora_url.as_ref().unwrap()); let config_a = common::random_config(true); - let mut builder_a = Builder::from_config(config_a); + let mut builder_a = Builder::from_config(config_a.node_config); builder_a.set_chain_source_esplora(esplora_url.clone(), None); let vss_base_url = std::env::var("TEST_VSS_BASE_URL").unwrap(); let node_a = builder_a @@ -32,7 +32,7 @@ fn channel_full_cycle_with_vss_store() { println!("\n== Node B =="); let config_b = common::random_config(true); - let mut builder_b = Builder::from_config(config_b); + let mut builder_b = Builder::from_config(config_b.node_config); builder_b.set_chain_source_esplora(esplora_url.clone(), None); let node_b = builder_b .build_with_vss_store_and_fixed_headers(