diff --git a/Cargo.lock b/Cargo.lock index 1d7d401..6331e76 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -189,6 +189,8 @@ dependencies = [ "lexarg", "lexarg-error", "libtest-lexarg", + "serde", + "serde_json", ] [[package]] diff --git a/crates/libtest2-harness/Cargo.toml b/crates/libtest2-harness/Cargo.toml index 7b7253f..43052fd 100644 --- a/crates/libtest2-harness/Cargo.toml +++ b/crates/libtest2-harness/Cargo.toml @@ -24,6 +24,7 @@ pre-release-replacements = [ [features] default = [] +json = ["dep:serde", "dep:serde_json"] [dependencies] anstream = "0.3.1" @@ -31,5 +32,7 @@ anstyle = "1.0.0" lexarg = { version = "0.1.0", path = "../lexarg" } lexarg-error = { version = "0.1.0", path = "../lexarg-error" } libtest-lexarg = { version = "0.1.0", path = "../libtest-lexarg" } +serde = { version = "1.0.160", features = ["derive"], optional = true } +serde_json = { version = "1.0.96", optional = true } [dev-dependencies] diff --git a/crates/libtest2-harness/src/case.rs b/crates/libtest2-harness/src/case.rs index 7c94930..33cc4a4 100644 --- a/crates/libtest2-harness/src/case.rs +++ b/crates/libtest2-harness/src/case.rs @@ -50,84 +50,62 @@ pub enum Source { pub type RunResult = Result<(), RunError>; #[derive(Debug)] -pub struct RunError(pub(crate) RunErrorInner); +pub struct RunError { + status: notify::RunStatus, + cause: Option>, +} impl RunError { - pub fn cause(cause: impl Into) -> Self { - Self(RunErrorInner::Failed(cause.into())) + pub fn with_cause(cause: impl std::error::Error + Send + Sync + 'static) -> Self { + Self { + status: notify::RunStatus::Failed, + cause: Some(Box::new(cause)), + } } - pub fn msg(cause: impl std::fmt::Display) -> Self { - Self::cause(FailMessage(cause.to_string())) + pub fn fail(cause: impl std::fmt::Display) -> Self { + Self::with_cause(Message(cause.to_string())) } pub(crate) fn ignore() -> Self { - Self(RunErrorInner::Ignored(Ignore { reason: None })) + Self { + status: notify::RunStatus::Ignored, + cause: None, + } } pub(crate) fn ignore_for(reason: String) -> Self { - Self(RunErrorInner::Ignored(Ignore { - reason: Some(reason), - })) + Self { + status: notify::RunStatus::Ignored, + cause: Some(Box::new(Message(reason))), + } } -} -impl From for RunError -where - E: std::error::Error + Send + Sync + 'static, -{ - fn from(error: E) -> Self { - Self::cause(error) + pub(crate) fn status(&self) -> notify::RunStatus { + self.status } -} - -#[derive(Debug)] -pub(crate) enum RunErrorInner { - Failed(Fail), - Ignored(Ignore), -} -#[derive(Debug)] -pub struct Fail { - inner: Box, + pub(crate) fn cause(&self) -> Option<&(dyn std::error::Error + Send + Sync)> { + self.cause.as_ref().map(|b| b.as_ref()) + } } -impl From for Fail +impl From for RunError where E: std::error::Error + Send + Sync + 'static, { - #[cold] fn from(error: E) -> Self { - Fail { - inner: Box::new(error), - } - } -} - -impl std::fmt::Display for Fail { - fn fmt(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - self.inner.fmt(formatter) + Self::with_cause(error) } } #[derive(Debug)] -pub struct FailMessage(String); +struct Message(String); -impl std::fmt::Display for FailMessage { +impl std::fmt::Display for Message { fn fmt(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { self.0.fmt(formatter) } } -impl std::error::Error for FailMessage {} - -#[derive(Debug)] -pub struct Ignore { - reason: Option, -} - -impl Ignore { - pub fn reason(&self) -> Option<&str> { - self.reason.as_deref() - } -} +impl std::error::Error for Message {} diff --git a/crates/libtest2-harness/src/harness.rs b/crates/libtest2-harness/src/harness.rs index 469541a..87f9f30 100644 --- a/crates/libtest2-harness/src/harness.rs +++ b/crates/libtest2-harness/src/harness.rs @@ -1,3 +1,5 @@ +use libtest_lexarg::OutputFormat; + use crate::*; pub struct Harness { @@ -30,13 +32,10 @@ impl Harness { pub fn main(mut self) -> ! { let mut parser = cli::Parser::new(&self.raw); - let opts = match parse(&mut parser) { - Ok(opts) => opts, - Err(err) => { - eprintln!("{}", err); - std::process::exit(1); - } - }; + let opts = parse(&mut parser).unwrap_or_else(|err| { + eprintln!("{}", err); + std::process::exit(1) + }); match opts.color { libtest_lexarg::ColorConfig::AutoColor => anstream::ColorChoice::Auto, @@ -45,45 +44,27 @@ impl Harness { } .write_global(); - let total = self.cases.len(); - let matches_filter = |case: &dyn Case, filter: &str| { - let test_name = case.name(); + let mut notifier = notifier(&opts).unwrap_or_else(|err| { + eprintln!("{}", err); + std::process::exit(1) + }); + discover(&opts, &mut self.cases, notifier.as_mut()).unwrap_or_else(|err| { + eprintln!("{}", err); + std::process::exit(1) + }); - match opts.filter_exact { - true => test_name == filter, - false => test_name.contains(filter), + if !opts.list { + match run(&opts, &self.cases, notifier.as_mut()) { + Ok(true) => {} + Ok(false) => std::process::exit(ERROR_EXIT_CODE), + Err(e) => { + eprintln!("error: io error when listing tests: {e:?}"); + std::process::exit(ERROR_EXIT_CODE) + } } - }; - // Remove tests that don't match the test filter - if !opts.filters.is_empty() { - self.cases.retain(|case| { - opts.filters - .iter() - .any(|filter| matches_filter(case.as_ref(), filter)) - }); - } - // Skip tests that match any of the skip filters - if !opts.skip.is_empty() { - self.cases - .retain(|case| !opts.skip.iter().any(|sf| matches_filter(case.as_ref(), sf))); } - let num_filtered_out = total - self.cases.len(); - self.cases - .sort_unstable_by_key(|case| case.name().to_owned()); - let seed = crate::shuffle::get_shuffle_seed(&opts); - if let Some(seed) = seed { - crate::shuffle::shuffle_tests(seed, &mut self.cases); - } - - match run(&opts, &self.cases, num_filtered_out) { - Ok(true) => std::process::exit(0), - Ok(false) => std::process::exit(ERROR_EXIT_CODE), - Err(e) => { - eprintln!("error: io error when listing tests: {e:?}"); - std::process::exit(ERROR_EXIT_CODE) - } - } + std::process::exit(0) } } @@ -138,12 +119,81 @@ fn parse(parser: &mut cli::Parser) -> cli::Result { test_opts.finish() } +fn notifier(opts: &libtest_lexarg::TestOpts) -> std::io::Result> { + let stdout = anstream::stdout(); + let notifier: Box = match opts.format { + #[cfg(feature = "json")] + OutputFormat::Json => Box::new(notify::JsonNotifier::new(stdout)), + #[cfg(not(feature = "json"))] + OutputFormat::Json => { + return Err(std::io::Error::new(std::io::ErrorKind::Other, "")); + } + _ if opts.list => Box::new(notify::TerseListNotifier::new(stdout)), + OutputFormat::Pretty => Box::new(notify::PrettyRunNotifier::new(stdout)), + OutputFormat::Terse => Box::new(notify::TerseRunNotifier::new(stdout)), + OutputFormat::Junit => todo!(), + }; + Ok(notifier) +} + +fn discover( + opts: &libtest_lexarg::TestOpts, + cases: &mut Vec>, + notifier: &mut dyn notify::Notifier, +) -> std::io::Result<()> { + notifier.notify(notify::Event::DiscoverStart)?; + let timer = std::time::Instant::now(); + + // Do this first so it applies to both discover and running + cases.sort_unstable_by_key(|case| case.name().to_owned()); + let seed = shuffle::get_shuffle_seed(&opts); + if let Some(seed) = seed { + shuffle::shuffle_tests(seed, cases); + } + + let matches_filter = |case: &dyn Case, filter: &str| { + let test_name = case.name(); + + match opts.filter_exact { + true => test_name == filter, + false => test_name.contains(filter), + } + }; + let mut retain_cases = Vec::with_capacity(cases.len()); + for case in cases.iter() { + let filtered_in = opts.filters.is_empty() + || opts + .filters + .iter() + .any(|filter| matches_filter(case.as_ref(), filter)); + let filtered_out = + !opts.skip.is_empty() && opts.skip.iter().any(|sf| matches_filter(case.as_ref(), sf)); + let retain_case = filtered_in && !filtered_out; + retain_cases.push(retain_case); + notifier.notify(notify::Event::DiscoverCase { + name: case.name().to_owned(), + mode: notify::CaseMode::Test, + run: retain_case, + })?; + } + let mut retain_cases = retain_cases.into_iter(); + cases.retain(|_| retain_cases.next().unwrap()); + + notifier.notify(notify::Event::DiscoverComplete { + elapsed_s: notify::Elapsed(timer.elapsed()), + seed, + })?; + + Ok(()) +} + fn run( opts: &libtest_lexarg::TestOpts, cases: &[Box], - num_filtered_out: usize, + notifier: &mut dyn notify::Notifier, ) -> std::io::Result { - let mut outcomes = Outcomes::new(&opts, cases, num_filtered_out)?; + notifier.notify(notify::Event::SuiteStart)?; + let timer = std::time::Instant::now(); if opts.force_run_in_process { todo!("`--force-run-in-process` is not yet supported"); @@ -166,28 +216,43 @@ fn run( if opts.options.panic_abort { todo!("panic-abort is not yet supported"); } + if opts.logfile.is_some() { + todo!("`--logfile` is not yet supported"); + } - if opts.list { - outcomes.list(cases)?; - Ok(true) - } else { - outcomes.start_suite()?; + let mut state = State::new(); + let run_ignored = match opts.run_ignored { + libtest_lexarg::RunIgnored::Yes | libtest_lexarg::RunIgnored::Only => true, + libtest_lexarg::RunIgnored::No => false, + }; + state.run_ignored(run_ignored); - let mut state = State::new(); - let run_ignored = match opts.run_ignored { - libtest_lexarg::RunIgnored::Yes | libtest_lexarg::RunIgnored::Only => true, - libtest_lexarg::RunIgnored::No => false, - }; - state.run_ignored(run_ignored); + let mut success = true; + for case in cases { + notifier.notify(notify::Event::CaseStart { + name: case.name().to_owned(), + })?; + let timer = std::time::Instant::now(); - for case in cases { - outcomes.start_case(case.as_ref())?; - let outcome = case.run(&state); - outcomes.finish_case(case.as_ref(), outcome)?; - } + let outcome = case.run(&state); - outcomes.finish_suite()?; + let err = outcome.as_ref().err(); + let status = err.map(|e| e.status()); + let message = err.and_then(|e| e.cause().map(|c| c.to_string())); + notifier.notify(notify::Event::CaseComplete { + name: case.name().to_owned(), + mode: notify::CaseMode::Test, + status, + message, + elapsed_s: Some(notify::Elapsed(timer.elapsed())), + })?; - Ok(!outcomes.has_failed()) + success &= status != Some(notify::RunStatus::Failed); } + + notifier.notify(notify::Event::SuiteComplete { + elapsed_s: notify::Elapsed(timer.elapsed()), + })?; + + Ok(success) } diff --git a/crates/libtest2-harness/src/lib.rs b/crates/libtest2-harness/src/lib.rs index 252e5b5..e779a64 100644 --- a/crates/libtest2-harness/src/lib.rs +++ b/crates/libtest2-harness/src/lib.rs @@ -22,7 +22,7 @@ mod case; mod harness; -mod outcomes; +mod notify; mod shuffle; mod state; @@ -30,5 +30,4 @@ pub mod cli; pub use case::*; pub use harness::*; -pub(crate) use outcomes::*; pub use state::*; diff --git a/crates/libtest2-harness/src/notify/json.rs b/crates/libtest2-harness/src/notify/json.rs new file mode 100644 index 0000000..3597933 --- /dev/null +++ b/crates/libtest2-harness/src/notify/json.rs @@ -0,0 +1,20 @@ +use super::Event; + +#[derive(Debug)] +pub(crate) struct JsonNotifier { + writer: W, +} + +impl JsonNotifier { + pub(crate) fn new(writer: W) -> Self { + Self { writer } + } +} + +impl super::Notifier for JsonNotifier { + fn notify(&mut self, event: Event) -> std::io::Result<()> { + let event = serde_json::to_string(&event)?; + writeln!(self.writer, "{}", event)?; + Ok(()) + } +} diff --git a/crates/libtest2-harness/src/notify/mod.rs b/crates/libtest2-harness/src/notify/mod.rs new file mode 100644 index 0000000..8c69997 --- /dev/null +++ b/crates/libtest2-harness/src/notify/mod.rs @@ -0,0 +1,90 @@ +#[cfg(feature = "json")] +mod json; +mod pretty; +mod summary; +mod terse; + +#[cfg(feature = "json")] +pub(crate) use json::*; +pub(crate) use pretty::*; +pub(crate) use summary::*; +pub(crate) use terse::*; + +pub(crate) trait Notifier { + fn notify(&mut self, event: Event) -> std::io::Result<()>; +} + +#[derive(Clone, Debug)] +#[cfg_attr(feature = "json", derive(serde::Serialize))] +#[cfg_attr(feature = "json", serde(rename_all = "kebab-case"))] +#[cfg_attr(feature = "json", serde(tag = "event"))] +pub(crate) enum Event { + DiscoverStart, + DiscoverCase { + name: String, + mode: CaseMode, + run: bool, + }, + DiscoverComplete { + #[allow(dead_code)] + elapsed_s: Elapsed, + seed: Option, + }, + SuiteStart, + CaseStart { + name: String, + }, + CaseComplete { + name: String, + #[allow(dead_code)] + mode: CaseMode, + status: Option, + message: Option, + #[allow(dead_code)] + elapsed_s: Option, + }, + SuiteComplete { + elapsed_s: Elapsed, + }, +} + +#[derive(Copy, Clone, Debug, PartialEq, Eq)] +#[cfg_attr(feature = "json", derive(serde::Serialize))] +#[cfg_attr(feature = "json", serde(rename_all = "kebab-case"))] +pub(crate) enum CaseMode { + Test, + #[allow(dead_code)] + Bench, +} + +#[derive(Copy, Clone, Debug, PartialEq, Eq)] +#[cfg_attr(feature = "json", derive(serde::Serialize))] +#[cfg_attr(feature = "json", serde(rename_all = "kebab-case"))] +pub(crate) enum RunStatus { + Ignored, + Failed, +} + +#[derive(Copy, Clone, Default, Debug, PartialEq, Eq)] +#[cfg_attr(feature = "json", derive(serde::Serialize))] +#[cfg_attr(feature = "json", serde(into = "String"))] +pub(crate) struct Elapsed(pub std::time::Duration); + +impl std::fmt::Display for Elapsed { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(f, "{:.3}s", self.0.as_secs_f64()) + } +} + +impl From for String { + fn from(elapsed: Elapsed) -> Self { + elapsed.0.as_secs_f64().to_string() + } +} + +const FAILED: anstyle::Style = + anstyle::Style::new().fg_color(Some(anstyle::Color::Ansi(anstyle::AnsiColor::Red))); +const OK: anstyle::Style = + anstyle::Style::new().fg_color(Some(anstyle::Color::Ansi(anstyle::AnsiColor::Green))); +const IGNORED: anstyle::Style = + anstyle::Style::new().fg_color(Some(anstyle::Color::Ansi(anstyle::AnsiColor::Yellow))); diff --git a/crates/libtest2-harness/src/notify/pretty.rs b/crates/libtest2-harness/src/notify/pretty.rs new file mode 100644 index 0000000..7d73a72 --- /dev/null +++ b/crates/libtest2-harness/src/notify/pretty.rs @@ -0,0 +1,57 @@ +use super::Event; +use super::RunStatus; +use super::FAILED; +use super::IGNORED; +use super::OK; + +#[derive(Debug)] +pub(crate) struct PrettyRunNotifier { + writer: W, + summary: super::Summary, + name_width: usize, +} + +impl PrettyRunNotifier { + pub(crate) fn new(writer: W) -> Self { + Self { + writer, + summary: Default::default(), + name_width: 0, + } + } +} + +impl super::Notifier for PrettyRunNotifier { + fn notify(&mut self, event: Event) -> std::io::Result<()> { + self.summary.notify(event.clone())?; + match event { + Event::DiscoverStart => {} + Event::DiscoverCase { name, run, .. } => { + if run { + self.name_width = name.len().max(self.name_width); + } + } + Event::DiscoverComplete { .. } => {} + Event::SuiteStart => { + self.summary.write_start(&mut self.writer)?; + } + Event::CaseStart { name, .. } => { + write!(self.writer, "test {: <1$} ... ", name, self.name_width)?; + self.writer.flush()?; + } + Event::CaseComplete { status, .. } => { + let (s, style) = match status { + Some(RunStatus::Ignored) => ("ignored", IGNORED), + Some(RunStatus::Failed) => ("FAILED", FAILED), + None => ("ok", OK), + }; + + writeln!(self.writer, "{}{s}{}", style.render(), style.render_reset())?; + } + Event::SuiteComplete { .. } => { + self.summary.write_complete(&mut self.writer)?; + } + } + Ok(()) + } +} diff --git a/crates/libtest2-harness/src/notify/summary.rs b/crates/libtest2-harness/src/notify/summary.rs new file mode 100644 index 0000000..b8fc70a --- /dev/null +++ b/crates/libtest2-harness/src/notify/summary.rs @@ -0,0 +1,128 @@ +use super::Event; +use super::RunStatus; +use super::FAILED; +use super::OK; + +#[derive(Default, Clone, Debug)] +pub(crate) struct Summary { + pub(crate) seed: Option, + pub(crate) failures: std::collections::BTreeMap>, + pub(crate) elapsed_s: super::Elapsed, + + pub(crate) num_run: usize, + /// Number of tests and benchmarks that were filtered out (either by the + /// filter-in pattern or by `--skip` arguments). + pub(crate) num_filtered_out: usize, + + /// Number of passed tests. + pub(crate) num_passed: usize, + /// Number of failed tests and benchmarks. + pub(crate) num_failed: usize, + /// Number of ignored tests and benchmarks. + pub(crate) num_ignored: usize, +} + +impl Summary { + pub(crate) fn has_failed(&self) -> bool { + 0 < self.num_failed + } + + pub(crate) fn write_start(&self, writer: &mut dyn std::io::Write) -> std::io::Result<()> { + let s = if self.num_run == 1 { "" } else { "s" }; + let seed = self + .seed + .map(|s| format!(" (shuffle seed: {s})")) + .unwrap_or_default(); + + writeln!(writer)?; + writeln!(writer, "running {} test{s}{seed}", self.num_run)?; + Ok(()) + } + + pub(crate) fn write_complete(&self, writer: &mut dyn ::std::io::Write) -> std::io::Result<()> { + let (summary, summary_style) = if self.has_failed() { + ("FAILED", FAILED) + } else { + ("ok", OK) + }; + let num_passed = self.num_passed; + let num_failed = self.num_failed; + let num_ignored = self.num_ignored; + let num_filtered_out = self.num_filtered_out; + let elapsed_s = self.elapsed_s; + + if self.has_failed() { + writeln!(writer)?; + writeln!(writer, "failures:")?; + writeln!(writer)?; + + // Print messages of all tests + for (name, msg) in &self.failures { + if let Some(msg) = msg { + writeln!(writer, "---- {} ----", name)?; + writeln!(writer, "{}", msg)?; + writeln!(writer)?; + } + } + + // Print summary list of failed tests + writeln!(writer)?; + writeln!(writer, "failures:")?; + for (name, _) in &self.failures { + writeln!(writer, " {}", name)?; + } + } + writeln!(writer)?; + writeln!( + writer, + "test result: {}{summary}{}. {num_passed} passed; {num_failed} failed; {num_ignored} ignored; \ + {num_filtered_out} filtered out; finished in {elapsed_s}", + summary_style.render(), + summary_style.render_reset() + )?; + writeln!(writer)?; + + Ok(()) + } +} + +impl super::Notifier for Summary { + fn notify(&mut self, event: Event) -> std::io::Result<()> { + match event { + Event::DiscoverStart => {} + Event::DiscoverCase { run, .. } => { + if run { + self.num_run += 1; + } else { + self.num_filtered_out += 1; + } + } + Event::DiscoverComplete { seed, .. } => { + self.seed = seed; + } + Event::SuiteStart => {} + Event::CaseStart { .. } => {} + Event::CaseComplete { + name, + status, + message, + .. + } => match status { + Some(RunStatus::Ignored) => { + self.num_ignored += 1; + } + Some(RunStatus::Failed) => { + self.num_failed += 1; + self.failures.insert(name, message); + } + None => { + self.num_passed += 1; + } + }, + Event::SuiteComplete { elapsed_s, .. } => { + self.elapsed_s = elapsed_s; + } + } + Ok(()) + } +} diff --git a/crates/libtest2-harness/src/notify/terse.rs b/crates/libtest2-harness/src/notify/terse.rs new file mode 100644 index 0000000..5f8e766 --- /dev/null +++ b/crates/libtest2-harness/src/notify/terse.rs @@ -0,0 +1,89 @@ +use super::CaseMode; +use super::Event; +use super::RunStatus; +use super::FAILED; +use super::IGNORED; +use super::OK; + +#[derive(Debug)] +pub(crate) struct TerseListNotifier { + writer: W, + tests: usize, +} + +impl TerseListNotifier { + pub(crate) fn new(writer: W) -> Self { + Self { writer, tests: 0 } + } +} + +impl super::Notifier for TerseListNotifier { + fn notify(&mut self, event: Event) -> std::io::Result<()> { + match event { + Event::DiscoverStart => {} + Event::DiscoverCase { name, mode, run } => { + if run { + let mode = match mode { + CaseMode::Test => "test", + CaseMode::Bench => "bench", + }; + writeln!(self.writer, "{name}: {mode}")?; + self.tests += 1; + } + } + Event::DiscoverComplete { .. } => { + writeln!(self.writer)?; + writeln!(self.writer, "{} tests", self.tests)?; + writeln!(self.writer)?; + } + Event::SuiteStart => {} + Event::CaseStart { .. } => {} + Event::CaseComplete { .. } => {} + Event::SuiteComplete { .. } => {} + } + Ok(()) + } +} + +#[derive(Debug)] +pub(crate) struct TerseRunNotifier { + writer: W, + summary: super::Summary, +} + +impl TerseRunNotifier { + pub(crate) fn new(writer: W) -> Self { + Self { + writer, + summary: Default::default(), + } + } +} + +impl super::Notifier for TerseRunNotifier { + fn notify(&mut self, event: Event) -> std::io::Result<()> { + self.summary.notify(event.clone())?; + match event { + Event::DiscoverStart => {} + Event::DiscoverCase { .. } => {} + Event::DiscoverComplete { .. } => {} + Event::SuiteStart => { + self.summary.write_start(&mut self.writer)?; + } + Event::CaseStart { .. } => {} + Event::CaseComplete { status, .. } => { + let (c, style) = match status { + Some(RunStatus::Ignored) => ('i', IGNORED), + Some(RunStatus::Failed) => ('F', FAILED), + None => ('.', OK), + }; + write!(self.writer, "{}{c}{}", style.render(), style.render_reset())?; + self.writer.flush()?; + } + Event::SuiteComplete { .. } => { + self.summary.write_complete(&mut self.writer)?; + } + } + Ok(()) + } +} diff --git a/crates/libtest2-harness/src/outcomes.rs b/crates/libtest2-harness/src/outcomes.rs deleted file mode 100644 index 48440e5..0000000 --- a/crates/libtest2-harness/src/outcomes.rs +++ /dev/null @@ -1,233 +0,0 @@ -//! Definition of the `Outcomes`. -//! -//! This is just an abstraction for everything that is printed to the screen -//! (or logfile, if specified). These parameters influence printing: -//! - `color` -//! - `format` (and `quiet`) -//! - `logfile` - -use libtest_lexarg::OutputFormat; - -use crate::Case; -use crate::RunError; -use crate::RunErrorInner; -use crate::RunResult; - -pub(crate) struct Outcomes { - out: Box, - format: OutputFormat, - name_width: usize, - - outcomes: std::collections::BTreeMap, - total_elapsed: std::time::Instant, - num_tests: usize, - /// Number of tests and benchmarks that were filtered out (either by the - /// filter-in pattern or by `--skip` arguments). - num_filtered_out: usize, - /// Number of passed tests. - num_passed: usize, - /// Number of failed tests and benchmarks. - num_failed: usize, - /// Number of ignored tests and benchmarks. - num_ignored: usize, -} - -impl Outcomes { - /// Creates a new printer configured by the given arguments (`format`, - /// `quiet`, `color` and `logfile` options). - pub(crate) fn new( - args: &libtest_lexarg::TestOpts, - cases: &[Box], - num_filtered_out: usize, - ) -> std::io::Result { - // Determine target of all output - let out: Box = if let Some(logfile) = &args.logfile { - let f = std::fs::File::create(logfile)?; - if anstream::ColorChoice::global() == anstream::ColorChoice::Always { - Box::new(f) - } else { - Box::new(anstream::StripStream::new(f)) - } - } else { - Box::new(anstream::stdout()) - }; - - // Determine correct format - let format = args.format; - - // Determine max test name length to do nice formatting later. - // - // Unicode is hard and there is no way we can properly align/pad the - // test names and outcomes. Counting the number of code points is just - // a cheap way that works in most cases. Usually, these names are - // ASCII. - let name_width = cases - .iter() - .map(|test| test.name().chars().count()) - .max() - .unwrap_or(0); - - Ok(Self { - out, - format, - name_width, - outcomes: Default::default(), - total_elapsed: std::time::Instant::now(), - num_tests: cases.len(), - num_filtered_out: num_filtered_out, - num_passed: 0, - num_failed: 0, - num_ignored: 0, - }) - } - - /// Prints the first line "running 3 tests". - pub(crate) fn start_suite(&mut self) -> std::io::Result<()> { - match self.format { - OutputFormat::Pretty | OutputFormat::Terse => { - let s = if self.num_tests == 1 { "" } else { "s" }; - - writeln!(self.out)?; - writeln!(self.out, "running {} test{s}", self.num_tests)?; - } - OutputFormat::Json | OutputFormat::Junit => todo!(), - } - - Ok(()) - } - - /// Prints the text announcing the test (e.g. "test foo::bar ... "). Prints - /// nothing in terse mode. - pub(crate) fn start_case(&mut self, case: &dyn Case) -> std::io::Result<()> { - match self.format { - OutputFormat::Pretty => { - write!(self.out, "test {: <1$} ... ", case.name(), self.name_width)?; - self.out.flush()?; - } - OutputFormat::Terse => { - // In terse mode, nothing is printed before the job. Only - // `print_single_outcome` prints one character. - } - OutputFormat::Json | OutputFormat::Junit => todo!(), - } - - Ok(()) - } - - /// Prints the outcome of a single tests. `ok` or `FAILED` in pretty mode - /// and `.` or `F` in terse mode. - pub(crate) fn finish_case( - &mut self, - case: &dyn Case, - outcome: RunResult, - ) -> std::io::Result<()> { - match self.format { - OutputFormat::Pretty => { - let (s, style) = match &outcome { - Ok(()) => ("ok", OK), - Err(RunError(RunErrorInner::Failed(_))) => ("FAILED", FAILED), - Err(RunError(RunErrorInner::Ignored(_))) => ("ignored", IGNORED), - }; - - writeln!(self.out, "{}{s}{}", style.render(), style.render_reset())?; - } - OutputFormat::Terse => { - let (c, style) = match outcome { - Ok(()) => ('.', OK), - Err(RunError(RunErrorInner::Failed(_))) => ('F', FAILED), - Err(RunError(RunErrorInner::Ignored(_))) => ('i', IGNORED), - }; - - write!(self.out, "{}{c}{}", style.render(), style.render_reset())?; - } - OutputFormat::Json | OutputFormat::Junit => todo!(), - } - - match &outcome { - Ok(()) => self.num_passed += 1, - Err(RunError(RunErrorInner::Failed(_))) => self.num_failed += 1, - Err(RunError(RunErrorInner::Ignored(_))) => self.num_ignored += 1, - } - self.outcomes.insert(case.name().to_owned(), outcome); - - Ok(()) - } - - /// Prints the summary line after all tests have been executed. - pub(crate) fn finish_suite(&mut self) -> std::io::Result<()> { - if self.has_failed() { - writeln!(self.out)?; - writeln!(self.out, "failures:")?; - writeln!(self.out)?; - - // Print messages of all tests - for (name, outcome) in &self.outcomes { - if let Err(RunError(RunErrorInner::Failed(msg))) = outcome { - writeln!(self.out, "---- {} ----", name)?; - writeln!(self.out, "{}", msg)?; - writeln!(self.out)?; - } - } - - // Print summary list of failed tests - writeln!(self.out)?; - writeln!(self.out, "failures:")?; - for (name, outcome) in &self.outcomes { - if let Err(RunError(RunErrorInner::Failed(_))) = outcome { - writeln!(self.out, " {}", name)?; - } - } - } - - match self.format { - OutputFormat::Pretty | OutputFormat::Terse => { - let (summary, summary_style) = if self.has_failed() { - ("FAILED", FAILED) - } else { - ("ok", OK) - }; - let num_passed = self.num_passed; - let num_failed = self.num_failed; - let num_ignored = self.num_ignored; - let num_filtered_out = self.num_filtered_out; - let execution_time = self.total_elapsed.elapsed().as_secs_f64(); - - writeln!(self.out)?; - writeln!( - self.out, - "test result: {}{summary}{}. {num_passed} passed; {num_failed} failed; {num_ignored} ignored; \ - {num_filtered_out} filtered out; finished in {execution_time:.2}s", - summary_style.render(), - summary_style.render_reset() - )?; - writeln!(self.out)?; - } - OutputFormat::Json | OutputFormat::Junit => todo!(), - } - Ok(()) - } - - /// Prints a list of all tests. Used if `--list` is set. - pub(crate) fn list(&mut self, cases: &[Box]) -> std::io::Result<()> { - for case in cases { - writeln!(self.out, "{}: test", case.name())?; - } - - writeln!(self.out)?; - writeln!(self.out, "{} tests", cases.len())?; - writeln!(self.out)?; - - Ok(()) - } - - pub(crate) fn has_failed(&self) -> bool { - 0 < self.num_failed - } -} - -const FAILED: anstyle::Style = - anstyle::Style::new().fg_color(Some(anstyle::Color::Ansi(anstyle::AnsiColor::Red))); -const OK: anstyle::Style = - anstyle::Style::new().fg_color(Some(anstyle::Color::Ansi(anstyle::AnsiColor::Green))); -const IGNORED: anstyle::Style = - anstyle::Style::new().fg_color(Some(anstyle::Color::Ansi(anstyle::AnsiColor::Yellow))); diff --git a/crates/libtest2-mimic/Cargo.toml b/crates/libtest2-mimic/Cargo.toml index 49fb398..ea35468 100644 --- a/crates/libtest2-mimic/Cargo.toml +++ b/crates/libtest2-mimic/Cargo.toml @@ -23,7 +23,8 @@ pre-release-replacements = [ ] [features] -default = [] +default = ["json"] +json = ["libtest2-harness/json"] [dependencies] libtest2-harness = { version = "0.1.0", path = "../libtest2-harness" } diff --git a/crates/libtest2-mimic/examples/simple.rs b/crates/libtest2-mimic/examples/simple.rs index e790132..c5a1843 100644 --- a/crates/libtest2-mimic/examples/simple.rs +++ b/crates/libtest2-mimic/examples/simple.rs @@ -22,7 +22,7 @@ fn check_katara(_state: &State) -> RunResult { Ok(()) } fn check_sokka(_state: &State) -> RunResult { - Err(RunError::msg("Sokka tripped and fell :(")) + Err(RunError::fail("Sokka tripped and fell :(")) } fn long_computation(state: &State) -> RunResult { state.ignore_for("slow")?; diff --git a/crates/libtest2-mimic/examples/tidy.rs b/crates/libtest2-mimic/examples/tidy.rs index 76885a9..71f50b4 100644 --- a/crates/libtest2-mimic/examples/tidy.rs +++ b/crates/libtest2-mimic/examples/tidy.rs @@ -54,29 +54,29 @@ fn collect_tests() -> std::io::Result> { /// Performs a couple of tidy tests. fn check_file(path: &std::path::Path) -> RunResult { let content = - std::fs::read(path).map_err(|e| RunError::msg(format!("Cannot read file: {e}")))?; + std::fs::read(path).map_err(|e| RunError::fail(format_args!("Cannot read file: {e}")))?; // Check that the file is valid UTF-8 let content = String::from_utf8(content) - .map_err(|_| RunError::msg("The file's contents are not a valid UTF-8 string!"))?; + .map_err(|_| RunError::fail("The file's contents are not a valid UTF-8 string!"))?; // Check for `\r`: we only want `\n` line breaks! if content.contains('\r') { - return Err(RunError::msg( + return Err(RunError::fail( "Contains '\\r' chars. Please use ' \\n' line breaks only!", )); } // Check for tab characters `\t` if content.contains('\t') { - return Err(RunError::msg( + return Err(RunError::fail( "Contains tab characters ('\\t'). Indent with four spaces!", )); } // Check for too long lines if content.lines().any(|line| line.chars().count() > 100) { - return Err(RunError::msg("Contains lines longer than 100 codepoints!")); + return Err(RunError::fail("Contains lines longer than 100 codepoints!")); } Ok(()) diff --git a/crates/libtest2-mimic/tests/testsuite/mixed_bag.rs b/crates/libtest2-mimic/tests/testsuite/mixed_bag.rs index dd255d0..9dcf446 100644 --- a/crates/libtest2-mimic/tests/testsuite/mixed_bag.rs +++ b/crates/libtest2-mimic/tests/testsuite/mixed_bag.rs @@ -9,11 +9,11 @@ fn main() { libtest2_mimic::Harness::with_env() .cases(vec![ Trial::test("cat", |_| Ok(())), - Trial::test("dog", |_| Err(RunError::msg("was not a good boy"))), + Trial::test("dog", |_| Err(RunError::fail("was not a good boy"))), Trial::test("fox", |_| Ok(())), Trial::test("bunny", |state| { state.ignore_for("fails")?; - Err(RunError::msg("jumped too high")) + Err(RunError::fail("jumped too high")) }), Trial::test("frog", |state| { state.ignore_for("slow")?; @@ -21,7 +21,7 @@ fn main() { }), Trial::test("owl", |state| { state.ignore_for("fails")?; - Err(RunError::msg("broke neck")) + Err(RunError::fail("broke neck")) }), Trial::test("fly", |state| { state.ignore_for("fails")?; @@ -29,7 +29,7 @@ fn main() { }), Trial::test("bear", |state| { state.ignore_for("fails")?; - Err(RunError::msg("no honey")) + Err(RunError::fail("no honey")) }), ]) .main(); @@ -136,7 +136,7 @@ was not a good boy failures: dog -test result: FAILED. 2 passed; 1 failed; 5 ignored; 0 filtered out; finished in 0.00s +test result: FAILED. 2 passed; 1 failed; 5 ignored; 0 filtered out; finished in [..]s "#, r#" @@ -152,7 +152,7 @@ was not a good boy failures: dog -test result: FAILED. 2 passed; 1 failed; 5 ignored; 0 filtered out; finished in 0.00s +test result: FAILED. 2 passed; 1 failed; 5 ignored; 0 filtered out; finished in [..]s "#, ) @@ -183,7 +183,7 @@ was not a good boy failures: dog -test result: FAILED. 2 passed; 1 failed; 5 ignored; 0 filtered out; finished in 0.00s +test result: FAILED. 2 passed; 1 failed; 5 ignored; 0 filtered out; finished in [..]s "#, r#" @@ -199,7 +199,7 @@ was not a good boy failures: dog -test result: FAILED. 2 passed; 1 failed; 5 ignored; 0 filtered out; finished in 0.00s +test result: FAILED. 2 passed; 1 failed; 5 ignored; 0 filtered out; finished in [..]s "#, ) @@ -474,7 +474,7 @@ failures: dog owl -test result: FAILED. 4 passed; 4 failed; 0 ignored; 0 filtered out; finished in 0.00s +test result: FAILED. 4 passed; 4 failed; 0 ignored; 0 filtered out; finished in [..]s "#, r#" @@ -502,7 +502,7 @@ failures: dog owl -test result: FAILED. 4 passed; 4 failed; 0 ignored; 0 filtered out; finished in 0.00s +test result: FAILED. 4 passed; 4 failed; 0 ignored; 0 filtered out; finished in [..]s "#, ) @@ -545,7 +545,7 @@ failures: dog owl -test result: FAILED. 4 passed; 4 failed; 0 ignored; 0 filtered out; finished in 0.00s +test result: FAILED. 4 passed; 4 failed; 0 ignored; 0 filtered out; finished in [..]s "#, r#" @@ -573,7 +573,7 @@ failures: dog owl -test result: FAILED. 4 passed; 4 failed; 0 ignored; 0 filtered out; finished in 0.00s +test result: FAILED. 4 passed; 4 failed; 0 ignored; 0 filtered out; finished in [..]s "#, ) @@ -620,6 +620,78 @@ test result: FAILED. 1 passed; 1 failed; 0 ignored; 6 filtered out; finished in ) } +#[test] +fn list_json() { + check( + &["-Zunstable-options", "--format=json", "--list", "a"], + 0, + r#"{"event":"discover-start"} +{"event":"discover-case","name":"bear","mode":"test","run":true} +{"event":"discover-case","name":"bunny","mode":"test","run":false} +{"event":"discover-case","name":"cat","mode":"test","run":true} +{"event":"discover-case","name":"dog","mode":"test","run":false} +{"event":"discover-case","name":"fly","mode":"test","run":false} +{"event":"discover-case","name":"fox","mode":"test","run":false} +{"event":"discover-case","name":"frog","mode":"test","run":false} +{"event":"discover-case","name":"owl","mode":"test","run":false} +{"event":"discover-complete","elapsed_s":"[..]","seed":null} +"#, + r#"{"event":"discover-start"} +{"event":"discover-case","name":"bear","mode":"test","run":true} +{"event":"discover-case","name":"bunny","mode":"test","run":false} +{"event":"discover-case","name":"cat","mode":"test","run":true} +{"event":"discover-case","name":"dog","mode":"test","run":false} +{"event":"discover-case","name":"fly","mode":"test","run":false} +{"event":"discover-case","name":"fox","mode":"test","run":false} +{"event":"discover-case","name":"frog","mode":"test","run":false} +{"event":"discover-case","name":"owl","mode":"test","run":false} +{"event":"discover-complete","elapsed_s":"[..]","seed":null} +"#, + ) +} + +#[test] +fn test_json() { + check( + &["-Zunstable-options", "--format=json", "a"], + 0, + r#"{"event":"discover-start"} +{"event":"discover-case","name":"bear","mode":"test","run":true} +{"event":"discover-case","name":"bunny","mode":"test","run":false} +{"event":"discover-case","name":"cat","mode":"test","run":true} +{"event":"discover-case","name":"dog","mode":"test","run":false} +{"event":"discover-case","name":"fly","mode":"test","run":false} +{"event":"discover-case","name":"fox","mode":"test","run":false} +{"event":"discover-case","name":"frog","mode":"test","run":false} +{"event":"discover-case","name":"owl","mode":"test","run":false} +{"event":"discover-complete","elapsed_s":"[..]","seed":null} +{"event":"suite-start"} +{"event":"case-start","name":"bear"} +{"event":"case-complete","name":"bear","mode":"test","status":"ignored","message":"fails","elapsed_s":"[..]"} +{"event":"case-start","name":"cat"} +{"event":"case-complete","name":"cat","mode":"test","status":null,"message":null,"elapsed_s":"[..]"} +{"event":"suite-complete","elapsed_s":"[..]"} +"#, + r#"{"event":"discover-start"} +{"event":"discover-case","name":"bear","mode":"test","run":true} +{"event":"discover-case","name":"bunny","mode":"test","run":false} +{"event":"discover-case","name":"cat","mode":"test","run":true} +{"event":"discover-case","name":"dog","mode":"test","run":false} +{"event":"discover-case","name":"fly","mode":"test","run":false} +{"event":"discover-case","name":"fox","mode":"test","run":false} +{"event":"discover-case","name":"frog","mode":"test","run":false} +{"event":"discover-case","name":"owl","mode":"test","run":false} +{"event":"discover-complete","elapsed_s":"[..]","seed":null} +{"event":"suite-start"} +[..] +[..] +[..] +[..] +{"event":"suite-complete","elapsed_s":"[..]"} +"#, + ) +} + #[test] fn terse_output() { check(