diff --git a/.github/workflows/test-js-packages.yml b/.github/workflows/test-js-packages.yml index 41a7008efc..dde0deed0c 100644 --- a/.github/workflows/test-js-packages.yml +++ b/.github/workflows/test-js-packages.yml @@ -534,7 +534,7 @@ jobs: - name: Build list of libraries id: get_critical_libraries run: | - LIBRARIES=$(grep -Po "^https://github.com/\K.+" ./CRITICAL_NOIR_LIBRARIES | jq -R -s -c 'split("\n") | map(select(. != "")) | map({ repo: ., path: "./"})') + LIBRARIES=$(grep -Po "^https://github.com/\K.+" ./CRITICAL_NOIR_LIBRARIES | jq -R -s -c 'split("\n") | map(select(. != "")) | map({ repo: ., path: ""})') echo "libraries=$LIBRARIES" echo "libraries=$LIBRARIES" >> $GITHUB_OUTPUT env: @@ -551,15 +551,15 @@ jobs: matrix: project: ${{ fromJson( needs.critical-library-list.outputs.libraries )}} include: - - project: { repo: AztecProtocol/aztec-packages, path: ./noir-projects/aztec-nr } - - project: { repo: AztecProtocol/aztec-packages, path: ./noir-projects/noir-contracts } - - project: { repo: AztecProtocol/aztec-packages, path: ./noir-projects/noir-protocol-circuits/crates/parity-lib } - - project: { repo: AztecProtocol/aztec-packages, path: ./noir-projects/noir-protocol-circuits/crates/private-kernel-lib } - - project: { repo: AztecProtocol/aztec-packages, path: ./noir-projects/noir-protocol-circuits/crates/reset-kernel-lib } - - project: { repo: AztecProtocol/aztec-packages, path: ./noir-projects/noir-protocol-circuits/crates/rollup-lib } - - project: { repo: AztecProtocol/aztec-packages, path: ./noir-projects/noir-protocol-circuits/crates/types } - - name: Check external repo - ${{ matrix.project.repo }} + - project: { repo: AztecProtocol/aztec-packages, path: noir-projects/aztec-nr } + - project: { repo: AztecProtocol/aztec-packages, path: noir-projects/noir-contracts } + - project: { repo: AztecProtocol/aztec-packages, path: noir-projects/noir-protocol-circuits/crates/parity-lib } + - project: { repo: AztecProtocol/aztec-packages, path: noir-projects/noir-protocol-circuits/crates/private-kernel-lib } + - project: { repo: AztecProtocol/aztec-packages, path: noir-projects/noir-protocol-circuits/crates/reset-kernel-lib } + - project: { repo: AztecProtocol/aztec-packages, path: noir-projects/noir-protocol-circuits/crates/rollup-lib } + - project: { repo: AztecProtocol/aztec-packages, path: noir-projects/noir-protocol-circuits/crates/types } + + name: Check external repo - ${{ matrix.project.repo }}/${{ matrix.project.path }} steps: - name: Checkout uses: actions/checkout@v4 @@ -591,7 +591,7 @@ jobs: - name: Run nargo test working-directory: ./test-repo/${{ matrix.project.path }} - run: nargo test --silence-warnings + run: nargo test -q --silence-warnings env: NARGO_IGNORE_TEST_FAILURES_FROM_FOREIGN_CALLS: true diff --git a/scripts/check-critical-libraries.sh b/scripts/check-critical-libraries.sh index b492cf1d4b..f8e474d23d 100755 --- a/scripts/check-critical-libraries.sh +++ b/scripts/check-critical-libraries.sh @@ -31,7 +31,7 @@ for REPO in ${REPOS_TO_CHECK[@]}; do TAG=$(getLatestReleaseTagForRepo $REPO) git clone $REPO -c advice.detachedHead=false --depth 1 --branch $TAG $TMP_DIR - nargo test --program-dir $TMP_DIR + nargo test -q --program-dir $TMP_DIR rm -rf $TMP_DIR done diff --git a/tooling/nargo_cli/src/cli/test_cmd.rs b/tooling/nargo_cli/src/cli/test_cmd.rs index ecbc42ff38..32512eba5b 100644 --- a/tooling/nargo_cli/src/cli/test_cmd.rs +++ b/tooling/nargo_cli/src/cli/test_cmd.rs @@ -1,6 +1,6 @@ use std::{ collections::{BTreeMap, HashMap}, - io::Write, + fmt::Display, panic::{catch_unwind, UnwindSafe}, path::PathBuf, sync::{mpsc, Mutex}, @@ -12,6 +12,7 @@ use acvm::{BlackBoxFunctionSolver, FieldElement}; use bn254_blackbox_solver::Bn254BlackBoxSolver; use clap::Args; use fm::FileManager; +use formatters::{Formatter, PrettyFormatter, TerseFormatter}; use nargo::{ insert_all_files_for_workspace_into_file_manager, ops::TestStatus, package::Package, parse_all, prepare_package, workspace::Workspace, PrintOutput, @@ -19,12 +20,13 @@ use nargo::{ use nargo_toml::{get_package_manifest, resolve_workspace_from_toml}; use noirc_driver::{check_crate, CompileOptions, NOIR_ARTIFACT_VERSION_STRING}; use noirc_frontend::hir::{FunctionNameMatch, ParsedFiles}; -use termcolor::{Color, ColorChoice, ColorSpec, StandardStream, StandardStreamLock, WriteColor}; use crate::{cli::check_cmd::check_crate_and_report_errors, errors::CliError}; use super::{NargoConfig, PackageOptions}; +mod formatters; + /// Run the tests for this program #[derive(Debug, Clone, Args)] #[clap(visible_alias = "t")] @@ -53,6 +55,40 @@ pub(crate) struct TestCommand { /// Number of threads used for running tests in parallel #[clap(long, default_value_t = rayon::current_num_threads())] test_threads: usize, + + /// Configure formatting of output + #[clap(long)] + format: Option, + + /// Display one character per test instead of one line + #[clap(short = 'q', long = "quiet")] + quiet: bool, +} + +#[derive(Debug, Copy, Clone, clap::ValueEnum)] +enum Format { + /// Print verbose output + Pretty, + /// Display one character per test + Terse, +} + +impl Format { + fn formatter(&self) -> Box { + match self { + Format::Pretty => Box::new(PrettyFormatter), + Format::Terse => Box::new(TerseFormatter), + } + } +} + +impl Display for Format { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + Format::Pretty => write!(f, "pretty"), + Format::Terse => write!(f, "terse"), + } + } } struct Test<'a> { @@ -95,6 +131,14 @@ pub(crate) fn run(args: TestCommand, config: NargoConfig) -> Result<(), CliError None => FunctionNameMatch::Anything, }; + let formatter: Box = if let Some(format) = args.format { + format.formatter() + } else if args.quiet { + Box::new(TerseFormatter) + } else { + Box::new(PrettyFormatter) + }; + let runner = TestRunner { file_manager: &file_manager, parsed_files: &parsed_files, @@ -102,6 +146,7 @@ pub(crate) fn run(args: TestCommand, config: NargoConfig) -> Result<(), CliError args: &args, pattern, num_threads: args.test_threads, + formatter, }; runner.run() } @@ -113,6 +158,7 @@ struct TestRunner<'a> { args: &'a TestCommand, pattern: FunctionNameMatch<'a>, num_threads: usize, + formatter: Box, } impl<'a> TestRunner<'a> { @@ -222,27 +268,31 @@ impl<'a> TestRunner<'a> { // We'll go package by package, but we might get test results from packages ahead of us. // We'll buffer those here and show them all at once when we get to those packages. let mut buffer: HashMap> = HashMap::new(); - for (package_name, test_count) in test_count_per_package { - let plural = if *test_count == 1 { "" } else { "s" }; - println!("[{package_name}] Running {test_count} test function{plural}"); - + for (package_name, total_test_count) in test_count_per_package { let mut test_report = Vec::new(); - // How many tests are left to receive for this package - let mut remaining_test_count = *test_count; + let mut current_test_count = 0; + let total_test_count = *total_test_count; + + self.formatter + .package_start(package_name, total_test_count) + .expect("Could not display package start"); // Check if we have buffered test results for this package if let Some(buffered_tests) = buffer.remove(package_name) { - remaining_test_count -= buffered_tests.len(); - for test_result in buffered_tests { - self.display_test_result(&test_result) - .expect("Could not display test status"); + self.display_test_result( + &test_result, + current_test_count + 1, + total_test_count, + ) + .expect("Could not display test status"); test_report.push(test_result); + current_test_count += 1; } } - if remaining_test_count > 0 { + if current_test_count < total_test_count { while let Ok(test_result) = receiver.recv() { if test_result.status.failed() { all_passed = false; @@ -257,17 +307,29 @@ impl<'a> TestRunner<'a> { continue; } - self.display_test_result(&test_result) - .expect("Could not display test status"); + self.display_test_result( + &test_result, + current_test_count + 1, + total_test_count, + ) + .expect("Could not display test status"); test_report.push(test_result); - remaining_test_count -= 1; - if remaining_test_count == 0 { + current_test_count += 1; + if current_test_count == total_test_count { break; } } } - display_test_report(package_name, &test_report) + self.formatter + .package_end( + package_name, + &test_report, + self.file_manager, + self.args.show_output, + self.args.compile_options.deny_warnings, + self.args.compile_options.silence_warnings, + ) .expect("Could not display test report"); } }); @@ -417,116 +479,20 @@ impl<'a> TestRunner<'a> { } /// Display the status of a single test - fn display_test_result(&'a self, test_result: &'a TestResult) -> std::io::Result<()> { - let writer = StandardStream::stderr(ColorChoice::Always); - let mut writer = writer.lock(); - - let is_slow = test_result.time_to_run >= Duration::from_secs(30); - let show_time = |writer: &mut StandardStreamLock<'_>| { - if is_slow { - write!(writer, " <{:.3}s>", test_result.time_to_run.as_secs_f64()) - } else { - Ok(()) - } - }; - - write!(writer, "[{}] Testing {}... ", &test_result.package_name, &test_result.name)?; - writer.flush()?; - - match &test_result.status { - TestStatus::Pass { .. } => { - writer.set_color(ColorSpec::new().set_fg(Some(Color::Green)))?; - write!(writer, "ok")?; - writer.reset()?; - show_time(&mut writer)?; - writeln!(writer)?; - } - TestStatus::Fail { message, error_diagnostic } => { - writer.set_color(ColorSpec::new().set_fg(Some(Color::Red)))?; - write!(writer, "FAIL\n{message}\n")?; - writer.reset()?; - show_time(&mut writer)?; - writeln!(writer)?; - if let Some(diag) = error_diagnostic { - noirc_errors::reporter::report_all( - self.file_manager.as_file_map(), - &[diag.clone()], - self.args.compile_options.deny_warnings, - self.args.compile_options.silence_warnings, - ); - } - } - TestStatus::Skipped { .. } => { - writer.set_color(ColorSpec::new().set_fg(Some(Color::Yellow)))?; - write!(writer, "skipped")?; - writer.reset()?; - show_time(&mut writer)?; - writeln!(writer)?; - } - TestStatus::CompileError(err) => { - noirc_errors::reporter::report_all( - self.file_manager.as_file_map(), - &[err.clone()], - self.args.compile_options.deny_warnings, - self.args.compile_options.silence_warnings, - ); - } - } - - if self.args.show_output && !test_result.output.is_empty() { - writeln!(writer, "--- {} stdout ---", test_result.name)?; - write!(writer, "{}", test_result.output)?; - let name_len = test_result.name.len(); - writeln!(writer, "{}", "-".repeat(name_len + "--- stdout ---".len())) - } else { - Ok(()) - } - } -} - -/// Display a report for all tests in a package -fn display_test_report(package_name: &String, test_report: &[TestResult]) -> std::io::Result<()> { - let writer = StandardStream::stderr(ColorChoice::Always); - let mut writer = writer.lock(); - - let failed_tests: Vec<_> = test_report - .iter() - .filter_map(|test_result| test_result.status.failed().then_some(&test_result.name)) - .collect(); - - if !failed_tests.is_empty() { - writeln!(writer)?; - writeln!(writer, "[{}] Failures:", package_name)?; - for failed_test in failed_tests { - writeln!(writer, " {}", failed_test)?; - } - writeln!(writer)?; - } - - write!(writer, "[{}] ", package_name)?; - - let count_all = test_report.len(); - let count_failed = test_report.iter().filter(|test_result| test_result.status.failed()).count(); - let plural = if count_all == 1 { "" } else { "s" }; - if count_failed == 0 { - writer.set_color(ColorSpec::new().set_fg(Some(Color::Green)))?; - write!(writer, "{count_all} test{plural} passed")?; - writer.reset()?; - writeln!(writer)?; - } else { - let count_passed = count_all - count_failed; - let plural_failed = if count_failed == 1 { "" } else { "s" }; - let plural_passed = if count_passed == 1 { "" } else { "s" }; - - if count_passed != 0 { - writer.set_color(ColorSpec::new().set_fg(Some(Color::Green)))?; - write!(writer, "{count_passed} test{plural_passed} passed, ")?; - } - - writer.set_color(ColorSpec::new().set_fg(Some(Color::Red)))?; - writeln!(writer, "{count_failed} test{plural_failed} failed")?; - writer.reset()?; + fn display_test_result( + &'a self, + test_result: &'a TestResult, + current_test_count: usize, + total_test_count: usize, + ) -> std::io::Result<()> { + self.formatter.test_end( + test_result, + current_test_count, + total_test_count, + self.file_manager, + self.args.show_output, + self.args.compile_options.deny_warnings, + self.args.compile_options.silence_warnings, + ) } - - Ok(()) } diff --git a/tooling/nargo_cli/src/cli/test_cmd/formatters.rs b/tooling/nargo_cli/src/cli/test_cmd/formatters.rs new file mode 100644 index 0000000000..2a791930f6 --- /dev/null +++ b/tooling/nargo_cli/src/cli/test_cmd/formatters.rs @@ -0,0 +1,324 @@ +use std::{io::Write, panic::RefUnwindSafe, time::Duration}; + +use fm::FileManager; +use nargo::ops::TestStatus; +use termcolor::{Color, ColorChoice, ColorSpec, StandardStream, StandardStreamLock, WriteColor}; + +use super::TestResult; + +pub(super) trait Formatter: Send + Sync + RefUnwindSafe { + fn package_start(&self, package_name: &str, test_count: usize) -> std::io::Result<()>; + + #[allow(clippy::too_many_arguments)] + fn test_end( + &self, + test_result: &TestResult, + current_test_count: usize, + total_test_count: usize, + file_manager: &FileManager, + show_output: bool, + deny_warnings: bool, + silence_warnings: bool, + ) -> std::io::Result<()>; + + fn package_end( + &self, + package_name: &str, + test_results: &[TestResult], + file_manager: &FileManager, + show_output: bool, + deny_warnings: bool, + silence_warnings: bool, + ) -> std::io::Result<()>; +} + +pub(super) struct PrettyFormatter; + +impl Formatter for PrettyFormatter { + fn package_start(&self, package_name: &str, test_count: usize) -> std::io::Result<()> { + package_start(package_name, test_count) + } + + fn test_end( + &self, + test_result: &TestResult, + _current_test_count: usize, + _total_test_count: usize, + file_manager: &FileManager, + show_output: bool, + deny_warnings: bool, + silence_warnings: bool, + ) -> std::io::Result<()> { + let writer = StandardStream::stderr(ColorChoice::Always); + let mut writer = writer.lock(); + + let is_slow = test_result.time_to_run >= Duration::from_secs(30); + let show_time = |writer: &mut StandardStreamLock<'_>| { + if is_slow { + write!(writer, " <{:.3}s>", test_result.time_to_run.as_secs_f64()) + } else { + Ok(()) + } + }; + + write!(writer, "[{}] Testing {}... ", &test_result.package_name, &test_result.name)?; + writer.flush()?; + + match &test_result.status { + TestStatus::Pass { .. } => { + writer.set_color(ColorSpec::new().set_fg(Some(Color::Green)))?; + write!(writer, "ok")?; + writer.reset()?; + show_time(&mut writer)?; + writeln!(writer)?; + } + TestStatus::Fail { message, error_diagnostic } => { + writer.set_color(ColorSpec::new().set_fg(Some(Color::Red)))?; + write!(writer, "FAIL\n{message}\n")?; + writer.reset()?; + show_time(&mut writer)?; + writeln!(writer)?; + if let Some(diag) = error_diagnostic { + noirc_errors::reporter::report_all( + file_manager.as_file_map(), + &[diag.clone()], + deny_warnings, + silence_warnings, + ); + } + } + TestStatus::Skipped { .. } => { + writer.set_color(ColorSpec::new().set_fg(Some(Color::Yellow)))?; + write!(writer, "skipped")?; + writer.reset()?; + show_time(&mut writer)?; + writeln!(writer)?; + } + TestStatus::CompileError(file_diagnostic) => { + noirc_errors::reporter::report_all( + file_manager.as_file_map(), + &[file_diagnostic.clone()], + deny_warnings, + silence_warnings, + ); + } + } + + if show_output && !test_result.output.is_empty() { + writeln!(writer, "--- {} stdout ---", test_result.name)?; + write!(writer, "{}", test_result.output)?; + let name_len = test_result.name.len(); + writeln!(writer, "{}", "-".repeat(name_len + "--- stdout ---".len())) + } else { + Ok(()) + } + } + + fn package_end( + &self, + package_name: &str, + test_results: &[TestResult], + _file_manager: &FileManager, + _show_output: bool, + _deny_warnings: bool, + _silence_warnings: bool, + ) -> std::io::Result<()> { + let writer = StandardStream::stderr(ColorChoice::Always); + let mut writer = writer.lock(); + + let failed_tests: Vec<_> = test_results + .iter() + .filter_map(|test_result| test_result.status.failed().then_some(&test_result.name)) + .collect(); + + if !failed_tests.is_empty() { + writeln!(writer)?; + writeln!(writer, "[{}] Failures:", package_name)?; + for failed_test in failed_tests { + writeln!(writer, " {}", failed_test)?; + } + writeln!(writer)?; + } + + write!(writer, "[{}] ", package_name)?; + + let count_all = test_results.len(); + let count_failed = + test_results.iter().filter(|test_result| test_result.status.failed()).count(); + let plural = if count_all == 1 { "" } else { "s" }; + if count_failed == 0 { + writer.set_color(ColorSpec::new().set_fg(Some(Color::Green)))?; + write!(writer, "{count_all} test{plural} passed")?; + writer.reset()?; + writeln!(writer)?; + } else { + let count_passed = count_all - count_failed; + let plural_failed = if count_failed == 1 { "" } else { "s" }; + let plural_passed = if count_passed == 1 { "" } else { "s" }; + + if count_passed != 0 { + writer.set_color(ColorSpec::new().set_fg(Some(Color::Green)))?; + write!(writer, "{count_passed} test{plural_passed} passed, ")?; + } + + writer.set_color(ColorSpec::new().set_fg(Some(Color::Red)))?; + writeln!(writer, "{count_failed} test{plural_failed} failed")?; + writer.reset()?; + } + + Ok(()) + } +} + +pub(super) struct TerseFormatter; + +impl Formatter for TerseFormatter { + fn package_start(&self, package_name: &str, test_count: usize) -> std::io::Result<()> { + package_start(package_name, test_count) + } + + fn test_end( + &self, + test_result: &TestResult, + current_test_count: usize, + total_test_count: usize, + _file_manager: &FileManager, + _show_output: bool, + _deny_warnings: bool, + _silence_warnings: bool, + ) -> std::io::Result<()> { + let writer = StandardStream::stderr(ColorChoice::Always); + let mut writer = writer.lock(); + + match &test_result.status { + TestStatus::Pass => { + writer.set_color(ColorSpec::new().set_fg(Some(Color::Green)))?; + write!(writer, ".")?; + writer.reset()?; + } + TestStatus::Fail { .. } | TestStatus::CompileError(_) => { + writer.set_color(ColorSpec::new().set_fg(Some(Color::Red)))?; + write!(writer, "F")?; + writer.reset()?; + } + TestStatus::Skipped => { + writer.set_color(ColorSpec::new().set_fg(Some(Color::Yellow)))?; + write!(writer, "s")?; + writer.reset()?; + } + } + + // How many tests ('.', 'F', etc.) to print per line. + // We use 88 which is a bit more than the traditional 80 columns (screens are larger these days) + // but we also want the output to be readable in case the terminal isn't maximized. + const MAX_TESTS_PER_LINE: usize = 88; + + if current_test_count % MAX_TESTS_PER_LINE == 0 && current_test_count < total_test_count { + writeln!(writer, " {}/{}", current_test_count, total_test_count)?; + } + + Ok(()) + } + + fn package_end( + &self, + package_name: &str, + test_results: &[TestResult], + file_manager: &FileManager, + show_output: bool, + deny_warnings: bool, + silence_warnings: bool, + ) -> std::io::Result<()> { + let writer = StandardStream::stderr(ColorChoice::Always); + let mut writer = writer.lock(); + + if !test_results.is_empty() { + writeln!(writer)?; + } + + for test_result in test_results { + if (show_output && !test_result.output.is_empty()) || test_result.status.failed() { + writeln!(writer, "--- {} stdout ---", test_result.name)?; + if !test_result.output.is_empty() { + write!(writer, "{}", test_result.output)?; + } + + match &test_result.status { + TestStatus::Pass | TestStatus::Skipped => (), + TestStatus::Fail { message, error_diagnostic } => { + writer.set_color(ColorSpec::new().set_fg(Some(Color::Red)))?; + writeln!(writer, "{message}")?; + writer.reset()?; + if let Some(diag) = error_diagnostic { + noirc_errors::reporter::report_all( + file_manager.as_file_map(), + &[diag.clone()], + deny_warnings, + silence_warnings, + ); + } + } + TestStatus::CompileError(file_diagnostic) => { + noirc_errors::reporter::report_all( + file_manager.as_file_map(), + &[file_diagnostic.clone()], + deny_warnings, + silence_warnings, + ); + } + } + + let name_len = test_result.name.len(); + writeln!(writer, "{}", "-".repeat(name_len + "--- stdout ---".len()))?; + } + } + + let failed_tests: Vec<_> = test_results + .iter() + .filter_map(|test_result| test_result.status.failed().then_some(&test_result.name)) + .collect(); + + if !failed_tests.is_empty() { + writeln!(writer)?; + writeln!(writer, "[{}] Failures:", package_name)?; + for failed_test in failed_tests { + writeln!(writer, " {}", failed_test)?; + } + writeln!(writer)?; + } + + write!(writer, "[{}] ", package_name)?; + + let count_all = test_results.len(); + let count_failed = + test_results.iter().filter(|test_result| test_result.status.failed()).count(); + let plural = if count_all == 1 { "" } else { "s" }; + if count_failed == 0 { + writer.set_color(ColorSpec::new().set_fg(Some(Color::Green)))?; + write!(writer, "{count_all} test{plural} passed")?; + writer.reset()?; + writeln!(writer)?; + } else { + let count_passed = count_all - count_failed; + let plural_failed = if count_failed == 1 { "" } else { "s" }; + let plural_passed = if count_passed == 1 { "" } else { "s" }; + + if count_passed != 0 { + writer.set_color(ColorSpec::new().set_fg(Some(Color::Green)))?; + write!(writer, "{count_passed} test{plural_passed} passed, ")?; + } + + writer.set_color(ColorSpec::new().set_fg(Some(Color::Red)))?; + writeln!(writer, "{count_failed} test{plural_failed} failed")?; + writer.reset()?; + } + + Ok(()) + } +} + +fn package_start(package_name: &str, test_count: usize) -> std::io::Result<()> { + let plural = if test_count == 1 { "" } else { "s" }; + println!("[{package_name}] Running {test_count} test function{plural}"); + Ok(()) +}