Skip to content

Commit

Permalink
Can now test multiple files
Browse files Browse the repository at this point in the history
  • Loading branch information
jfkonecn committed Aug 4, 2024
1 parent 034d149 commit d85b534
Showing 1 changed file with 123 additions and 109 deletions.
232 changes: 123 additions & 109 deletions crates/cli/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -264,6 +264,7 @@ pub fn build_app() -> Command {
Arg::new(ROC_FILE)
.help("The .roc file to test")
.value_parser(value_parser!(PathBuf))
.num_args(0..)
.required(false)
.default_value(DEFAULT_ROC_FILENAME)
)
Expand Down Expand Up @@ -498,18 +499,20 @@ pub fn test(matches: &ArgMatches, target: Target) -> io::Result<i32> {
Some(n) => Threading::AtMost(*n),
};

let path = matches.get_one::<PathBuf>(ROC_FILE).unwrap();
// let path = matches.get_one::<PathBuf>(ROC_FILE).unwrap();
let paths: Vec<_> = matches.get_many::<PathBuf>(ROC_FILE).unwrap().collect();

// Spawn the root task
if !path.exists() {
let current_dir = env::current_dir().unwrap();
let expected_file_path = current_dir.join(path);
for path in paths.iter() {
// Spawn the root task
if !path.exists() {
let current_dir = env::current_dir().unwrap();
let expected_file_path = current_dir.join(path);

let current_dir_string = current_dir.display();
let expected_file_path_string = expected_file_path.display();
let current_dir_string = current_dir.display();
let expected_file_path_string = expected_file_path.display();

// TODO these should use roc_reporting to display nicer error messages.
match matches.value_source(ROC_FILE) {
// TODO these should use roc_reporting to display nicer error messages.
match matches.value_source(ROC_FILE) {
Some(ValueSource::DefaultValue) => {
eprintln!(
"\nThe current directory ({current_dir_string}) does not contain a {DEFAULT_ROC_FILENAME} file to use as a default.\n\nYou can run `roc help` for more information on how to provide a .roc file.\n"
Expand All @@ -518,115 +521,137 @@ pub fn test(matches: &ArgMatches, target: Target) -> io::Result<i32> {
_ => eprintln!("\nThis file was not found: {expected_file_path_string}\n\nYou can run `roc help` for more information on how to provide a .roc file.\n"),
}

process::exit(1);
process::exit(1);
}
}

let arena = &arena;
let function_kind = FunctionKind::from_env();
let mut all_files_total_failed_count = 0;
let mut all_files_total_passed_count = 0;

let opt_main_path = matches.get_one::<PathBuf>(FLAG_MAIN);
for path in paths.iter() {
let arena = &arena;
let function_kind = FunctionKind::from_env();

// Step 1: compile the app and generate the .o file
let load_config = LoadConfig {
target,
function_kind,
// TODO: expose this from CLI?
render: roc_reporting::report::RenderTarget::ColorTerminal,
palette: roc_reporting::report::DEFAULT_PALETTE,
threading,
exec_mode: ExecutionMode::Test,
};
let load_result = roc_load::load_and_monomorphize(
arena,
path.to_path_buf(),
opt_main_path.cloned(),
RocCacheDir::Persistent(cache::roc_cache_dir().as_path()),
load_config,
);
let opt_main_path = matches.get_one::<PathBuf>(FLAG_MAIN);

let mut loaded = match load_result {
Ok(loaded) => loaded,
Err(LoadMonomorphizedError::LoadingProblem(problem)) => {
return handle_loading_problem(problem);
}
Err(LoadMonomorphizedError::ErrorModule(module)) => {
return handle_error_module(module, start_time.elapsed(), path.as_os_str(), false);
}
};
let problems = report_problems_monomorphized(&mut loaded);
// Step 1: compile the app and generate the .o file
let load_config = LoadConfig {
target,
function_kind,
// TODO: expose this from CLI?
render: roc_reporting::report::RenderTarget::ColorTerminal,
palette: roc_reporting::report::DEFAULT_PALETTE,
threading,
exec_mode: ExecutionMode::Test,
};
let load_result = roc_load::load_and_monomorphize(
arena,
path.to_path_buf(),
opt_main_path.cloned(),
RocCacheDir::Persistent(cache::roc_cache_dir().as_path()),
load_config,
);

let mut expectations = std::mem::take(&mut loaded.expectations);
let mut loaded = match load_result {
Ok(loaded) => loaded,
Err(LoadMonomorphizedError::LoadingProblem(problem)) => {
return handle_loading_problem(problem);
}
Err(LoadMonomorphizedError::ErrorModule(module)) => {
return handle_error_module(module, start_time.elapsed(), path.as_os_str(), false);
}
};
let problems = report_problems_monomorphized(&mut loaded);

let interns = loaded.interns.clone();
let sources = loaded.sources.clone();
let mut expectations = std::mem::take(&mut loaded.expectations);

let (dyn_lib, expects_by_module, layout_interner) =
roc_repl_expect::run::expect_mono_module_to_dylib(
arena,
target,
loaded,
opt_level,
LlvmBackendMode::CliTest,
)
.unwrap();
let interns = loaded.interns.clone();
let sources = loaded.sources.clone();

// Print warnings before running tests.
{
debug_assert_eq!(
problems.errors, 0,
"if there were errors, we would have already exited."
);
if problems.warnings > 0 {
problems.print_error_warning_count(start_time.elapsed());
println!(".\n\nRunning tests…\n\n\x1B[36m{}\x1B[39m", "─".repeat(80));
}
}
let (dyn_lib, expects_by_module, layout_interner) =
roc_repl_expect::run::expect_mono_module_to_dylib(
arena,
target,
loaded,
opt_level,
LlvmBackendMode::CliTest,
)
.unwrap();

// Run the tests.
let arena = &bumpalo::Bump::new();
let interns = arena.alloc(interns);
// Print warnings before running tests.
{
debug_assert_eq!(
problems.errors, 0,
"if there were errors, we would have already exited."
);
if problems.warnings > 0 {
problems.print_error_warning_count(start_time.elapsed());
println!(".\n\nRunning tests…\n\n\x1B[36m{}\x1B[39m", "─".repeat(80));
}
}

let mut writer = std::io::stdout();
// Run the tests.
let arena = &bumpalo::Bump::new();
let interns = arena.alloc(interns);

let mut total_failed_count = 0;
let mut total_passed_count = 0;
let mut writer = std::io::stdout();

let mut results_by_module = Vec::new();
let global_layout_interner = layout_interner.into_global();
let mut total_failed_count = 0;
let mut total_passed_count = 0;

let compilation_duration = start_time.elapsed();
let mut results_by_module = Vec::new();
let global_layout_interner = layout_interner.into_global();

for (module_id, expects) in expects_by_module.into_iter() {
let test_start_time = Instant::now();
let compilation_duration = start_time.elapsed();

let (failed_count, passed_count) = roc_repl_expect::run::run_toplevel_expects(
&mut writer,
roc_reporting::report::RenderTarget::ColorTerminal,
arena,
interns,
&global_layout_interner,
&dyn_lib,
&mut expectations,
expects,
)
.unwrap();
for (module_id, expects) in expects_by_module.into_iter() {
let test_start_time = Instant::now();

let tests_duration = test_start_time.elapsed();
let (failed_count, passed_count) = roc_repl_expect::run::run_toplevel_expects(
&mut writer,
roc_reporting::report::RenderTarget::ColorTerminal,
arena,
interns,
&global_layout_interner,
&dyn_lib,
&mut expectations,
expects,
)
.unwrap();

results_by_module.push(ModuleTestResults {
module_id,
failed_count,
passed_count,
tests_duration,
});
let tests_duration = test_start_time.elapsed();

total_failed_count += failed_count;
total_passed_count += passed_count;
}
results_by_module.push(ModuleTestResults {
module_id,
failed_count,
passed_count,
tests_duration,
});

let total_duration = start_time.elapsed();
total_failed_count += failed_count;
total_passed_count += passed_count;
}

if total_failed_count == 0 && total_passed_count == 0 {
let total_duration = start_time.elapsed();
all_files_total_failed_count += total_failed_count;
all_files_total_passed_count += total_passed_count;
if total_failed_count == 0 && total_passed_count == 0 {
// TODO print this in a more nicely formatted way!
println!("No expectations were found.");
} else {
if matches.get_flag(FLAG_VERBOSE) {
println!("Compiled in {} ms.", compilation_duration.as_millis());
for module_test_results in results_by_module {
print_test_results(module_test_results, &sources);
}
} else {
let test_summary_str =
test_summary(total_failed_count, total_passed_count, total_duration);
println!("{test_summary_str}");
}
}
}
if all_files_total_failed_count == 0 && all_files_total_passed_count == 0 {
// TODO print this in a more nicely formatted way!
println!("No expectations were found.");

Expand All @@ -637,18 +662,7 @@ pub fn test(matches: &ArgMatches, target: Target) -> io::Result<i32> {
// running tests altogether!
Ok(2)
} else {
if matches.get_flag(FLAG_VERBOSE) {
println!("Compiled in {} ms.", compilation_duration.as_millis());
for module_test_results in results_by_module {
print_test_results(module_test_results, &sources);
}
} else {
let test_summary_str =
test_summary(total_failed_count, total_passed_count, total_duration);
println!("{test_summary_str}");
}

Ok((total_failed_count > 0) as i32)
Ok((all_files_total_failed_count > 0) as i32)
}
}

Expand Down

0 comments on commit d85b534

Please sign in to comment.