Basic benchmarking functionality.

This commit is contained in:
Riley-Kilgore 2024-11-26 09:24:49 -08:00 committed by Riley
parent e97e85a272
commit 9a3513b245
7 changed files with 103 additions and 26 deletions

View File

@ -40,7 +40,7 @@ use aiken_lang::{
format::{Formatter, MAX_COLUMNS},
gen_uplc::CodeGenerator,
line_numbers::LineNumbers,
test_framework::{Test, TestResult, BenchmarkResult},
test_framework::{Test, TestResult},
tipo::{Type, TypeInfo},
utils, IdGenerator,
};
@ -315,8 +315,8 @@ where
seed,
property_max_success,
output,
}
},
blueprint_path: self.blueprint_path(None),
};
self.compile(options)
@ -473,6 +473,7 @@ where
exact_match,
seed,
property_max_success,
output,
} => {
let tests =
self.collect_tests(false, match_tests, exact_match, options.tracing)?;
@ -500,17 +501,42 @@ where
if e.is_success() {
None
} else {
Some(e.into_error(false))
Some(Error::from_test_result(e, false))
}
})
.collect();
self.event_listener
.handle_event(Event::FinishedTests { seed, tests });
.handle_event(Event::FinishedBenchmarks { seed, tests: tests.clone() });
if !errors.is_empty() {
Err(errors)
} else {
// Write benchmark results to CSV
use std::fs::File;
use std::io::Write;
let mut writer = File::create(&output)
.map_err(|error| vec![Error::FileIo { error, path: output.clone() }])?;
// Write CSV header
writeln!(writer, "test_name,module,memory,cpu")
.map_err(|error| vec![Error::FileIo { error, path: output.clone() }])?;
// Write benchmark results
for test in tests {
if let TestResult::Benchmark(result) = test {
writeln!(
writer,
"{},{},{},{}",
result.test.name,
result.test.module,
result.cost.mem,
result.cost.cpu
).map_err(|error| vec![Error::FileIo { error, path: output.clone() }])?;
}
}
Ok(())
}
}
@ -1101,15 +1127,18 @@ where
use rayon::prelude::*;
let data_types = utils::indexmap::as_ref_values(&self.data_types);
let plutus_version = &self.config.plutus;
tests
.into_par_iter()
.map(|test| match test {
Test::UnitTest(unit_test) => unit_test.run(plutus_version),
.flat_map(|test| match test {
Test::UnitTest(_) => Vec::new(),
Test::PropertyTest(property_test) => {
property_test.run(seed, property_max_success, plutus_version)
property_test
.benchmark(seed, property_max_success, plutus_version)
.into_iter()
.map(|result| TestResult::Benchmark(result))
.collect::<Vec<_>>()
}
})
.collect::<Vec<TestResult<(Constant, Rc<Type>), PlutusData>>>()

View File

@ -1,6 +1,7 @@
use aiken_lang::ast::Tracing;
use std::path::PathBuf;
use aiken_lang::ast::Tracing;
pub struct Options {
pub code_gen_mode: CodeGenMode,
pub tracing: Tracing,
@ -32,8 +33,8 @@ pub enum CodeGenMode {
match_tests: Option<Vec<String>>,
exact_match: bool,
seed: u32,
property_max_success: usize
property_max_success: usize,
output: PathBuf,
},
NoOp,
}

View File

@ -134,6 +134,10 @@ pub(crate) fn find_max_execution_units<T>(xs: &[TestResult<T, T>]) -> (usize, us
(max_mem, max_cpu, max_iter)
}
}
TestResult::Benchmark(..) => {
// todo riley - should this be reachable?
unreachable!("property returned benchmark result ?!")
}
});
(
@ -141,4 +145,4 @@ pub(crate) fn find_max_execution_units<T>(xs: &[TestResult<T, T>]) -> (usize, us
max_cpu.to_string().len(),
max_iter.to_string().len(),
)
}
}

View File

@ -50,6 +50,7 @@ fn fmt_test_json(result: &TestResult<UntypedExpr, UntypedExpr>) -> serde_json::V
TestResult::PropertyTestResult(PropertyTestResult { ref test, .. }) => {
&test.on_test_failure
}
TestResult::Benchmark(_) => unreachable!("benchmark returned in JSON output"),
};
let mut test = json!({
@ -95,6 +96,7 @@ fn fmt_test_json(result: &TestResult<UntypedExpr, UntypedExpr>) -> serde_json::V
Err(err) => json!({"error": err.to_string()}),
};
}
TestResult::Benchmark(_) => unreachable!("benchmark returned in JSON output"),
}
if !result.traces().is_empty() {
@ -270,4 +272,4 @@ pub fn json_schema() -> serde_json::Value {
"definitions": definitions
}
})
}
}

View File

@ -215,6 +215,25 @@ impl EventListener for Terminal {
"dependencies".if_supports_color(Stderr, |s| s.bold())
)
}
Event::RunningBenchmarks => {
eprintln!(
"{} {}",
" Benchmarking"
.if_supports_color(Stderr, |s| s.bold())
.if_supports_color(Stderr, |s| s.purple()),
"...".if_supports_color(Stderr, |s| s.bold())
);
}
Event::FinishedBenchmarks { seed: _, tests: _ } => {
eprintln!(
"{} {}",
" Complete"
.if_supports_color(Stderr, |s| s.bold())
.if_supports_color(Stderr, |s| s.green()),
format!("benchmark results written to CSV")
.if_supports_color(Stderr, |s| s.bold())
);
}
}
}
}
@ -273,6 +292,20 @@ fn fmt_test(
if *iterations > 1 { "s" } else { "" }
);
}
TestResult::Benchmark(benchmark) => {
let mem_pad = pretty::pad_left(benchmark.cost.mem.to_string(), max_mem, " ");
let cpu_pad = pretty::pad_left(benchmark.cost.cpu.to_string(), max_cpu, " ");
test = format!(
"{test} [mem: {mem_unit}, cpu: {cpu_unit}]",
mem_unit = pretty::style_if(styled, mem_pad, |s| s
.if_supports_color(Stderr, |s| s.cyan())
.to_string()),
cpu_unit = pretty::style_if(styled, cpu_pad, |s| s
.if_supports_color(Stderr, |s| s.cyan())
.to_string()),
);
}
}
// Title
@ -436,4 +469,4 @@ fn fmt_test_summary<T>(tests: &[&TestResult<T, T>], styled: bool) -> String {
.if_supports_color(Stderr, |s| s.bold())
.to_string()),
)
}
}

View File

@ -286,6 +286,8 @@ mod test {
result.labels
)
}
// todo riley - should this be reachable?
TestResult::Benchmark(..) => unreachable!("property returned benchmark result ?!"),
}
}

View File

@ -1,11 +1,7 @@
use super::build::{filter_traces_parser, trace_level_parser};
use aiken_lang::ast::{TraceLevel, Tracing};
use aiken_project::{
test_framework::PropertyTest,
watch::{self, watch_project, with_project},
};
use aiken_lang::test_framework::PropertyTest;
use aiken_project::watch::with_project;
use rand::prelude::*;
use std::{path::PathBuf, process};
use std::{io::{self, IsTerminal},path::PathBuf, process};
#[derive(clap::Args)]
/// Type-check an Aiken project
@ -31,6 +27,14 @@ pub struct Args {
/// It forces test names to match exactly
#[clap(short, long)]
exact_match: bool,
/// Environment to use for benchmarking
#[clap(short, long)]
env: Option<String>,
/// Output file for benchmark results
#[clap(short, long)]
output: PathBuf,
}
pub fn exec(
@ -40,22 +44,24 @@ pub fn exec(
exact_match,
seed,
max_success,
env,
output,
}: Args,
) -> miette::Result<()> {
// Actually we don't want to use check right?
let mut rng = rand::thread_rng();
let seed = seed.unwrap_or_else(|| rng.gen());
let result = with_project(directory.as_deref(), false, |p| {
let result = with_project(directory.as_deref(), false, !io::stdout().is_terminal(), |p| {
// We don't want to check here, we want to benchmark
p.benchmark(
match_tests.clone(),
exact_match,
seed,
max_success
max_success,
env.clone(),
output.clone(),
)
});
// todo riley - We need to either print or output the results to a file.
result.map_err(|_| process::exit(1))
}