Merge pull request #800 from aiken-lang/codegen-traces

Adding a new option to toggle codegen traces
This commit is contained in:
Matthias Benkort 2024-01-19 14:41:25 +01:00 committed by GitHub
commit 1f6e719fde
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
32 changed files with 763 additions and 379 deletions

View File

@ -2,10 +2,24 @@
## UNRELEASED ## UNRELEASED
### Added
- **aiken**: New `--trace-level` option for the `check` and `build` commands to allow chosing the verbosity level of traces amongst three levels: silent, compact & verbose. @MicroProofs @KtorZ
- **aiken**: New `--filter-traces` option for the `check` and `build` commands to enable restricting traces with more granularity between user-defined traces, compiler-generated traces or both. @MicroProofs @KtorZ.
### Fixed ### Fixed
- **aiken-lang**: Fix flat encoding and decoding of large integer values. @KtorZ - **aiken-lang**: Fix flat encoding and decoding of large integer values. @KtorZ
### Removed
- **aiken**: The options `--keep-traces` (on the `build` command) and `--no-traces` (on the `check` command) have been removed; superseded by the new options. @MicroProofs @KtorZ
> ![TIP]
>
> - If you've been using `aiken check --no-traces`, you can recover the old behavior by doing `aiken check --trace-level silent`.
> - If you've been using `aiken build --keep-traces`, you can recover the old behavior by doing `aiken build --trace-level verbose`.
## v1.0.21-alpha - 2023-12-04 ## v1.0.21-alpha - 2023-12-04
### Added ### Added

View File

@ -1,6 +1,7 @@
use crate::{ use crate::{
builtins::{self, bool, g1_element, g2_element}, builtins::{self, bool, g1_element, g2_element},
expr::{TypedExpr, UntypedExpr}, expr::{TypedExpr, UntypedExpr},
line_numbers::LineNumbers,
parser::token::{Base, Token}, parser::token::{Base, Token},
tipo::{PatternConstructor, Type, TypeInfo}, tipo::{PatternConstructor, Type, TypeInfo},
}; };
@ -42,6 +43,7 @@ pub struct Module<Info, Definitions> {
pub docs: Vec<String>, pub docs: Vec<String>,
pub type_info: Info, pub type_info: Info,
pub definitions: Vec<Definitions>, pub definitions: Vec<Definitions>,
pub lines: LineNumbers,
pub kind: ModuleKind, pub kind: ModuleKind,
} }
@ -1360,25 +1362,51 @@ pub enum TraceKind {
#[derive(Debug, Clone, Copy, PartialEq, Eq)] #[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum Tracing { pub enum Tracing {
NoTraces, UserDefined(TraceLevel),
KeepTraces, CompilerGenerated(TraceLevel),
All(TraceLevel),
} }
impl From<bool> for Tracing { #[derive(Debug, Clone, Copy, PartialEq, Eq)]
fn from(keep: bool) -> Self { pub enum TraceLevel {
if keep { Silent, // No traces
Tracing::KeepTraces Compact, // Line numbers only
} else { Verbose, // Full verbose traces as provided by the user or the compiler
Tracing::NoTraces }
impl Tracing {
pub fn silent() -> Self {
Tracing::All(TraceLevel::Silent)
}
/// Get the tracing level based on the context we're in.
pub fn trace_level(&self, is_code_gen: bool) -> TraceLevel {
match self {
Tracing::UserDefined(lvl) => {
if is_code_gen {
TraceLevel::Silent
} else {
*lvl
}
}
Tracing::CompilerGenerated(lvl) => {
if is_code_gen {
*lvl
} else {
TraceLevel::Silent
}
}
Tracing::All(lvl) => *lvl,
} }
} }
} }
impl From<Tracing> for bool { impl Display for TraceLevel {
fn from(value: Tracing) -> Self { fn fmt(&self, f: &mut std::fmt::Formatter) -> std::result::Result<(), std::fmt::Error> {
match value { match self {
Tracing::NoTraces => false, TraceLevel::Silent => f.write_str("silent"),
Tracing::KeepTraces => true, TraceLevel::Compact => f.write_str("compact"),
TraceLevel::Verbose => f.write_str("verbose"),
} }
} }
} }

View File

@ -19,18 +19,19 @@ use uplc::{
use crate::{ use crate::{
ast::{ ast::{
AssignmentKind, BinOp, Bls12_381Point, Curve, Pattern, Span, TypedArg, TypedClause, AssignmentKind, BinOp, Bls12_381Point, Curve, Pattern, Span, TraceLevel, TypedArg,
TypedDataType, TypedFunction, TypedPattern, TypedValidator, UnOp, TypedClause, TypedDataType, TypedFunction, TypedPattern, TypedValidator, UnOp,
}, },
builtins::{bool, data, int, list, string, void}, builtins::{bool, data, int, list, string, void},
expr::TypedExpr, expr::TypedExpr,
gen_uplc::builder::{ gen_uplc::builder::{
check_replaceable_opaque_type, convert_opaque_type, erase_opaque_type_operations, check_replaceable_opaque_type, convert_opaque_type, erase_opaque_type_operations,
find_and_replace_generics, find_list_clause_or_default_first, get_arg_type_name, find_and_replace_generics, find_list_clause_or_default_first, get_arg_type_name,
get_generic_id_and_type, get_generic_variant_name, get_src_code_by_span, monomorphize, get_generic_id_and_type, get_generic_variant_name, get_line_columns_by_span,
pattern_has_conditions, wrap_as_multi_validator, wrap_validator_condition, CodeGenFunction, get_src_code_by_span, monomorphize, pattern_has_conditions, wrap_as_multi_validator,
SpecificClause, wrap_validator_condition, CodeGenFunction, SpecificClause,
}, },
line_numbers::LineNumbers,
tipo::{ tipo::{
ModuleValueConstructor, PatternConstructor, Type, TypeInfo, ValueConstructor, ModuleValueConstructor, PatternConstructor, Type, TypeInfo, ValueConstructor,
ValueConstructorVariant, ValueConstructorVariant,
@ -55,9 +56,9 @@ pub struct CodeGenerator<'a> {
functions: IndexMap<FunctionAccessKey, &'a TypedFunction>, functions: IndexMap<FunctionAccessKey, &'a TypedFunction>,
data_types: IndexMap<DataTypeKey, &'a TypedDataType>, data_types: IndexMap<DataTypeKey, &'a TypedDataType>,
module_types: IndexMap<&'a String, &'a TypeInfo>, module_types: IndexMap<&'a String, &'a TypeInfo>,
module_src: IndexMap<String, String>, module_src: IndexMap<String, (String, LineNumbers)>,
/// immutable option /// immutable option
tracing: bool, tracing: TraceLevel,
/// mutable index maps that are reset /// mutable index maps that are reset
defined_functions: IndexMap<FunctionAccessKey, ()>, defined_functions: IndexMap<FunctionAccessKey, ()>,
special_functions: CodeGenSpecialFuncs, special_functions: CodeGenSpecialFuncs,
@ -74,8 +75,8 @@ impl<'a> CodeGenerator<'a> {
functions: IndexMap<FunctionAccessKey, &'a TypedFunction>, functions: IndexMap<FunctionAccessKey, &'a TypedFunction>,
data_types: IndexMap<DataTypeKey, &'a TypedDataType>, data_types: IndexMap<DataTypeKey, &'a TypedDataType>,
module_types: IndexMap<&'a String, &'a TypeInfo>, module_types: IndexMap<&'a String, &'a TypeInfo>,
module_src: IndexMap<String, String>, module_src: IndexMap<String, (String, LineNumbers)>,
tracing: bool, tracing: TraceLevel,
) -> Self { ) -> Self {
CodeGenerator { CodeGenerator {
functions, functions,
@ -132,10 +133,10 @@ impl<'a> CodeGenerator<'a> {
air_tree_fun = wrap_validator_condition(air_tree_fun, self.tracing); air_tree_fun = wrap_validator_condition(air_tree_fun, self.tracing);
let src_code = self.module_src.get(module_name).unwrap().clone(); let (src_code, lines) = self.module_src.get(module_name).unwrap().clone();
let mut validator_args_tree = let mut validator_args_tree =
self.check_validator_args(&fun.arguments, true, air_tree_fun, &src_code); self.check_validator_args(&fun.arguments, true, air_tree_fun, &src_code, &lines);
validator_args_tree = AirTree::no_op().hoist_over(validator_args_tree); validator_args_tree = AirTree::no_op().hoist_over(validator_args_tree);
@ -154,8 +155,13 @@ impl<'a> CodeGenerator<'a> {
air_tree_fun_other = wrap_validator_condition(air_tree_fun_other, self.tracing); air_tree_fun_other = wrap_validator_condition(air_tree_fun_other, self.tracing);
let mut validator_args_tree_other = let mut validator_args_tree_other = self.check_validator_args(
self.check_validator_args(&other.arguments, true, air_tree_fun_other, &src_code); &other.arguments,
true,
air_tree_fun_other,
&src_code,
&lines,
);
validator_args_tree_other = AirTree::no_op().hoist_over(validator_args_tree_other); validator_args_tree_other = AirTree::no_op().hoist_over(validator_args_tree_other);
@ -468,20 +474,36 @@ impl<'a> CodeGenerator<'a> {
let air_value = self.build(value, module_name); let air_value = self.build(value, module_name);
let msg = get_src_code_by_span(module_name, location, &self.module_src); let msg_func = match self.tracing {
TraceLevel::Silent => None,
TraceLevel::Verbose | TraceLevel::Compact => {
if kind.is_expect() {
let msg = match self.tracing {
TraceLevel::Silent => unreachable!("excluded from pattern guards"),
TraceLevel::Compact => get_line_columns_by_span(
module_name,
location,
&self.module_src,
)
.to_string(),
TraceLevel::Verbose => {
get_src_code_by_span(module_name, location, &self.module_src)
}
};
let msg_func_name = msg.split_whitespace().join(""); let msg_func_name = msg.split_whitespace().join("");
self.special_functions.insert_new_function( self.special_functions.insert_new_function(
msg_func_name.clone(), msg_func_name.clone(),
Term::string(msg), Term::string(msg),
string(), string(),
); );
let msg_func = if self.tracing && kind.is_expect() { Some(self.special_functions.use_function_msg(msg_func_name))
Some(self.special_functions.use_function_msg(msg_func_name)) } else {
} else { None
None }
}
}; };
self.assignment( self.assignment(
@ -1586,19 +1608,19 @@ impl<'a> CodeGenerator<'a> {
// mutate code_gen_funcs and defined_data_types in this if branch // mutate code_gen_funcs and defined_data_types in this if branch
if function.is_none() && defined_data_types.get(&data_type_name).is_none() { if function.is_none() && defined_data_types.get(&data_type_name).is_none() {
let (msg_term, error_term) = if self.tracing { let (msg_term, error_term) = match self.tracing {
let msg = AirMsg::LocalVar("__param_msg".to_string()); TraceLevel::Silent => (None, AirTree::error(tipo.clone(), false)),
TraceLevel::Compact | TraceLevel::Verbose => {
( let msg = AirMsg::LocalVar("__param_msg".to_string());
Some(msg.clone()), (
AirTree::trace( Some(msg.clone()),
msg.to_air_tree(), AirTree::trace(
tipo.clone(), msg.to_air_tree(),
AirTree::error(tipo.clone(), false), tipo.clone(),
), AirTree::error(tipo.clone(), false),
) ),
} else { )
(None, AirTree::error(tipo.clone(), false)) }
}; };
defined_data_types.insert(data_type_name.clone(), 1); defined_data_types.insert(data_type_name.clone(), 1);
@ -1717,16 +1739,15 @@ impl<'a> CodeGenerator<'a> {
} }
} }
let code_gen_func = if self.tracing { let code_gen_func = match self.tracing {
CodeGenFunction::Function { TraceLevel::Silent => CodeGenFunction::Function {
body: func_body,
params: vec!["__param_0".to_string(), "__param_msg".to_string()],
}
} else {
CodeGenFunction::Function {
body: func_body, body: func_body,
params: vec!["__param_0".to_string()], params: vec!["__param_0".to_string()],
} },
TraceLevel::Compact | TraceLevel::Verbose => CodeGenFunction::Function {
body: func_body,
params: vec!["__param_0".to_string(), "__param_msg".to_string()],
},
}; };
self.code_gen_functions self.code_gen_functions
@ -1737,15 +1758,14 @@ impl<'a> CodeGenerator<'a> {
defined_data_types.insert(data_type_name.to_string(), 1); defined_data_types.insert(data_type_name.to_string(), 1);
} }
let args = if self.tracing { let args = match self.tracing {
vec![ TraceLevel::Silent => vec![value],
TraceLevel::Compact | TraceLevel::Verbose => vec![
value, value,
msg_func msg_func
.expect("should be unreachable: no msg func with tracing enabled.") .expect("should be unreachable: no msg func with tracing enabled.")
.to_air_tree(), .to_air_tree(),
] ],
} else {
vec![value]
}; };
let module_fn = ValueConstructorVariant::ModuleFn { let module_fn = ValueConstructorVariant::ModuleFn {
@ -2736,6 +2756,7 @@ impl<'a> CodeGenerator<'a> {
has_context: bool, has_context: bool,
body: AirTree, body: AirTree,
src_code: &str, src_code: &str,
lines: &LineNumbers,
) -> AirTree { ) -> AirTree {
let checked_args = arguments let checked_args = arguments
.iter() .iter()
@ -2749,23 +2770,31 @@ impl<'a> CodeGenerator<'a> {
let actual_type = convert_opaque_type(&arg.tipo, &self.data_types); let actual_type = convert_opaque_type(&arg.tipo, &self.data_types);
let msg = src_code let msg_func = match self.tracing {
.get(arg_span.start..arg_span.end) TraceLevel::Silent => None,
.expect("Out of bounds span") TraceLevel::Compact | TraceLevel::Verbose => {
.to_string(); let msg = match self.tracing {
TraceLevel::Silent => unreachable!("excluded from pattern guards"),
TraceLevel::Compact => lines
.line_and_column_number(arg_span.start)
.expect("Out of bounds span")
.to_string(),
TraceLevel::Verbose => src_code
.get(arg_span.start..arg_span.end)
.expect("Out of bounds span")
.to_string(),
};
let msg_func_name = msg.split_whitespace().join(""); let msg_func_name = msg.split_whitespace().join("");
self.special_functions.insert_new_function( self.special_functions.insert_new_function(
msg_func_name.to_string(), msg_func_name.to_string(),
Term::string(msg), Term::string(msg),
string(), string(),
); );
let msg_func = if self.tracing && !actual_type.is_data() { Some(self.special_functions.use_function_msg(msg_func_name))
Some(self.special_functions.use_function_msg(msg_func_name)) }
} else {
None
}; };
let assign = self.assignment( let assign = self.assignment(
@ -3721,14 +3750,16 @@ impl<'a> CodeGenerator<'a> {
fn gen_uplc(&mut self, ir: Air, arg_stack: &mut Vec<Term<Name>>) -> Option<Term<Name>> { fn gen_uplc(&mut self, ir: Air, arg_stack: &mut Vec<Term<Name>>) -> Option<Term<Name>> {
// Going to mark the changes made to code gen after air tree implementation // Going to mark the changes made to code gen after air tree implementation
let error_term = if self.tracing && air_holds_msg(&ir) { let error_term = match self.tracing {
// In the case of an air that holds a msg and tracing is active TraceLevel::Silent => Term::Error,
// we pop the msg off the stack first TraceLevel::Compact | TraceLevel::Verbose => {
let msg = arg_stack.pop().unwrap(); if air_holds_msg(&ir) {
let msg = arg_stack.pop().unwrap();
Term::Error.delayed_trace(msg) Term::Error.delayed_trace(msg)
} else { } else {
Term::Error Term::Error
}
}
}; };
match ir { match ir {

View File

@ -15,11 +15,12 @@ use uplc::{
use crate::{ use crate::{
ast::{ ast::{
AssignmentKind, DataType, Pattern, Span, TypedArg, TypedClause, TypedClauseGuard, AssignmentKind, DataType, Pattern, Span, TraceLevel, TypedArg, TypedClause,
TypedDataType, TypedPattern, TypedClauseGuard, TypedDataType, TypedPattern,
}, },
builtins::{bool, data, function, int, list, string, void}, builtins::{bool, data, function, int, list, string, void},
expr::TypedExpr, expr::TypedExpr,
line_numbers::{LineColumn, LineNumbers},
tipo::{PatternConstructor, TypeVar, ValueConstructor, ValueConstructorVariant}, tipo::{PatternConstructor, TypeVar, ValueConstructor, ValueConstructorVariant},
}; };
@ -1621,52 +1622,12 @@ pub fn special_case_builtin(
pub fn wrap_as_multi_validator( pub fn wrap_as_multi_validator(
spend: Term<Name>, spend: Term<Name>,
mint: Term<Name>, mint: Term<Name>,
trace: bool, trace: TraceLevel,
spend_name: String, spend_name: String,
mint_name: String, mint_name: String,
) -> Term<Name> { ) -> Term<Name> {
if trace { match trace {
let trace_string = format!( TraceLevel::Silent | TraceLevel::Compact => Term::equals_integer()
"Incorrect redeemer type for validator {}.
Double check you have wrapped the redeemer type as specified in your plutus.json",
spend_name
);
let error_term = Term::Error.delayed_trace(Term::var("__incorrect_second_arg_type"));
Term::var("__second_arg")
.delayed_choose_data(
Term::equals_integer()
.apply(Term::integer(0.into()))
.apply(Term::var(CONSTR_INDEX_EXPOSER).apply(Term::var("__second_arg")))
.delayed_if_then_else(
mint.apply(Term::var("__first_arg"))
.apply(Term::var("__second_arg"))
.delayed_trace(Term::string(format!(
"Running 2 arg validator {}",
mint_name
))),
spend
.apply(Term::var("__first_arg"))
.apply(Term::head_list().apply(
Term::var(CONSTR_FIELDS_EXPOSER).apply(Term::var("__second_arg")),
))
.delayed_trace(Term::string(format!(
"Running 3 arg validator {}",
spend_name
))),
),
error_term.clone(),
error_term.clone(),
error_term.clone(),
error_term,
)
.lambda("__incorrect_second_arg_type")
.apply(Term::string(trace_string))
.lambda("__second_arg")
.lambda("__first_arg")
} else {
Term::equals_integer()
.apply(Term::integer(0.into())) .apply(Term::integer(0.into()))
.apply(Term::var(CONSTR_INDEX_EXPOSER).apply(Term::var("__second_arg"))) .apply(Term::var(CONSTR_INDEX_EXPOSER).apply(Term::var("__second_arg")))
.delayed_if_then_else( .delayed_if_then_else(
@ -1678,7 +1639,50 @@ pub fn wrap_as_multi_validator(
), ),
) )
.lambda("__second_arg") .lambda("__second_arg")
.lambda("__first_arg") .lambda("__first_arg"),
TraceLevel::Verbose => {
let trace_string = format!(
"Incorrect redeemer type for validator {}.
Double check you have wrapped the redeemer type as specified in your plutus.json",
spend_name
);
let error_term = Term::Error.delayed_trace(Term::var("__incorrect_second_arg_type"));
let then_term = mint
.apply(Term::var("__first_arg"))
.apply(Term::var("__second_arg"));
let else_term = spend.apply(Term::var("__first_arg")).apply(
Term::head_list()
.apply(Term::var(CONSTR_FIELDS_EXPOSER).apply(Term::var("__second_arg"))),
);
Term::var("__second_arg")
.delayed_choose_data(
Term::equals_integer()
.apply(Term::integer(0.into()))
.apply(Term::var(CONSTR_INDEX_EXPOSER).apply(Term::var("__second_arg")))
.delayed_if_then_else(
then_term.delayed_trace(Term::string(format!(
"Running 2 arg validator {}",
mint_name
))),
else_term.delayed_trace(Term::string(format!(
"Running 3 arg validator {}",
spend_name
))),
),
error_term.clone(),
error_term.clone(),
error_term.clone(),
error_term,
)
.lambda("__incorrect_second_arg_type")
.apply(Term::string(trace_string))
.lambda("__second_arg")
.lambda("__first_arg")
}
} }
} }
@ -1717,16 +1721,15 @@ pub fn cast_validator_args(term: Term<Name>, arguments: &[TypedArg]) -> Term<Nam
term term
} }
pub fn wrap_validator_condition(air_tree: AirTree, trace: bool) -> AirTree { pub fn wrap_validator_condition(air_tree: AirTree, trace: TraceLevel) -> AirTree {
let success_branch = vec![(air_tree, AirTree::void())]; let success_branch = vec![(air_tree, AirTree::void())];
let otherwise = if trace { let otherwise = match trace {
AirTree::trace( TraceLevel::Silent | TraceLevel::Compact => AirTree::error(void(), true),
TraceLevel::Verbose => AirTree::trace(
AirTree::string("Validator returned false"), AirTree::string("Validator returned false"),
void(), void(),
AirTree::error(void(), true), AirTree::error(void(), true),
) ),
} else {
AirTree::error(void(), true)
}; };
AirTree::if_branches(success_branch, void(), otherwise) AirTree::if_branches(success_branch, void(), otherwise)
@ -1758,9 +1761,9 @@ pub fn extract_constant(term: &Term<Name>) -> Option<Rc<UplcConstant>> {
pub fn get_src_code_by_span( pub fn get_src_code_by_span(
module_name: &String, module_name: &String,
span: &Span, span: &Span,
module_src: &IndexMap<String, String>, module_src: &IndexMap<String, (String, LineNumbers)>,
) -> String { ) -> String {
let src = module_src let (src, _) = module_src
.get(module_name) .get(module_name)
.unwrap_or_else(|| panic!("Missing module {module_name}")); .unwrap_or_else(|| panic!("Missing module {module_name}"));
@ -1769,6 +1772,20 @@ pub fn get_src_code_by_span(
.to_string() .to_string()
} }
pub fn get_line_columns_by_span(
module_name: &String,
span: &Span,
module_src: &IndexMap<String, (String, LineNumbers)>,
) -> LineColumn {
let (_, lines) = module_src
.get(module_name)
.unwrap_or_else(|| panic!("Missing module {module_name}"));
lines
.line_and_column_number(span.start)
.expect("Out of bounds span")
}
pub fn air_holds_msg(air: &Air) -> bool { pub fn air_holds_msg(air: &Air) -> bool {
match air { match air {
Air::AssertConstr { .. } | Air::AssertBool { .. } | Air::FieldsEmpty | Air::ListEmpty => { Air::AssertConstr { .. } | Air::AssertBool { .. } | Air::FieldsEmpty | Air::ListEmpty => {

View File

@ -10,6 +10,7 @@ pub mod expr;
pub mod format; pub mod format;
pub mod gen_uplc; pub mod gen_uplc;
pub mod levenshtein; pub mod levenshtein;
pub mod line_numbers;
pub mod parser; pub mod parser;
pub mod pretty; pub mod pretty;
pub mod tipo; pub mod tipo;

View File

@ -0,0 +1,126 @@
use std::fmt::{self, Display};
#[derive(Debug, PartialEq, Eq, Clone)]
pub struct LineNumbers {
line_starts: Vec<usize>,
length: usize,
last: Option<usize>,
}
#[derive(Debug, PartialEq, Clone, Copy)]
pub struct LineColumn {
pub line: usize,
pub column: usize,
}
impl Display for LineColumn {
fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> {
f.write_str(&format!("L{};{}", self.line, self.column))
}
}
impl LineNumbers {
pub fn new(src: &str) -> Self {
let line_starts: Vec<usize> = std::iter::once(0)
.chain(src.match_indices('\n').map(|(i, _)| i + 1))
.collect();
let length = src.len();
Self {
length,
last: line_starts.last().cloned(),
line_starts: if length > 0 { line_starts } else { Vec::new() },
}
}
/// Get the line number for a byte index
pub fn line_number(&self, byte_index: usize) -> Option<usize> {
self.line_starts
.binary_search(&byte_index)
.map(|l| Some(l + 1))
.unwrap_or_else(|next_index| {
if Some(next_index) >= self.last {
None
} else {
Some(next_index)
}
})
}
pub fn line_and_column_number(&self, byte_index: usize) -> Option<LineColumn> {
let line = self.line_number(byte_index)?;
let column = byte_index - self.line_starts.get(line - 1).copied().unwrap_or_default() + 1;
Some(LineColumn { line, column })
}
#[allow(dead_code)]
pub fn byte_index(&self, line: usize, character: usize) -> usize {
match self.line_starts.get(line) {
Some(line_index) => *line_index + character,
None => self.length,
}
}
}
#[cfg(test)]
mod tests {
use super::*;
use chumsky::text::Character;
use indoc::indoc;
fn assert_line_column(src: &str, ix: usize, lcol: Option<LineColumn>) {
let lines = LineNumbers::new(src);
println!("{lines:?}");
let byte = src
.as_bytes()
.get(ix)
.map(|b| {
if b.is_ascii() {
format!("{}", b.to_char())
} else {
format!("{b}")
}
})
.unwrap_or_else(|| "OUT-OF-BOUNDS".to_string());
assert_eq!(
lines.line_and_column_number(ix),
lcol,
"\n{src}\n--> at index {ix} ({byte})\n",
);
}
#[test]
fn out_of_range_byte_index() {
let src = indoc! { r#""# };
assert_line_column(src, 42, None);
assert_line_column(src, 0, None);
}
#[test]
fn basic() {
let src = indoc! { r#"
foo
bar
"# };
assert_line_column(src, 0, Some(LineColumn { line: 1, column: 1 }));
assert_line_column(src, 2, Some(LineColumn { line: 1, column: 3 }));
assert_line_column(src, 4, Some(LineColumn { line: 2, column: 1 }));
}
#[test]
fn unicode() {
let src = indoc! { r#"
💩
foo
"# };
assert_line_column(src, 0, Some(LineColumn { line: 1, column: 1 }));
assert_line_column(src, 2, Some(LineColumn { line: 1, column: 3 }));
assert_line_column(src, 5, Some(LineColumn { line: 2, column: 1 }));
}
}

View File

@ -15,7 +15,7 @@ pub use definition::parser as definition;
pub use expr::parser as expression; pub use expr::parser as expression;
pub use pattern::parser as pattern; pub use pattern::parser as pattern;
use crate::ast; use crate::{ast, line_numbers::LineNumbers};
use chumsky::prelude::*; use chumsky::prelude::*;
use error::ParseError; use error::ParseError;
use extra::ModuleExtra; use extra::ModuleExtra;
@ -30,8 +30,11 @@ pub fn module(
let definitions = definition().repeated().then_ignore(end()).parse(stream)?; let definitions = definition().repeated().then_ignore(end()).parse(stream)?;
let lines = LineNumbers::new(src);
let module = ast::UntypedModule { let module = ast::UntypedModule {
kind, kind,
lines,
definitions, definitions,
docs: vec![], docs: vec![],
name: "".to_string(), name: "".to_string(),

View File

@ -19,5 +19,17 @@ Module {
}, },
), ),
], ],
lines: LineNumbers {
line_starts: [
0,
10,
11,
27,
],
length: 43,
last: Some(
27,
),
},
kind: Validator, kind: Validator,
} }

View File

@ -197,5 +197,32 @@ Module {
}, },
), ),
], ],
lines: LineNumbers {
line_starts: [
0,
13,
27,
34,
36,
37,
50,
64,
71,
73,
74,
87,
104,
106,
107,
120,
138,
154,
156,
],
length: 156,
last: Some(
156,
),
},
kind: Validator, kind: Validator,
} }

View File

@ -48,5 +48,18 @@ Module {
}, },
), ),
], ],
lines: LineNumbers {
line_starts: [
0,
11,
27,
31,
33,
],
length: 33,
last: Some(
33,
),
},
kind: Validator, kind: Validator,
} }

View File

@ -46,5 +46,18 @@ Module {
}, },
), ),
], ],
lines: LineNumbers {
line_starts: [
0,
11,
25,
29,
31,
],
length: 31,
last: Some(
31,
),
},
kind: Validator, kind: Validator,
} }

View File

@ -20,5 +20,15 @@ Module {
}, },
), ),
], ],
lines: LineNumbers {
line_starts: [
0,
16,
],
length: 16,
last: Some(
16,
),
},
kind: Validator, kind: Validator,
} }

View File

@ -1,5 +1,5 @@
use crate::{ use crate::{
ast::{Definition, ModuleKind, Tracing, TypedModule, UntypedModule}, ast::{Definition, ModuleKind, TraceLevel, Tracing, TypedModule, UntypedModule},
builtins, builtins,
expr::TypedExpr, expr::TypedExpr,
parser, parser,
@ -31,7 +31,7 @@ fn check_module(
kind, kind,
"test/project", "test/project",
&module_types, &module_types,
Tracing::KeepTraces, Tracing::All(TraceLevel::Verbose),
&mut warnings, &mut warnings,
); );

View File

@ -1,3 +1,4 @@
use crate::line_numbers::LineNumbers;
use std::{cmp::Ordering, collections::HashMap, rc::Rc}; use std::{cmp::Ordering, collections::HashMap, rc::Rc};
use vec1::Vec1; use vec1::Vec1;
@ -5,9 +6,9 @@ use crate::{
ast::{ ast::{
Annotation, Arg, ArgName, AssignmentKind, BinOp, Bls12_381Point, ByteArrayFormatPreference, Annotation, Arg, ArgName, AssignmentKind, BinOp, Bls12_381Point, ByteArrayFormatPreference,
CallArg, ClauseGuard, Constant, Curve, IfBranch, LogicalOpChainKind, RecordUpdateSpread, CallArg, ClauseGuard, Constant, Curve, IfBranch, LogicalOpChainKind, RecordUpdateSpread,
Span, TraceKind, Tracing, TypedArg, TypedCallArg, TypedClause, TypedClauseGuard, Span, TraceKind, TraceLevel, Tracing, TypedArg, TypedCallArg, TypedClause,
TypedIfBranch, TypedPattern, TypedRecordUpdateArg, UnOp, UntypedArg, UntypedClause, TypedClauseGuard, TypedIfBranch, TypedPattern, TypedRecordUpdateArg, UnOp, UntypedArg,
UntypedClauseGuard, UntypedIfBranch, UntypedPattern, UntypedRecordUpdateArg, UntypedClause, UntypedClauseGuard, UntypedIfBranch, UntypedPattern, UntypedRecordUpdateArg,
}, },
builtins::{bool, byte_array, function, g1_element, g2_element, int, list, string, tuple}, builtins::{bool, byte_array, function, g1_element, g2_element, int, list, string, tuple},
expr::{FnStyle, TypedExpr, UntypedExpr}, expr::{FnStyle, TypedExpr, UntypedExpr},
@ -26,6 +27,8 @@ use super::{
#[derive(Debug)] #[derive(Debug)]
pub(crate) struct ExprTyper<'a, 'b> { pub(crate) struct ExprTyper<'a, 'b> {
pub(crate) lines: &'a LineNumbers,
pub(crate) environment: &'a mut Environment<'b>, pub(crate) environment: &'a mut Environment<'b>,
// We tweak the tracing behavior during type-check. Traces are either kept or left out of the // We tweak the tracing behavior during type-check. Traces are either kept or left out of the
@ -421,24 +424,36 @@ impl<'a, 'b> ExprTyper<'a, 'b> {
}, },
}; };
let text = TypedExpr::String { let text = match self.tracing.trace_level(false) {
location, TraceLevel::Verbose => Some(TypedExpr::String {
tipo: string(), location,
value: format!( tipo: string(),
"{} ? False", value: format!(
format::Formatter::new() "{} ? False",
.expr(&value, false) format::Formatter::new()
.to_pretty_string(999) .expr(&value, false)
), .to_pretty_string(999)
),
}),
TraceLevel::Compact => Some(TypedExpr::String {
location,
tipo: string(),
value: self
.lines
.line_and_column_number(location.start)
.expect("Spans are within bounds.")
.to_string(),
}),
TraceLevel::Silent => None,
}; };
let typed_value = self.infer(value)?; let typed_value = self.infer(value)?;
self.unify(bool(), typed_value.tipo(), typed_value.location(), false)?; self.unify(bool(), typed_value.tipo(), typed_value.location(), false)?;
match self.tracing { match self.tracing.trace_level(false) {
Tracing::NoTraces => Ok(typed_value), TraceLevel::Silent => Ok(typed_value),
Tracing::KeepTraces => Ok(TypedExpr::If { TraceLevel::Verbose | TraceLevel::Compact => Ok(TypedExpr::If {
location, location,
branches: vec1::vec1![IfBranch { branches: vec1::vec1![IfBranch {
condition: typed_value, condition: typed_value,
@ -448,7 +463,7 @@ impl<'a, 'b> ExprTyper<'a, 'b> {
final_else: Box::new(TypedExpr::Trace { final_else: Box::new(TypedExpr::Trace {
location, location,
tipo: bool(), tipo: bool(),
text: Box::new(text), text: Box::new(text.expect("TraceLevel::Silent excluded from pattern-guard")),
then: Box::new(var_false), then: Box::new(var_false),
}), }),
tipo: bool(), tipo: bool(),
@ -1817,9 +1832,23 @@ impl<'a, 'b> ExprTyper<'a, 'b> {
}) })
} }
match self.tracing { match self.tracing.trace_level(false) {
Tracing::NoTraces => Ok(then), TraceLevel::Silent => Ok(then),
Tracing::KeepTraces => Ok(TypedExpr::Trace { TraceLevel::Compact => Ok(TypedExpr::Trace {
location,
tipo,
then: Box::new(then),
text: Box::new(TypedExpr::String {
location,
tipo: string(),
value: self
.lines
.line_and_column_number(location.start)
.expect("Spans are within bounds.")
.to_string(),
}),
}),
TraceLevel::Verbose => Ok(TypedExpr::Trace {
location, location,
tipo, tipo,
then: Box::new(then), then: Box::new(then),
@ -1976,12 +2005,17 @@ impl<'a, 'b> ExprTyper<'a, 'b> {
self.environment.instantiate(t, ids, &self.hydrator) self.environment.instantiate(t, ids, &self.hydrator)
} }
pub fn new(environment: &'a mut Environment<'b>, tracing: Tracing) -> Self { pub fn new(
environment: &'a mut Environment<'b>,
lines: &'a LineNumbers,
tracing: Tracing,
) -> Self {
Self { Self {
hydrator: Hydrator::new(), hydrator: Hydrator::new(),
environment, environment,
tracing, tracing,
ungeneralised_function_used: false, ungeneralised_function_used: false,
lines,
} }
} }

View File

@ -8,6 +8,7 @@ use crate::{
}, },
builtins, builtins,
builtins::function, builtins::function,
line_numbers::LineNumbers,
IdGenerator, IdGenerator,
}; };
@ -79,8 +80,14 @@ impl UntypedModule {
} }
for def in consts.into_iter().chain(not_consts) { for def in consts.into_iter().chain(not_consts) {
let definition = let definition = infer_definition(
infer_definition(def, &name, &mut hydrators, &mut environment, tracing)?; def,
&name,
&mut hydrators,
&mut environment,
&self.lines,
tracing,
)?;
definitions.push(definition); definitions.push(definition);
} }
@ -127,6 +134,7 @@ impl UntypedModule {
name: name.clone(), name: name.clone(),
definitions, definitions,
kind, kind,
lines: self.lines,
type_info: TypeInfo { type_info: TypeInfo {
name, name,
types, types,
@ -145,6 +153,7 @@ fn infer_definition(
module_name: &String, module_name: &String,
hydrators: &mut HashMap<String, Hydrator>, hydrators: &mut HashMap<String, Hydrator>,
environment: &mut Environment<'_>, environment: &mut Environment<'_>,
lines: &LineNumbers,
tracing: Tracing, tracing: Tracing,
) -> Result<TypedDefinition, Error> { ) -> Result<TypedDefinition, Error> {
match def { match def {
@ -181,7 +190,7 @@ fn infer_definition(
.map(|(arg_name, tipo)| arg_name.set_type(tipo.clone())) .map(|(arg_name, tipo)| arg_name.set_type(tipo.clone()))
.collect(); .collect();
let mut expr_typer = ExprTyper::new(environment, tracing); let mut expr_typer = ExprTyper::new(environment, lines, tracing);
expr_typer.hydrator = hydrators expr_typer.hydrator = hydrators
.remove(&name) .remove(&name)
@ -293,6 +302,7 @@ fn infer_definition(
module_name, module_name,
hydrators, hydrators,
environment, environment,
lines,
tracing, tracing,
)? )?
else { else {
@ -343,6 +353,7 @@ fn infer_definition(
module_name, module_name,
hydrators, hydrators,
environment, environment,
lines,
tracing, tracing,
)? )?
else { else {
@ -404,6 +415,7 @@ fn infer_definition(
module_name, module_name,
hydrators, hydrators,
environment, environment,
lines,
tracing, tracing,
)? { )? {
environment.unify(f.return_type.clone(), builtins::bool(), f.location, false)?; environment.unify(f.return_type.clone(), builtins::bool(), f.location, false)?;
@ -585,7 +597,7 @@ fn infer_definition(
.. ..
}) => { }) => {
let typed_expr = let typed_expr =
ExprTyper::new(environment, tracing).infer_const(&annotation, *value)?; ExprTyper::new(environment, lines, tracing).infer_const(&annotation, *value)?;
let tipo = typed_expr.tipo(); let tipo = typed_expr.tipo();

View File

@ -1,5 +1,8 @@
use crate::{line_numbers::LineNumbers, utils::span_to_lsp_range}; use crate::utils::span_to_lsp_range;
use aiken_lang::ast::{Definition, ModuleKind, Span, UntypedDefinition, Use}; use aiken_lang::{
ast::{Definition, ModuleKind, Span, UntypedDefinition, Use},
line_numbers::LineNumbers,
};
use aiken_project::module::CheckedModule; use aiken_project::module::CheckedModule;
use itertools::Itertools; use itertools::Itertools;
use std::fs; use std::fs;

View File

@ -7,7 +7,6 @@ use std::env;
mod cast; mod cast;
mod edits; mod edits;
pub mod error; pub mod error;
mod line_numbers;
mod quickfix; mod quickfix;
pub mod server; pub mod server;
mod utils; mod utils;

View File

@ -1,48 +0,0 @@
#[allow(dead_code)]
#[derive(Debug)]
pub struct LineNumbers {
line_starts: Vec<usize>,
length: usize,
}
impl LineNumbers {
pub fn new(src: &str) -> Self {
Self {
length: src.len(),
line_starts: std::iter::once(0)
.chain(src.match_indices('\n').map(|(i, _)| i + 1))
.collect(),
}
}
/// Get the line number for a byte index
pub fn line_number(&self, byte_index: usize) -> usize {
self.line_starts
.binary_search(&byte_index)
.unwrap_or_else(|next_line| next_line - 1)
+ 1
}
// TODO: handle unicode characters that may be more than 1 byte in width
pub fn line_and_column_number(&self, byte_index: usize) -> LineColumn {
let line = self.line_number(byte_index);
let column = byte_index - self.line_starts.get(line - 1).copied().unwrap_or_default() + 1;
LineColumn { line, column }
}
// TODO: handle unicode characters that may be more than 1 byte in width
/// 0 indexed line and character to byte index
#[allow(dead_code)]
pub fn byte_index(&self, line: usize, character: usize) -> usize {
match self.line_starts.get(line) {
Some(line_index) => *line_index + character,
None => self.length,
}
}
}
#[derive(Debug, Clone, Copy)]
pub struct LineColumn {
pub line: usize,
pub column: usize,
}

View File

@ -8,6 +8,7 @@ use std::{
use aiken_lang::{ use aiken_lang::{
ast::{Definition, Located, ModuleKind, Span, Use}, ast::{Definition, Located, ModuleKind, Span, Use},
error::ExtraData, error::ExtraData,
line_numbers::LineNumbers,
parser, parser,
tipo::pretty::Printer, tipo::pretty::Printer,
}; };
@ -35,7 +36,6 @@ use miette::Diagnostic;
use crate::{ use crate::{
cast::{cast_notification, cast_request}, cast::{cast_notification, cast_request},
error::Error as ServerError, error::Error as ServerError,
line_numbers::LineNumbers,
quickfix, quickfix,
utils::{ utils::{
path_to_uri, span_to_lsp_range, text_edit_replace, uri_to_module_name, path_to_uri, span_to_lsp_range, text_edit_replace, uri_to_module_name,

View File

@ -1,10 +1,8 @@
use std::{collections::HashMap, path::PathBuf}; use std::{collections::HashMap, path::PathBuf};
use aiken_lang::ast::Tracing; use aiken_lang::{ast::Tracing, line_numbers::LineNumbers};
use aiken_project::{config::Config, error::Error as ProjectError, module::CheckedModule, Project}; use aiken_project::{config::Config, error::Error as ProjectError, module::CheckedModule, Project};
use crate::line_numbers::LineNumbers;
#[derive(Debug)] #[derive(Debug)]
pub struct SourceInfo { pub struct SourceInfo {
/// The path to the source file from within the project root /// The path to the source file from within the project root
@ -34,7 +32,7 @@ impl LspProject {
let result = self let result = self
.project .project
.check(true, None, false, false, Tracing::NoTraces); .check(true, None, false, false, Tracing::silent());
self.project.restore(checkpoint); self.project.restore(checkpoint);

View File

@ -1,11 +1,11 @@
use std::path::{Path, PathBuf}; use std::path::{Path, PathBuf};
use aiken_lang::ast::Span; use aiken_lang::{ast::Span, line_numbers::LineNumbers};
use itertools::Itertools; use itertools::Itertools;
use lsp_types::TextEdit; use lsp_types::TextEdit;
use urlencoding::decode; use urlencoding::decode;
use crate::{error::Error, line_numbers::LineNumbers}; use crate::error::Error;
pub const COMPILING_PROGRESS_TOKEN: &str = "compiling-aiken"; pub const COMPILING_PROGRESS_TOKEN: &str = "compiling-aiken";
pub const CREATE_COMPILING_PROGRESS_TOKEN: &str = "create-compiling-progress-token"; pub const CREATE_COMPILING_PROGRESS_TOKEN: &str = "create-compiling-progress-token";
@ -37,8 +37,12 @@ pub fn path_to_uri(path: PathBuf) -> Result<lsp_types::Url, Error> {
} }
pub fn span_to_lsp_range(location: Span, line_numbers: &LineNumbers) -> lsp_types::Range { pub fn span_to_lsp_range(location: Span, line_numbers: &LineNumbers) -> lsp_types::Range {
let start = line_numbers.line_and_column_number(location.start); let start = line_numbers
let end = line_numbers.line_and_column_number(location.end); .line_and_column_number(location.start)
.expect("Spans are within bounds");
let end = line_numbers
.line_and_column_number(location.end)
.expect("Spans are within bounds");
lsp_types::Range { lsp_types::Range {
start: lsp_types::Position { start: lsp_types::Position {

View File

@ -217,7 +217,11 @@ impl Validator {
mod tests { mod tests {
use std::collections::HashMap; use std::collections::HashMap;
use aiken_lang::{self, builtins}; use aiken_lang::{
self,
ast::{TraceLevel, Tracing},
builtins,
};
use uplc::ast as uplc_ast; use uplc::ast as uplc_ast;
use crate::tests::TestProject; use crate::tests::TestProject;
@ -240,7 +244,7 @@ mod tests {
&project.functions, &project.functions,
&project.data_types, &project.data_types,
&project.module_types, &project.module_types,
true, Tracing::All(TraceLevel::Verbose),
); );
let (validator, def) = modules let (validator, def) = modules

View File

@ -176,7 +176,7 @@ where
let parsed_modules = self.parse_sources(self.config.name.clone())?; let parsed_modules = self.parse_sources(self.config.name.clone())?;
self.type_check(parsed_modules, Tracing::NoTraces, false)?; self.type_check(parsed_modules, Tracing::silent(), false)?;
self.event_listener.handle_event(Event::GeneratingDocFiles { self.event_listener.handle_event(Event::GeneratingDocFiles {
output_path: destination.clone(), output_path: destination.clone(),
@ -282,7 +282,7 @@ where
&self.functions, &self.functions,
&self.data_types, &self.data_types,
&self.module_types, &self.module_types,
options.tracing.into(), options.tracing,
); );
let blueprint = Blueprint::new(&self.config, &self.checked_modules, &mut generator) let blueprint = Blueprint::new(&self.config, &self.checked_modules, &mut generator)
@ -312,7 +312,7 @@ where
exact_match, exact_match,
} => { } => {
let tests = let tests =
self.collect_tests(verbose, match_tests, exact_match, options.tracing.into())?; self.collect_tests(verbose, match_tests, exact_match, options.tracing)?;
if !tests.is_empty() { if !tests.is_empty() {
self.event_listener.handle_event(Event::RunningTests); self.event_listener.handle_event(Event::RunningTests);
@ -518,7 +518,7 @@ where
let parsed_modules = self.parse_sources(package.name)?; let parsed_modules = self.parse_sources(package.name)?;
self.type_check(parsed_modules, Tracing::NoTraces, true)?; self.type_check(parsed_modules, Tracing::silent(), true)?;
} }
Ok(()) Ok(())
@ -675,7 +675,7 @@ where
verbose: bool, verbose: bool,
match_tests: Option<Vec<String>>, match_tests: Option<Vec<String>>,
exact_match: bool, exact_match: bool,
tracing: bool, tracing: Tracing,
) -> Result<Vec<Script>, Error> { ) -> Result<Vec<Script>, Error> {
let mut scripts = Vec::new(); let mut scripts = Vec::new();
let mut testable_validators = Vec::new(); let mut testable_validators = Vec::new();

View File

@ -1,13 +1,14 @@
use crate::error::Error; use crate::error::Error;
use aiken_lang::{ use aiken_lang::{
ast::{ ast::{
DataType, Definition, Function, Located, ModuleKind, TypedDataType, TypedFunction, DataType, Definition, Function, Located, ModuleKind, Tracing, TypedDataType, TypedFunction,
TypedModule, TypedValidator, UntypedModule, Validator, TypedModule, TypedValidator, UntypedModule, Validator,
}, },
gen_uplc::{ gen_uplc::{
builder::{DataTypeKey, FunctionAccessKey}, builder::{DataTypeKey, FunctionAccessKey},
CodeGenerator, CodeGenerator,
}, },
line_numbers::LineNumbers,
parser::extra::{comments_before, Comment, ModuleExtra}, parser::extra::{comments_before, Comment, ModuleExtra},
tipo::TypeInfo, tipo::TypeInfo,
}; };
@ -358,7 +359,7 @@ impl CheckedModules {
builtin_functions: &'a IndexMap<FunctionAccessKey, TypedFunction>, builtin_functions: &'a IndexMap<FunctionAccessKey, TypedFunction>,
builtin_data_types: &'a IndexMap<DataTypeKey, TypedDataType>, builtin_data_types: &'a IndexMap<DataTypeKey, TypedDataType>,
module_types: &'a HashMap<String, TypeInfo>, module_types: &'a HashMap<String, TypeInfo>,
tracing: bool, tracing: Tracing,
) -> CodeGenerator<'a> { ) -> CodeGenerator<'a> {
let mut functions = IndexMap::new(); let mut functions = IndexMap::new();
for (k, v) in builtin_functions { for (k, v) in builtin_functions {
@ -401,7 +402,10 @@ impl CheckedModules {
| Definition::Use(_) => {} | Definition::Use(_) => {}
} }
} }
module_src.insert(module.name.clone(), module.code.clone()); module_src.insert(
module.name.clone(),
(module.code.clone(), LineNumbers::new(&module.code)),
);
} }
let mut module_types_index = IndexMap::new(); let mut module_types_index = IndexMap::new();
@ -412,7 +416,7 @@ impl CheckedModules {
data_types, data_types,
module_types_index, module_types_index,
module_src, module_src,
tracing, tracing.trace_level(true),
) )
} }
} }

View File

@ -316,7 +316,11 @@ fn fmt_test(eval_info: &EvalInfo, max_mem: usize, max_cpu: usize, styled: bool)
format!( format!(
"{arrow} {styled_line}", "{arrow} {styled_line}",
arrow = "".if_supports_color(Stderr, |s| s.bright_yellow()), arrow = "".if_supports_color(Stderr, |s| s.bright_yellow()),
styled_line = line.if_supports_color(Stderr, |s| s.bright_black()) styled_line = line
.split('\n')
.map(|l| format!("{}", l.if_supports_color(Stderr, |s| s.bright_black())))
.collect::<Vec<_>>()
.join("\n")
) )
}) })
.collect::<Vec<_>>() .collect::<Vec<_>>()

View File

@ -1,6 +1,6 @@
use pretty_assertions::assert_eq; use pretty_assertions::assert_eq;
use aiken_lang::ast::{Definition, Function, TypedFunction, TypedValidator}; use aiken_lang::ast::{Definition, Function, TraceLevel, Tracing, TypedFunction, TypedValidator};
use uplc::{ use uplc::{
ast::{Constant, Data, DeBruijn, Name, Program, Term, Type}, ast::{Constant, Data, DeBruijn, Name, Program, Term, Type},
builder::{CONSTR_FIELDS_EXPOSER, CONSTR_INDEX_EXPOSER}, builder::{CONSTR_FIELDS_EXPOSER, CONSTR_INDEX_EXPOSER},
@ -26,7 +26,7 @@ fn assert_uplc(source_code: &str, expected: Term<Name>, should_fail: bool) {
&project.functions, &project.functions,
&project.data_types, &project.data_types,
&project.module_types, &project.module_types,
true, Tracing::All(TraceLevel::Verbose),
); );
let Some(checked_module) = modules.values().next() else { let Some(checked_module) = modules.values().next() else {
@ -115,7 +115,7 @@ fn acceptance_test_1_length() {
1 + length(rest) 1 + length(rest)
} }
} }
test length_1() { test length_1() {
length([1, 2, 3]) == 3 length([1, 2, 3]) == 3
} }
@ -167,7 +167,7 @@ fn acceptance_test_2_repeat() {
[x, ..repeat(x, n - 1)] [x, ..repeat(x, n - 1)]
} }
} }
test repeat_1() { test repeat_1() {
repeat("aiken", 2) == ["aiken", "aiken"] repeat("aiken", 2) == ["aiken", "aiken"]
} }
@ -232,11 +232,11 @@ fn acceptance_test_3_concat() {
f(x, foldr(rest, f, zero)) f(x, foldr(rest, f, zero))
} }
} }
pub fn concat(left: List<a>, right: List<a>) -> List<a> { pub fn concat(left: List<a>, right: List<a>) -> List<a> {
foldr(left, fn(x, xs) { [x, ..xs] }, right) foldr(left, fn(x, xs) { [x, ..xs] }, right)
} }
test concat_1() { test concat_1() {
concat([1, 2, 3], [4, 5, 6]) == [1, 2, 3, 4, 5, 6] concat([1, 2, 3], [4, 5, 6]) == [1, 2, 3, 4, 5, 6]
} }
@ -329,15 +329,15 @@ fn acceptance_test_4_concat_no_anon_func() {
f(x, foldr(rest, f, zero)) f(x, foldr(rest, f, zero))
} }
} }
pub fn prepend(x: a, xs: List<a>) -> List<a> { pub fn prepend(x: a, xs: List<a>) -> List<a> {
[x, ..xs] [x, ..xs]
} }
pub fn concat(left: List<a>, right: List<a>) -> List<a> { pub fn concat(left: List<a>, right: List<a>) -> List<a> {
foldr(left, prepend, right) foldr(left, prepend, right)
} }
test concat_1() { test concat_1() {
concat([1, 2, 3], [4, 5, 6]) == [1, 2, 3, 4, 5, 6] concat([1, 2, 3], [4, 5, 6]) == [1, 2, 3, 4, 5, 6]
} }
@ -425,7 +425,7 @@ fn acceptance_test_4_concat_no_anon_func() {
fn acceptance_test_5_direct_head() { fn acceptance_test_5_direct_head() {
let src = r#" let src = r#"
use aiken/builtin.{head_list} use aiken/builtin.{head_list}
test head_1() { test head_1() {
let head = fn(xs){ let head = fn(xs){
when xs is { when xs is {
@ -433,9 +433,9 @@ fn acceptance_test_5_direct_head() {
_ -> Some(head_list(xs)) _ -> Some(head_list(xs))
} }
} }
head([1, 2, 3]) == Some(1) head([1, 2, 3]) == Some(1)
} }
"#; "#;
assert_uplc( assert_uplc(
@ -473,7 +473,7 @@ fn acceptance_test_5_direct_head() {
fn acceptance_test_5_direct_2_heads() { fn acceptance_test_5_direct_2_heads() {
let src = r#" let src = r#"
use aiken/builtin.{head_list} use aiken/builtin.{head_list}
test head_2() { test head_2() {
let head = fn(xs: List<Int>){ let head = fn(xs: List<Int>){
when xs is { when xs is {
@ -482,9 +482,9 @@ fn acceptance_test_5_direct_2_heads() {
[a, b, ..] -> Some([a,b]) [a, b, ..] -> Some([a,b])
} }
} }
head([1, 2, 3]) == Some([1, 2]) head([1, 2, 3]) == Some([1, 2])
} }
"#; "#;
assert_uplc( assert_uplc(
@ -583,10 +583,10 @@ fn acceptance_test_5_head_not_empty() {
_ -> Some(head_list(xs)) _ -> Some(head_list(xs))
} }
} }
test head_1() { test head_1() {
head([1, 2, 3]) == Some(1) head([1, 2, 3]) == Some(1)
} }
"#; "#;
assert_uplc( assert_uplc(
@ -631,10 +631,10 @@ fn acceptance_test_5_head_empty() {
_ -> Some(head_list(xs)) _ -> Some(head_list(xs))
} }
} }
test head_1() { test head_1() {
head([]) == None head([]) == None
} }
"#; "#;
assert_uplc( assert_uplc(
@ -744,10 +744,10 @@ fn acceptance_test_7_unzip() {
} }
} }
} }
test unzip1() { test unzip1() {
let x = [(3, #"55"), (4, #"7799")] let x = [(3, #"55"), (4, #"7799")]
unzip(x) == ([3, 4], [#"55", #"7799"]) unzip(x) == ([3, 4], [#"55", #"7799"])
} }
"#; "#;
@ -869,7 +869,7 @@ fn acceptance_test_8_is_empty() {
pub fn is_empty(bytes: ByteArray) -> Bool { pub fn is_empty(bytes: ByteArray) -> Bool {
builtin.length_of_bytearray(bytes) == 0 builtin.length_of_bytearray(bytes) == 0
} }
test is_empty_1() { test is_empty_1() {
is_empty(#"") == True is_empty(#"") == True
} }
@ -902,7 +902,7 @@ fn acceptance_test_8_is_not_empty() {
pub fn is_empty(bytes: ByteArray) -> Bool { pub fn is_empty(bytes: ByteArray) -> Bool {
builtin.length_of_bytearray(bytes) == 0 builtin.length_of_bytearray(bytes) == 0
} }
test is_empty_1() { test is_empty_1() {
is_empty(#"01") == False is_empty(#"01") == False
} }
@ -935,7 +935,7 @@ fn acceptance_test_9_is_empty() {
pub fn is_empty(bytes: ByteArray) -> Bool { pub fn is_empty(bytes: ByteArray) -> Bool {
length_of_bytearray(bytes) == 0 length_of_bytearray(bytes) == 0
} }
test is_empty_1() { test is_empty_1() {
is_empty(#"") == True is_empty(#"") == True
} }
@ -971,14 +971,14 @@ fn acceptance_test_10_map_none() {
Some(f(a)) Some(f(a))
} }
} }
fn add_one(n: Int) -> Int { fn add_one(n: Int) -> Int {
n + 1 n + 1
} }
test map_1() { test map_1() {
map(None, add_one) == None map(None, add_one) == None
} }
"#; "#;
assert_uplc( assert_uplc(
@ -1053,14 +1053,14 @@ fn acceptance_test_10_map_some() {
Some(f(a)) Some(f(a))
} }
} }
fn add_one(n: Int) -> Int { fn add_one(n: Int) -> Int {
n + 1 n + 1
} }
test map_1() { test map_1() {
map(Some(1), add_one) == Some(2) map(Some(1), add_one) == Some(2)
} }
"#; "#;
assert_uplc( assert_uplc(
@ -1135,10 +1135,10 @@ fn acceptance_test_11_map_empty() {
[f(x), ..map(rest, f)] [f(x), ..map(rest, f)]
} }
} }
test map_1() { test map_1() {
map([], fn(n) { n + 1 }) == [] map([], fn(n) { n + 1 }) == []
} }
"#; "#;
assert_uplc( assert_uplc(
@ -1206,10 +1206,10 @@ fn acceptance_test_11_map_filled() {
[f(x), ..map(rest, f)] [f(x), ..map(rest, f)]
} }
} }
test map_1() { test map_1() {
map([6, 7, 8], fn(n) { n + 1 }) == [7, 8, 9] map([6, 7, 8], fn(n) { n + 1 }) == [7, 8, 9]
} }
"#; "#;
assert_uplc( assert_uplc(
@ -1291,7 +1291,7 @@ fn acceptance_test_12_filter_even() {
} }
} }
} }
test filter_1() { test filter_1() {
filter([1, 2, 3, 4, 5, 6], fn(x) { builtin.mod_integer(x, 2) == 0 }) == [2, 4, 6] filter([1, 2, 3, 4, 5, 6], fn(x) { builtin.mod_integer(x, 2) == 0 }) == [2, 4, 6]
} }
@ -1776,10 +1776,10 @@ fn acceptance_test_20_map_some() {
Some(f(a)) Some(f(a))
} }
} }
test map_1() { test map_1() {
map(Some(14), fn(n){ n + 1 }) == Some(15) map(Some(14), fn(n){ n + 1 }) == Some(15)
} }
"#; "#;
assert_uplc( assert_uplc(
@ -1967,15 +1967,15 @@ fn acceptance_test_23_to_list() {
pub opaque type AssocList<key, value> { pub opaque type AssocList<key, value> {
inner: List<(key, value)>, inner: List<(key, value)>,
} }
pub fn new() -> AssocList<key, value> { pub fn new() -> AssocList<key, value> {
AssocList { inner: [] } AssocList { inner: [] }
} }
pub fn to_list(m: AssocList<key, value>) -> List<(key, value)> { pub fn to_list(m: AssocList<key, value>) -> List<(key, value)> {
m.inner m.inner
} }
pub fn insert( pub fn insert(
in m: AssocList<key, value>, in m: AssocList<key, value>,
key k: key, key k: key,
@ -1983,7 +1983,7 @@ fn acceptance_test_23_to_list() {
) -> AssocList<key, value> { ) -> AssocList<key, value> {
AssocList { inner: do_insert(m.inner, k, v) } AssocList { inner: do_insert(m.inner, k, v) }
} }
fn do_insert(elems: List<(key, value)>, k: key, v: value) -> List<(key, value)> { fn do_insert(elems: List<(key, value)>, k: key, v: value) -> List<(key, value)> {
when elems is { when elems is {
[] -> [] ->
@ -1996,13 +1996,13 @@ fn acceptance_test_23_to_list() {
} }
} }
} }
fn fixture_1() { fn fixture_1() {
new() new()
|> insert("foo", 42) |> insert("foo", 42)
|> insert("bar", 14) |> insert("bar", 14)
} }
test to_list_2() { test to_list_2() {
to_list(fixture_1()) == [("foo", 42), ("bar", 14)] to_list(fixture_1()) == [("foo", 42), ("bar", 14)]
} }
@ -2063,10 +2063,10 @@ fn acceptance_test_24_map2() {
} }
} }
} }
test map2_3() { test map2_3() {
map2(Some(14), Some(42), fn(a, b) { (a, b) }) == Some((14, 42)) map2(Some(14), Some(42), fn(a, b) { (a, b) }) == Some((14, 42))
} }
"#; "#;
assert_uplc( assert_uplc(
@ -2191,7 +2191,7 @@ fn acceptance_test_25_void_equal() {
let src = r#" let src = r#"
test nil_1() { test nil_1() {
Void == Void Void == Void
} }
"#; "#;
assert_uplc( assert_uplc(
@ -2212,11 +2212,11 @@ fn acceptance_test_26_foldr() {
f(x, foldr(rest, f, zero)) f(x, foldr(rest, f, zero))
} }
} }
pub fn concat(left: List<a>, right: List<a>) -> List<a> { pub fn concat(left: List<a>, right: List<a>) -> List<a> {
foldr(left, fn(x, xs) { [x, ..xs] }, right) foldr(left, fn(x, xs) { [x, ..xs] }, right)
} }
pub fn flat_map(xs: List<a>, f: fn(a) -> List<b>) -> List<b> { pub fn flat_map(xs: List<a>, f: fn(a) -> List<b>) -> List<b> {
when xs is { when xs is {
[] -> [] ->
@ -2225,7 +2225,7 @@ fn acceptance_test_26_foldr() {
concat(f(x), flat_map(rest, f)) concat(f(x), flat_map(rest, f))
} }
} }
test flat_map_2() { test flat_map_2() {
flat_map([1, 2, 3], fn(a) { [a, a] }) == [1, 1, 2, 2, 3, 3] flat_map([1, 2, 3], fn(a) { [a, a] }) == [1, 1, 2, 2, 3, 3]
} }
@ -2353,11 +2353,11 @@ fn acceptance_test_27_flat_map() {
f(x, foldr(rest, f, zero)) f(x, foldr(rest, f, zero))
} }
} }
pub fn concat(left: List<a>, right: List<a>) -> List<a> { pub fn concat(left: List<a>, right: List<a>) -> List<a> {
foldr(left, fn(x, xs) { [x, ..xs] }, right) foldr(left, fn(x, xs) { [x, ..xs] }, right)
} }
pub fn flat_map(xs: List<a>, f: fn(a) -> List<b>) -> List<b> { pub fn flat_map(xs: List<a>, f: fn(a) -> List<b>) -> List<b> {
when xs is { when xs is {
[] -> [] ->
@ -2366,7 +2366,7 @@ fn acceptance_test_27_flat_map() {
concat(f(x), flat_map(rest, f)) concat(f(x), flat_map(rest, f))
} }
} }
test flat_map_2() { test flat_map_2() {
flat_map([1, 2, 3], fn(a) { [a, a] }) == [1, 1, 2, 2, 3, 3] flat_map([1, 2, 3], fn(a) { [a, a] }) == [1, 1, 2, 2, 3, 3]
} }
@ -2498,7 +2498,7 @@ fn acceptance_test_28_unique_empty_list() {
} }
} }
} }
pub fn unique(xs: List<a>) -> List<a> { pub fn unique(xs: List<a>) -> List<a> {
when xs is { when xs is {
[] -> [] ->
@ -2507,10 +2507,10 @@ fn acceptance_test_28_unique_empty_list() {
[x, ..unique(filter(rest, fn(y) { y != x }))] [x, ..unique(filter(rest, fn(y) { y != x }))]
} }
} }
test unique_1() { test unique_1() {
unique([]) == [] unique([]) == []
} }
"#; "#;
assert_uplc( assert_uplc(
@ -2606,7 +2606,7 @@ fn acceptance_test_28_unique_list() {
} }
} }
} }
pub fn unique(xs: List<a>) -> List<a> { pub fn unique(xs: List<a>) -> List<a> {
when xs is { when xs is {
[] -> [] ->
@ -2615,10 +2615,10 @@ fn acceptance_test_28_unique_list() {
[x, ..unique(filter(rest, fn(y) { y != x }))] [x, ..unique(filter(rest, fn(y) { y != x }))]
} }
} }
test unique_1() { test unique_1() {
unique([1,2,3,1]) == [1,2,3] unique([1,2,3,1]) == [1,2,3]
} }
"#; "#;
assert_uplc( assert_uplc(
@ -2721,15 +2721,15 @@ fn acceptance_test_29_union() {
pub opaque type AssocList<key, value> { pub opaque type AssocList<key, value> {
inner: List<(key, value)>, inner: List<(key, value)>,
} }
pub fn new() -> AssocList<key, value> { pub fn new() -> AssocList<key, value> {
AssocList { inner: [] } AssocList { inner: [] }
} }
pub fn from_list(xs: List<(key, value)>) -> AssocList<key, value> { pub fn from_list(xs: List<(key, value)>) -> AssocList<key, value> {
AssocList { inner: do_from_list(xs) } AssocList { inner: do_from_list(xs) }
} }
fn do_from_list(xs: List<(key, value)>) -> List<(key, value)> { fn do_from_list(xs: List<(key, value)>) -> List<(key, value)> {
when xs is { when xs is {
[] -> [] ->
@ -2738,7 +2738,7 @@ fn acceptance_test_29_union() {
do_insert(do_from_list(rest), k, v) do_insert(do_from_list(rest), k, v)
} }
} }
pub fn insert( pub fn insert(
in m: AssocList<key, value>, in m: AssocList<key, value>,
key k: key, key k: key,
@ -2746,7 +2746,7 @@ fn acceptance_test_29_union() {
) -> AssocList<key, value> { ) -> AssocList<key, value> {
AssocList { inner: do_insert(m.inner, k, v) } AssocList { inner: do_insert(m.inner, k, v) }
} }
fn do_insert(elems: List<(key, value)>, k: key, v: value) -> List<(key, value)> { fn do_insert(elems: List<(key, value)>, k: key, v: value) -> List<(key, value)> {
when elems is { when elems is {
[] -> [] ->
@ -2759,14 +2759,14 @@ fn acceptance_test_29_union() {
} }
} }
} }
pub fn union( pub fn union(
left: AssocList<key, value>, left: AssocList<key, value>,
right: AssocList<key, value>, right: AssocList<key, value>,
) -> AssocList<key, value> { ) -> AssocList<key, value> {
AssocList { inner: do_union(left.inner, right.inner) } AssocList { inner: do_union(left.inner, right.inner) }
} }
fn do_union( fn do_union(
left: List<(key, value)>, left: List<(key, value)>,
right: List<(key, value)>, right: List<(key, value)>,
@ -2778,17 +2778,17 @@ fn acceptance_test_29_union() {
do_union(rest, do_insert(right, k, v)) do_union(rest, do_insert(right, k, v))
} }
} }
fn fixture_1() { fn fixture_1() {
new() new()
|> insert("foo", 42) |> insert("foo", 42)
|> insert("bar", 14) |> insert("bar", 14)
} }
test union_1() { test union_1() {
union(fixture_1(), new()) == fixture_1() union(fixture_1(), new()) == fixture_1()
} }
"#; "#;
assert_uplc( assert_uplc(
@ -2954,7 +2954,7 @@ fn acceptance_test_30_abs() {
a a
} }
} }
test abs_1() { test abs_1() {
abs(-14) == 14 abs(-14) == 14
} }
@ -2987,13 +2987,13 @@ fn acceptance_test_30_abs() {
#[test] #[test]
fn expect_empty_list_on_filled_list() { fn expect_empty_list_on_filled_list() {
let src = r#" let src = r#"
test empty_list1() { test empty_list1() {
let x = [1,2] let x = [1,2]
expect [] = x expect [] = x
True True
} }
"#; "#;
assert_uplc( assert_uplc(
@ -3014,13 +3014,13 @@ fn expect_empty_list_on_filled_list() {
#[test] #[test]
fn expect_empty_list_on_new_list() { fn expect_empty_list_on_new_list() {
let src = r#" let src = r#"
test empty_list1() { test empty_list1() {
let x = [] let x = []
expect [] = x expect [] = x
True True
} }
"#; "#;
assert_uplc( assert_uplc(
@ -3038,7 +3038,7 @@ fn expect_empty_list_on_new_list() {
#[test] #[test]
fn when_bool_is_true() { fn when_bool_is_true() {
let src = r#" let src = r#"
test it() { test it() {
when True is { when True is {
True -> True ->
@ -3046,7 +3046,7 @@ fn when_bool_is_true() {
False -> False ->
fail fail
} }
} }
"#; "#;
assert_uplc( assert_uplc(
@ -3061,7 +3061,7 @@ fn when_bool_is_true() {
#[test] #[test]
fn when_bool_is_true_switched_cases() { fn when_bool_is_true_switched_cases() {
let src = r#" let src = r#"
test it() { test it() {
when True is { when True is {
False -> False ->
@ -3069,7 +3069,7 @@ fn when_bool_is_true_switched_cases() {
True -> True ->
True True
} }
} }
"#; "#;
assert_uplc( assert_uplc(
@ -3084,7 +3084,7 @@ fn when_bool_is_true_switched_cases() {
#[test] #[test]
fn when_bool_is_false() { fn when_bool_is_false() {
let src = r#" let src = r#"
test it() { test it() {
when False is { when False is {
False -> False ->
@ -3092,7 +3092,7 @@ fn when_bool_is_false() {
True -> True ->
True True
} }
} }
"#; "#;
assert_uplc( assert_uplc(
@ -3607,15 +3607,15 @@ fn pass_constr_as_function() {
a: Int, a: Int,
b: SubMake b: SubMake
} }
type SubMake { type SubMake {
c: Int c: Int
} }
fn hi(sm: SubMake, to_make: fn (Int, SubMake) -> Make) -> Make { fn hi(sm: SubMake, to_make: fn (Int, SubMake) -> Make) -> Make {
to_make(3, sm) to_make(3, sm)
} }
test cry() { test cry() {
Make(3, SubMake(1)) == hi(SubMake(1), Make) Make(3, SubMake(1)) == hi(SubMake(1), Make)
} }
@ -3668,23 +3668,23 @@ fn record_update_output_2_vals() {
type Address { type Address {
thing: ByteArray, thing: ByteArray,
} }
type Datum { type Datum {
NoDatum NoDatum
InlineDatum(Data) InlineDatum(Data)
} }
type Output { type Output {
address: Address, address: Address,
value: List<(ByteArray, List<(ByteArray, Int)>)>, value: List<(ByteArray, List<(ByteArray, Int)>)>,
datum: Datum, datum: Datum,
script_ref: Option<ByteArray>, script_ref: Option<ByteArray>,
} }
type MyDatum { type MyDatum {
a: Int, a: Int,
} }
test huh() { test huh() {
let prev_output = let prev_output =
Output { Output {
@ -3693,10 +3693,10 @@ fn record_update_output_2_vals() {
datum: InlineDatum(MyDatum{a: 3}), datum: InlineDatum(MyDatum{a: 3}),
script_ref: None, script_ref: None,
} }
let next_output = let next_output =
Output { ..prev_output, value: [], datum: prev_output.datum } Output { ..prev_output, value: [], datum: prev_output.datum }
prev_output == next_output prev_output == next_output
} }
"#; "#;
@ -3770,23 +3770,23 @@ fn record_update_output_1_val() {
type Address { type Address {
thing: ByteArray, thing: ByteArray,
} }
type Datum { type Datum {
NoDatum NoDatum
InlineDatum(Data) InlineDatum(Data)
} }
type Output { type Output {
address: Address, address: Address,
value: List<(ByteArray, List<(ByteArray, Int)>)>, value: List<(ByteArray, List<(ByteArray, Int)>)>,
datum: Datum, datum: Datum,
script_ref: Option<ByteArray>, script_ref: Option<ByteArray>,
} }
type MyDatum { type MyDatum {
a: Int, a: Int,
} }
test huh() { test huh() {
let prev_output = let prev_output =
Output { Output {
@ -3795,10 +3795,10 @@ fn record_update_output_1_val() {
datum: InlineDatum(MyDatum{a: 3}), datum: InlineDatum(MyDatum{a: 3}),
script_ref: None, script_ref: None,
} }
let next_output = let next_output =
Output { ..prev_output, datum: prev_output.datum } Output { ..prev_output, datum: prev_output.datum }
prev_output == next_output prev_output == next_output
} }
"#; "#;
@ -3871,23 +3871,23 @@ fn record_update_output_first_last_val() {
type Address { type Address {
thing: ByteArray, thing: ByteArray,
} }
type Datum { type Datum {
NoDatum NoDatum
InlineDatum(Data) InlineDatum(Data)
} }
type Output { type Output {
address: Address, address: Address,
value: List<(ByteArray, List<(ByteArray, Int)>)>, value: List<(ByteArray, List<(ByteArray, Int)>)>,
datum: Datum, datum: Datum,
script_ref: Option<ByteArray>, script_ref: Option<ByteArray>,
} }
type MyDatum { type MyDatum {
a: Int, a: Int,
} }
test huh() { test huh() {
let prev_output = let prev_output =
Output { Output {
@ -3896,10 +3896,10 @@ fn record_update_output_first_last_val() {
datum: InlineDatum(MyDatum{a: 3}), datum: InlineDatum(MyDatum{a: 3}),
script_ref: None, script_ref: None,
} }
let next_output = let next_output =
Output { ..prev_output, script_ref: None, address: Address{thing: "script_hash_0"} } Output { ..prev_output, script_ref: None, address: Address{thing: "script_hash_0"} }
prev_output == next_output prev_output == next_output
} }
"#; "#;
@ -3969,14 +3969,14 @@ fn list_fields_unwrap() {
a: ByteArray, a: ByteArray,
b: Int, b: Int,
} }
fn data_fields(){ fn data_fields(){
[ [
Fields{a: #"", b: 14}, Fields{a: #"", b: 14},
Fields{a: #"AA", b: 0} Fields{a: #"AA", b: 0}
] ]
} }
test list_fields_unwr_0() { test list_fields_unwr_0() {
when data_fields() is { when data_fields() is {
[Fields { b, .. }, ..] -> [Fields { b, .. }, ..] ->
@ -4035,27 +4035,27 @@ fn foldl_type_mismatch() {
payment_credential: ByteArray, payment_credential: ByteArray,
stake_credential: Option<ByteArray>, stake_credential: Option<ByteArray>,
} }
type Output { type Output {
address: Address, address: Address,
value: List<Int>, value: List<Int>,
datum: Option<Int>, datum: Option<Int>,
reference_script: Option<Int>, reference_script: Option<Int>,
} }
pub fn foldl(self: List<a>, with: fn(a, b) -> b, zero: b) -> b { pub fn foldl(self: List<a>, with: fn(a, b) -> b, zero: b) -> b {
when self is { when self is {
[] -> zero [] -> zero
[x, ..xs] -> foldl(xs, with, with(x, zero)) [x, ..xs] -> foldl(xs, with, with(x, zero))
} }
} }
test hi() { test hi() {
let addr1 = Address { payment_credential: "adff", stake_credential: None } let addr1 = Address { payment_credential: "adff", stake_credential: None }
let out = let out =
Output { address: addr1, value: [], datum: None, reference_script: None } Output { address: addr1, value: [], datum: None, reference_script: None }
let outputs: List<Output> = let outputs: List<Output> =
[out, out, out] [out, out, out]
let cry = let cry =
@ -4074,7 +4074,7 @@ fn foldl_type_mismatch() {
}, },
None, None,
) )
cry == cry cry == cry
} }
"#; "#;
@ -4466,14 +4466,14 @@ fn expect_head_cast_data_with_tail() {
#[test] #[test]
fn test_init_3() { fn test_init_3() {
let src = r#" let src = r#"
pub fn init(self: List<a>) -> Option<List<a>> { pub fn init(self: List<a>) -> Option<List<a>> {
when self is { when self is {
[] -> None [] -> None
_ -> Some(do_init(self)) _ -> Some(do_init(self))
} }
} }
fn do_init(self: List<a>) -> List<a> { fn do_init(self: List<a>) -> List<a> {
when self is { when self is {
[] -> fail @"unreachable" [] -> fail @"unreachable"
@ -4483,7 +4483,7 @@ fn test_init_3() {
[x, ..do_init(xs)] [x, ..do_init(xs)]
} }
} }
test init_3() { test init_3() {
init([1, 2, 3, 4]) == Some([1, 2, 3]) init([1, 2, 3, 4]) == Some([1, 2, 3])
} }
@ -4579,7 +4579,7 @@ fn list_clause_with_guard() {
} }
} }
} }
test init_3() { test init_3() {
do_init([1, 3]) == [1] do_init([1, 3]) == [1]
} }
@ -4718,7 +4718,7 @@ fn list_clause_with_guard2() {
} }
} }
} }
test init_3() { test init_3() {
do_init([1, 3]) == [1] do_init([1, 3]) == [1]
} }
@ -4850,7 +4850,7 @@ fn list_clause_with_guard3() {
} }
} }
} }
test init_3() { test init_3() {
do_init([1, 3]) == [1] do_init([1, 3]) == [1]
} }
@ -4989,7 +4989,7 @@ fn list_clause_with_assign() {
} }
} }
} }
test init_3() { test init_3() {
do_init([1, 3]) == [1] do_init([1, 3]) == [1]
} }
@ -5135,7 +5135,7 @@ fn list_clause_with_assign2() {
} }
} }
} }
test init_3() { test init_3() {
do_init([Some(1), None]) == [Some(1)] do_init([Some(1), None]) == [Some(1)]
} }
@ -5262,10 +5262,10 @@ fn opaque_value_in_datum() {
a: Value a: Value
} }
validator { validator {
fn spend(dat: Dat, red: Data, ctx: Data) { fn spend(dat: Dat, red: Data, ctx: Data) {
let val = dat.a let val = dat.a
expect [(_, amount)] = val.inner.inner expect [(_, amount)] = val.inner.inner
@ -5452,22 +5452,22 @@ fn opaque_value_in_test() {
pub fn dat_new() -> Dat { pub fn dat_new() -> Dat {
let v = Value { inner: Dict { inner: [("", [(#"aa", 4)] |> Dict)] } } let v = Value { inner: Dict { inner: [("", [(#"aa", 4)] |> Dict)] } }
Dat { Dat {
c: 0, c: 0,
a: v a: v
} }
} }
test spend() { test spend() {
let dat = dat_new() let dat = dat_new()
let val = dat.a let val = dat.a
expect [(_, amount)] = val.inner.inner expect [(_, amount)] = val.inner.inner
let final_amount = [(#"AA", 4)] |> Dict let final_amount = [(#"AA", 4)] |> Dict
final_amount == amount final_amount == amount
} }
"#; "#;

View File

@ -2,7 +2,7 @@ use std::collections::HashMap;
use std::path::PathBuf; use std::path::PathBuf;
use aiken_lang::{ use aiken_lang::{
ast::{ModuleKind, Tracing, TypedDataType, TypedFunction}, ast::{ModuleKind, TraceLevel, Tracing, TypedDataType, TypedFunction},
gen_uplc::builder::{DataTypeKey, FunctionAccessKey}, gen_uplc::builder::{DataTypeKey, FunctionAccessKey},
parser, parser,
tipo::TypeInfo, tipo::TypeInfo,
@ -81,7 +81,7 @@ impl TestProject {
module.kind, module.kind,
&self.package.to_string(), &self.package.to_string(),
&self.module_types, &self.module_types,
Tracing::KeepTraces, Tracing::All(TraceLevel::Verbose),
&mut warnings, &mut warnings,
) )
.expect("Failed to type-check module"); .expect("Failed to type-check module");

View File

@ -36,7 +36,7 @@ pub fn exec(
) -> miette::Result<()> { ) -> miette::Result<()> {
with_project(directory.as_deref(), false, |p| { with_project(directory.as_deref(), false, |p| {
if rebuild { if rebuild {
p.build(false, Tracing::NoTraces)?; p.build(false, Tracing::silent())?;
} }
let title = module.as_ref().map(|m| { let title = module.as_ref().map(|m| {

View File

@ -31,7 +31,7 @@ pub fn exec(
) -> miette::Result<()> { ) -> miette::Result<()> {
with_project(directory.as_deref(), false, |p| { with_project(directory.as_deref(), false, |p| {
if rebuild { if rebuild {
p.build(false, Tracing::NoTraces)?; p.build(false, Tracing::silent())?;
} }
let title = module.as_ref().map(|m| { let title = module.as_ref().map(|m| {

View File

@ -31,7 +31,7 @@ pub fn exec(
) -> miette::Result<()> { ) -> miette::Result<()> {
with_project(directory.as_deref(), false, |p| { with_project(directory.as_deref(), false, |p| {
if rebuild { if rebuild {
p.build(false, Tracing::NoTraces)?; p.build(false, Tracing::silent())?;
} }
let title = module.as_ref().map(|m| { let title = module.as_ref().map(|m| {

View File

@ -1,4 +1,7 @@
use aiken_lang::ast::{TraceLevel, Tracing};
use aiken_project::watch::{self, watch_project, with_project}; use aiken_project::watch::{self, watch_project, with_project};
use clap::builder::MapValueParser;
use clap::builder::{PossibleValuesParser, TypedValueParser};
use std::{path::PathBuf, process}; use std::{path::PathBuf, process};
#[derive(clap::Args)] #[derive(clap::Args)]
@ -19,9 +22,23 @@ pub struct Args {
#[clap(short, long)] #[clap(short, long)]
uplc: bool, uplc: bool,
/// Do not remove traces when generating code /// Filter traces to be included in the generated program(s).
#[clap(short, long)] /// - user-defined: only consider traces that you've explicitly defined (either through the
keep_traces: bool, /// 'trace' keyword of via the trace-if-false ('?') operator.
/// - compiler-generated: only included internal traces generated by the Aiken compiler, for
/// example in usage of 'expect'.
/// - all: include both user-defined and compiler-generated traces.
/// [optional] [default: all]
#[clap(short, long, value_parser=filter_traces_parser(), default_missing_value="all", verbatim_doc_comment)]
filter_traces: Option<fn(TraceLevel) -> Tracing>,
/// Choose the verbosity level of traces:
/// - silent: disable traces altogether
/// - compact: only culprit line numbers are shown on failures
/// - verbose: enable full verbose traces as provided by the user or the compiler
/// [optional]
#[clap(short, long, value_parser=trace_level_parser(), default_value_t=TraceLevel::Silent, verbatim_doc_comment)]
trace_level: TraceLevel,
} }
pub fn exec( pub fn exec(
@ -30,18 +47,54 @@ pub fn exec(
deny, deny,
watch, watch,
uplc, uplc,
keep_traces, filter_traces,
trace_level,
}: Args, }: Args,
) -> miette::Result<()> { ) -> miette::Result<()> {
let result = if watch { let result = if watch {
watch_project(directory.as_deref(), watch::default_filter, 500, |p| { watch_project(directory.as_deref(), watch::default_filter, 500, |p| {
p.build(uplc, keep_traces.into()) p.build(
uplc,
match filter_traces {
Some(filter_traces) => filter_traces(trace_level),
None => Tracing::All(trace_level),
},
)
}) })
} else { } else {
with_project(directory.as_deref(), deny, |p| { with_project(directory.as_deref(), deny, |p| {
p.build(uplc, keep_traces.into()) p.build(
uplc,
match filter_traces {
Some(filter_traces) => filter_traces(trace_level),
None => Tracing::All(trace_level),
},
)
}) })
}; };
result.map_err(|_| process::exit(1)) result.map_err(|_| process::exit(1))
} }
#[allow(clippy::type_complexity)]
pub fn filter_traces_parser(
) -> MapValueParser<PossibleValuesParser, fn(String) -> fn(TraceLevel) -> Tracing> {
PossibleValuesParser::new(["user-defined", "compiler-generated", "all"]).map(
|s: String| match s.as_str() {
"user-defined" => Tracing::UserDefined,
"compiler-generated" => Tracing::CompilerGenerated,
"all" => Tracing::All,
_ => unreachable!(),
},
)
}
#[allow(clippy::type_complexity)]
pub fn trace_level_parser() -> MapValueParser<PossibleValuesParser, fn(String) -> TraceLevel> {
PossibleValuesParser::new(["silent", "compact", "verbose"]).map(|s| match s.as_str() {
"silent" => TraceLevel::Silent,
"compact" => TraceLevel::Compact,
"verbose" => TraceLevel::Verbose,
_ => unreachable!(),
})
}

View File

@ -1,3 +1,5 @@
use super::build::{filter_traces_parser, trace_level_parser};
use aiken_lang::ast::{TraceLevel, Tracing};
use aiken_project::watch::{self, watch_project, with_project}; use aiken_project::watch::{self, watch_project, with_project};
use std::{path::PathBuf, process}; use std::{path::PathBuf, process};
@ -34,9 +36,23 @@ pub struct Args {
#[clap(short, long)] #[clap(short, long)]
exact_match: bool, exact_match: bool,
/// Remove traces when generating code (including tests) /// Filter traces to be considered during testing:
#[clap(long)] /// - user-defined: only consider traces that you've explicitly defined (either through the
no_traces: bool, /// 'trace' keyword of via the trace-if-false ('?') operator.
/// - compiler-generated: only included internal traces generated by the Aiken compiler, for
/// example in usage of 'expect'.
/// - all: include both user-defined and compiler-generated traces.
/// [optional] [default: all]
#[clap(short, long, value_parser=filter_traces_parser(), default_missing_value="all", verbatim_doc_comment)]
filter_traces: Option<fn(TraceLevel) -> Tracing>,
/// Choose the verbosity level of traces:
/// - silent: disable traces altogether
/// - compact: only culprit line numbers are shown on failures
/// - verbose: enable full verbose traces as provided by the user or the compiler
/// [optional]
#[clap(short, long, value_parser=trace_level_parser(), default_value_t=TraceLevel::Verbose, verbatim_doc_comment)]
trace_level: TraceLevel,
} }
pub fn exec( pub fn exec(
@ -47,9 +63,9 @@ pub fn exec(
debug, debug,
match_tests, match_tests,
exact_match, exact_match,
no_traces,
watch, watch,
.. filter_traces,
trace_level,
}: Args, }: Args,
) -> miette::Result<()> { ) -> miette::Result<()> {
let result = if watch { let result = if watch {
@ -59,7 +75,10 @@ pub fn exec(
match_tests.clone(), match_tests.clone(),
debug, debug,
exact_match, exact_match,
(!no_traces).into(), match filter_traces {
Some(filter_traces) => filter_traces(trace_level),
None => Tracing::All(trace_level),
},
) )
}) })
} else { } else {
@ -69,7 +88,10 @@ pub fn exec(
match_tests.clone(), match_tests.clone(),
debug, debug,
exact_match, exact_match,
(!no_traces).into(), match filter_traces {
Some(filter_traces) => filter_traces(trace_level),
None => Tracing::All(trace_level),
},
) )
}) })
}; };