Merge pull request #800 from aiken-lang/codegen-traces

Adding a new option to toggle codegen traces
This commit is contained in:
Matthias Benkort 2024-01-19 14:41:25 +01:00 committed by GitHub
commit 1f6e719fde
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
32 changed files with 763 additions and 379 deletions

View File

@ -2,10 +2,24 @@
## UNRELEASED
### Added
- **aiken**: New `--trace-level` option for the `check` and `build` commands to allow chosing the verbosity level of traces amongst three levels: silent, compact & verbose. @MicroProofs @KtorZ
- **aiken**: New `--filter-traces` option for the `check` and `build` commands to enable restricting traces with more granularity between user-defined traces, compiler-generated traces or both. @MicroProofs @KtorZ.
### Fixed
- **aiken-lang**: Fix flat encoding and decoding of large integer values. @KtorZ
### Removed
- **aiken**: The options `--keep-traces` (on the `build` command) and `--no-traces` (on the `check` command) have been removed; superseded by the new options. @MicroProofs @KtorZ
> ![TIP]
>
> - If you've been using `aiken check --no-traces`, you can recover the old behavior by doing `aiken check --trace-level silent`.
> - If you've been using `aiken build --keep-traces`, you can recover the old behavior by doing `aiken build --trace-level verbose`.
## v1.0.21-alpha - 2023-12-04
### Added

View File

@ -1,6 +1,7 @@
use crate::{
builtins::{self, bool, g1_element, g2_element},
expr::{TypedExpr, UntypedExpr},
line_numbers::LineNumbers,
parser::token::{Base, Token},
tipo::{PatternConstructor, Type, TypeInfo},
};
@ -42,6 +43,7 @@ pub struct Module<Info, Definitions> {
pub docs: Vec<String>,
pub type_info: Info,
pub definitions: Vec<Definitions>,
pub lines: LineNumbers,
pub kind: ModuleKind,
}
@ -1360,25 +1362,51 @@ pub enum TraceKind {
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum Tracing {
NoTraces,
KeepTraces,
UserDefined(TraceLevel),
CompilerGenerated(TraceLevel),
All(TraceLevel),
}
impl From<bool> for Tracing {
fn from(keep: bool) -> Self {
if keep {
Tracing::KeepTraces
} else {
Tracing::NoTraces
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum TraceLevel {
Silent, // No traces
Compact, // Line numbers only
Verbose, // Full verbose traces as provided by the user or the compiler
}
impl Tracing {
pub fn silent() -> Self {
Tracing::All(TraceLevel::Silent)
}
/// Get the tracing level based on the context we're in.
pub fn trace_level(&self, is_code_gen: bool) -> TraceLevel {
match self {
Tracing::UserDefined(lvl) => {
if is_code_gen {
TraceLevel::Silent
} else {
*lvl
}
}
Tracing::CompilerGenerated(lvl) => {
if is_code_gen {
*lvl
} else {
TraceLevel::Silent
}
}
Tracing::All(lvl) => *lvl,
}
}
}
impl From<Tracing> for bool {
fn from(value: Tracing) -> Self {
match value {
Tracing::NoTraces => false,
Tracing::KeepTraces => true,
impl Display for TraceLevel {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::result::Result<(), std::fmt::Error> {
match self {
TraceLevel::Silent => f.write_str("silent"),
TraceLevel::Compact => f.write_str("compact"),
TraceLevel::Verbose => f.write_str("verbose"),
}
}
}

View File

@ -19,18 +19,19 @@ use uplc::{
use crate::{
ast::{
AssignmentKind, BinOp, Bls12_381Point, Curve, Pattern, Span, TypedArg, TypedClause,
TypedDataType, TypedFunction, TypedPattern, TypedValidator, UnOp,
AssignmentKind, BinOp, Bls12_381Point, Curve, Pattern, Span, TraceLevel, TypedArg,
TypedClause, TypedDataType, TypedFunction, TypedPattern, TypedValidator, UnOp,
},
builtins::{bool, data, int, list, string, void},
expr::TypedExpr,
gen_uplc::builder::{
check_replaceable_opaque_type, convert_opaque_type, erase_opaque_type_operations,
find_and_replace_generics, find_list_clause_or_default_first, get_arg_type_name,
get_generic_id_and_type, get_generic_variant_name, get_src_code_by_span, monomorphize,
pattern_has_conditions, wrap_as_multi_validator, wrap_validator_condition, CodeGenFunction,
SpecificClause,
get_generic_id_and_type, get_generic_variant_name, get_line_columns_by_span,
get_src_code_by_span, monomorphize, pattern_has_conditions, wrap_as_multi_validator,
wrap_validator_condition, CodeGenFunction, SpecificClause,
},
line_numbers::LineNumbers,
tipo::{
ModuleValueConstructor, PatternConstructor, Type, TypeInfo, ValueConstructor,
ValueConstructorVariant,
@ -55,9 +56,9 @@ pub struct CodeGenerator<'a> {
functions: IndexMap<FunctionAccessKey, &'a TypedFunction>,
data_types: IndexMap<DataTypeKey, &'a TypedDataType>,
module_types: IndexMap<&'a String, &'a TypeInfo>,
module_src: IndexMap<String, String>,
module_src: IndexMap<String, (String, LineNumbers)>,
/// immutable option
tracing: bool,
tracing: TraceLevel,
/// mutable index maps that are reset
defined_functions: IndexMap<FunctionAccessKey, ()>,
special_functions: CodeGenSpecialFuncs,
@ -74,8 +75,8 @@ impl<'a> CodeGenerator<'a> {
functions: IndexMap<FunctionAccessKey, &'a TypedFunction>,
data_types: IndexMap<DataTypeKey, &'a TypedDataType>,
module_types: IndexMap<&'a String, &'a TypeInfo>,
module_src: IndexMap<String, String>,
tracing: bool,
module_src: IndexMap<String, (String, LineNumbers)>,
tracing: TraceLevel,
) -> Self {
CodeGenerator {
functions,
@ -132,10 +133,10 @@ impl<'a> CodeGenerator<'a> {
air_tree_fun = wrap_validator_condition(air_tree_fun, self.tracing);
let src_code = self.module_src.get(module_name).unwrap().clone();
let (src_code, lines) = self.module_src.get(module_name).unwrap().clone();
let mut validator_args_tree =
self.check_validator_args(&fun.arguments, true, air_tree_fun, &src_code);
self.check_validator_args(&fun.arguments, true, air_tree_fun, &src_code, &lines);
validator_args_tree = AirTree::no_op().hoist_over(validator_args_tree);
@ -154,8 +155,13 @@ impl<'a> CodeGenerator<'a> {
air_tree_fun_other = wrap_validator_condition(air_tree_fun_other, self.tracing);
let mut validator_args_tree_other =
self.check_validator_args(&other.arguments, true, air_tree_fun_other, &src_code);
let mut validator_args_tree_other = self.check_validator_args(
&other.arguments,
true,
air_tree_fun_other,
&src_code,
&lines,
);
validator_args_tree_other = AirTree::no_op().hoist_over(validator_args_tree_other);
@ -468,20 +474,36 @@ impl<'a> CodeGenerator<'a> {
let air_value = self.build(value, module_name);
let msg = get_src_code_by_span(module_name, location, &self.module_src);
let msg_func = match self.tracing {
TraceLevel::Silent => None,
TraceLevel::Verbose | TraceLevel::Compact => {
if kind.is_expect() {
let msg = match self.tracing {
TraceLevel::Silent => unreachable!("excluded from pattern guards"),
TraceLevel::Compact => get_line_columns_by_span(
module_name,
location,
&self.module_src,
)
.to_string(),
TraceLevel::Verbose => {
get_src_code_by_span(module_name, location, &self.module_src)
}
};
let msg_func_name = msg.split_whitespace().join("");
let msg_func_name = msg.split_whitespace().join("");
self.special_functions.insert_new_function(
msg_func_name.clone(),
Term::string(msg),
string(),
);
self.special_functions.insert_new_function(
msg_func_name.clone(),
Term::string(msg),
string(),
);
let msg_func = if self.tracing && kind.is_expect() {
Some(self.special_functions.use_function_msg(msg_func_name))
} else {
None
Some(self.special_functions.use_function_msg(msg_func_name))
} else {
None
}
}
};
self.assignment(
@ -1586,19 +1608,19 @@ impl<'a> CodeGenerator<'a> {
// mutate code_gen_funcs and defined_data_types in this if branch
if function.is_none() && defined_data_types.get(&data_type_name).is_none() {
let (msg_term, error_term) = if self.tracing {
let msg = AirMsg::LocalVar("__param_msg".to_string());
(
Some(msg.clone()),
AirTree::trace(
msg.to_air_tree(),
tipo.clone(),
AirTree::error(tipo.clone(), false),
),
)
} else {
(None, AirTree::error(tipo.clone(), false))
let (msg_term, error_term) = match self.tracing {
TraceLevel::Silent => (None, AirTree::error(tipo.clone(), false)),
TraceLevel::Compact | TraceLevel::Verbose => {
let msg = AirMsg::LocalVar("__param_msg".to_string());
(
Some(msg.clone()),
AirTree::trace(
msg.to_air_tree(),
tipo.clone(),
AirTree::error(tipo.clone(), false),
),
)
}
};
defined_data_types.insert(data_type_name.clone(), 1);
@ -1717,16 +1739,15 @@ impl<'a> CodeGenerator<'a> {
}
}
let code_gen_func = if self.tracing {
CodeGenFunction::Function {
body: func_body,
params: vec!["__param_0".to_string(), "__param_msg".to_string()],
}
} else {
CodeGenFunction::Function {
let code_gen_func = match self.tracing {
TraceLevel::Silent => CodeGenFunction::Function {
body: func_body,
params: vec!["__param_0".to_string()],
}
},
TraceLevel::Compact | TraceLevel::Verbose => CodeGenFunction::Function {
body: func_body,
params: vec!["__param_0".to_string(), "__param_msg".to_string()],
},
};
self.code_gen_functions
@ -1737,15 +1758,14 @@ impl<'a> CodeGenerator<'a> {
defined_data_types.insert(data_type_name.to_string(), 1);
}
let args = if self.tracing {
vec![
let args = match self.tracing {
TraceLevel::Silent => vec![value],
TraceLevel::Compact | TraceLevel::Verbose => vec![
value,
msg_func
.expect("should be unreachable: no msg func with tracing enabled.")
.to_air_tree(),
]
} else {
vec![value]
],
};
let module_fn = ValueConstructorVariant::ModuleFn {
@ -2736,6 +2756,7 @@ impl<'a> CodeGenerator<'a> {
has_context: bool,
body: AirTree,
src_code: &str,
lines: &LineNumbers,
) -> AirTree {
let checked_args = arguments
.iter()
@ -2749,23 +2770,31 @@ impl<'a> CodeGenerator<'a> {
let actual_type = convert_opaque_type(&arg.tipo, &self.data_types);
let msg = src_code
.get(arg_span.start..arg_span.end)
.expect("Out of bounds span")
.to_string();
let msg_func = match self.tracing {
TraceLevel::Silent => None,
TraceLevel::Compact | TraceLevel::Verbose => {
let msg = match self.tracing {
TraceLevel::Silent => unreachable!("excluded from pattern guards"),
TraceLevel::Compact => lines
.line_and_column_number(arg_span.start)
.expect("Out of bounds span")
.to_string(),
TraceLevel::Verbose => src_code
.get(arg_span.start..arg_span.end)
.expect("Out of bounds span")
.to_string(),
};
let msg_func_name = msg.split_whitespace().join("");
let msg_func_name = msg.split_whitespace().join("");
self.special_functions.insert_new_function(
msg_func_name.to_string(),
Term::string(msg),
string(),
);
self.special_functions.insert_new_function(
msg_func_name.to_string(),
Term::string(msg),
string(),
);
let msg_func = if self.tracing && !actual_type.is_data() {
Some(self.special_functions.use_function_msg(msg_func_name))
} else {
None
Some(self.special_functions.use_function_msg(msg_func_name))
}
};
let assign = self.assignment(
@ -3721,14 +3750,16 @@ impl<'a> CodeGenerator<'a> {
fn gen_uplc(&mut self, ir: Air, arg_stack: &mut Vec<Term<Name>>) -> Option<Term<Name>> {
// Going to mark the changes made to code gen after air tree implementation
let error_term = if self.tracing && air_holds_msg(&ir) {
// In the case of an air that holds a msg and tracing is active
// we pop the msg off the stack first
let msg = arg_stack.pop().unwrap();
Term::Error.delayed_trace(msg)
} else {
Term::Error
let error_term = match self.tracing {
TraceLevel::Silent => Term::Error,
TraceLevel::Compact | TraceLevel::Verbose => {
if air_holds_msg(&ir) {
let msg = arg_stack.pop().unwrap();
Term::Error.delayed_trace(msg)
} else {
Term::Error
}
}
};
match ir {

View File

@ -15,11 +15,12 @@ use uplc::{
use crate::{
ast::{
AssignmentKind, DataType, Pattern, Span, TypedArg, TypedClause, TypedClauseGuard,
TypedDataType, TypedPattern,
AssignmentKind, DataType, Pattern, Span, TraceLevel, TypedArg, TypedClause,
TypedClauseGuard, TypedDataType, TypedPattern,
},
builtins::{bool, data, function, int, list, string, void},
expr::TypedExpr,
line_numbers::{LineColumn, LineNumbers},
tipo::{PatternConstructor, TypeVar, ValueConstructor, ValueConstructorVariant},
};
@ -1621,52 +1622,12 @@ pub fn special_case_builtin(
pub fn wrap_as_multi_validator(
spend: Term<Name>,
mint: Term<Name>,
trace: bool,
trace: TraceLevel,
spend_name: String,
mint_name: String,
) -> Term<Name> {
if trace {
let trace_string = format!(
"Incorrect redeemer type for validator {}.
Double check you have wrapped the redeemer type as specified in your plutus.json",
spend_name
);
let error_term = Term::Error.delayed_trace(Term::var("__incorrect_second_arg_type"));
Term::var("__second_arg")
.delayed_choose_data(
Term::equals_integer()
.apply(Term::integer(0.into()))
.apply(Term::var(CONSTR_INDEX_EXPOSER).apply(Term::var("__second_arg")))
.delayed_if_then_else(
mint.apply(Term::var("__first_arg"))
.apply(Term::var("__second_arg"))
.delayed_trace(Term::string(format!(
"Running 2 arg validator {}",
mint_name
))),
spend
.apply(Term::var("__first_arg"))
.apply(Term::head_list().apply(
Term::var(CONSTR_FIELDS_EXPOSER).apply(Term::var("__second_arg")),
))
.delayed_trace(Term::string(format!(
"Running 3 arg validator {}",
spend_name
))),
),
error_term.clone(),
error_term.clone(),
error_term.clone(),
error_term,
)
.lambda("__incorrect_second_arg_type")
.apply(Term::string(trace_string))
.lambda("__second_arg")
.lambda("__first_arg")
} else {
Term::equals_integer()
match trace {
TraceLevel::Silent | TraceLevel::Compact => Term::equals_integer()
.apply(Term::integer(0.into()))
.apply(Term::var(CONSTR_INDEX_EXPOSER).apply(Term::var("__second_arg")))
.delayed_if_then_else(
@ -1678,7 +1639,50 @@ pub fn wrap_as_multi_validator(
),
)
.lambda("__second_arg")
.lambda("__first_arg")
.lambda("__first_arg"),
TraceLevel::Verbose => {
let trace_string = format!(
"Incorrect redeemer type for validator {}.
Double check you have wrapped the redeemer type as specified in your plutus.json",
spend_name
);
let error_term = Term::Error.delayed_trace(Term::var("__incorrect_second_arg_type"));
let then_term = mint
.apply(Term::var("__first_arg"))
.apply(Term::var("__second_arg"));
let else_term = spend.apply(Term::var("__first_arg")).apply(
Term::head_list()
.apply(Term::var(CONSTR_FIELDS_EXPOSER).apply(Term::var("__second_arg"))),
);
Term::var("__second_arg")
.delayed_choose_data(
Term::equals_integer()
.apply(Term::integer(0.into()))
.apply(Term::var(CONSTR_INDEX_EXPOSER).apply(Term::var("__second_arg")))
.delayed_if_then_else(
then_term.delayed_trace(Term::string(format!(
"Running 2 arg validator {}",
mint_name
))),
else_term.delayed_trace(Term::string(format!(
"Running 3 arg validator {}",
spend_name
))),
),
error_term.clone(),
error_term.clone(),
error_term.clone(),
error_term,
)
.lambda("__incorrect_second_arg_type")
.apply(Term::string(trace_string))
.lambda("__second_arg")
.lambda("__first_arg")
}
}
}
@ -1717,16 +1721,15 @@ pub fn cast_validator_args(term: Term<Name>, arguments: &[TypedArg]) -> Term<Nam
term
}
pub fn wrap_validator_condition(air_tree: AirTree, trace: bool) -> AirTree {
pub fn wrap_validator_condition(air_tree: AirTree, trace: TraceLevel) -> AirTree {
let success_branch = vec![(air_tree, AirTree::void())];
let otherwise = if trace {
AirTree::trace(
let otherwise = match trace {
TraceLevel::Silent | TraceLevel::Compact => AirTree::error(void(), true),
TraceLevel::Verbose => AirTree::trace(
AirTree::string("Validator returned false"),
void(),
AirTree::error(void(), true),
)
} else {
AirTree::error(void(), true)
),
};
AirTree::if_branches(success_branch, void(), otherwise)
@ -1758,9 +1761,9 @@ pub fn extract_constant(term: &Term<Name>) -> Option<Rc<UplcConstant>> {
pub fn get_src_code_by_span(
module_name: &String,
span: &Span,
module_src: &IndexMap<String, String>,
module_src: &IndexMap<String, (String, LineNumbers)>,
) -> String {
let src = module_src
let (src, _) = module_src
.get(module_name)
.unwrap_or_else(|| panic!("Missing module {module_name}"));
@ -1769,6 +1772,20 @@ pub fn get_src_code_by_span(
.to_string()
}
pub fn get_line_columns_by_span(
module_name: &String,
span: &Span,
module_src: &IndexMap<String, (String, LineNumbers)>,
) -> LineColumn {
let (_, lines) = module_src
.get(module_name)
.unwrap_or_else(|| panic!("Missing module {module_name}"));
lines
.line_and_column_number(span.start)
.expect("Out of bounds span")
}
pub fn air_holds_msg(air: &Air) -> bool {
match air {
Air::AssertConstr { .. } | Air::AssertBool { .. } | Air::FieldsEmpty | Air::ListEmpty => {

View File

@ -10,6 +10,7 @@ pub mod expr;
pub mod format;
pub mod gen_uplc;
pub mod levenshtein;
pub mod line_numbers;
pub mod parser;
pub mod pretty;
pub mod tipo;

View File

@ -0,0 +1,126 @@
use std::fmt::{self, Display};
#[derive(Debug, PartialEq, Eq, Clone)]
pub struct LineNumbers {
line_starts: Vec<usize>,
length: usize,
last: Option<usize>,
}
#[derive(Debug, PartialEq, Clone, Copy)]
pub struct LineColumn {
pub line: usize,
pub column: usize,
}
impl Display for LineColumn {
fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> {
f.write_str(&format!("L{};{}", self.line, self.column))
}
}
impl LineNumbers {
pub fn new(src: &str) -> Self {
let line_starts: Vec<usize> = std::iter::once(0)
.chain(src.match_indices('\n').map(|(i, _)| i + 1))
.collect();
let length = src.len();
Self {
length,
last: line_starts.last().cloned(),
line_starts: if length > 0 { line_starts } else { Vec::new() },
}
}
/// Get the line number for a byte index
pub fn line_number(&self, byte_index: usize) -> Option<usize> {
self.line_starts
.binary_search(&byte_index)
.map(|l| Some(l + 1))
.unwrap_or_else(|next_index| {
if Some(next_index) >= self.last {
None
} else {
Some(next_index)
}
})
}
pub fn line_and_column_number(&self, byte_index: usize) -> Option<LineColumn> {
let line = self.line_number(byte_index)?;
let column = byte_index - self.line_starts.get(line - 1).copied().unwrap_or_default() + 1;
Some(LineColumn { line, column })
}
#[allow(dead_code)]
pub fn byte_index(&self, line: usize, character: usize) -> usize {
match self.line_starts.get(line) {
Some(line_index) => *line_index + character,
None => self.length,
}
}
}
#[cfg(test)]
mod tests {
use super::*;
use chumsky::text::Character;
use indoc::indoc;
fn assert_line_column(src: &str, ix: usize, lcol: Option<LineColumn>) {
let lines = LineNumbers::new(src);
println!("{lines:?}");
let byte = src
.as_bytes()
.get(ix)
.map(|b| {
if b.is_ascii() {
format!("{}", b.to_char())
} else {
format!("{b}")
}
})
.unwrap_or_else(|| "OUT-OF-BOUNDS".to_string());
assert_eq!(
lines.line_and_column_number(ix),
lcol,
"\n{src}\n--> at index {ix} ({byte})\n",
);
}
#[test]
fn out_of_range_byte_index() {
let src = indoc! { r#""# };
assert_line_column(src, 42, None);
assert_line_column(src, 0, None);
}
#[test]
fn basic() {
let src = indoc! { r#"
foo
bar
"# };
assert_line_column(src, 0, Some(LineColumn { line: 1, column: 1 }));
assert_line_column(src, 2, Some(LineColumn { line: 1, column: 3 }));
assert_line_column(src, 4, Some(LineColumn { line: 2, column: 1 }));
}
#[test]
fn unicode() {
let src = indoc! { r#"
💩
foo
"# };
assert_line_column(src, 0, Some(LineColumn { line: 1, column: 1 }));
assert_line_column(src, 2, Some(LineColumn { line: 1, column: 3 }));
assert_line_column(src, 5, Some(LineColumn { line: 2, column: 1 }));
}
}

View File

@ -15,7 +15,7 @@ pub use definition::parser as definition;
pub use expr::parser as expression;
pub use pattern::parser as pattern;
use crate::ast;
use crate::{ast, line_numbers::LineNumbers};
use chumsky::prelude::*;
use error::ParseError;
use extra::ModuleExtra;
@ -30,8 +30,11 @@ pub fn module(
let definitions = definition().repeated().then_ignore(end()).parse(stream)?;
let lines = LineNumbers::new(src);
let module = ast::UntypedModule {
kind,
lines,
definitions,
docs: vec![],
name: "".to_string(),

View File

@ -19,5 +19,17 @@ Module {
},
),
],
lines: LineNumbers {
line_starts: [
0,
10,
11,
27,
],
length: 43,
last: Some(
27,
),
},
kind: Validator,
}

View File

@ -197,5 +197,32 @@ Module {
},
),
],
lines: LineNumbers {
line_starts: [
0,
13,
27,
34,
36,
37,
50,
64,
71,
73,
74,
87,
104,
106,
107,
120,
138,
154,
156,
],
length: 156,
last: Some(
156,
),
},
kind: Validator,
}

View File

@ -48,5 +48,18 @@ Module {
},
),
],
lines: LineNumbers {
line_starts: [
0,
11,
27,
31,
33,
],
length: 33,
last: Some(
33,
),
},
kind: Validator,
}

View File

@ -46,5 +46,18 @@ Module {
},
),
],
lines: LineNumbers {
line_starts: [
0,
11,
25,
29,
31,
],
length: 31,
last: Some(
31,
),
},
kind: Validator,
}

View File

@ -20,5 +20,15 @@ Module {
},
),
],
lines: LineNumbers {
line_starts: [
0,
16,
],
length: 16,
last: Some(
16,
),
},
kind: Validator,
}

View File

@ -1,5 +1,5 @@
use crate::{
ast::{Definition, ModuleKind, Tracing, TypedModule, UntypedModule},
ast::{Definition, ModuleKind, TraceLevel, Tracing, TypedModule, UntypedModule},
builtins,
expr::TypedExpr,
parser,
@ -31,7 +31,7 @@ fn check_module(
kind,
"test/project",
&module_types,
Tracing::KeepTraces,
Tracing::All(TraceLevel::Verbose),
&mut warnings,
);

View File

@ -1,3 +1,4 @@
use crate::line_numbers::LineNumbers;
use std::{cmp::Ordering, collections::HashMap, rc::Rc};
use vec1::Vec1;
@ -5,9 +6,9 @@ use crate::{
ast::{
Annotation, Arg, ArgName, AssignmentKind, BinOp, Bls12_381Point, ByteArrayFormatPreference,
CallArg, ClauseGuard, Constant, Curve, IfBranch, LogicalOpChainKind, RecordUpdateSpread,
Span, TraceKind, Tracing, TypedArg, TypedCallArg, TypedClause, TypedClauseGuard,
TypedIfBranch, TypedPattern, TypedRecordUpdateArg, UnOp, UntypedArg, UntypedClause,
UntypedClauseGuard, UntypedIfBranch, UntypedPattern, UntypedRecordUpdateArg,
Span, TraceKind, TraceLevel, Tracing, TypedArg, TypedCallArg, TypedClause,
TypedClauseGuard, TypedIfBranch, TypedPattern, TypedRecordUpdateArg, UnOp, UntypedArg,
UntypedClause, UntypedClauseGuard, UntypedIfBranch, UntypedPattern, UntypedRecordUpdateArg,
},
builtins::{bool, byte_array, function, g1_element, g2_element, int, list, string, tuple},
expr::{FnStyle, TypedExpr, UntypedExpr},
@ -26,6 +27,8 @@ use super::{
#[derive(Debug)]
pub(crate) struct ExprTyper<'a, 'b> {
pub(crate) lines: &'a LineNumbers,
pub(crate) environment: &'a mut Environment<'b>,
// We tweak the tracing behavior during type-check. Traces are either kept or left out of the
@ -421,24 +424,36 @@ impl<'a, 'b> ExprTyper<'a, 'b> {
},
};
let text = TypedExpr::String {
location,
tipo: string(),
value: format!(
"{} ? False",
format::Formatter::new()
.expr(&value, false)
.to_pretty_string(999)
),
let text = match self.tracing.trace_level(false) {
TraceLevel::Verbose => Some(TypedExpr::String {
location,
tipo: string(),
value: format!(
"{} ? False",
format::Formatter::new()
.expr(&value, false)
.to_pretty_string(999)
),
}),
TraceLevel::Compact => Some(TypedExpr::String {
location,
tipo: string(),
value: self
.lines
.line_and_column_number(location.start)
.expect("Spans are within bounds.")
.to_string(),
}),
TraceLevel::Silent => None,
};
let typed_value = self.infer(value)?;
self.unify(bool(), typed_value.tipo(), typed_value.location(), false)?;
match self.tracing {
Tracing::NoTraces => Ok(typed_value),
Tracing::KeepTraces => Ok(TypedExpr::If {
match self.tracing.trace_level(false) {
TraceLevel::Silent => Ok(typed_value),
TraceLevel::Verbose | TraceLevel::Compact => Ok(TypedExpr::If {
location,
branches: vec1::vec1![IfBranch {
condition: typed_value,
@ -448,7 +463,7 @@ impl<'a, 'b> ExprTyper<'a, 'b> {
final_else: Box::new(TypedExpr::Trace {
location,
tipo: bool(),
text: Box::new(text),
text: Box::new(text.expect("TraceLevel::Silent excluded from pattern-guard")),
then: Box::new(var_false),
}),
tipo: bool(),
@ -1817,9 +1832,23 @@ impl<'a, 'b> ExprTyper<'a, 'b> {
})
}
match self.tracing {
Tracing::NoTraces => Ok(then),
Tracing::KeepTraces => Ok(TypedExpr::Trace {
match self.tracing.trace_level(false) {
TraceLevel::Silent => Ok(then),
TraceLevel::Compact => Ok(TypedExpr::Trace {
location,
tipo,
then: Box::new(then),
text: Box::new(TypedExpr::String {
location,
tipo: string(),
value: self
.lines
.line_and_column_number(location.start)
.expect("Spans are within bounds.")
.to_string(),
}),
}),
TraceLevel::Verbose => Ok(TypedExpr::Trace {
location,
tipo,
then: Box::new(then),
@ -1976,12 +2005,17 @@ impl<'a, 'b> ExprTyper<'a, 'b> {
self.environment.instantiate(t, ids, &self.hydrator)
}
pub fn new(environment: &'a mut Environment<'b>, tracing: Tracing) -> Self {
pub fn new(
environment: &'a mut Environment<'b>,
lines: &'a LineNumbers,
tracing: Tracing,
) -> Self {
Self {
hydrator: Hydrator::new(),
environment,
tracing,
ungeneralised_function_used: false,
lines,
}
}

View File

@ -8,6 +8,7 @@ use crate::{
},
builtins,
builtins::function,
line_numbers::LineNumbers,
IdGenerator,
};
@ -79,8 +80,14 @@ impl UntypedModule {
}
for def in consts.into_iter().chain(not_consts) {
let definition =
infer_definition(def, &name, &mut hydrators, &mut environment, tracing)?;
let definition = infer_definition(
def,
&name,
&mut hydrators,
&mut environment,
&self.lines,
tracing,
)?;
definitions.push(definition);
}
@ -127,6 +134,7 @@ impl UntypedModule {
name: name.clone(),
definitions,
kind,
lines: self.lines,
type_info: TypeInfo {
name,
types,
@ -145,6 +153,7 @@ fn infer_definition(
module_name: &String,
hydrators: &mut HashMap<String, Hydrator>,
environment: &mut Environment<'_>,
lines: &LineNumbers,
tracing: Tracing,
) -> Result<TypedDefinition, Error> {
match def {
@ -181,7 +190,7 @@ fn infer_definition(
.map(|(arg_name, tipo)| arg_name.set_type(tipo.clone()))
.collect();
let mut expr_typer = ExprTyper::new(environment, tracing);
let mut expr_typer = ExprTyper::new(environment, lines, tracing);
expr_typer.hydrator = hydrators
.remove(&name)
@ -293,6 +302,7 @@ fn infer_definition(
module_name,
hydrators,
environment,
lines,
tracing,
)?
else {
@ -343,6 +353,7 @@ fn infer_definition(
module_name,
hydrators,
environment,
lines,
tracing,
)?
else {
@ -404,6 +415,7 @@ fn infer_definition(
module_name,
hydrators,
environment,
lines,
tracing,
)? {
environment.unify(f.return_type.clone(), builtins::bool(), f.location, false)?;
@ -585,7 +597,7 @@ fn infer_definition(
..
}) => {
let typed_expr =
ExprTyper::new(environment, tracing).infer_const(&annotation, *value)?;
ExprTyper::new(environment, lines, tracing).infer_const(&annotation, *value)?;
let tipo = typed_expr.tipo();

View File

@ -1,5 +1,8 @@
use crate::{line_numbers::LineNumbers, utils::span_to_lsp_range};
use aiken_lang::ast::{Definition, ModuleKind, Span, UntypedDefinition, Use};
use crate::utils::span_to_lsp_range;
use aiken_lang::{
ast::{Definition, ModuleKind, Span, UntypedDefinition, Use},
line_numbers::LineNumbers,
};
use aiken_project::module::CheckedModule;
use itertools::Itertools;
use std::fs;

View File

@ -7,7 +7,6 @@ use std::env;
mod cast;
mod edits;
pub mod error;
mod line_numbers;
mod quickfix;
pub mod server;
mod utils;

View File

@ -1,48 +0,0 @@
#[allow(dead_code)]
#[derive(Debug)]
pub struct LineNumbers {
line_starts: Vec<usize>,
length: usize,
}
impl LineNumbers {
pub fn new(src: &str) -> Self {
Self {
length: src.len(),
line_starts: std::iter::once(0)
.chain(src.match_indices('\n').map(|(i, _)| i + 1))
.collect(),
}
}
/// Get the line number for a byte index
pub fn line_number(&self, byte_index: usize) -> usize {
self.line_starts
.binary_search(&byte_index)
.unwrap_or_else(|next_line| next_line - 1)
+ 1
}
// TODO: handle unicode characters that may be more than 1 byte in width
pub fn line_and_column_number(&self, byte_index: usize) -> LineColumn {
let line = self.line_number(byte_index);
let column = byte_index - self.line_starts.get(line - 1).copied().unwrap_or_default() + 1;
LineColumn { line, column }
}
// TODO: handle unicode characters that may be more than 1 byte in width
/// 0 indexed line and character to byte index
#[allow(dead_code)]
pub fn byte_index(&self, line: usize, character: usize) -> usize {
match self.line_starts.get(line) {
Some(line_index) => *line_index + character,
None => self.length,
}
}
}
#[derive(Debug, Clone, Copy)]
pub struct LineColumn {
pub line: usize,
pub column: usize,
}

View File

@ -8,6 +8,7 @@ use std::{
use aiken_lang::{
ast::{Definition, Located, ModuleKind, Span, Use},
error::ExtraData,
line_numbers::LineNumbers,
parser,
tipo::pretty::Printer,
};
@ -35,7 +36,6 @@ use miette::Diagnostic;
use crate::{
cast::{cast_notification, cast_request},
error::Error as ServerError,
line_numbers::LineNumbers,
quickfix,
utils::{
path_to_uri, span_to_lsp_range, text_edit_replace, uri_to_module_name,

View File

@ -1,10 +1,8 @@
use std::{collections::HashMap, path::PathBuf};
use aiken_lang::ast::Tracing;
use aiken_lang::{ast::Tracing, line_numbers::LineNumbers};
use aiken_project::{config::Config, error::Error as ProjectError, module::CheckedModule, Project};
use crate::line_numbers::LineNumbers;
#[derive(Debug)]
pub struct SourceInfo {
/// The path to the source file from within the project root
@ -34,7 +32,7 @@ impl LspProject {
let result = self
.project
.check(true, None, false, false, Tracing::NoTraces);
.check(true, None, false, false, Tracing::silent());
self.project.restore(checkpoint);

View File

@ -1,11 +1,11 @@
use std::path::{Path, PathBuf};
use aiken_lang::ast::Span;
use aiken_lang::{ast::Span, line_numbers::LineNumbers};
use itertools::Itertools;
use lsp_types::TextEdit;
use urlencoding::decode;
use crate::{error::Error, line_numbers::LineNumbers};
use crate::error::Error;
pub const COMPILING_PROGRESS_TOKEN: &str = "compiling-aiken";
pub const CREATE_COMPILING_PROGRESS_TOKEN: &str = "create-compiling-progress-token";
@ -37,8 +37,12 @@ pub fn path_to_uri(path: PathBuf) -> Result<lsp_types::Url, Error> {
}
pub fn span_to_lsp_range(location: Span, line_numbers: &LineNumbers) -> lsp_types::Range {
let start = line_numbers.line_and_column_number(location.start);
let end = line_numbers.line_and_column_number(location.end);
let start = line_numbers
.line_and_column_number(location.start)
.expect("Spans are within bounds");
let end = line_numbers
.line_and_column_number(location.end)
.expect("Spans are within bounds");
lsp_types::Range {
start: lsp_types::Position {

View File

@ -217,7 +217,11 @@ impl Validator {
mod tests {
use std::collections::HashMap;
use aiken_lang::{self, builtins};
use aiken_lang::{
self,
ast::{TraceLevel, Tracing},
builtins,
};
use uplc::ast as uplc_ast;
use crate::tests::TestProject;
@ -240,7 +244,7 @@ mod tests {
&project.functions,
&project.data_types,
&project.module_types,
true,
Tracing::All(TraceLevel::Verbose),
);
let (validator, def) = modules

View File

@ -176,7 +176,7 @@ where
let parsed_modules = self.parse_sources(self.config.name.clone())?;
self.type_check(parsed_modules, Tracing::NoTraces, false)?;
self.type_check(parsed_modules, Tracing::silent(), false)?;
self.event_listener.handle_event(Event::GeneratingDocFiles {
output_path: destination.clone(),
@ -282,7 +282,7 @@ where
&self.functions,
&self.data_types,
&self.module_types,
options.tracing.into(),
options.tracing,
);
let blueprint = Blueprint::new(&self.config, &self.checked_modules, &mut generator)
@ -312,7 +312,7 @@ where
exact_match,
} => {
let tests =
self.collect_tests(verbose, match_tests, exact_match, options.tracing.into())?;
self.collect_tests(verbose, match_tests, exact_match, options.tracing)?;
if !tests.is_empty() {
self.event_listener.handle_event(Event::RunningTests);
@ -518,7 +518,7 @@ where
let parsed_modules = self.parse_sources(package.name)?;
self.type_check(parsed_modules, Tracing::NoTraces, true)?;
self.type_check(parsed_modules, Tracing::silent(), true)?;
}
Ok(())
@ -675,7 +675,7 @@ where
verbose: bool,
match_tests: Option<Vec<String>>,
exact_match: bool,
tracing: bool,
tracing: Tracing,
) -> Result<Vec<Script>, Error> {
let mut scripts = Vec::new();
let mut testable_validators = Vec::new();

View File

@ -1,13 +1,14 @@
use crate::error::Error;
use aiken_lang::{
ast::{
DataType, Definition, Function, Located, ModuleKind, TypedDataType, TypedFunction,
DataType, Definition, Function, Located, ModuleKind, Tracing, TypedDataType, TypedFunction,
TypedModule, TypedValidator, UntypedModule, Validator,
},
gen_uplc::{
builder::{DataTypeKey, FunctionAccessKey},
CodeGenerator,
},
line_numbers::LineNumbers,
parser::extra::{comments_before, Comment, ModuleExtra},
tipo::TypeInfo,
};
@ -358,7 +359,7 @@ impl CheckedModules {
builtin_functions: &'a IndexMap<FunctionAccessKey, TypedFunction>,
builtin_data_types: &'a IndexMap<DataTypeKey, TypedDataType>,
module_types: &'a HashMap<String, TypeInfo>,
tracing: bool,
tracing: Tracing,
) -> CodeGenerator<'a> {
let mut functions = IndexMap::new();
for (k, v) in builtin_functions {
@ -401,7 +402,10 @@ impl CheckedModules {
| Definition::Use(_) => {}
}
}
module_src.insert(module.name.clone(), module.code.clone());
module_src.insert(
module.name.clone(),
(module.code.clone(), LineNumbers::new(&module.code)),
);
}
let mut module_types_index = IndexMap::new();
@ -412,7 +416,7 @@ impl CheckedModules {
data_types,
module_types_index,
module_src,
tracing,
tracing.trace_level(true),
)
}
}

View File

@ -316,7 +316,11 @@ fn fmt_test(eval_info: &EvalInfo, max_mem: usize, max_cpu: usize, styled: bool)
format!(
"{arrow} {styled_line}",
arrow = "".if_supports_color(Stderr, |s| s.bright_yellow()),
styled_line = line.if_supports_color(Stderr, |s| s.bright_black())
styled_line = line
.split('\n')
.map(|l| format!("{}", l.if_supports_color(Stderr, |s| s.bright_black())))
.collect::<Vec<_>>()
.join("\n")
)
})
.collect::<Vec<_>>()

View File

@ -1,6 +1,6 @@
use pretty_assertions::assert_eq;
use aiken_lang::ast::{Definition, Function, TypedFunction, TypedValidator};
use aiken_lang::ast::{Definition, Function, TraceLevel, Tracing, TypedFunction, TypedValidator};
use uplc::{
ast::{Constant, Data, DeBruijn, Name, Program, Term, Type},
builder::{CONSTR_FIELDS_EXPOSER, CONSTR_INDEX_EXPOSER},
@ -26,7 +26,7 @@ fn assert_uplc(source_code: &str, expected: Term<Name>, should_fail: bool) {
&project.functions,
&project.data_types,
&project.module_types,
true,
Tracing::All(TraceLevel::Verbose),
);
let Some(checked_module) = modules.values().next() else {
@ -115,7 +115,7 @@ fn acceptance_test_1_length() {
1 + length(rest)
}
}
test length_1() {
length([1, 2, 3]) == 3
}
@ -167,7 +167,7 @@ fn acceptance_test_2_repeat() {
[x, ..repeat(x, n - 1)]
}
}
test repeat_1() {
repeat("aiken", 2) == ["aiken", "aiken"]
}
@ -232,11 +232,11 @@ fn acceptance_test_3_concat() {
f(x, foldr(rest, f, zero))
}
}
pub fn concat(left: List<a>, right: List<a>) -> List<a> {
foldr(left, fn(x, xs) { [x, ..xs] }, right)
}
test concat_1() {
concat([1, 2, 3], [4, 5, 6]) == [1, 2, 3, 4, 5, 6]
}
@ -329,15 +329,15 @@ fn acceptance_test_4_concat_no_anon_func() {
f(x, foldr(rest, f, zero))
}
}
pub fn prepend(x: a, xs: List<a>) -> List<a> {
[x, ..xs]
}
pub fn concat(left: List<a>, right: List<a>) -> List<a> {
foldr(left, prepend, right)
}
test concat_1() {
concat([1, 2, 3], [4, 5, 6]) == [1, 2, 3, 4, 5, 6]
}
@ -425,7 +425,7 @@ fn acceptance_test_4_concat_no_anon_func() {
fn acceptance_test_5_direct_head() {
let src = r#"
use aiken/builtin.{head_list}
test head_1() {
let head = fn(xs){
when xs is {
@ -433,9 +433,9 @@ fn acceptance_test_5_direct_head() {
_ -> Some(head_list(xs))
}
}
head([1, 2, 3]) == Some(1)
}
}
"#;
assert_uplc(
@ -473,7 +473,7 @@ fn acceptance_test_5_direct_head() {
fn acceptance_test_5_direct_2_heads() {
let src = r#"
use aiken/builtin.{head_list}
test head_2() {
let head = fn(xs: List<Int>){
when xs is {
@ -482,9 +482,9 @@ fn acceptance_test_5_direct_2_heads() {
[a, b, ..] -> Some([a,b])
}
}
head([1, 2, 3]) == Some([1, 2])
}
}
"#;
assert_uplc(
@ -583,10 +583,10 @@ fn acceptance_test_5_head_not_empty() {
_ -> Some(head_list(xs))
}
}
test head_1() {
head([1, 2, 3]) == Some(1)
}
}
"#;
assert_uplc(
@ -631,10 +631,10 @@ fn acceptance_test_5_head_empty() {
_ -> Some(head_list(xs))
}
}
test head_1() {
head([]) == None
}
}
"#;
assert_uplc(
@ -744,10 +744,10 @@ fn acceptance_test_7_unzip() {
}
}
}
test unzip1() {
let x = [(3, #"55"), (4, #"7799")]
unzip(x) == ([3, 4], [#"55", #"7799"])
}
"#;
@ -869,7 +869,7 @@ fn acceptance_test_8_is_empty() {
pub fn is_empty(bytes: ByteArray) -> Bool {
builtin.length_of_bytearray(bytes) == 0
}
test is_empty_1() {
is_empty(#"") == True
}
@ -902,7 +902,7 @@ fn acceptance_test_8_is_not_empty() {
pub fn is_empty(bytes: ByteArray) -> Bool {
builtin.length_of_bytearray(bytes) == 0
}
test is_empty_1() {
is_empty(#"01") == False
}
@ -935,7 +935,7 @@ fn acceptance_test_9_is_empty() {
pub fn is_empty(bytes: ByteArray) -> Bool {
length_of_bytearray(bytes) == 0
}
test is_empty_1() {
is_empty(#"") == True
}
@ -971,14 +971,14 @@ fn acceptance_test_10_map_none() {
Some(f(a))
}
}
fn add_one(n: Int) -> Int {
n + 1
}
test map_1() {
map(None, add_one) == None
}
}
"#;
assert_uplc(
@ -1053,14 +1053,14 @@ fn acceptance_test_10_map_some() {
Some(f(a))
}
}
fn add_one(n: Int) -> Int {
n + 1
}
test map_1() {
map(Some(1), add_one) == Some(2)
}
}
"#;
assert_uplc(
@ -1135,10 +1135,10 @@ fn acceptance_test_11_map_empty() {
[f(x), ..map(rest, f)]
}
}
test map_1() {
map([], fn(n) { n + 1 }) == []
}
}
"#;
assert_uplc(
@ -1206,10 +1206,10 @@ fn acceptance_test_11_map_filled() {
[f(x), ..map(rest, f)]
}
}
test map_1() {
map([6, 7, 8], fn(n) { n + 1 }) == [7, 8, 9]
}
}
"#;
assert_uplc(
@ -1291,7 +1291,7 @@ fn acceptance_test_12_filter_even() {
}
}
}
test filter_1() {
filter([1, 2, 3, 4, 5, 6], fn(x) { builtin.mod_integer(x, 2) == 0 }) == [2, 4, 6]
}
@ -1776,10 +1776,10 @@ fn acceptance_test_20_map_some() {
Some(f(a))
}
}
test map_1() {
map(Some(14), fn(n){ n + 1 }) == Some(15)
}
}
"#;
assert_uplc(
@ -1967,15 +1967,15 @@ fn acceptance_test_23_to_list() {
pub opaque type AssocList<key, value> {
inner: List<(key, value)>,
}
pub fn new() -> AssocList<key, value> {
AssocList { inner: [] }
}
pub fn to_list(m: AssocList<key, value>) -> List<(key, value)> {
m.inner
}
pub fn insert(
in m: AssocList<key, value>,
key k: key,
@ -1983,7 +1983,7 @@ fn acceptance_test_23_to_list() {
) -> AssocList<key, value> {
AssocList { inner: do_insert(m.inner, k, v) }
}
fn do_insert(elems: List<(key, value)>, k: key, v: value) -> List<(key, value)> {
when elems is {
[] ->
@ -1996,13 +1996,13 @@ fn acceptance_test_23_to_list() {
}
}
}
fn fixture_1() {
new()
|> insert("foo", 42)
|> insert("bar", 14)
}
test to_list_2() {
to_list(fixture_1()) == [("foo", 42), ("bar", 14)]
}
@ -2063,10 +2063,10 @@ fn acceptance_test_24_map2() {
}
}
}
test map2_3() {
map2(Some(14), Some(42), fn(a, b) { (a, b) }) == Some((14, 42))
}
}
"#;
assert_uplc(
@ -2191,7 +2191,7 @@ fn acceptance_test_25_void_equal() {
let src = r#"
test nil_1() {
Void == Void
}
}
"#;
assert_uplc(
@ -2212,11 +2212,11 @@ fn acceptance_test_26_foldr() {
f(x, foldr(rest, f, zero))
}
}
pub fn concat(left: List<a>, right: List<a>) -> List<a> {
foldr(left, fn(x, xs) { [x, ..xs] }, right)
}
pub fn flat_map(xs: List<a>, f: fn(a) -> List<b>) -> List<b> {
when xs is {
[] ->
@ -2225,7 +2225,7 @@ fn acceptance_test_26_foldr() {
concat(f(x), flat_map(rest, f))
}
}
test flat_map_2() {
flat_map([1, 2, 3], fn(a) { [a, a] }) == [1, 1, 2, 2, 3, 3]
}
@ -2353,11 +2353,11 @@ fn acceptance_test_27_flat_map() {
f(x, foldr(rest, f, zero))
}
}
pub fn concat(left: List<a>, right: List<a>) -> List<a> {
foldr(left, fn(x, xs) { [x, ..xs] }, right)
}
pub fn flat_map(xs: List<a>, f: fn(a) -> List<b>) -> List<b> {
when xs is {
[] ->
@ -2366,7 +2366,7 @@ fn acceptance_test_27_flat_map() {
concat(f(x), flat_map(rest, f))
}
}
test flat_map_2() {
flat_map([1, 2, 3], fn(a) { [a, a] }) == [1, 1, 2, 2, 3, 3]
}
@ -2498,7 +2498,7 @@ fn acceptance_test_28_unique_empty_list() {
}
}
}
pub fn unique(xs: List<a>) -> List<a> {
when xs is {
[] ->
@ -2507,10 +2507,10 @@ fn acceptance_test_28_unique_empty_list() {
[x, ..unique(filter(rest, fn(y) { y != x }))]
}
}
test unique_1() {
unique([]) == []
}
}
"#;
assert_uplc(
@ -2606,7 +2606,7 @@ fn acceptance_test_28_unique_list() {
}
}
}
pub fn unique(xs: List<a>) -> List<a> {
when xs is {
[] ->
@ -2615,10 +2615,10 @@ fn acceptance_test_28_unique_list() {
[x, ..unique(filter(rest, fn(y) { y != x }))]
}
}
test unique_1() {
unique([1,2,3,1]) == [1,2,3]
}
}
"#;
assert_uplc(
@ -2721,15 +2721,15 @@ fn acceptance_test_29_union() {
pub opaque type AssocList<key, value> {
inner: List<(key, value)>,
}
pub fn new() -> AssocList<key, value> {
AssocList { inner: [] }
}
pub fn from_list(xs: List<(key, value)>) -> AssocList<key, value> {
AssocList { inner: do_from_list(xs) }
}
fn do_from_list(xs: List<(key, value)>) -> List<(key, value)> {
when xs is {
[] ->
@ -2738,7 +2738,7 @@ fn acceptance_test_29_union() {
do_insert(do_from_list(rest), k, v)
}
}
pub fn insert(
in m: AssocList<key, value>,
key k: key,
@ -2746,7 +2746,7 @@ fn acceptance_test_29_union() {
) -> AssocList<key, value> {
AssocList { inner: do_insert(m.inner, k, v) }
}
fn do_insert(elems: List<(key, value)>, k: key, v: value) -> List<(key, value)> {
when elems is {
[] ->
@ -2759,14 +2759,14 @@ fn acceptance_test_29_union() {
}
}
}
pub fn union(
left: AssocList<key, value>,
right: AssocList<key, value>,
) -> AssocList<key, value> {
AssocList { inner: do_union(left.inner, right.inner) }
}
fn do_union(
left: List<(key, value)>,
right: List<(key, value)>,
@ -2778,17 +2778,17 @@ fn acceptance_test_29_union() {
do_union(rest, do_insert(right, k, v))
}
}
fn fixture_1() {
new()
|> insert("foo", 42)
|> insert("bar", 14)
}
test union_1() {
union(fixture_1(), new()) == fixture_1()
}
"#;
assert_uplc(
@ -2954,7 +2954,7 @@ fn acceptance_test_30_abs() {
a
}
}
test abs_1() {
abs(-14) == 14
}
@ -2987,13 +2987,13 @@ fn acceptance_test_30_abs() {
#[test]
fn expect_empty_list_on_filled_list() {
let src = r#"
let src = r#"
test empty_list1() {
let x = [1,2]
expect [] = x
True
}
}
"#;
assert_uplc(
@ -3014,13 +3014,13 @@ fn expect_empty_list_on_filled_list() {
#[test]
fn expect_empty_list_on_new_list() {
let src = r#"
let src = r#"
test empty_list1() {
let x = []
expect [] = x
True
}
}
"#;
assert_uplc(
@ -3038,7 +3038,7 @@ fn expect_empty_list_on_new_list() {
#[test]
fn when_bool_is_true() {
let src = r#"
let src = r#"
test it() {
when True is {
True ->
@ -3046,7 +3046,7 @@ fn when_bool_is_true() {
False ->
fail
}
}
}
"#;
assert_uplc(
@ -3061,7 +3061,7 @@ fn when_bool_is_true() {
#[test]
fn when_bool_is_true_switched_cases() {
let src = r#"
let src = r#"
test it() {
when True is {
False ->
@ -3069,7 +3069,7 @@ fn when_bool_is_true_switched_cases() {
True ->
True
}
}
}
"#;
assert_uplc(
@ -3084,7 +3084,7 @@ fn when_bool_is_true_switched_cases() {
#[test]
fn when_bool_is_false() {
let src = r#"
let src = r#"
test it() {
when False is {
False ->
@ -3092,7 +3092,7 @@ fn when_bool_is_false() {
True ->
True
}
}
}
"#;
assert_uplc(
@ -3607,15 +3607,15 @@ fn pass_constr_as_function() {
a: Int,
b: SubMake
}
type SubMake {
c: Int
}
fn hi(sm: SubMake, to_make: fn (Int, SubMake) -> Make) -> Make {
to_make(3, sm)
}
test cry() {
Make(3, SubMake(1)) == hi(SubMake(1), Make)
}
@ -3668,23 +3668,23 @@ fn record_update_output_2_vals() {
type Address {
thing: ByteArray,
}
type Datum {
NoDatum
InlineDatum(Data)
}
type Output {
address: Address,
value: List<(ByteArray, List<(ByteArray, Int)>)>,
datum: Datum,
script_ref: Option<ByteArray>,
}
type MyDatum {
a: Int,
}
test huh() {
let prev_output =
Output {
@ -3693,10 +3693,10 @@ fn record_update_output_2_vals() {
datum: InlineDatum(MyDatum{a: 3}),
script_ref: None,
}
let next_output =
Output { ..prev_output, value: [], datum: prev_output.datum }
prev_output == next_output
}
"#;
@ -3770,23 +3770,23 @@ fn record_update_output_1_val() {
type Address {
thing: ByteArray,
}
type Datum {
NoDatum
InlineDatum(Data)
}
type Output {
address: Address,
value: List<(ByteArray, List<(ByteArray, Int)>)>,
datum: Datum,
script_ref: Option<ByteArray>,
}
type MyDatum {
a: Int,
}
test huh() {
let prev_output =
Output {
@ -3795,10 +3795,10 @@ fn record_update_output_1_val() {
datum: InlineDatum(MyDatum{a: 3}),
script_ref: None,
}
let next_output =
Output { ..prev_output, datum: prev_output.datum }
prev_output == next_output
}
"#;
@ -3871,23 +3871,23 @@ fn record_update_output_first_last_val() {
type Address {
thing: ByteArray,
}
type Datum {
NoDatum
InlineDatum(Data)
}
type Output {
address: Address,
value: List<(ByteArray, List<(ByteArray, Int)>)>,
datum: Datum,
script_ref: Option<ByteArray>,
}
type MyDatum {
a: Int,
}
test huh() {
let prev_output =
Output {
@ -3896,10 +3896,10 @@ fn record_update_output_first_last_val() {
datum: InlineDatum(MyDatum{a: 3}),
script_ref: None,
}
let next_output =
Output { ..prev_output, script_ref: None, address: Address{thing: "script_hash_0"} }
prev_output == next_output
}
"#;
@ -3969,14 +3969,14 @@ fn list_fields_unwrap() {
a: ByteArray,
b: Int,
}
fn data_fields(){
[
Fields{a: #"", b: 14},
Fields{a: #"", b: 14},
Fields{a: #"AA", b: 0}
]
}
test list_fields_unwr_0() {
when data_fields() is {
[Fields { b, .. }, ..] ->
@ -4035,27 +4035,27 @@ fn foldl_type_mismatch() {
payment_credential: ByteArray,
stake_credential: Option<ByteArray>,
}
type Output {
address: Address,
value: List<Int>,
datum: Option<Int>,
reference_script: Option<Int>,
}
pub fn foldl(self: List<a>, with: fn(a, b) -> b, zero: b) -> b {
when self is {
[] -> zero
[x, ..xs] -> foldl(xs, with, with(x, zero))
}
}
test hi() {
let addr1 = Address { payment_credential: "adff", stake_credential: None }
let out =
Output { address: addr1, value: [], datum: None, reference_script: None }
let outputs: List<Output> =
[out, out, out]
let cry =
@ -4074,7 +4074,7 @@ fn foldl_type_mismatch() {
},
None,
)
cry == cry
}
"#;
@ -4466,14 +4466,14 @@ fn expect_head_cast_data_with_tail() {
#[test]
fn test_init_3() {
let src = r#"
pub fn init(self: List<a>) -> Option<List<a>> {
when self is {
[] -> None
_ -> Some(do_init(self))
}
}
fn do_init(self: List<a>) -> List<a> {
when self is {
[] -> fail @"unreachable"
@ -4483,7 +4483,7 @@ fn test_init_3() {
[x, ..do_init(xs)]
}
}
test init_3() {
init([1, 2, 3, 4]) == Some([1, 2, 3])
}
@ -4579,7 +4579,7 @@ fn list_clause_with_guard() {
}
}
}
test init_3() {
do_init([1, 3]) == [1]
}
@ -4718,7 +4718,7 @@ fn list_clause_with_guard2() {
}
}
}
test init_3() {
do_init([1, 3]) == [1]
}
@ -4850,7 +4850,7 @@ fn list_clause_with_guard3() {
}
}
}
test init_3() {
do_init([1, 3]) == [1]
}
@ -4989,7 +4989,7 @@ fn list_clause_with_assign() {
}
}
}
test init_3() {
do_init([1, 3]) == [1]
}
@ -5135,7 +5135,7 @@ fn list_clause_with_assign2() {
}
}
}
test init_3() {
do_init([Some(1), None]) == [Some(1)]
}
@ -5262,10 +5262,10 @@ fn opaque_value_in_datum() {
a: Value
}
validator {
fn spend(dat: Dat, red: Data, ctx: Data) {
let val = dat.a
let val = dat.a
expect [(_, amount)] = val.inner.inner
@ -5452,22 +5452,22 @@ fn opaque_value_in_test() {
pub fn dat_new() -> Dat {
let v = Value { inner: Dict { inner: [("", [(#"aa", 4)] |> Dict)] } }
Dat {
c: 0,
c: 0,
a: v
}
}
test spend() {
let dat = dat_new()
let val = dat.a
let val = dat.a
expect [(_, amount)] = val.inner.inner
let final_amount = [(#"AA", 4)] |> Dict
final_amount == amount
final_amount == amount
}
"#;

View File

@ -2,7 +2,7 @@ use std::collections::HashMap;
use std::path::PathBuf;
use aiken_lang::{
ast::{ModuleKind, Tracing, TypedDataType, TypedFunction},
ast::{ModuleKind, TraceLevel, Tracing, TypedDataType, TypedFunction},
gen_uplc::builder::{DataTypeKey, FunctionAccessKey},
parser,
tipo::TypeInfo,
@ -81,7 +81,7 @@ impl TestProject {
module.kind,
&self.package.to_string(),
&self.module_types,
Tracing::KeepTraces,
Tracing::All(TraceLevel::Verbose),
&mut warnings,
)
.expect("Failed to type-check module");

View File

@ -36,7 +36,7 @@ pub fn exec(
) -> miette::Result<()> {
with_project(directory.as_deref(), false, |p| {
if rebuild {
p.build(false, Tracing::NoTraces)?;
p.build(false, Tracing::silent())?;
}
let title = module.as_ref().map(|m| {

View File

@ -31,7 +31,7 @@ pub fn exec(
) -> miette::Result<()> {
with_project(directory.as_deref(), false, |p| {
if rebuild {
p.build(false, Tracing::NoTraces)?;
p.build(false, Tracing::silent())?;
}
let title = module.as_ref().map(|m| {

View File

@ -31,7 +31,7 @@ pub fn exec(
) -> miette::Result<()> {
with_project(directory.as_deref(), false, |p| {
if rebuild {
p.build(false, Tracing::NoTraces)?;
p.build(false, Tracing::silent())?;
}
let title = module.as_ref().map(|m| {

View File

@ -1,4 +1,7 @@
use aiken_lang::ast::{TraceLevel, Tracing};
use aiken_project::watch::{self, watch_project, with_project};
use clap::builder::MapValueParser;
use clap::builder::{PossibleValuesParser, TypedValueParser};
use std::{path::PathBuf, process};
#[derive(clap::Args)]
@ -19,9 +22,23 @@ pub struct Args {
#[clap(short, long)]
uplc: bool,
/// Do not remove traces when generating code
#[clap(short, long)]
keep_traces: bool,
/// Filter traces to be included in the generated program(s).
/// - user-defined: only consider traces that you've explicitly defined (either through the
/// 'trace' keyword of via the trace-if-false ('?') operator.
/// - compiler-generated: only included internal traces generated by the Aiken compiler, for
/// example in usage of 'expect'.
/// - all: include both user-defined and compiler-generated traces.
/// [optional] [default: all]
#[clap(short, long, value_parser=filter_traces_parser(), default_missing_value="all", verbatim_doc_comment)]
filter_traces: Option<fn(TraceLevel) -> Tracing>,
/// Choose the verbosity level of traces:
/// - silent: disable traces altogether
/// - compact: only culprit line numbers are shown on failures
/// - verbose: enable full verbose traces as provided by the user or the compiler
/// [optional]
#[clap(short, long, value_parser=trace_level_parser(), default_value_t=TraceLevel::Silent, verbatim_doc_comment)]
trace_level: TraceLevel,
}
pub fn exec(
@ -30,18 +47,54 @@ pub fn exec(
deny,
watch,
uplc,
keep_traces,
filter_traces,
trace_level,
}: Args,
) -> miette::Result<()> {
let result = if watch {
watch_project(directory.as_deref(), watch::default_filter, 500, |p| {
p.build(uplc, keep_traces.into())
p.build(
uplc,
match filter_traces {
Some(filter_traces) => filter_traces(trace_level),
None => Tracing::All(trace_level),
},
)
})
} else {
with_project(directory.as_deref(), deny, |p| {
p.build(uplc, keep_traces.into())
p.build(
uplc,
match filter_traces {
Some(filter_traces) => filter_traces(trace_level),
None => Tracing::All(trace_level),
},
)
})
};
result.map_err(|_| process::exit(1))
}
#[allow(clippy::type_complexity)]
pub fn filter_traces_parser(
) -> MapValueParser<PossibleValuesParser, fn(String) -> fn(TraceLevel) -> Tracing> {
PossibleValuesParser::new(["user-defined", "compiler-generated", "all"]).map(
|s: String| match s.as_str() {
"user-defined" => Tracing::UserDefined,
"compiler-generated" => Tracing::CompilerGenerated,
"all" => Tracing::All,
_ => unreachable!(),
},
)
}
#[allow(clippy::type_complexity)]
pub fn trace_level_parser() -> MapValueParser<PossibleValuesParser, fn(String) -> TraceLevel> {
PossibleValuesParser::new(["silent", "compact", "verbose"]).map(|s| match s.as_str() {
"silent" => TraceLevel::Silent,
"compact" => TraceLevel::Compact,
"verbose" => TraceLevel::Verbose,
_ => unreachable!(),
})
}

View File

@ -1,3 +1,5 @@
use super::build::{filter_traces_parser, trace_level_parser};
use aiken_lang::ast::{TraceLevel, Tracing};
use aiken_project::watch::{self, watch_project, with_project};
use std::{path::PathBuf, process};
@ -34,9 +36,23 @@ pub struct Args {
#[clap(short, long)]
exact_match: bool,
/// Remove traces when generating code (including tests)
#[clap(long)]
no_traces: bool,
/// Filter traces to be considered during testing:
/// - user-defined: only consider traces that you've explicitly defined (either through the
/// 'trace' keyword of via the trace-if-false ('?') operator.
/// - compiler-generated: only included internal traces generated by the Aiken compiler, for
/// example in usage of 'expect'.
/// - all: include both user-defined and compiler-generated traces.
/// [optional] [default: all]
#[clap(short, long, value_parser=filter_traces_parser(), default_missing_value="all", verbatim_doc_comment)]
filter_traces: Option<fn(TraceLevel) -> Tracing>,
/// Choose the verbosity level of traces:
/// - silent: disable traces altogether
/// - compact: only culprit line numbers are shown on failures
/// - verbose: enable full verbose traces as provided by the user or the compiler
/// [optional]
#[clap(short, long, value_parser=trace_level_parser(), default_value_t=TraceLevel::Verbose, verbatim_doc_comment)]
trace_level: TraceLevel,
}
pub fn exec(
@ -47,9 +63,9 @@ pub fn exec(
debug,
match_tests,
exact_match,
no_traces,
watch,
..
filter_traces,
trace_level,
}: Args,
) -> miette::Result<()> {
let result = if watch {
@ -59,7 +75,10 @@ pub fn exec(
match_tests.clone(),
debug,
exact_match,
(!no_traces).into(),
match filter_traces {
Some(filter_traces) => filter_traces(trace_level),
None => Tracing::All(trace_level),
},
)
})
} else {
@ -69,7 +88,10 @@ pub fn exec(
match_tests.clone(),
debug,
exact_match,
(!no_traces).into(),
match filter_traces {
Some(filter_traces) => filter_traces(trace_level),
None => Tracing::All(trace_level),
},
)
})
};