Merge pull request #379 from aiken-lang/trace-rework-and-unification

Trace rework and unification
This commit is contained in:
Matthias Benkort 2023-02-16 00:48:13 +01:00 committed by GitHub
commit 000a24b41c
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
15 changed files with 642 additions and 304 deletions

83
Cargo.lock generated vendored
View File

@ -29,15 +29,6 @@ dependencies = [
"opaque-debug", "opaque-debug",
] ]
[[package]]
name = "ahash"
version = "0.3.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e8fd72866655d1904d6b0997d0b07ba561047d070fbe29de039031c641b61217"
dependencies = [
"const-random",
]
[[package]] [[package]]
name = "ahash" name = "ahash"
version = "0.7.6" version = "0.7.6"
@ -390,11 +381,12 @@ checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd"
[[package]] [[package]]
name = "chumsky" name = "chumsky"
version = "0.8.0" version = "0.9.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8d02796e4586c6c41aeb68eae9bfb4558a522c35f1430c14b40136c3706e09e4" checksum = "c4d619fba796986dd538d82660b76e0b9756c6e19b2e4d4559ba5a57f9f00810"
dependencies = [ dependencies = [
"ahash 0.3.8", "hashbrown",
"stacker",
] ]
[[package]] [[package]]
@ -451,28 +443,6 @@ version = "0.9.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cec318a675afcb6a1ea1d4340e2d377e56e47c266f28043ceccbf4412ddfdd3b" checksum = "cec318a675afcb6a1ea1d4340e2d377e56e47c266f28043ceccbf4412ddfdd3b"
[[package]]
name = "const-random"
version = "0.1.15"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "368a7a772ead6ce7e1de82bfb04c485f3db8ec744f72925af5735e29a22cc18e"
dependencies = [
"const-random-macro",
"proc-macro-hack",
]
[[package]]
name = "const-random-macro"
version = "0.1.15"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9d7d6ab3c3a2282db210df5f02c4dab6e0a7057af0fb7ebd4070f30fe05c0ddb"
dependencies = [
"getrandom",
"once_cell",
"proc-macro-hack",
"tiny-keccak",
]
[[package]] [[package]]
name = "constant_time_eq" name = "constant_time_eq"
version = "0.1.5" version = "0.1.5"
@ -556,12 +526,6 @@ dependencies = [
"cfg-if", "cfg-if",
] ]
[[package]]
name = "crunchy"
version = "0.2.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7a81dae078cea95a014a339291cec439d2f232ebe854a9d672b796c6afafa9b7"
[[package]] [[package]]
name = "crypto-bigint" name = "crypto-bigint"
version = "0.4.9" version = "0.4.9"
@ -955,7 +919,7 @@ version = "0.12.3"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8a9ee70c43aaf417c914396645a0fa852624801b24ebb7ae78fe8272889ac888" checksum = "8a9ee70c43aaf417c914396645a0fa852624801b24ebb7ae78fe8272889ac888"
dependencies = [ dependencies = [
"ahash 0.7.6", "ahash",
] ]
[[package]] [[package]]
@ -1800,12 +1764,6 @@ dependencies = [
"version_check", "version_check",
] ]
[[package]]
name = "proc-macro-hack"
version = "0.5.20+deprecated"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "dc375e1527247fe1a97d8b7156678dfe7c1af2fc075c9a4db3690ecd2a148068"
[[package]] [[package]]
name = "proc-macro2" name = "proc-macro2"
version = "1.0.49" version = "1.0.49"
@ -1835,6 +1793,15 @@ dependencies = [
"tempfile", "tempfile",
] ]
[[package]]
name = "psm"
version = "0.1.21"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5787f7cda34e3033a72192c018bc5883100330f362ef279a8cbccfce8bb4e874"
dependencies = [
"cc",
]
[[package]] [[package]]
name = "pulldown-cmark" name = "pulldown-cmark"
version = "0.8.0" version = "0.8.0"
@ -2275,6 +2242,19 @@ dependencies = [
"der", "der",
] ]
[[package]]
name = "stacker"
version = "0.1.15"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c886bd4480155fd3ef527d45e9ac8dd7118a898a46530b7b94c3e21866259fce"
dependencies = [
"cc",
"cfg-if",
"libc",
"psm",
"winapi",
]
[[package]] [[package]]
name = "static_assertions" name = "static_assertions"
version = "1.1.0" version = "1.1.0"
@ -2461,15 +2441,6 @@ version = "0.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2e153e1f1acaef8acc537e68b44906d2db6436e2b35ac2c6b42640fff91f00fd" checksum = "2e153e1f1acaef8acc537e68b44906d2db6436e2b35ac2c6b42640fff91f00fd"
[[package]]
name = "tiny-keccak"
version = "2.0.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2c9d3793400a45f954c52e73d068316d76b6f4e36977e3fcebb13a2721e80237"
dependencies = [
"crunchy",
]
[[package]] [[package]]
name = "tinyvec" name = "tinyvec"
version = "1.6.0" version = "1.6.0"

View File

@ -11,7 +11,7 @@ authors = ["Lucas Rosa <x@rvcas.dev>", "Kasey White <kwhitemsg@gmail.com>"]
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies] [dependencies]
chumsky = "0.8.0" chumsky = "0.9.0"
hex = "0.4.3" hex = "0.4.3"
indexmap = "1.9.1" indexmap = "1.9.1"
indoc = "1.0.7" indoc = "1.0.7"

View File

@ -235,21 +235,13 @@ pub enum Air {
}, },
// Misc. // Misc.
Todo {
scope: Vec<u64>,
label: Option<String>,
tipo: Arc<Type>,
},
ErrorTerm { ErrorTerm {
scope: Vec<u64>, scope: Vec<u64>,
tipo: Arc<Type>, tipo: Arc<Type>,
label: Option<String>,
}, },
Trace { Trace {
scope: Vec<u64>, scope: Vec<u64>,
text: Option<String>,
tipo: Arc<Type>, tipo: Arc<Type>,
}, },
} }
@ -292,7 +284,6 @@ impl Air {
| Air::ListExpose { scope, .. } | Air::ListExpose { scope, .. }
| Air::TupleAccessor { scope, .. } | Air::TupleAccessor { scope, .. }
| Air::TupleIndex { scope, .. } | Air::TupleIndex { scope, .. }
| Air::Todo { scope, .. }
| Air::ErrorTerm { scope, .. } | Air::ErrorTerm { scope, .. }
| Air::Trace { scope, .. } => scope.clone(), | Air::Trace { scope, .. } => scope.clone(),
} }
@ -375,7 +366,6 @@ impl Air {
| Air::ListExpose { tipo, .. } | Air::ListExpose { tipo, .. }
| Air::TupleAccessor { tipo, .. } | Air::TupleAccessor { tipo, .. }
| Air::TupleIndex { tipo, .. } | Air::TupleIndex { tipo, .. }
| Air::Todo { tipo, .. }
| Air::ErrorTerm { tipo, .. } | Air::ErrorTerm { tipo, .. }
| Air::Trace { tipo, .. } => Some(tipo.clone()), | Air::Trace { tipo, .. } => Some(tipo.clone()),

View File

@ -995,9 +995,10 @@ pub struct RecordUpdateSpread {
} }
#[derive(Debug, Clone, PartialEq, Eq)] #[derive(Debug, Clone, PartialEq, Eq)]
pub enum TodoKind { pub enum TraceKind {
Keyword, Trace,
EmptyFunction, Todo,
Error,
} }
#[derive(Copy, Clone, PartialEq, Eq)] #[derive(Copy, Clone, PartialEq, Eq)]

View File

@ -433,11 +433,22 @@ pub fn rearrange_clauses(
sorted_clauses[sorted_clauses.len() - 1].clone().then sorted_clauses[sorted_clauses.len() - 1].clone().then
} }
Pattern::Discard { .. } => sorted_clauses[sorted_clauses.len() - 1].clone().then, Pattern::Discard { .. } => sorted_clauses[sorted_clauses.len() - 1].clone().then,
_ => TypedExpr::ErrorTerm { _ => {
let tipo = sorted_clauses[sorted_clauses.len() - 1].then.tipo();
TypedExpr::Trace {
location: Span::empty(), location: Span::empty(),
tipo: sorted_clauses[sorted_clauses.len() - 1].then.tipo(), tipo: tipo.clone(),
label: Some("Clause not filled".to_string()), text: Box::new(TypedExpr::String {
}, location: Span::empty(),
tipo: crate::builtins::string(),
value: "Clause not filled".to_string(),
}),
then: Box::new(TypedExpr::ErrorTerm {
location: Span::empty(),
tipo,
}),
}
}
}; };
for (index, clause) in sorted_clauses.iter().enumerate() { for (index, clause) in sorted_clauses.iter().enumerate() {
@ -1667,30 +1678,21 @@ pub fn monomorphize(
needs_variant = true; needs_variant = true;
} }
} }
Air::Todo { scope, label, tipo } => { Air::ErrorTerm { scope, tipo } => {
if tipo.is_generic() { if tipo.is_generic() {
let mut tipo = tipo.clone(); let mut tipo = tipo.clone();
find_generics_to_replace(&mut tipo, &generic_types); find_generics_to_replace(&mut tipo, &generic_types);
new_air[index] = Air::Todo { scope, tipo, label }; new_air[index] = Air::ErrorTerm { scope, tipo };
needs_variant = true; needs_variant = true;
} }
} }
Air::ErrorTerm { scope, label, tipo } => { Air::Trace { scope, tipo } => {
if tipo.is_generic() { if tipo.is_generic() {
let mut tipo = tipo.clone(); let mut tipo = tipo.clone();
find_generics_to_replace(&mut tipo, &generic_types); find_generics_to_replace(&mut tipo, &generic_types);
new_air[index] = Air::ErrorTerm { scope, tipo, label }; new_air[index] = Air::Trace { scope, tipo };
needs_variant = true;
}
}
Air::Trace { scope, text, tipo } => {
if tipo.is_generic() {
let mut tipo = tipo.clone();
find_generics_to_replace(&mut tipo, &generic_types);
new_air[index] = Air::Trace { scope, tipo, text };
needs_variant = true; needs_variant = true;
} }
} }

View File

@ -5,7 +5,7 @@ use vec1::Vec1;
use crate::{ use crate::{
ast::{ ast::{
Annotation, Arg, AssignmentKind, BinOp, CallArg, Clause, DefinitionLocation, IfBranch, Annotation, Arg, AssignmentKind, BinOp, CallArg, Clause, DefinitionLocation, IfBranch,
Pattern, RecordUpdateSpread, Span, TodoKind, TypedRecordUpdateArg, UnOp, Pattern, RecordUpdateSpread, Span, TraceKind, TypedRecordUpdateArg, UnOp,
UntypedRecordUpdateArg, UntypedRecordUpdateArg,
}, },
builtins::void, builtins::void,
@ -96,7 +96,7 @@ pub enum TypedExpr {
location: Span, location: Span,
tipo: Arc<Type>, tipo: Arc<Type>,
then: Box<Self>, then: Box<Self>,
text: Option<String>, text: Box<Self>,
}, },
When { When {
@ -143,16 +143,9 @@ pub enum TypedExpr {
tuple: Box<Self>, tuple: Box<Self>,
}, },
Todo {
location: Span,
label: Option<String>,
tipo: Arc<Type>,
},
ErrorTerm { ErrorTerm {
location: Span, location: Span,
tipo: Arc<Type>, tipo: Arc<Type>,
label: Option<String>,
}, },
RecordUpdate { RecordUpdate {
@ -177,7 +170,6 @@ impl TypedExpr {
Self::Trace { then, .. } => then.tipo(), Self::Trace { then, .. } => then.tipo(),
Self::Fn { tipo, .. } Self::Fn { tipo, .. }
| Self::Int { tipo, .. } | Self::Int { tipo, .. }
| Self::Todo { tipo, .. }
| Self::ErrorTerm { tipo, .. } | Self::ErrorTerm { tipo, .. }
| Self::When { tipo, .. } | Self::When { tipo, .. }
| Self::List { tipo, .. } | Self::List { tipo, .. }
@ -223,7 +215,6 @@ impl TypedExpr {
| TypedExpr::List { .. } | TypedExpr::List { .. }
| TypedExpr::Call { .. } | TypedExpr::Call { .. }
| TypedExpr::When { .. } | TypedExpr::When { .. }
| TypedExpr::Todo { .. }
| TypedExpr::ErrorTerm { .. } | TypedExpr::ErrorTerm { .. }
| TypedExpr::BinOp { .. } | TypedExpr::BinOp { .. }
| TypedExpr::Tuple { .. } | TypedExpr::Tuple { .. }
@ -262,7 +253,6 @@ impl TypedExpr {
| Self::Int { location, .. } | Self::Int { location, .. }
| Self::Var { location, .. } | Self::Var { location, .. }
| Self::Trace { location, .. } | Self::Trace { location, .. }
| Self::Todo { location, .. }
| Self::ErrorTerm { location, .. } | Self::ErrorTerm { location, .. }
| Self::When { location, .. } | Self::When { location, .. }
| Self::Call { location, .. } | Self::Call { location, .. }
@ -298,7 +288,6 @@ impl TypedExpr {
| Self::Int { location, .. } | Self::Int { location, .. }
| Self::Trace { location, .. } | Self::Trace { location, .. }
| Self::Var { location, .. } | Self::Var { location, .. }
| Self::Todo { location, .. }
| Self::ErrorTerm { location, .. } | Self::ErrorTerm { location, .. }
| Self::When { location, .. } | Self::When { location, .. }
| Self::Call { location, .. } | Self::Call { location, .. }
@ -387,9 +376,10 @@ pub enum UntypedExpr {
}, },
Trace { Trace {
kind: TraceKind,
location: Span, location: Span,
then: Box<Self>, then: Box<Self>,
text: Option<String>, text: Box<Self>,
}, },
When { When {
@ -421,15 +411,8 @@ pub enum UntypedExpr {
tuple: Box<Self>, tuple: Box<Self>,
}, },
Todo {
kind: TodoKind,
location: Span,
label: Option<String>,
},
ErrorTerm { ErrorTerm {
location: Span, location: Span,
label: Option<String>,
}, },
RecordUpdate { RecordUpdate {
@ -446,7 +429,35 @@ pub enum UntypedExpr {
}, },
} }
pub const DEFAULT_TODO_STR: &str = "aiken::todo";
pub const DEFAULT_ERROR_STR: &str = "aiken::error";
impl UntypedExpr { impl UntypedExpr {
pub fn todo(location: Span, reason: Option<Self>) -> Self {
UntypedExpr::Trace {
location,
kind: TraceKind::Todo,
then: Box::new(UntypedExpr::ErrorTerm { location }),
text: Box::new(reason.unwrap_or_else(|| UntypedExpr::String {
location,
value: DEFAULT_TODO_STR.to_string(),
})),
}
}
pub fn error(location: Span, reason: Option<Self>) -> Self {
UntypedExpr::Trace {
location,
kind: TraceKind::Error,
then: Box::new(UntypedExpr::ErrorTerm { location }),
text: Box::new(reason.unwrap_or_else(|| UntypedExpr::String {
location,
value: DEFAULT_ERROR_STR.to_string(),
})),
}
}
pub fn append_in_sequence(self, next: Self) -> Self { pub fn append_in_sequence(self, next: Self) -> Self {
let location = Span { let location = Span {
start: self.location().start, start: self.location().start,
@ -502,7 +513,6 @@ impl UntypedExpr {
Self::Fn { location, .. } Self::Fn { location, .. }
| Self::Var { location, .. } | Self::Var { location, .. }
| Self::Int { location, .. } | Self::Int { location, .. }
| Self::Todo { location, .. }
| Self::ErrorTerm { location, .. } | Self::ErrorTerm { location, .. }
| Self::When { location, .. } | Self::When { location, .. }
| Self::Call { location, .. } | Self::Call { location, .. }

View File

@ -7,12 +7,13 @@ use crate::{
ast::{ ast::{
Annotation, Arg, ArgName, AssignmentKind, BinOp, CallArg, ClauseGuard, Constant, DataType, Annotation, Arg, ArgName, AssignmentKind, BinOp, CallArg, ClauseGuard, Constant, DataType,
Definition, Function, IfBranch, ModuleConstant, Pattern, RecordConstructor, Definition, Function, IfBranch, ModuleConstant, Pattern, RecordConstructor,
RecordConstructorArg, RecordUpdateSpread, Span, TypeAlias, TypedArg, TypedConstant, UnOp, RecordConstructorArg, RecordUpdateSpread, Span, TraceKind, TypeAlias, TypedArg,
UnqualifiedImport, UntypedArg, UntypedClause, UntypedClauseGuard, UntypedDefinition, TypedConstant, UnOp, UnqualifiedImport, UntypedArg, UntypedClause, UntypedClauseGuard,
UntypedModule, UntypedPattern, UntypedRecordUpdateArg, Use, CAPTURE_VARIABLE, UntypedDefinition, UntypedModule, UntypedPattern, UntypedRecordUpdateArg, Use,
CAPTURE_VARIABLE,
}, },
docvec, docvec,
expr::UntypedExpr, expr::{UntypedExpr, DEFAULT_ERROR_STR, DEFAULT_TODO_STR},
parser::extra::{Comment, ModuleExtra}, parser::extra::{Comment, ModuleExtra},
pretty::{break_, concat, flex_break, join, line, lines, nil, Document, Documentable}, pretty::{break_, concat, flex_break, join, line, lines, nil, Document, Documentable},
tipo::{self, Type}, tipo::{self, Type},
@ -654,9 +655,6 @@ impl<'comments> Formatter<'comments> {
final_else, final_else,
.. ..
} => self.if_expr(branches, final_else), } => self.if_expr(branches, final_else),
UntypedExpr::Todo { label: None, .. } => "todo".to_doc(),
UntypedExpr::Todo { label: Some(l), .. } => docvec!["todo(\"", l, "\")"],
UntypedExpr::PipeLine { expressions, .. } => self.pipeline(expressions), UntypedExpr::PipeLine { expressions, .. } => self.pipeline(expressions),
@ -706,27 +704,8 @@ impl<'comments> Formatter<'comments> {
} => self.assignment(pattern, value, None, Some(*kind), annotation), } => self.assignment(pattern, value, None, Some(*kind), annotation),
UntypedExpr::Trace { UntypedExpr::Trace {
text: None, then, .. kind, text, then, ..
} => "trace" } => self.trace(kind, text, then),
.to_doc()
.append(if self.pop_empty_lines(then.start_byte_index()) {
lines(2)
} else {
line()
})
.append(self.expr(then)),
UntypedExpr::Trace {
text: Some(l),
then,
..
} => docvec!["trace(\"", l, "\")"]
.append(if self.pop_empty_lines(then.start_byte_index()) {
lines(2)
} else {
line()
})
.append(self.expr(then)),
UntypedExpr::When { UntypedExpr::When {
subjects, clauses, .. subjects, clauses, ..
@ -755,9 +734,7 @@ impl<'comments> Formatter<'comments> {
.append(suffix) .append(suffix)
} }
UntypedExpr::ErrorTerm { label: None, .. } => "error".to_doc(), UntypedExpr::ErrorTerm { .. } => "error".to_doc(),
UntypedExpr::ErrorTerm { label: Some(l), .. } => docvec!["error(\"", l, "\")"],
}; };
commented(document, comments) commented(document, comments)
@ -772,6 +749,41 @@ impl<'comments> Formatter<'comments> {
} }
} }
pub fn trace<'a>(
&mut self,
kind: &'a TraceKind,
text: &'a UntypedExpr,
then: &'a UntypedExpr,
) -> Document<'a> {
let (keyword, default_text) = match kind {
TraceKind::Trace => ("trace", None),
TraceKind::Error => ("error", Some(DEFAULT_ERROR_STR.to_string())),
TraceKind::Todo => ("todo", Some(DEFAULT_TODO_STR.to_string())),
};
let body = match text {
UntypedExpr::String { value, .. } if Some(value) == default_text.as_ref() => {
keyword.to_doc()
}
_ => keyword
.to_doc()
.append(" ")
.append(self.wrap_expr(text))
.group(),
};
match kind {
TraceKind::Error | TraceKind::Todo => body,
TraceKind::Trace => body
.append(if self.pop_empty_lines(then.start_byte_index()) {
lines(2)
} else {
line()
})
.append(self.expr(then)),
}
}
pub fn pattern_constructor<'a>( pub fn pattern_constructor<'a>(
&mut self, &mut self,
name: &'a str, name: &'a str,
@ -1322,7 +1334,10 @@ impl<'comments> Formatter<'comments> {
fn wrap_expr<'a>(&mut self, expr: &'a UntypedExpr) -> Document<'a> { fn wrap_expr<'a>(&mut self, expr: &'a UntypedExpr) -> Document<'a> {
match expr { match expr {
UntypedExpr::Trace { .. } UntypedExpr::Trace {
kind: TraceKind::Trace,
..
}
| UntypedExpr::Sequence { .. } | UntypedExpr::Sequence { .. }
| UntypedExpr::Assignment { .. } => "{" | UntypedExpr::Assignment { .. } => "{"
.to_doc() .to_doc()
@ -1361,7 +1376,10 @@ impl<'comments> Formatter<'comments> {
fn case_clause_value<'a>(&mut self, expr: &'a UntypedExpr) -> Document<'a> { fn case_clause_value<'a>(&mut self, expr: &'a UntypedExpr) -> Document<'a> {
match expr { match expr {
UntypedExpr::Trace { .. } UntypedExpr::Trace {
kind: TraceKind::Trace,
..
}
| UntypedExpr::Sequence { .. } | UntypedExpr::Sequence { .. }
| UntypedExpr::Assignment { .. } => " {" | UntypedExpr::Assignment { .. } => " {"
.to_doc() .to_doc()

View File

@ -7,7 +7,7 @@ pub mod lexer;
pub mod token; pub mod token;
use crate::{ use crate::{
ast::{self, BinOp, Span, TodoKind, UnOp, UntypedDefinition, CAPTURE_VARIABLE}, ast::{self, BinOp, Span, TraceKind, UnOp, UntypedDefinition, CAPTURE_VARIABLE},
expr, expr,
}; };
@ -254,11 +254,7 @@ pub fn fn_parser() -> impl Parser<Token, ast::UntypedDefinition, Error = ParseEr
|((((opt_pub, name), (arguments, args_span)), return_annotation), body), span| { |((((opt_pub, name), (arguments, args_span)), return_annotation), body), span| {
ast::UntypedDefinition::Fn(ast::Function { ast::UntypedDefinition::Fn(ast::Function {
arguments, arguments,
body: body.unwrap_or(expr::UntypedExpr::Todo { body: body.unwrap_or_else(|| expr::UntypedExpr::todo(span, None)),
kind: TodoKind::EmptyFunction,
location: span,
label: None,
}),
doc: None, doc: None,
location: Span { location: Span {
start: span.start, start: span.start,
@ -291,11 +287,7 @@ pub fn test_parser() -> impl Parser<Token, ast::UntypedDefinition, Error = Parse
.map_with_span(|((name, span_end), body), span| { .map_with_span(|((name, span_end), body), span| {
ast::UntypedDefinition::Test(ast::Function { ast::UntypedDefinition::Test(ast::Function {
arguments: vec![], arguments: vec![],
body: body.unwrap_or(expr::UntypedExpr::Todo { body: body.unwrap_or_else(|| expr::UntypedExpr::todo(span, None)),
kind: TodoKind::EmptyFunction,
location: span,
label: None,
}),
doc: None, doc: None,
location: span_end, location: span_end,
end_position: span.end - 1, end_position: span.end - 1,
@ -591,17 +583,20 @@ pub fn expr_seq_parser() -> impl Parser<Token, expr::UntypedExpr, Error = ParseE
recursive(|r| { recursive(|r| {
choice(( choice((
just(Token::Trace) just(Token::Trace)
.ignore_then( .ignore_then(expr_parser(r.clone()))
select! {Token::String {value} => value}
.delimited_by(just(Token::LeftParen), just(Token::RightParen))
.or_not(),
)
.then(r.clone()) .then(r.clone())
.map_with_span(|(text, then_), span| expr::UntypedExpr::Trace { .map_with_span(|(text, then_), span| expr::UntypedExpr::Trace {
kind: TraceKind::Trace,
location: span, location: span,
then: Box::new(then_), then: Box::new(then_),
text, text: Box::new(text),
}), }),
just(Token::ErrorTerm)
.ignore_then(expr_parser(r.clone()).or_not())
.map_with_span(|reason, span| expr::UntypedExpr::error(span, reason)),
just(Token::Todo)
.ignore_then(expr_parser(r.clone()).or_not())
.map_with_span(|reason, span| expr::UntypedExpr::todo(span, reason)),
expr_parser(r.clone()) expr_parser(r.clone())
.then(r.repeated()) .then(r.repeated())
.foldl(|current, next| current.append_in_sequence(next)), .foldl(|current, next| current.append_in_sequence(next)),
@ -871,29 +866,6 @@ pub fn expr_parser(
name, name,
}); });
let todo_parser = just(Token::Todo)
.ignore_then(
select! {Token::String {value} => value}
.delimited_by(just(Token::LeftParen), just(Token::RightParen))
.or_not(),
)
.map_with_span(|label, span| expr::UntypedExpr::Todo {
kind: TodoKind::Keyword,
location: span,
label,
});
let error_parser = just(Token::ErrorTerm)
.ignore_then(
select! {Token::String {value} => value}
.delimited_by(just(Token::LeftParen), just(Token::RightParen))
.or_not(),
)
.map_with_span(|label, span| expr::UntypedExpr::ErrorTerm {
location: span,
label,
});
let tuple = r let tuple = r
.clone() .clone()
.separated_by(just(Token::Comma)) .separated_by(just(Token::Comma))
@ -988,7 +960,23 @@ pub fn expr_parser(
}), }),
))) )))
// TODO: add hint "Did you mean to wrap a multi line clause in curly braces?" // TODO: add hint "Did you mean to wrap a multi line clause in curly braces?"
.then(r.clone()) .then(choice((
r.clone(),
just(Token::Todo)
.ignore_then(
r.clone()
.then_ignore(one_of(Token::RArrow).not().rewind())
.or_not(),
)
.map_with_span(|reason, span| expr::UntypedExpr::todo(span, reason)),
just(Token::ErrorTerm)
.ignore_then(
r.clone()
.then_ignore(just(Token::RArrow).not().rewind())
.or_not(),
)
.map_with_span(|reason, span| expr::UntypedExpr::error(span, reason)),
)))
.map_with_span( .map_with_span(
|(((patterns, alternative_patterns_opt), guard), then), span| ast::UntypedClause { |(((patterns, alternative_patterns_opt), guard), then), span| ast::UntypedClause {
location: span, location: span,
@ -1084,8 +1072,6 @@ pub fn expr_parser(
record_parser, record_parser,
field_access_constructor, field_access_constructor,
var_parser, var_parser,
todo_parser,
error_parser,
tuple, tuple,
bytearray, bytearray,
list_parser, list_parser,

View File

@ -126,3 +126,35 @@ fn list_pattern_6() {
"#; "#;
assert!(check(parse(source_code)).is_ok()) assert!(check(parse(source_code)).is_ok())
} }
#[test]
fn trace_strings() {
let source_code = r#"
fn bar() {
"BAR"
}
test foo() {
let msg1 = "FOO"
trace("INLINE")
trace(msg1)
trace(bar())
True
}
"#;
assert!(check(parse(source_code)).is_ok())
}
#[test]
fn trace_non_strings() {
let source_code = r#"
test foo() {
trace(14 + 42)
True
}
"#;
assert!(matches!(
check(parse(source_code)),
Err((_, Error::CouldNotUnify { .. }))
))
}

View File

@ -320,7 +320,7 @@ fn test_nested_function_calls() {
, ,
when output.datum is { when output.datum is {
InlineDatum(_) -> True InlineDatum(_) -> True
_ -> error("expected inline datum") _ -> error "expected inline datum"
}, },
] ]
|> list.and |> list.and
@ -339,7 +339,7 @@ fn test_nested_function_calls() {
), ),
when output.datum is { when output.datum is {
InlineDatum(_) -> True InlineDatum(_) -> True
_ -> error("expected inline datum") _ -> error "expected inline datum"
}, },
] ]
|> list.and |> list.and
@ -348,3 +348,34 @@ fn test_nested_function_calls() {
assert_fmt(src, expected); assert_fmt(src, expected);
} }
#[test]
fn format_trace_todo_error() {
let src = indoc! {r#"
fn foo_1() {
todo
}
fn foo_2() {
todo "my custom message"
}
fn foo_3() {
when x is {
Foo -> True
_ -> error
}
}
fn foo_4() {
if 14 == 42 {
error "I don't think so"
} else {
trace "been there"
True
}
}
"#};
assert_fmt(src, src);
}

View File

@ -308,10 +308,16 @@ fn empty_function() {
code, code,
vec![ast::UntypedDefinition::Fn(Function { vec![ast::UntypedDefinition::Fn(Function {
arguments: vec![], arguments: vec![],
body: expr::UntypedExpr::Todo { body: expr::UntypedExpr::Trace {
kind: ast::TodoKind::EmptyFunction, kind: ast::TraceKind::Todo,
location: Span::new((), 0..15), location: Span::new((), 0..15),
label: None, text: Box::new(expr::UntypedExpr::String {
value: "aiken::todo".to_string(),
location: Span::new((), 0..15),
}),
then: Box::new(expr::UntypedExpr::ErrorTerm {
location: Span::new((), 0..15),
}),
}, },
doc: None, doc: None,
location: Span::new((), 0..12), location: Span::new((), 0..12),
@ -1781,10 +1787,16 @@ fn function_def() {
vec![ast::UntypedDefinition::Fn(Function { vec![ast::UntypedDefinition::Fn(Function {
doc: None, doc: None,
arguments: vec![], arguments: vec![],
body: expr::UntypedExpr::Todo { body: expr::UntypedExpr::Trace {
kind: ast::TodoKind::EmptyFunction, kind: ast::TraceKind::Todo,
location: Span::new((), 0..11), location: Span::new((), 0..11),
label: None, text: Box::new(expr::UntypedExpr::String {
value: "aiken::todo".to_string(),
location: Span::new((), 0..11),
}),
then: Box::new(expr::UntypedExpr::ErrorTerm {
location: Span::new((), 0..11),
}),
}, },
location: Span::new((), 0..8), location: Span::new((), 0..8),
name: "foo".to_string(), name: "foo".to_string(),
@ -2586,3 +2598,345 @@ fn scope_logical_expression() {
})], })],
) )
} }
#[test]
fn trace_expressions() {
let code = indoc! {r#"
fn foo() {
let msg1 = "FOO"
trace "INLINE"
trace msg1
trace string.concat(msg1, "BAR")
trace ( 14 + 42 * 1337 )
Void
}
"#};
assert_definitions(
code,
vec![ast::Definition::Fn(Function {
arguments: vec![],
body: expr::UntypedExpr::Sequence {
location: Span::new((), 13..128),
expressions: vec![
expr::UntypedExpr::Assignment {
location: Span::new((), 13..29),
value: Box::new(expr::UntypedExpr::String {
location: Span::new((), 24..29),
value: "FOO".to_string(),
}),
pattern: ast::Pattern::Var {
location: Span::new((), 17..21),
name: "msg1".to_string(),
},
kind: ast::AssignmentKind::Let,
annotation: None,
},
expr::UntypedExpr::Trace {
kind: ast::TraceKind::Trace,
location: Span::new((), 32..128),
then: Box::new(expr::UntypedExpr::Trace {
kind: ast::TraceKind::Trace,
location: Span::new((), 49..128),
then: Box::new(expr::UntypedExpr::Trace {
kind: ast::TraceKind::Trace,
location: Span::new((), 62..128),
then: Box::new(expr::UntypedExpr::Trace {
kind: ast::TraceKind::Trace,
location: Span::new((), 97..128),
then: Box::new(expr::UntypedExpr::Var {
location: Span::new((), 124..128),
name: "Void".to_string(),
}),
text: Box::new(expr::UntypedExpr::BinOp {
location: Span::new((), 105..119),
name: ast::BinOp::AddInt,
left: Box::new(expr::UntypedExpr::Int {
location: Span::new((), 105..107),
value: "14".to_string(),
}),
right: Box::new(expr::UntypedExpr::BinOp {
location: Span::new((), 110..119),
name: ast::BinOp::MultInt,
left: Box::new(expr::UntypedExpr::Int {
location: Span::new((), 110..112),
value: "42".to_string(),
}),
right: Box::new(expr::UntypedExpr::Int {
location: Span::new((), 115..119),
value: "1337".to_string(),
}),
}),
}),
}),
text: Box::new(expr::UntypedExpr::Call {
arguments: vec![
ast::CallArg {
label: None,
location: Span::new((), 82..86),
value: expr::UntypedExpr::Var {
location: Span::new((), 82..86),
name: "msg1".to_string(),
},
},
ast::CallArg {
label: None,
location: Span::new((), 88..93),
value: expr::UntypedExpr::String {
location: Span::new((), 88..93),
value: "BAR".to_string(),
},
},
],
fun: Box::new(expr::UntypedExpr::FieldAccess {
location: Span::new((), 68..81),
label: "concat".to_string(),
container: Box::new(expr::UntypedExpr::Var {
location: Span::new((), 68..74),
name: "string".to_string(),
}),
}),
location: Span::new((), 68..94),
}),
}),
text: Box::new(expr::UntypedExpr::Var {
location: Span::new((), 55..59),
name: "msg1".to_string(),
}),
}),
text: Box::new(expr::UntypedExpr::String {
location: Span::new((), 38..46),
value: "INLINE".to_string(),
}),
},
],
},
doc: None,
location: Span::new((), 0..8),
name: "foo".to_string(),
public: false,
return_annotation: None,
return_type: (),
end_position: 129,
})],
)
}
#[test]
fn parse_keyword_error() {
let code = indoc! {r#"
fn foo() {
error "not implemented"
}
fn bar() {
when x is {
Something -> Void
_ -> error
}
}
"#};
assert_definitions(
code,
vec![
ast::Definition::Fn(Function {
arguments: vec![],
body: expr::UntypedExpr::Trace {
kind: ast::TraceKind::Error,
location: Span::new((), 13..36),
then: Box::new(expr::UntypedExpr::ErrorTerm {
location: Span::new((), 13..36),
}),
text: Box::new(expr::UntypedExpr::String {
location: Span::new((), 19..36),
value: "not implemented".to_string(),
}),
},
doc: None,
location: Span::new((), 0..8),
name: "foo".to_string(),
public: false,
return_annotation: None,
return_type: (),
end_position: 37,
}),
ast::Definition::Fn(Function {
arguments: vec![],
body: expr::UntypedExpr::When {
location: Span::new((), 53..109),
subjects: vec![expr::UntypedExpr::Var {
location: Span::new((), 58..59),
name: "x".to_string(),
}],
clauses: vec![
ast::Clause {
location: Span::new((), 71..88),
pattern: vec![ast::Pattern::Constructor {
is_record: false,
location: Span::new((), 71..80),
name: "Something".to_string(),
arguments: vec![],
module: None,
constructor: (),
with_spread: false,
tipo: (),
}],
alternative_patterns: vec![],
guard: None,
then: expr::UntypedExpr::Var {
location: Span::new((), 84..88),
name: "Void".to_string(),
},
},
ast::Clause {
location: Span::new((), 95..105),
pattern: vec![ast::Pattern::Discard {
name: "_".to_string(),
location: Span::new((), 95..96),
}],
alternative_patterns: vec![],
guard: None,
then: expr::UntypedExpr::Trace {
kind: ast::TraceKind::Error,
location: Span::new((), 100..105),
then: Box::new(expr::UntypedExpr::ErrorTerm {
location: Span::new((), 100..105),
}),
text: Box::new(expr::UntypedExpr::String {
location: Span::new((), 100..105),
value: "aiken::error".to_string(),
}),
},
},
],
},
doc: None,
location: Span::new((), 40..48),
name: "bar".to_string(),
public: false,
return_annotation: None,
return_type: (),
end_position: 110,
}),
],
)
}
#[test]
fn parse_keyword_todo() {
let code = indoc! {r#"
fn foo() {
todo "not implemented"
}
fn bar() {
when x is {
Foo -> todo
Bar -> True
_ -> False
}
}
"#};
assert_definitions(
code,
vec![
ast::Definition::Fn(Function {
arguments: vec![],
body: expr::UntypedExpr::Trace {
kind: ast::TraceKind::Todo,
location: Span::new((), 13..35),
then: Box::new(expr::UntypedExpr::ErrorTerm {
location: Span::new((), 13..35),
}),
text: Box::new(expr::UntypedExpr::String {
location: Span::new((), 18..35),
value: "not implemented".to_string(),
}),
},
doc: None,
location: Span::new((), 0..8),
name: "foo".to_string(),
public: false,
return_annotation: None,
return_type: (),
end_position: 36,
}),
ast::Definition::Fn(Function {
arguments: vec![],
body: expr::UntypedExpr::When {
location: Span::new((), 52..120),
subjects: vec![expr::UntypedExpr::Var {
location: Span::new((), 57..58),
name: "x".to_string(),
}],
clauses: vec![
ast::Clause {
location: Span::new((), 70..81),
pattern: vec![ast::Pattern::Constructor {
is_record: false,
location: Span::new((), 70..73),
name: "Foo".to_string(),
arguments: vec![],
module: None,
constructor: (),
with_spread: false,
tipo: (),
}],
alternative_patterns: vec![],
guard: None,
then: expr::UntypedExpr::Trace {
kind: ast::TraceKind::Todo,
location: Span::new((), 77..81),
then: Box::new(expr::UntypedExpr::ErrorTerm {
location: Span::new((), 77..81),
}),
text: Box::new(expr::UntypedExpr::String {
location: Span::new((), 77..81),
value: "aiken::todo".to_string(),
}),
},
},
ast::Clause {
location: Span::new((), 88..99),
pattern: vec![ast::Pattern::Constructor {
is_record: false,
location: Span::new((), 88..91),
name: "Bar".to_string(),
arguments: vec![],
module: None,
constructor: (),
with_spread: false,
tipo: (),
}],
alternative_patterns: vec![],
guard: None,
then: expr::UntypedExpr::Var {
location: Span::new((), 95..99),
name: "True".to_string(),
},
},
ast::Clause {
location: Span::new((), 106..116),
pattern: vec![ast::Pattern::Discard {
name: "_".to_string(),
location: Span::new((), 106..107),
}],
alternative_patterns: vec![],
guard: None,
then: expr::UntypedExpr::Var {
location: Span::new((), 111..116),
name: "False".to_string(),
},
},
],
},
doc: None,
location: Span::new((), 39..47),
name: "bar".to_string(),
public: false,
return_annotation: None,
return_type: (),
end_position: 121,
}),
],
)
}

View File

@ -1,6 +1,6 @@
use super::Type; use super::Type;
use crate::{ use crate::{
ast::{Annotation, BinOp, CallArg, Span, TodoKind, UntypedPattern}, ast::{Annotation, BinOp, CallArg, Span, UntypedPattern},
expr::{self, UntypedExpr}, expr::{self, UntypedExpr},
format::Formatter, format::Formatter,
levenshtein, levenshtein,
@ -1118,7 +1118,6 @@ pub enum Warning {
#[diagnostic(help("You probably want to replace that one with real code... eventually."))] #[diagnostic(help("You probably want to replace that one with real code... eventually."))]
#[diagnostic(code("todo"))] #[diagnostic(code("todo"))]
Todo { Todo {
kind: TodoKind,
#[label] #[label]
location: Span, location: Span,
tipo: Arc<Type>, tipo: Arc<Type>,

View File

@ -5,7 +5,7 @@ use vec1::Vec1;
use crate::{ use crate::{
ast::{ ast::{
Annotation, Arg, ArgName, AssignmentKind, BinOp, CallArg, Clause, ClauseGuard, Constant, Annotation, Arg, ArgName, AssignmentKind, BinOp, CallArg, Clause, ClauseGuard, Constant,
RecordUpdateSpread, Span, TodoKind, TypedArg, TypedCallArg, TypedClause, TypedClauseGuard, RecordUpdateSpread, Span, TraceKind, TypedArg, TypedCallArg, TypedClause, TypedClauseGuard,
TypedConstant, TypedIfBranch, TypedMultiPattern, TypedRecordUpdateArg, UnOp, UntypedArg, TypedConstant, TypedIfBranch, TypedMultiPattern, TypedRecordUpdateArg, UnOp, UntypedArg,
UntypedClause, UntypedClauseGuard, UntypedConstant, UntypedIfBranch, UntypedMultiPattern, UntypedClause, UntypedClauseGuard, UntypedConstant, UntypedIfBranch, UntypedMultiPattern,
UntypedPattern, UntypedRecordUpdateArg, UntypedPattern, UntypedRecordUpdateArg,
@ -221,7 +221,6 @@ impl<'a, 'b> ExprTyper<'a, 'b> {
| UntypedExpr::RecordUpdate { .. } | UntypedExpr::RecordUpdate { .. }
| UntypedExpr::Sequence { .. } | UntypedExpr::Sequence { .. }
| UntypedExpr::String { .. } | UntypedExpr::String { .. }
| UntypedExpr::Todo { .. }
| UntypedExpr::Tuple { .. } | UntypedExpr::Tuple { .. }
| UntypedExpr::TupleIndex { .. } | UntypedExpr::TupleIndex { .. }
| UntypedExpr::UnOp { .. } | UntypedExpr::UnOp { .. }
@ -249,16 +248,7 @@ impl<'a, 'b> ExprTyper<'a, 'b> {
/// returning an error. /// returning an error.
pub fn infer(&mut self, expr: UntypedExpr) -> Result<TypedExpr, Error> { pub fn infer(&mut self, expr: UntypedExpr) -> Result<TypedExpr, Error> {
match expr { match expr {
UntypedExpr::Todo { UntypedExpr::ErrorTerm { location } => Ok(self.infer_error_term(location)),
location,
label,
kind,
..
} => Ok(self.infer_todo(location, kind, label)),
UntypedExpr::ErrorTerm { location, label } => {
Ok(self.infer_error_term(location, label))
}
UntypedExpr::Var { location, name, .. } => self.infer_var(name, location), UntypedExpr::Var { location, name, .. } => self.infer_var(name, location),
@ -309,7 +299,8 @@ impl<'a, 'b> ExprTyper<'a, 'b> {
location, location,
then, then,
text, text,
} => self.infer_trace(*then, location, text), kind,
} => self.infer_trace(kind, *then, location, *text),
UntypedExpr::When { UntypedExpr::When {
location, location,
@ -1857,47 +1848,37 @@ impl<'a, 'b> ExprTyper<'a, 'b> {
}) })
} }
fn infer_todo(&mut self, location: Span, kind: TodoKind, label: Option<String>) -> TypedExpr { fn infer_error_term(&mut self, location: Span) -> TypedExpr {
let tipo = self.new_unbound_var(); let tipo = self.new_unbound_var();
self.environment.warnings.push(Warning::Todo { TypedExpr::ErrorTerm { location, tipo }
kind,
location,
tipo: tipo.clone(),
});
TypedExpr::Todo {
location,
label,
tipo,
}
}
fn infer_error_term(&mut self, location: Span, label: Option<String>) -> TypedExpr {
let tipo = self.new_unbound_var();
TypedExpr::ErrorTerm {
location,
tipo,
label,
}
} }
fn infer_trace( fn infer_trace(
&mut self, &mut self,
kind: TraceKind,
then: UntypedExpr, then: UntypedExpr,
location: Span, location: Span,
text: Option<String>, text: UntypedExpr,
) -> Result<TypedExpr, Error> { ) -> Result<TypedExpr, Error> {
let then = self.infer(then)?; let text = self.infer(text)?;
self.unify(text.tipo(), string(), text.location(), false)?;
let then = self.infer(then)?;
let tipo = then.tipo(); let tipo = then.tipo();
if let TraceKind::Todo = kind {
self.environment.warnings.push(Warning::Todo {
location,
tipo: tipo.clone(),
})
}
Ok(TypedExpr::Trace { Ok(TypedExpr::Trace {
location, location,
tipo, tipo,
then: Box::new(then), then: Box::new(then),
text, text: Box::new(text),
}) })
} }

View File

@ -587,13 +587,6 @@ impl<'a> CodeGenerator<'a> {
constants_ir(literal, ir_stack, scope); constants_ir(literal, ir_stack, scope);
} }
}, },
TypedExpr::Todo { label, tipo, .. } => {
ir_stack.push(Air::Todo {
scope,
label: label.clone(),
tipo: tipo.clone(),
});
}
TypedExpr::RecordUpdate { TypedExpr::RecordUpdate {
spread, args, tipo, .. spread, args, tipo, ..
} => { } => {
@ -651,19 +644,21 @@ impl<'a> CodeGenerator<'a> {
ir_stack.append(&mut elems_air); ir_stack.append(&mut elems_air);
} }
TypedExpr::Trace { TypedExpr::Trace {
tipo, then, text, .. tipo, then, text, ..
} => { } => {
let mut scope = scope; let mut scope = scope;
ir_stack.push(Air::Trace { ir_stack.push(Air::Trace {
text: text.clone(),
tipo: tipo.clone(), tipo: tipo.clone(),
scope: scope.clone(), scope: scope.clone(),
}); });
scope.push(self.id_gen.next()); scope.push(self.id_gen.next());
self.build_ir(text, ir_stack, scope.clone());
scope.push(self.id_gen.next());
self.build_ir(then, ir_stack, scope); self.build_ir(then, ir_stack, scope);
} }
@ -677,11 +672,10 @@ impl<'a> CodeGenerator<'a> {
self.build_ir(tuple, ir_stack, scope); self.build_ir(tuple, ir_stack, scope);
} }
TypedExpr::ErrorTerm { tipo, label, .. } => { TypedExpr::ErrorTerm { tipo, .. } => {
ir_stack.push(Air::ErrorTerm { ir_stack.push(Air::ErrorTerm {
scope, scope,
tipo: tipo.clone(), tipo: tipo.clone(),
label: label.clone(),
}); });
} }
} }
@ -2566,10 +2560,19 @@ impl<'a> CodeGenerator<'a> {
}); });
} }
assert_vec.push(Air::Trace {
scope: scope.clone(),
tipo: tipo.clone(),
});
assert_vec.push(Air::String {
scope: scope.clone(),
value: "Constr index did not match any type variant".to_string(),
});
assert_vec.push(Air::ErrorTerm { assert_vec.push(Air::ErrorTerm {
scope, scope,
tipo: tipo.clone(), tipo: tipo.clone(),
label: Some("Constr index did not match any type variant".to_string()),
}); });
} }
} }
@ -3484,33 +3487,21 @@ impl<'a> CodeGenerator<'a> {
tuple_index, tuple_index,
}; };
} }
Air::Todo { tipo, scope, label } => { Air::ErrorTerm { tipo, scope } => {
let mut replaced_type = tipo.clone();
replace_opaque_type(&mut replaced_type, self.data_types.clone());
ir_stack[index] = Air::Todo {
scope,
label,
tipo: replaced_type,
};
}
Air::ErrorTerm { tipo, scope, label } => {
let mut replaced_type = tipo.clone(); let mut replaced_type = tipo.clone();
replace_opaque_type(&mut replaced_type, self.data_types.clone()); replace_opaque_type(&mut replaced_type, self.data_types.clone());
ir_stack[index] = Air::ErrorTerm { ir_stack[index] = Air::ErrorTerm {
scope, scope,
tipo: replaced_type, tipo: replaced_type,
label,
}; };
} }
Air::Trace { tipo, scope, text } => { Air::Trace { tipo, scope } => {
let mut replaced_type = tipo.clone(); let mut replaced_type = tipo.clone();
replace_opaque_type(&mut replaced_type, self.data_types.clone()); replace_opaque_type(&mut replaced_type, self.data_types.clone());
ir_stack[index] = Air::Trace { ir_stack[index] = Air::Trace {
scope, scope,
text,
tipo: replaced_type, tipo: replaced_type,
}; };
} }
@ -5278,23 +5269,6 @@ impl<'a> CodeGenerator<'a> {
arg_stack.push(term); arg_stack.push(term);
} }
} }
Air::Todo { label, .. } => {
let term = apply_wrap(
apply_wrap(
Term::Builtin(DefaultFunction::Trace).force_wrap(),
Term::Constant(
UplcConstant::String(
label.unwrap_or_else(|| "aiken::todo".to_string()),
)
.into(),
),
),
Term::Delay(Term::Error.into()),
)
.force_wrap();
arg_stack.push(term);
}
Air::RecordUpdate { Air::RecordUpdate {
highest_index, highest_index,
indices, indices,
@ -5631,41 +5605,20 @@ impl<'a> CodeGenerator<'a> {
arg_stack.push(term); arg_stack.push(term);
} }
Air::Trace { text, .. } => { Air::Trace { .. } => {
let text = arg_stack.pop().unwrap();
let term = arg_stack.pop().unwrap(); let term = arg_stack.pop().unwrap();
let term = apply_wrap( let term = apply_wrap(
apply_wrap( apply_wrap(Term::Builtin(DefaultFunction::Trace).force_wrap(), text),
Term::Builtin(DefaultFunction::Trace).force_wrap(),
Term::Constant(
UplcConstant::String(
text.unwrap_or_else(|| "aiken::trace".to_string()),
)
.into(),
),
),
Term::Delay(term.into()), Term::Delay(term.into()),
) )
.force_wrap(); .force_wrap();
arg_stack.push(term); arg_stack.push(term);
} }
Air::ErrorTerm { label, .. } => { Air::ErrorTerm { .. } => arg_stack.push(Term::Error),
if let Some(label) = label {
let term = apply_wrap(
apply_wrap(
Term::Builtin(DefaultFunction::Trace).force_wrap(),
Term::Constant(UplcConstant::String(label).into()),
),
Term::Delay(Term::Error.into()),
)
.force_wrap();
arg_stack.push(term);
} else {
arg_stack.push(Term::Error)
}
}
Air::TupleClause { Air::TupleClause {
tipo, tipo,
indices, indices,

View File

@ -1,9 +1,19 @@
use aiken/builtin
fn concat(left: String, right: String) -> String {
builtin.append_bytearray(
builtin.encode_utf8(left),
builtin.encode_utf8(right),
)
|> builtin.decode_utf8
}
fn is_negative(i: Int) -> Bool { fn is_negative(i: Int) -> Bool {
if i < 0 { if i < 0 {
trace("is negative") trace "is negative"
True True
} else { } else {
trace("is non-negative") trace concat("is", concat(" ", "non-negative"))
False False
} }
} }