Compare commits

..

No commits in common. "c03d12b98c4e53fd3b8a991c59313ac756981269" and "81e93b4309e97258c79e3e27a5e07654e3e4a68d" have entirely different histories.

50 changed files with 189 additions and 635 deletions

View File

@ -1,18 +1,5 @@
# Changelog # Changelog
## v1.0.24-alpha - 2024-01-31
### Added
- **aiken**: New aliases for `check` (aiken c) and `build` (aiken b) commands. @Kuly14
### Fixed
- **aiken-lang**: Fixed an issue with expects on lists that used discards. This fixes
the validator issues being seen for previously succeeding validators on 1.0.21-alpha. @MicroProofs
- **aiken-lang**: Out of Span issue is now solved. This also fixes incorrectly selected
traces from the wrong module, which in some cases lead to the out of span issue. @MicroProofs
## v1.0.23-alpha - 2024-01-24 ## v1.0.23-alpha - 2024-01-24
### Fixed ### Fixed
@ -31,6 +18,7 @@
compact & verbose. @MicroProofs @KtorZ compact & verbose. @MicroProofs @KtorZ
- **aiken**: New `--filter-traces` option for the `check` and `build` commands - **aiken**: New `--filter-traces` option for the `check` and `build` commands
to enable restricting traces with more granularity between user-defined to enable restricting traces with more granularity between user-defined
traces, compiler-generated traces or both. @MicroProofs @KtorZ.
- **aiken-lang**: Most builtin errors are now caught and instead catched trace - **aiken-lang**: Most builtin errors are now caught and instead catched trace
errors are thrown. The exception is BLS primitives. errors are thrown. The exception is BLS primitives.
@ -57,12 +45,11 @@
`--no-traces` (on the `check` command) have been removed; superseded by the `--no-traces` (on the `check` command) have been removed; superseded by the
new options. @MicroProofs @KtorZ new options. @MicroProofs @KtorZ
> [!TIP] > ![TIP]
> > - If you've been using `aiken check --no-traces`, you can recover the old
> - If you've been using `aiken check --no-traces`, you can recover the old > behavior by doing `aiken check --trace-level silent`.
> behavior by doing `aiken check --trace-level silent`. > - If you've been using `aiken build --keep-traces`, you can recover the old
> - If you've been using `aiken build --keep-traces`, you can recover the old > behavior by doing `aiken build --trace-level verbose`.
> behavior by doing `aiken build --trace-level verbose`.
## v1.0.21-alpha - 2023-12-04 ## v1.0.21-alpha - 2023-12-04

10
Cargo.lock generated vendored
View File

@ -51,7 +51,7 @@ dependencies = [
[[package]] [[package]]
name = "aiken" name = "aiken"
version = "1.0.24-alpha" version = "1.0.23-alpha"
dependencies = [ dependencies = [
"aiken-lang", "aiken-lang",
"aiken-lsp", "aiken-lsp",
@ -75,7 +75,7 @@ dependencies = [
[[package]] [[package]]
name = "aiken-lang" name = "aiken-lang"
version = "1.0.24-alpha" version = "1.0.23-alpha"
dependencies = [ dependencies = [
"blst", "blst",
"chumsky", "chumsky",
@ -98,7 +98,7 @@ dependencies = [
[[package]] [[package]]
name = "aiken-lsp" name = "aiken-lsp"
version = "1.0.24-alpha" version = "1.0.23-alpha"
dependencies = [ dependencies = [
"aiken-lang", "aiken-lang",
"aiken-project", "aiken-project",
@ -119,7 +119,7 @@ dependencies = [
[[package]] [[package]]
name = "aiken-project" name = "aiken-project"
version = "1.0.24-alpha" version = "1.0.23-alpha"
dependencies = [ dependencies = [
"aiken-lang", "aiken-lang",
"askama", "askama",
@ -3441,7 +3441,7 @@ checksum = "e51733f11c9c4f72aa0c160008246859e340b00807569a0da0e7a1079b27ba85"
[[package]] [[package]]
name = "uplc" name = "uplc"
version = "1.0.24-alpha" version = "1.0.23-alpha"
dependencies = [ dependencies = [
"blst", "blst",
"cryptoxide", "cryptoxide",

View File

@ -1,7 +1,7 @@
[package] [package]
name = "aiken-lang" name = "aiken-lang"
description = "The Aiken compiler" description = "The Aiken compiler"
version = "1.0.24-alpha" version = "1.0.23-alpha"
edition = "2021" edition = "2021"
repository = "https://github.com/aiken-lang/aiken" repository = "https://github.com/aiken-lang/aiken"
homepage = "https://github.com/aiken-lang/aiken" homepage = "https://github.com/aiken-lang/aiken"
@ -24,7 +24,7 @@ owo-colors = { version = "3.5.0", features = ["supports-colors"] }
strum = "0.24.1" strum = "0.24.1"
thiserror = "1.0.39" thiserror = "1.0.39"
vec1 = "1.10.1" vec1 = "1.10.1"
uplc = { path = '../uplc', version = "1.0.24-alpha" } uplc = { path = '../uplc', version = "1.0.23-alpha" }
num-bigint = "0.4.3" num-bigint = "0.4.3"
petgraph = "0.6.3" petgraph = "0.6.3"
blst = "0.3.11" blst = "0.3.11"

View File

@ -236,7 +236,7 @@ impl<'a> CodeGenerator<'a> {
program program
} }
fn build(&mut self, body: &TypedExpr, module_build_name: &String) -> AirTree { fn build(&mut self, body: &TypedExpr, module_name: &String) -> AirTree {
match body { match body {
TypedExpr::UInt { value, .. } => AirTree::int(value), TypedExpr::UInt { value, .. } => AirTree::int(value),
TypedExpr::String { value, .. } => AirTree::string(value), TypedExpr::String { value, .. } => AirTree::string(value),
@ -251,11 +251,11 @@ impl<'a> CodeGenerator<'a> {
let mut last_exp = self.build( let mut last_exp = self.build(
&expressions.pop().unwrap_or_else(|| unreachable!()), &expressions.pop().unwrap_or_else(|| unreachable!()),
module_build_name, module_name,
); );
while let Some(expression) = expressions.pop() { while let Some(expression) = expressions.pop() {
let exp_tree = self.build(&expression, module_build_name); let exp_tree = self.build(&expression, module_name);
last_exp = exp_tree.hoist_over(last_exp); last_exp = exp_tree.hoist_over(last_exp);
} }
@ -273,7 +273,7 @@ impl<'a> CodeGenerator<'a> {
args.iter() args.iter()
.map(|arg| arg.arg_name.get_variable_name().unwrap_or("_").to_string()) .map(|arg| arg.arg_name.get_variable_name().unwrap_or("_").to_string())
.collect_vec(), .collect_vec(),
self.build(body, module_build_name), self.build(body, module_name),
), ),
TypedExpr::List { TypedExpr::List {
@ -284,11 +284,10 @@ impl<'a> CodeGenerator<'a> {
} => AirTree::list( } => AirTree::list(
elements elements
.iter() .iter()
.map(|elem| self.build(elem, module_build_name)) .map(|elem| self.build(elem, module_name))
.collect_vec(), .collect_vec(),
tipo.clone(), tipo.clone(),
tail.as_ref() tail.as_ref().map(|tail| self.build(tail, module_name)),
.map(|tail| self.build(tail, module_build_name)),
), ),
TypedExpr::Call { TypedExpr::Call {
@ -331,11 +330,11 @@ impl<'a> CodeGenerator<'a> {
.map(|(arg, tipo)| { .map(|(arg, tipo)| {
if tipo.is_data() { if tipo.is_data() {
AirTree::cast_to_data( AirTree::cast_to_data(
self.build(&arg.value, module_build_name), self.build(&arg.value, module_name),
arg.value.tipo(), arg.value.tipo(),
) )
} else { } else {
self.build(&arg.value, module_build_name) self.build(&arg.value, module_name)
} }
}) })
.collect_vec(); .collect_vec();
@ -362,7 +361,7 @@ impl<'a> CodeGenerator<'a> {
.iter() .iter()
.zip(fun_arg_types) .zip(fun_arg_types)
.map(|(arg, arg_tipo)| { .map(|(arg, arg_tipo)| {
let mut arg_val = self.build(&arg.value, module_build_name); let mut arg_val = self.build(&arg.value, module_name);
if arg_tipo.is_data() && !arg.value.tipo().is_data() { if arg_tipo.is_data() && !arg.value.tipo().is_data() {
arg_val = AirTree::cast_to_data(arg_val, arg.value.tipo()) arg_val = AirTree::cast_to_data(arg_val, arg.value.tipo())
@ -375,7 +374,7 @@ impl<'a> CodeGenerator<'a> {
AirTree::builtin(*func, tipo.clone(), func_args) AirTree::builtin(*func, tipo.clone(), func_args)
} else { } else {
AirTree::call( AirTree::call(
self.build(fun.as_ref(), module_build_name), self.build(fun.as_ref(), module_name),
tipo.clone(), tipo.clone(),
func_args, func_args,
) )
@ -405,7 +404,7 @@ impl<'a> CodeGenerator<'a> {
.iter() .iter()
.zip(fun_arg_types) .zip(fun_arg_types)
.map(|(arg, arg_tipo)| { .map(|(arg, arg_tipo)| {
let mut arg_val = self.build(&arg.value, module_build_name); let mut arg_val = self.build(&arg.value, module_name);
if arg_tipo.is_data() && !arg.value.tipo().is_data() { if arg_tipo.is_data() && !arg.value.tipo().is_data() {
arg_val = AirTree::cast_to_data(arg_val, arg.value.tipo()) arg_val = AirTree::cast_to_data(arg_val, arg.value.tipo())
@ -418,7 +417,7 @@ impl<'a> CodeGenerator<'a> {
AirTree::builtin(*func, tipo.clone(), func_args) AirTree::builtin(*func, tipo.clone(), func_args)
} else { } else {
AirTree::call( AirTree::call(
self.build(fun.as_ref(), module_build_name), self.build(fun.as_ref(), module_name),
tipo.clone(), tipo.clone(),
func_args, func_args,
) )
@ -436,7 +435,7 @@ impl<'a> CodeGenerator<'a> {
.iter() .iter()
.zip(fun_arg_types) .zip(fun_arg_types)
.map(|(arg, arg_tipo)| { .map(|(arg, arg_tipo)| {
let mut arg_val = self.build(&arg.value, module_build_name); let mut arg_val = self.build(&arg.value, module_name);
if arg_tipo.is_data() && !arg.value.tipo().is_data() { if arg_tipo.is_data() && !arg.value.tipo().is_data() {
arg_val = AirTree::cast_to_data(arg_val, arg.value.tipo()) arg_val = AirTree::cast_to_data(arg_val, arg.value.tipo())
@ -446,7 +445,7 @@ impl<'a> CodeGenerator<'a> {
.collect_vec(); .collect_vec();
AirTree::call( AirTree::call(
self.build(fun.as_ref(), module_build_name), self.build(fun.as_ref(), module_name),
tipo.clone(), tipo.clone(),
func_args, func_args,
) )
@ -461,8 +460,8 @@ impl<'a> CodeGenerator<'a> {
} => AirTree::binop( } => AirTree::binop(
*name, *name,
tipo.clone(), tipo.clone(),
self.build(left, module_build_name), self.build(left, module_name),
self.build(right, module_build_name), self.build(right, module_name),
left.tipo(), left.tipo(),
), ),
@ -476,7 +475,7 @@ impl<'a> CodeGenerator<'a> {
} => { } => {
let replaced_type = convert_opaque_type(tipo, &self.data_types); let replaced_type = convert_opaque_type(tipo, &self.data_types);
let air_value = self.build(value, module_build_name); let air_value = self.build(value, module_name);
let msg_func = match self.tracing { let msg_func = match self.tracing {
TraceLevel::Silent => None, TraceLevel::Silent => None,
@ -485,16 +484,14 @@ impl<'a> CodeGenerator<'a> {
let msg = match self.tracing { let msg = match self.tracing {
TraceLevel::Silent => unreachable!("excluded from pattern guards"), TraceLevel::Silent => unreachable!("excluded from pattern guards"),
TraceLevel::Compact => get_line_columns_by_span( TraceLevel::Compact => get_line_columns_by_span(
module_build_name, module_name,
location, location,
&self.module_src, &self.module_src,
) )
.to_string(), .to_string(),
TraceLevel::Verbose => get_src_code_by_span( TraceLevel::Verbose => {
module_build_name, get_src_code_by_span(module_name, location, &self.module_src)
location, }
&self.module_src,
),
}; };
let msg_func_name = msg.split_whitespace().join(""); let msg_func_name = msg.split_whitespace().join("");
@ -529,9 +526,9 @@ impl<'a> CodeGenerator<'a> {
TypedExpr::Trace { TypedExpr::Trace {
tipo, then, text, .. tipo, then, text, ..
} => AirTree::trace( } => AirTree::trace(
self.build(text, module_build_name), self.build(text, module_name),
tipo.clone(), tipo.clone(),
self.build(then, module_build_name), self.build(then, module_name),
), ),
TypedExpr::When { TypedExpr::When {
@ -547,11 +544,11 @@ impl<'a> CodeGenerator<'a> {
} else if clauses.len() == 1 { } else if clauses.len() == 1 {
let last_clause = clauses.pop().unwrap(); let last_clause = clauses.pop().unwrap();
let clause_then = self.build(&last_clause.then, module_build_name); let clause_then = self.build(&last_clause.then, module_name);
let subject_type = subject.tipo(); let subject_type = subject.tipo();
let subject_val = self.build(subject, module_build_name); let subject_val = self.build(subject, module_name);
let assignment = self.assignment( let assignment = self.assignment(
&last_clause.pattern, &last_clause.pattern,
@ -597,13 +594,11 @@ impl<'a> CodeGenerator<'a> {
constr_var.clone(), constr_var.clone(),
subject_name.clone(), subject_name.clone(),
), ),
module_build_name, module_name,
); );
let constr_assign = AirTree::let_assignment( let constr_assign =
&constr_var, AirTree::let_assignment(&constr_var, self.build(subject, module_name));
self.build(subject, module_build_name),
);
let when_assign = AirTree::when( let when_assign = AirTree::when(
subject_name, subject_name,
@ -627,13 +622,13 @@ impl<'a> CodeGenerator<'a> {
.iter() .iter()
.map(|branch| { .map(|branch| {
( (
self.build(&branch.condition, module_build_name), self.build(&branch.condition, module_name),
self.build(&branch.body, module_build_name), self.build(&branch.body, module_name),
) )
}) })
.collect_vec(), .collect_vec(),
tipo.clone(), tipo.clone(),
self.build(final_else, module_build_name), self.build(final_else, module_name),
), ),
TypedExpr::RecordAccess { TypedExpr::RecordAccess {
@ -643,7 +638,7 @@ impl<'a> CodeGenerator<'a> {
.. ..
} => { } => {
if check_replaceable_opaque_type(&record.tipo(), &self.data_types) { if check_replaceable_opaque_type(&record.tipo(), &self.data_types) {
self.build(record, module_build_name) self.build(record, module_name)
} else { } else {
let function_name = format!("__access_index_{}", *index); let function_name = format!("__access_index_{}", *index);
@ -673,7 +668,7 @@ impl<'a> CodeGenerator<'a> {
self.special_functions self.special_functions
.use_function_tree(CONSTR_FIELDS_EXPOSER.to_string()), .use_function_tree(CONSTR_FIELDS_EXPOSER.to_string()),
list(data()), list(data()),
vec![self.build(record, module_build_name)], vec![self.build(record, module_name)],
); );
AirTree::index_access(function_name, tipo.clone(), list_of_fields) AirTree::index_access(function_name, tipo.clone(), list_of_fields)
@ -746,7 +741,7 @@ impl<'a> CodeGenerator<'a> {
TypedExpr::Tuple { tipo, elems, .. } => AirTree::tuple( TypedExpr::Tuple { tipo, elems, .. } => AirTree::tuple(
elems elems
.iter() .iter()
.map(|elem| self.build(elem, module_build_name)) .map(|elem| self.build(elem, module_name))
.collect_vec(), .collect_vec(),
tipo.clone(), tipo.clone(),
), ),
@ -755,7 +750,7 @@ impl<'a> CodeGenerator<'a> {
index, tuple, tipo, .. index, tuple, tipo, ..
} => { } => {
if tuple.tipo().is_2_tuple() { if tuple.tipo().is_2_tuple() {
AirTree::pair_index(*index, tipo.clone(), self.build(tuple, module_build_name)) AirTree::pair_index(*index, tipo.clone(), self.build(tuple, module_name))
} else { } else {
let function_name = format!("__access_index_{}", *index); let function_name = format!("__access_index_{}", *index);
@ -784,7 +779,7 @@ impl<'a> CodeGenerator<'a> {
AirTree::index_access( AirTree::index_access(
function_name, function_name,
tipo.clone(), tipo.clone(),
self.build(tuple, module_build_name), self.build(tuple, module_name),
) )
} }
} }
@ -803,7 +798,7 @@ impl<'a> CodeGenerator<'a> {
.iter() .iter()
.sorted_by(|arg1, arg2| arg1.index.cmp(&arg2.index)) .sorted_by(|arg1, arg2| arg1.index.cmp(&arg2.index))
{ {
let arg_val = self.build(&arg.value, module_build_name); let arg_val = self.build(&arg.value, module_name);
if arg.index > highest_index { if arg.index > highest_index {
highest_index = arg.index; highest_index = arg.index;
@ -817,14 +812,12 @@ impl<'a> CodeGenerator<'a> {
index_types, index_types,
highest_index, highest_index,
tipo.clone(), tipo.clone(),
self.build(spread, module_build_name), self.build(spread, module_name),
update_args, update_args,
) )
} }
TypedExpr::UnOp { value, op, .. } => { TypedExpr::UnOp { value, op, .. } => AirTree::unop(*op, self.build(value, module_name)),
AirTree::unop(*op, self.build(value, module_build_name))
}
TypedExpr::CurvePoint { point, .. } => AirTree::curve(*point.as_ref()), TypedExpr::CurvePoint { point, .. } => AirTree::curve(*point.as_ref()),
} }
} }

View File

@ -1,7 +1,7 @@
use std::{collections::HashMap, ops::Deref, rc::Rc}; use std::{collections::HashMap, ops::Deref, rc::Rc};
use indexmap::{IndexMap, IndexSet}; use indexmap::{IndexMap, IndexSet};
use itertools::{Itertools, Position}; use itertools::Itertools;
use uplc::{ use uplc::{
ast::{Constant as UplcConstant, Name, Term, Type as UplcType}, ast::{Constant as UplcConstant, Name, Term, Type as UplcType},
builder::{CONSTR_FIELDS_EXPOSER, CONSTR_INDEX_EXPOSER}, builder::{CONSTR_FIELDS_EXPOSER, CONSTR_INDEX_EXPOSER},
@ -1530,152 +1530,127 @@ pub fn list_access_to_uplc(
error_term: Term<Name>, error_term: Term<Name>,
) -> Term<Name> { ) -> Term<Name> {
let names_len = names_types_ids.len(); let names_len = names_types_ids.len();
// Should never be expect level none on a list without a tail
assert!(!(matches!(expect_level, ExpectLevel::None) && is_list_accessor && !tail_present));
let mut no_tailing_discards = names_types_ids let mut no_tailing_discards = names_types_ids
.iter() .iter()
.rev() .rev()
.with_position() .skip_while(|(name, _, _)| name == "_")
.skip_while(|pos| match pos {
// Items are reversed order
Position::Last((name, _, _)) | Position::Middle((name, _, _)) => {
name == "_" && matches!(expect_level, ExpectLevel::None)
}
Position::First((name, _, _)) | Position::Only((name, _, _)) => {
name == "_" && (tail_present || matches!(expect_level, ExpectLevel::None))
}
})
.map(|position| match position {
Position::First(a) | Position::Middle(a) | Position::Last(a) | Position::Only(a) => a,
})
.collect_vec(); .collect_vec();
// If is just discards and check_last_item then we check for empty list // If the the is just discards and check_last_item then we check for empty list
if no_tailing_discards.is_empty() { if no_tailing_discards.is_empty() {
if tail_present || matches!(expect_level, ExpectLevel::None) { if !tail_present && matches!(expect_level, ExpectLevel::Full | ExpectLevel::Items) {
return term.lambda("_");
} else {
return Term::var("empty_list") return Term::var("empty_list")
.delayed_choose_list(term, error_term) .delayed_choose_list(term, error_term)
.lambda("empty_list"); .lambda("empty_list");
} else {
return term.lambda("_");
} }
} }
// reverse back to original order // reverse back to original order
no_tailing_discards.reverse(); no_tailing_discards.reverse();
let no_tailing_len = no_tailing_discards.len();
// If we cut off at least one element then that was tail and possibly some heads // If we cut off at least one element then that was tail and possibly some heads
let tail_wasnt_cutoff = tail_present && no_tailing_discards.len() == names_len; let tail_wasnt_cutoff = tail_present && no_tailing_discards.len() == names_len;
let tail_name = |id| format!("tail_id_{}", id); no_tailing_discards.into_iter().enumerate().rev().fold(
term,
|acc, (index, (name, tipo, id))| {
let tail_name = format!("tail_index_{}_{}", index, id);
let head_item = |name, tipo: &Rc<Type>, tail_name: &str| { let head_list =
if name == "_" { if matches!(tipo.get_uplc_type(), UplcType::Pair(_, _)) && is_list_accessor {
Term::unit() Term::head_list().apply(Term::var(tail_name.to_string()))
} else if matches!(tipo.get_uplc_type(), UplcType::Pair(_, _)) && is_list_accessor { } else if matches!(expect_level, ExpectLevel::Full) && error_term != Term::Error {
Term::head_list().apply(Term::var(tail_name.to_string())) convert_data_to_type_debug(
} else if matches!(expect_level, ExpectLevel::Full) && error_term != Term::Error { Term::head_list().apply(Term::var(tail_name.to_string())),
convert_data_to_type_debug( &tipo.to_owned(),
Term::head_list().apply(Term::var(tail_name.to_string())), error_term.clone(),
&tipo.to_owned(), )
error_term.clone(), } else {
) convert_data_to_type(
} else { Term::head_list().apply(Term::var(tail_name.to_string())),
convert_data_to_type( &tipo.to_owned(),
Term::head_list().apply(Term::var(tail_name.to_string())), )
&tipo.to_owned(), };
)
}
};
// Remember we reverse here so the First or Only is the last item // handle tail case
no_tailing_discards // name is guaranteed to not be discard at this point
.into_iter() if index == no_tailing_len - 1 && tail_wasnt_cutoff {
.rev() // simply lambda for tail name
.with_position() acc.lambda(name)
.fold(term, |acc, position| { } else if index == no_tailing_len - 1 {
match position { // case for no tail
Position::First((name, _, _)) | Position::Only((name, _, _)) // name is guaranteed to not be discard at this point
if tail_wasnt_cutoff =>
{
// case for tail as the last item
acc.lambda(name)
}
Position::First((name, tipo, id)) | Position::Only((name, tipo, id)) => { match expect_level {
// case for no tail, but last item ExpectLevel::None => acc.lambda(name).apply(head_list).lambda(tail_name),
let tail_name = tail_name(id); ExpectLevel::Full | ExpectLevel::Items => {
if error_term == Term::Error && tail_present {
let head_item = head_item(name, tipo, &tail_name); acc.lambda(name).apply(head_list).lambda(tail_name)
} else if tail_present {
match expect_level { // Custom error instead of trying to do head_list on a possibly empty list.
ExpectLevel::None => acc.lambda(name).apply(head_item).lambda(tail_name), Term::var(tail_name.to_string())
.delayed_choose_list(
ExpectLevel::Full | ExpectLevel::Items => { error_term.clone(),
if error_term == Term::Error && tail_present { acc.lambda(name).apply(head_list),
// No need to check last item if tail was present )
acc.lambda(name).apply(head_item).lambda(tail_name) .lambda(tail_name)
} else if tail_present { } else if error_term == Term::Error {
// Custom error instead of trying to do head_item on a possibly empty list. // Check head is last item in this list
Term::var(tail_name.to_string()) Term::tail_list()
.delayed_choose_list( .apply(Term::var(tail_name.to_string()))
error_term.clone(), .delayed_choose_list(acc, error_term.clone())
acc.lambda(name).apply(head_item), .lambda(name)
) .apply(head_list)
.lambda(tail_name) .lambda(tail_name)
} else if error_term == Term::Error { } else {
// Check head is last item in this list // Custom error if list is not empty after this head
Term::tail_list() Term::var(tail_name.to_string())
.apply(Term::var(tail_name.to_string())) .delayed_choose_list(
.delayed_choose_list(acc, error_term.clone()) error_term.clone(),
.lambda(name) Term::tail_list()
.apply(head_item) .apply(Term::var(tail_name.to_string()))
.lambda(tail_name) .delayed_choose_list(acc, error_term.clone())
} else { .lambda(name)
// Custom error if list is not empty after this head .apply(head_list),
Term::var(tail_name.to_string()) )
.delayed_choose_list( .lambda(tail_name)
error_term.clone(),
Term::tail_list()
.apply(Term::var(tail_name.to_string()))
.delayed_choose_list(acc, error_term.clone())
.lambda(name)
.apply(head_item),
)
.lambda(tail_name)
}
} }
} }
} }
} else if name == "_" {
Position::Middle((name, tipo, id)) | Position::Last((name, tipo, id)) => { if matches!(expect_level, ExpectLevel::None) || error_term == Term::Error {
// case for every item except the last item acc.apply(Term::tail_list().apply(Term::var(tail_name.to_string())))
let tail_name = tail_name(id); .lambda(tail_name)
} else {
let head_item = head_item(name, tipo, &tail_name); Term::var(tail_name.to_string())
.delayed_choose_list(
if matches!(expect_level, ExpectLevel::None) || error_term == Term::Error { error_term.clone(),
acc.apply(Term::tail_list().apply(Term::var(tail_name.to_string()))),
)
.lambda(tail_name)
}
} else if matches!(expect_level, ExpectLevel::None) || error_term == Term::Error {
acc.apply(Term::tail_list().apply(Term::var(tail_name.to_string())))
.lambda(name)
.apply(head_list)
.lambda(tail_name)
} else {
Term::var(tail_name.to_string())
.delayed_choose_list(
error_term.clone(),
acc.apply(Term::tail_list().apply(Term::var(tail_name.to_string()))) acc.apply(Term::tail_list().apply(Term::var(tail_name.to_string())))
.lambda(name) .lambda(name)
.apply(head_item) .apply(head_list),
.lambda(tail_name) )
} else { .lambda(tail_name)
// case for a custom error if the list is empty at this point
Term::var(tail_name.to_string())
.delayed_choose_list(
error_term.clone(),
acc.apply(
Term::tail_list().apply(Term::var(tail_name.to_string())),
)
.lambda(name)
.apply(head_item),
)
.lambda(tail_name)
}
}
} }
}) },
)
} }
pub fn apply_builtin_forces(mut term: Term<Name>, force_count: u32) -> Term<Name> { pub fn apply_builtin_forces(mut term: Term<Name>, force_count: u32) -> Term<Name> {

View File

@ -3,7 +3,7 @@ use chumsky::prelude::*;
use crate::{ use crate::{
ast, ast,
expr::UntypedExpr, expr::UntypedExpr,
parser::{error::ParseError, expr, token::Token, definition::function::param}, parser::{error::ParseError, expr, token::Token},
}; };
pub fn parser() -> impl Parser<Token, ast::UntypedDefinition, Error = ParseError> { pub fn parser() -> impl Parser<Token, ast::UntypedDefinition, Error = ParseError> {
@ -13,15 +13,7 @@ pub fn parser() -> impl Parser<Token, ast::UntypedDefinition, Error = ParseError
.or_not() .or_not()
.then_ignore(just(Token::Test)) .then_ignore(just(Token::Test))
.then(select! {Token::Name {name} => name}) .then(select! {Token::Name {name} => name})
.then(
param(false)
.separated_by(just(Token::Comma))
.allow_trailing()
.delimited_by(just(Token::LeftParen), just(Token::RightParen))
.map_with_span(|arguments, span| (arguments, span)),
)
.then_ignore(just(Token::LeftParen)) .then_ignore(just(Token::LeftParen))
.then_ignore(just(Token::RightParen)) .then_ignore(just(Token::RightParen))
.then(just(Token::Fail).ignored().or_not()) .then(just(Token::Fail).ignored().or_not())
.map_with_span(|name, span| (name, span)) .map_with_span(|name, span| (name, span))

View File

@ -1,6 +1,6 @@
[package] [package]
name = "aiken-lsp" name = "aiken-lsp"
version = "1.0.24-alpha" version = "1.0.23-alpha"
edition = "2021" edition = "2021"
description = "Cardano smart contract language and toolchain" description = "Cardano smart contract language and toolchain"
repository = "https://github.com/aiken-lang/aiken" repository = "https://github.com/aiken-lang/aiken"
@ -24,5 +24,5 @@ tracing = "0.1.37"
url = "2.3.1" url = "2.3.1"
urlencoding = "2.1.2" urlencoding = "2.1.2"
aiken-lang = { path = '../aiken-lang', version = "1.0.24-alpha" } aiken-lang = { path = '../aiken-lang', version = "1.0.23-alpha" }
aiken-project = { path = '../aiken-project', version = "1.0.24-alpha" } aiken-project = { path = '../aiken-project', version = "1.0.23-alpha" }

View File

@ -1,7 +1,7 @@
[package] [package]
name = "aiken-project" name = "aiken-project"
description = "Aiken project utilities" description = "Aiken project utilities"
version = "1.0.24-alpha" version = "1.0.23-alpha"
edition = "2021" edition = "2021"
repository = "https://github.com/aiken-lang/aiken/crates/project" repository = "https://github.com/aiken-lang/aiken/crates/project"
homepage = "https://github.com/aiken-lang/aiken" homepage = "https://github.com/aiken-lang/aiken"
@ -41,8 +41,8 @@ toml = "0.7.2"
walkdir.workspace = true walkdir.workspace = true
zip = "0.6.4" zip = "0.6.4"
aiken-lang = { path = "../aiken-lang", version = "1.0.24-alpha" } aiken-lang = { path = "../aiken-lang", version = "1.0.23-alpha" }
uplc = { path = '../uplc', version = "1.0.24-alpha" } uplc = { path = '../uplc', version = "1.0.23-alpha" }
[dev-dependencies] [dev-dependencies]
indoc = "2.0.1" indoc = "2.0.1"

View File

@ -1,7 +1,7 @@
[package] [package]
name = "aiken" name = "aiken"
description = "Cardano smart contract language and toolchain" description = "Cardano smart contract language and toolchain"
version = "1.0.24-alpha" version = "1.0.23-alpha"
edition = "2021" edition = "2021"
repository = "https://github.com/aiken-lang/aiken" repository = "https://github.com/aiken-lang/aiken"
homepage = "https://github.com/aiken-lang/aiken" homepage = "https://github.com/aiken-lang/aiken"
@ -30,10 +30,10 @@ regex = "1.7.1"
serde_json = "1.0.94" serde_json = "1.0.94"
thiserror = "1.0.39" thiserror = "1.0.39"
aiken-lang = { path = "../aiken-lang", version = "1.0.24-alpha" } aiken-lang = { path = "../aiken-lang", version = "1.0.23-alpha" }
aiken-lsp = { path = "../aiken-lsp", version = "1.0.24-alpha" } aiken-lsp = { path = "../aiken-lsp", version = "1.0.23-alpha" }
aiken-project = { path = '../aiken-project', version = "1.0.24-alpha" } aiken-project = { path = '../aiken-project', version = "1.0.23-alpha" }
uplc = { path = '../uplc', version = "1.0.24-alpha" } uplc = { path = '../uplc', version = "1.0.23-alpha" }
clap_complete = "4.3.2" clap_complete = "4.3.2"
inquire = "0.6.2" inquire = "0.6.2"
num-bigint = "0.4.3" num-bigint = "0.4.3"

View File

@ -1,7 +1,7 @@
[package] [package]
name = "uplc" name = "uplc"
description = "Utilities for working with Untyped Plutus Core" description = "Utilities for working with Untyped Plutus Core"
version = "1.0.24-alpha" version = "1.0.23-alpha"
edition = "2021" edition = "2021"
repository = "https://github.com/aiken-lang/aiken/crates/uplc" repository = "https://github.com/aiken-lang/aiken/crates/uplc"
homepage = "https://github.com/aiken-lang/aiken" homepage = "https://github.com/aiken-lang/aiken"

View File

@ -13,4 +13,4 @@ requirements = []
source = "github" source = "github"
[etags] [etags]
"aiken-lang/stdlib@main" = [{ secs_since_epoch = 1706676848, nanos_since_epoch = 220859000 }, "cf946239d3dd481ed41f20e56bf24910b5229ea35aa171a708edc2a47fc20a7b"] "aiken-lang/stdlib@main" = [{ secs_since_epoch = 1705743641, nanos_since_epoch = 8357000 }, "cf946239d3dd481ed41f20e56bf24910b5229ea35aa171a708edc2a47fc20a7b"]

View File

@ -5,7 +5,7 @@
"plutusVersion": "v2", "plutusVersion": "v2",
"compiler": { "compiler": {
"name": "Aiken", "name": "Aiken",
"version": "v1.0.23-alpha+3a53427" "version": "v1.0.21-alpha+bf96c3a"
} }
}, },
"validators": [ "validators": [

View File

@ -5,7 +5,7 @@
"plutusVersion": "v2", "plutusVersion": "v2",
"compiler": { "compiler": {
"name": "Aiken", "name": "Aiken",
"version": "v1.0.23-alpha+3a53427" "version": "v1.0.21-alpha+bf96c3a"
} }
}, },
"validators": [ "validators": [

View File

@ -13,4 +13,4 @@ requirements = []
source = "github" source = "github"
[etags] [etags]
"aiken-lang/stdlib@main" = [{ secs_since_epoch = 1706676846, nanos_since_epoch = 18640000 }, "cf946239d3dd481ed41f20e56bf24910b5229ea35aa171a708edc2a47fc20a7b"] "aiken-lang/stdlib@main" = [{ secs_since_epoch = 1705743640, nanos_since_epoch = 944875000 }, "cf946239d3dd481ed41f20e56bf24910b5229ea35aa171a708edc2a47fc20a7b"]

View File

@ -13,4 +13,4 @@ requirements = []
source = "github" source = "github"
[etags] [etags]
"aiken-lang/stdlib@main" = [{ secs_since_epoch = 1706676848, nanos_since_epoch = 314005000 }, "cf946239d3dd481ed41f20e56bf24910b5229ea35aa171a708edc2a47fc20a7b"] "aiken-lang/stdlib@main" = [{ secs_since_epoch = 1705743642, nanos_since_epoch = 21373000 }, "cf946239d3dd481ed41f20e56bf24910b5229ea35aa171a708edc2a47fc20a7b"]

View File

@ -13,4 +13,4 @@ requirements = []
source = "github" source = "github"
[etags] [etags]
"aiken-lang/stdlib@main" = [{ secs_since_epoch = 1706676863, nanos_since_epoch = 448060000 }, "cf946239d3dd481ed41f20e56bf24910b5229ea35aa171a708edc2a47fc20a7b"] "aiken-lang/stdlib@main" = [{ secs_since_epoch = 1705743643, nanos_since_epoch = 126963000 }, "cf946239d3dd481ed41f20e56bf24910b5229ea35aa171a708edc2a47fc20a7b"]

View File

@ -13,4 +13,4 @@ requirements = []
source = "github" source = "github"
[etags] [etags]
"aiken-lang/stdlib@main" = [{ secs_since_epoch = 1706676848, nanos_since_epoch = 251934000 }, "cf946239d3dd481ed41f20e56bf24910b5229ea35aa171a708edc2a47fc20a7b"] "aiken-lang/stdlib@main" = [{ secs_since_epoch = 1705743642, nanos_since_epoch = 40391000 }, "cf946239d3dd481ed41f20e56bf24910b5229ea35aa171a708edc2a47fc20a7b"]

View File

@ -13,4 +13,4 @@ requirements = []
source = "github" source = "github"
[etags] [etags]
"aiken-lang/stdlib@main" = [{ secs_since_epoch = 1706676868, nanos_since_epoch = 575072000 }, "cf946239d3dd481ed41f20e56bf24910b5229ea35aa171a708edc2a47fc20a7b"] "aiken-lang/stdlib@main" = [{ secs_since_epoch = 1705743643, nanos_since_epoch = 283477000 }, "cf946239d3dd481ed41f20e56bf24910b5229ea35aa171a708edc2a47fc20a7b"]

View File

@ -13,4 +13,4 @@ requirements = []
source = "github" source = "github"
[etags] [etags]
"aiken-lang/stdlib@main" = [{ secs_since_epoch = 1706676866, nanos_since_epoch = 190875000 }, "cf946239d3dd481ed41f20e56bf24910b5229ea35aa171a708edc2a47fc20a7b"] "aiken-lang/stdlib@main" = [{ secs_since_epoch = 1705743643, nanos_since_epoch = 186230000 }, "cf946239d3dd481ed41f20e56bf24910b5229ea35aa171a708edc2a47fc20a7b"]

View File

@ -13,4 +13,4 @@ requirements = []
source = "github" source = "github"
[etags] [etags]
"aiken-lang/stdlib@main" = [{ secs_since_epoch = 1706676867, nanos_since_epoch = 381950000 }, "cf946239d3dd481ed41f20e56bf24910b5229ea35aa171a708edc2a47fc20a7b"] "aiken-lang/stdlib@main" = [{ secs_since_epoch = 1705743643, nanos_since_epoch = 242458000 }, "cf946239d3dd481ed41f20e56bf24910b5229ea35aa171a708edc2a47fc20a7b"]

View File

@ -1,16 +0,0 @@
# This file was generated by Aiken
# You typically do not need to edit this file
[[requirements]]
name = "aiken-lang/stdlib"
version = "main"
source = "github"
[[packages]]
name = "aiken-lang/stdlib"
version = "main"
requirements = []
source = "github"
[etags]
"aiken-lang/stdlib@main" = [{ secs_since_epoch = 1706674613, nanos_since_epoch = 871553000 }, "cf946239d3dd481ed41f20e56bf24910b5229ea35aa171a708edc2a47fc20a7b"]

View File

@ -1,8 +0,0 @@
name = "aiken-lang/acceptance_test_070"
version = '0.0.0'
description = ''
[[dependencies]]
name = 'aiken-lang/stdlib'
version = 'main'
source = 'github'

View File

@ -1,47 +0,0 @@
use aiken/list
use aiken/transaction.{InlineDatum, Input, OutputReference, TransactionId}
type OtherInput {
output_reference: OutputReference,
other: Data,
}
type MyDatum<a> {
Constructor1(a)
Constructor2
}
test discard_partitions() {
let all_inputs =
[
OtherInput(OutputReference(TransactionId(#"aabb"), 2), 3),
OtherInput(OutputReference(TransactionId(#"aabbcc"), 3), 3),
]
let own_out_ref = OutputReference(TransactionId(#"aabb"), 2)
expect ([_], other_inputs) =
list.partition(
all_inputs,
fn(input) { input.output_reference == own_out_ref },
)
let inputs: List<Input> =
[]
list.all(
inputs,
fn(input) {
expect dat: MyDatum<Int> =
when input.output.datum is {
InlineDatum(d) -> d
_ -> fail @"Not an inline datum"
}
when dat is {
Constructor1 { .. } -> True
_ -> False
}
},
)
}

View File

@ -13,4 +13,4 @@ requirements = []
source = "github" source = "github"
[etags] [etags]
"aiken-lang/stdlib@main" = [{ secs_since_epoch = 1706676838, nanos_since_epoch = 531311000 }, "cf946239d3dd481ed41f20e56bf24910b5229ea35aa171a708edc2a47fc20a7b"] "aiken-lang/stdlib@main" = [{ secs_since_epoch = 1705743640, nanos_since_epoch = 944756000 }, "cf946239d3dd481ed41f20e56bf24910b5229ea35aa171a708edc2a47fc20a7b"]

View File

@ -5,7 +5,7 @@
"plutusVersion": "v2", "plutusVersion": "v2",
"compiler": { "compiler": {
"name": "Aiken", "name": "Aiken",
"version": "v1.0.23-alpha+3a53427" "version": "v1.0.21-alpha+bf96c3a"
} }
}, },
"validators": [ "validators": [

View File

@ -13,4 +13,4 @@ requirements = []
source = "github" source = "github"
[etags] [etags]
"aiken-lang/stdlib@main" = [{ secs_since_epoch = 1706676857, nanos_since_epoch = 453030000 }, "cf946239d3dd481ed41f20e56bf24910b5229ea35aa171a708edc2a47fc20a7b"] "aiken-lang/stdlib@main" = [{ secs_since_epoch = 1705743642, nanos_since_epoch = 736511000 }, "cf946239d3dd481ed41f20e56bf24910b5229ea35aa171a708edc2a47fc20a7b"]

View File

@ -13,4 +13,4 @@ requirements = []
source = "github" source = "github"
[etags] [etags]
"aiken-lang/stdlib@main" = [{ secs_since_epoch = 1706676854, nanos_since_epoch = 654421000 }, "cf946239d3dd481ed41f20e56bf24910b5229ea35aa171a708edc2a47fc20a7b"] "aiken-lang/stdlib@main" = [{ secs_since_epoch = 1705743642, nanos_since_epoch = 350520000 }, "cf946239d3dd481ed41f20e56bf24910b5229ea35aa171a708edc2a47fc20a7b"]

View File

@ -13,4 +13,4 @@ requirements = []
source = "github" source = "github"
[etags] [etags]
"aiken-lang/stdlib@main" = [{ secs_since_epoch = 1706676837, nanos_since_epoch = 179902000 }, "cf946239d3dd481ed41f20e56bf24910b5229ea35aa171a708edc2a47fc20a7b"] "aiken-lang/stdlib@main" = [{ secs_since_epoch = 1705743640, nanos_since_epoch = 940487000 }, "cf946239d3dd481ed41f20e56bf24910b5229ea35aa171a708edc2a47fc20a7b"]

View File

@ -5,7 +5,7 @@
"plutusVersion": "v2", "plutusVersion": "v2",
"compiler": { "compiler": {
"name": "Aiken", "name": "Aiken",
"version": "v1.0.23-alpha+3a53427" "version": "v1.0.21-alpha+bf96c3a"
} }
}, },
"validators": [ "validators": [

View File

@ -5,7 +5,7 @@
"plutusVersion": "v2", "plutusVersion": "v2",
"compiler": { "compiler": {
"name": "Aiken", "name": "Aiken",
"version": "v1.0.23-alpha+3a53427" "version": "v1.0.21-alpha+bf96c3a"
} }
}, },
"validators": [ "validators": [

View File

@ -13,4 +13,4 @@ requirements = []
source = "github" source = "github"
[etags] [etags]
"aiken-lang/stdlib@main" = [{ secs_since_epoch = 1706676839, nanos_since_epoch = 823823000 }, "cf946239d3dd481ed41f20e56bf24910b5229ea35aa171a708edc2a47fc20a7b"] "aiken-lang/stdlib@main" = [{ secs_since_epoch = 1705743640, nanos_since_epoch = 972182000 }, "cf946239d3dd481ed41f20e56bf24910b5229ea35aa171a708edc2a47fc20a7b"]

View File

@ -13,4 +13,4 @@ requirements = []
source = "github" source = "github"
[etags] [etags]
"aiken-lang/stdlib@main" = [{ secs_since_epoch = 1706676837, nanos_since_epoch = 181741000 }, "cf946239d3dd481ed41f20e56bf24910b5229ea35aa171a708edc2a47fc20a7b"] "aiken-lang/stdlib@main" = [{ secs_since_epoch = 1705743640, nanos_since_epoch = 966665000 }, "cf946239d3dd481ed41f20e56bf24910b5229ea35aa171a708edc2a47fc20a7b"]

View File

@ -13,4 +13,4 @@ requirements = []
source = "github" source = "github"
[etags] [etags]
"aiken-lang/stdlib@main" = [{ secs_since_epoch = 1706676837, nanos_since_epoch = 181596000 }, "cf946239d3dd481ed41f20e56bf24910b5229ea35aa171a708edc2a47fc20a7b"] "aiken-lang/stdlib@main" = [{ secs_since_epoch = 1705743640, nanos_since_epoch = 968996000 }, "cf946239d3dd481ed41f20e56bf24910b5229ea35aa171a708edc2a47fc20a7b"]

View File

@ -13,4 +13,4 @@ requirements = []
source = "github" source = "github"
[etags] [etags]
"aiken-lang/stdlib@main" = [{ secs_since_epoch = 1706676838, nanos_since_epoch = 486267000 }, "cf946239d3dd481ed41f20e56bf24910b5229ea35aa171a708edc2a47fc20a7b"] "aiken-lang/stdlib@main" = [{ secs_since_epoch = 1705743640, nanos_since_epoch = 975570000 }, "cf946239d3dd481ed41f20e56bf24910b5229ea35aa171a708edc2a47fc20a7b"]

View File

@ -13,4 +13,4 @@ requirements = []
source = "github" source = "github"
[etags] [etags]
"aiken-lang/stdlib@main" = [{ secs_since_epoch = 1706676857, nanos_since_epoch = 437022000 }, "cf946239d3dd481ed41f20e56bf24910b5229ea35aa171a708edc2a47fc20a7b"] "aiken-lang/stdlib@main" = [{ secs_since_epoch = 1705743642, nanos_since_epoch = 725442000 }, "cf946239d3dd481ed41f20e56bf24910b5229ea35aa171a708edc2a47fc20a7b"]

View File

@ -5,7 +5,7 @@
"plutusVersion": "v2", "plutusVersion": "v2",
"compiler": { "compiler": {
"name": "Aiken", "name": "Aiken",
"version": "v1.0.23-alpha+3a53427" "version": "v1.0.21-alpha+bf96c3a"
} }
}, },
"validators": [ "validators": [

View File

@ -13,4 +13,4 @@ requirements = []
source = "github" source = "github"
[etags] [etags]
"aiken-lang/stdlib@main" = [{ secs_since_epoch = 1706676854, nanos_since_epoch = 655166000 }, "cf946239d3dd481ed41f20e56bf24910b5229ea35aa171a708edc2a47fc20a7b"] "aiken-lang/stdlib@main" = [{ secs_since_epoch = 1705743642, nanos_since_epoch = 191574000 }, "cf946239d3dd481ed41f20e56bf24910b5229ea35aa171a708edc2a47fc20a7b"]

View File

@ -13,4 +13,4 @@ requirements = []
source = "github" source = "github"
[etags] [etags]
"aiken-lang/stdlib@main" = [{ secs_since_epoch = 1706676858, nanos_since_epoch = 638040000 }, "cf946239d3dd481ed41f20e56bf24910b5229ea35aa171a708edc2a47fc20a7b"] "aiken-lang/stdlib@main" = [{ secs_since_epoch = 1705743642, nanos_since_epoch = 768373000 }, "cf946239d3dd481ed41f20e56bf24910b5229ea35aa171a708edc2a47fc20a7b"]

View File

@ -13,4 +13,4 @@ requirements = []
source = "github" source = "github"
[etags] [etags]
"aiken-lang/stdlib@main" = [{ secs_since_epoch = 1706676853, nanos_since_epoch = 345400000 }, "cf946239d3dd481ed41f20e56bf24910b5229ea35aa171a708edc2a47fc20a7b"] "aiken-lang/stdlib@main" = [{ secs_since_epoch = 1706210606, nanos_since_epoch = 598159000 }, "cf946239d3dd481ed41f20e56bf24910b5229ea35aa171a708edc2a47fc20a7b"]

View File

@ -1,89 +0,0 @@
{
"preamble": {
"title": "aiken-lang/acceptance_test_089",
"version": "0.0.0",
"plutusVersion": "v2",
"compiler": {
"name": "Aiken",
"version": "v1.0.23-alpha+3a53427"
}
},
"validators": [
{
"title": "test2.simple_oneshot",
"redeemer": {
"title": "_r",
"schema": {
"$ref": "#/definitions/Void"
}
},
"parameters": [
{
"title": "utxo_ref",
"schema": {
"$ref": "#/definitions/aiken~1transaction~1OutputReference"
}
}
],
"compiledCode": "58d40100003232323232323232322225333006323232323232533300c3370e900018058018991919299980799b8748000c0380044c8c94ccc044cdc3a40000022944528180780099801002119baf3004300e00100d163300100323375e6006601a00201844646600200200644a6660280022980103d87a8000132325333013300500213374a90001980b80125eb804cc010010004c060008c0580048c04800458dd61808000980400198070009807001180600098020008a4c26cac4600a6ea80048c00cdd5000ab9a5573aaae7955cfaba05742ae89",
"hash": "dd850cc95e173d7dbb3357a4a021afc350f405a3cc2e85ace58bfe8d"
}
],
"definitions": {
"ByteArray": {
"dataType": "bytes"
},
"Int": {
"dataType": "integer"
},
"Void": {
"title": "Unit",
"description": "The nullary constructor.",
"anyOf": [
{
"dataType": "constructor",
"index": 0,
"fields": []
}
]
},
"aiken/transaction/OutputReference": {
"title": "OutputReference",
"description": "An `OutputReference` is a unique reference to an output on-chain. The `output_index`\n corresponds to the position in the output list of the transaction (identified by its id)\n that produced that output",
"anyOf": [
{
"title": "OutputReference",
"dataType": "constructor",
"index": 0,
"fields": [
{
"title": "transaction_id",
"$ref": "#/definitions/aiken~1transaction~1TransactionId"
},
{
"title": "output_index",
"$ref": "#/definitions/Int"
}
]
}
]
},
"aiken/transaction/TransactionId": {
"title": "TransactionId",
"description": "A unique transaction identifier, as the hash of a transaction body. Note that the transaction id\n isn't a direct hash of the `Transaction` as visible on-chain. Rather, they correspond to hash\n digests of transaction body as they are serialized on the network.",
"anyOf": [
{
"title": "TransactionId",
"dataType": "constructor",
"index": 0,
"fields": [
{
"title": "hash",
"$ref": "#/definitions/ByteArray"
}
]
}
]
}
}
}

View File

@ -5,7 +5,7 @@
"plutusVersion": "v2", "plutusVersion": "v2",
"compiler": { "compiler": {
"name": "Aiken", "name": "Aiken",
"version": "v1.0.23-alpha+3a53427" "version": "v1.0.21-alpha+bf96c3a"
} }
}, },
"validators": [ "validators": [

View File

@ -13,4 +13,4 @@ requirements = []
source = "github" source = "github"
[etags] [etags]
"aiken-lang/stdlib@main" = [{ secs_since_epoch = 1706677006, nanos_since_epoch = 304401000 }, "cf946239d3dd481ed41f20e56bf24910b5229ea35aa171a708edc2a47fc20a7b"] "aiken-lang/stdlib@main" = [{ secs_since_epoch = 1705181303, nanos_since_epoch = 777227000 }, "cf946239d3dd481ed41f20e56bf24910b5229ea35aa171a708edc2a47fc20a7b"]

View File

@ -5,7 +5,7 @@
"plutusVersion": "v2", "plutusVersion": "v2",
"compiler": { "compiler": {
"name": "Aiken", "name": "Aiken",
"version": "v1.0.23-alpha+3a53427" "version": "v1.0.21-alpha+0161cf6"
} }
}, },
"validators": [ "validators": [

View File

@ -1 +0,0 @@
build

View File

@ -1,114 +0,0 @@
# Moonrat
> Hedgehog's spineless cousin
## Aims
Property based testing for aiken inspired by hedgehog and elm-test.
Aims:
- Default gen and shrinking auto derived for any types
- Support custom gen/shrinking
- Friendly output (progress, sensible feedback such as diffs on large data)
- Reasonably speedy
Non-aims:
- e2e testing.
This is intended for functions rather than testing full txs against validators.
Although it should still be possible, it is not our aim here to make writing and testing txs ergonomic.
## Interface
An aiken file
```aiken
// my_tests.ak
type T0 {
f0 : Int,
...
}
fn gen_t0(seed : Int, complexity : Int) -> T0 {
...
}
fn shrink_t0(x : T0) -> List<T0> {
// TODO : what should the signature of this be?!
...
}
type T1 {
f0 : Int,
...
}
test prop_x (
a0 : T0 via (gen_t0(0), shrink_t0),
a1 : T0 via (gen_t0(1), shrink_t0),
a2 : T0,
a2 : T1,
) {
todo!
}
```
Comments on the sample.
`prop_x` is our test - now supporting arguments.
There is new syntax `via`.
We have a custom generator and shrinker for `T0` which we may or may not use.
In the absence of a specified gen/shrink pair, the default, autoderived one is used.
Run 100 times
```
aiken check -m "my_lib/my_test.{prop_x}"
```
Run 1000 cases with a specified seed and shrink limit
```
aiken check --repeat 1000 --seed 123212123 --shrink-limit 5
```
Reporting:
```sample
Testing ...
my_test
prop_x PASS [100/100]
```
```sample
Testing ...
my_test
prop_x FAIL (after 16 tests and 5 shrinks):
a0 = T0 { f0 : 120201, ... }
a1 = T0 { ... }
...
RHS = True
LHS = False
seed = 123212123
Rerun with
aiken check -m "my_lib/my_test.{prop_x}" --args " [ T0 { }] ... "
```
## Functionality
Aiken compiler finds all tests.
Any tests with args are assumed subject to property based testing.
[Property config](https://hackage.haskell.org/package/hedgehog-1.4/docs/Hedgehog-Internal-Property.html#t:PropertyConfig) is global, rather than local.
The test is compiled as if it were a parametrized validator.
Separate gen and shrink functions are also compiled.
To evaluate the test, the generator(s) are run to generate input for the test.
Then the args are applied, and the code evaluated.
On success this is repeated until `repeat` number of successes.
On failure, the shrinker is employed to seek a simpler failure case.

View File

@ -1,16 +0,0 @@
# This file was generated by Aiken
# You typically do not need to edit this file
[[requirements]]
name = "aiken-lang/stdlib"
version = "main"
source = "github"
[[packages]]
name = "aiken-lang/stdlib"
version = "main"
requirements = []
source = "github"
[etags]
"aiken-lang/stdlib@main" = [{ secs_since_epoch = 1707160390, nanos_since_epoch = 895305443 }, "cf946239d3dd481ed41f20e56bf24910b5229ea35aa171a708edc2a47fc20a7b"]

View File

@ -1,8 +0,0 @@
name = "aiken-lang/moonrat"
version = "0.0.0"
description = ""
[[dependencies]]
name = 'aiken-lang/stdlib'
version = 'main'
source = 'github'

View File

@ -1,4 +0,0 @@
test test_with_arg(x : Int) {
x - x == 0
}

View File

@ -1,89 +0,0 @@
{
"preamble": {
"title": "aiken-lang/acceptance_test_089",
"version": "0.0.0",
"plutusVersion": "v2",
"compiler": {
"name": "Aiken",
"version": "v1.0.24-alpha+982eff4"
}
},
"validators": [
{
"title": "test2.simple_oneshot",
"redeemer": {
"title": "_r",
"schema": {
"$ref": "#/definitions/Void"
}
},
"parameters": [
{
"title": "utxo_ref",
"schema": {
"$ref": "#/definitions/aiken~1transaction~1OutputReference"
}
}
],
"compiledCode": "58d40100003232323232323232322225333006323232323232533300c3370e900018058018991919299980799b8748000c0380044c8c94ccc044cdc3a40000022944528180780099801002119baf3004300e00100d163300100323375e6006601a00201844646600200200644a6660280022980103d87a8000132325333013300500213374a90001980b80125eb804cc010010004c060008c0580048c04800458dd61808000980400198070009807001180600098020008a4c26cac4600a6ea80048c00cdd5000ab9a5573aaae7955cfaba05742ae89",
"hash": "dd850cc95e173d7dbb3357a4a021afc350f405a3cc2e85ace58bfe8d"
}
],
"definitions": {
"ByteArray": {
"dataType": "bytes"
},
"Int": {
"dataType": "integer"
},
"Void": {
"title": "Unit",
"description": "The nullary constructor.",
"anyOf": [
{
"dataType": "constructor",
"index": 0,
"fields": []
}
]
},
"aiken/transaction/OutputReference": {
"title": "OutputReference",
"description": "An `OutputReference` is a unique reference to an output on-chain. The `output_index`\n corresponds to the position in the output list of the transaction (identified by its id)\n that produced that output",
"anyOf": [
{
"title": "OutputReference",
"dataType": "constructor",
"index": 0,
"fields": [
{
"title": "transaction_id",
"$ref": "#/definitions/aiken~1transaction~1TransactionId"
},
{
"title": "output_index",
"$ref": "#/definitions/Int"
}
]
}
]
},
"aiken/transaction/TransactionId": {
"title": "TransactionId",
"description": "A unique transaction identifier, as the hash of a transaction body. Note that the transaction id\n isn't a direct hash of the `Transaction` as visible on-chain. Rather, they correspond to hash\n digests of transaction body as they are serialized on the network.",
"anyOf": [
{
"title": "TransactionId",
"dataType": "constructor",
"index": 0,
"fields": [
{
"title": "hash",
"$ref": "#/definitions/ByteArray"
}
]
}
]
}
}
}

View File

@ -79,9 +79,8 @@
cargo-insta cargo-insta
(pkgs.rust-bin.stable.latest.default.override { (pkgs.rust-bin.stable.latest.default.override {
extensions = [ "rust-src" "clippy" "rustfmt" "rust-analyzer" ]; extensions = [ "rust-src" "clippy" "rustfmt" ];
}) })
] ++ osxDependencies; ] ++ osxDependencies;