Merge branch 'supercharge-constants'

This commit is contained in:
KtorZ 2024-08-30 15:45:04 +02:00
commit 55d381fbfc
No known key found for this signature in database
GPG Key ID: 33173CB6F77F4277
165 changed files with 748 additions and 2923 deletions

View File

@ -41,6 +41,23 @@ queens5x5/fc 1867 179227518621 1023295666
-->
<!--
v1.1.0
┍━ benchmarks/clausify/benchmark ━━━━━━━━━━━━━━━━━━━━━━━━━━━
│ PASS [mem: 53769377, cpu: 16198154564] bench_clausify_f1
│ PASS [mem: 67108683, cpu: 20169891270] bench_clausify_f2
│ PASS [mem: 179606857, cpu: 53923018831] bench_clausify_f3
│ PASS [mem: 231444137, cpu: 70014384566] bench_clausify_f4
│ PASS [mem: 874286879, cpu: 262421671684] bench_clausify_f5
┕━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ 5 tests | 5 passed | 0 failed
┍━ benchmarks/knights/benchmark ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
│ PASS [mem: 172246681, cpu: 57037226471] bench_knights_100_4x4
│ PASS [mem: 321690197, cpu: 137399466410] bench_knights_100_6x6
│ PASS [mem: 601026745, cpu: 281418742606] bench_knights_100_8x8
┕━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ 3 tests | 3 passed | 0 failed
v1.0.29-alpha & v1.0.28-alpha
┍━ benchmarks/clausify/benchmark ━━━━━━━━━━━━━━━━━━━━━━━━━━━
@ -128,26 +145,26 @@ V1.0.20-alpha, v1.0.19-alpha & v1.0.18-alpha
### CPU
| Benchmark | `v1.0.29` | vs `v1.0.25` | vs `v1.0.23` | vs `v1.0.21` | vs `v1.0.18` |
| Benchmark | `v1.1.0` | vs `v1.0.29` | vs `v1.0.25` | vs `v1.0.23` | vs `v1.0.21` |
| --- | ---: | ---: | ---: | ---: | ---: |
| `clausify_f1` | 21594809455 | -6.26% | +11.71% | +11.71% | +11.84% |
| `clausify_f2` | 26864755594 | -6.07% | +11.30% | +11.30% | +11.44% |
| `clausify_f3` | 71814854199 | -5.98% | +11.17% | +11.17% | +11.31% |
| `clausify_f4` | 93024749730 | -5.01% | +9.37% | +9.37% | +9.55% |
| `clausify_f5` | 349894049008 | -6.00% | +11.24% | +11.24% | +11.35% |
| `knights_100_4x4` | 71851995726 | +1.40% | +4.45% | +4.30% | +4.80% |
| `knights_100_6x6` | 159767368294 | +8.93% | +11.92% | +11.85% | +12.70% |
| `knights_100_8x8` | 319834775948 | +11.40% | +14.35% | +14.32% | +15.13% |
| `clausify_f1` | 16198154564 | -24.99% | -6.26% | +11.71% | +11.71% |
| `clausify_f2` | 20169891270 | -24.92% | -6.07% | +11.30% | +11.30% |
| `clausify_f3` | 53923018831 | -24.91% | -5.98% | +11.17% | +11.17% |
| `clausify_f4` | 70014384566 | -24.74% | -5.01% | +9.37% | +9.37% |
| `clausify_f5` | 262421671684 | -25.00% | -6.00% | +11.24% | +11.24% |
| `knights_100_4x4` | 57037226471 | -20.62% | +1.40% | +4.45% | +4.30% |
| `knights_100_6x6` | 137399466410 | -14.00% | +8.93% | +11.92% | +11.85% |
| `knights_100_8x8` | 281418742606 | -12.00% | +11.40% | +14.35% | +14.32% |
### Mem
| Benchmark | `v1.0.29` | vs `v1.0.25` | vs `v1.0.23` | vs `v1.0.21` | vs `v1.0.18` |
| Benchmark | `v1.1.0` | vs `v1.0.29` | vs `v1.0.25` | vs `v1.0.23` | vs `v1.0.21` |
| --- | ---: | ---: | ---: | ---: | ---: |
| `clausify_f1` | 53769377 | -2.29% | +20.40% | +20.40% | +20.63% |
| `clausify_f2` | 67108683 | -2.54% | +19.63% | +19.63% | +19.87% |
| `clausify_f3` | 179606857 | -2.64% | +19.38% | +19.38% | +19.61% |
| `clausify_f4` | 231444137 | -2.75% | +16.33% | +16.33% | +16.64% |
| `clausify_f5` | 874286879 | -2.63% | +19.53% | +19.53% | +19.72% |
| `knights_100_4x4` | 172256715 | -0.48% | +5.04% | +4.90% | +5.80% |
| `knights_100_6x6` | 321712271 | +10.08% | +16.54% | +16.46% | +18.29% |
| `knights_100_8x8` | 601065675 | +14.48% | +21.30% | +21.26% | +23.15% |
| `clausify_f1` | 53769377 | ± 0.00% | -2.29% | +20.40% | +20.40% |
| `clausify_f2` | 67108683 | ± 0.00% | -2.54% | +19.63% | +19.63% |
| `clausify_f3` | 179606857 | ± 0.00% | -2.64% | +19.38% | +19.38% |
| `clausify_f4` | 231444137 | ± 0.00% | -2.75% | +16.33% | +16.33% |
| `clausify_f5` | 874286879 | ± 0.00% | -2.63% | +19.53% | +19.53% |
| `knights_100_4x4` | 172246681 | -0.01% | -0.48% | +5.04% | +4.90% |
| `knights_100_6x6` | 321690197 | -0.01% | +10.08% | +16.54% | +16.46% |
| `knights_100_8x8` | 601026745 | -0.01% | +14.48% | +21.30% | +21.26% |

View File

@ -3,13 +3,14 @@
[[requirements]]
name = "aiken-lang/stdlib"
version = "1.9.0"
version = "main"
source = "github"
[[packages]]
name = "aiken-lang/stdlib"
version = "1.9.0"
version = "main"
requirements = []
source = "github"
[etags]
"aiken-lang/stdlib@main" = [{ secs_since_epoch = 1725022066, nanos_since_epoch = 34627000 }, "21da5761ffd088c964cb038888826da8a6ea1d8c26f6f4e8a1dc6e97a64fe3f7"]

View File

@ -1,5 +1,5 @@
use aiken/int
use aiken/list
use aiken/collection/list
use aiken/primitive/int
// ------------------------------------------------------------------ Benchmarks

View File

@ -1,4 +1,4 @@
use aiken/list
use aiken/collection/list
use benchmarks/knights/heuristic.{descendants, start_tour, tour_finished}
use benchmarks/knights/queue.{Queue}
use benchmarks/knights/types.{ChessSet, Solution}
@ -10,11 +10,11 @@ test bench_knights_100_4x4() {
}
test bench_knights_100_6x6() {
run_knights(100, 6) == solution_100_6x6()
run_knights(100, 6) == solution_100_6x6
}
test bench_knights_100_8x8() {
run_knights(100, 8) == solution_100_8x8()
run_knights(100, 8) == solution_100_8x8
}
// ----------------------------------------------------------------------- Setup
@ -30,7 +30,7 @@ fn depth_search(
done: fn(a) -> Bool,
) -> Queue<a> {
if depth == 0 || queue.is_empty(xs) {
queue.new()
queue.empty
} else if done(queue.head(xs)) {
depth_search(depth - 1, queue.remove_front(xs), grow, done)
|> queue.append_front(queue.head(xs))
@ -41,7 +41,7 @@ fn depth_search(
}
fn root(size: Int) -> Queue<(Int, ChessSet)> {
queue.append_all_front(queue.new(), mk_starts(size))
queue.append_all_front(queue.empty, mk_starts(size))
}
fn mk_starts(size: Int) -> List<(Int, ChessSet)> {
@ -79,7 +79,7 @@ fn done(item: (Int, ChessSet)) -> Bool {
// ------------------------------------------------------------------ Fixtures
fn solution_100_6x6() -> Solution {
const solution_100_6x6: Solution =
[
(
0,
@ -266,9 +266,9 @@ fn solution_100_6x6() -> Solution {
},
),
]
}
fn solution_100_8x8() -> Solution {
const solution_100_8x8: Solution =
[
(
0,
@ -493,4 +493,3 @@ fn solution_100_8x8() -> Solution {
},
),
]
}

View File

@ -1,4 +1,4 @@
use aiken/list
use aiken/collection/list
use benchmarks/knights/types.{ChessSet, Tile}
pub fn create_board(size: Int, init_square: Tile) -> ChessSet {

View File

@ -1,6 +1,6 @@
use aiken/builtin
use aiken/int
use aiken/list
use aiken/collection/list
use aiken/primitive/int
use benchmarks/knights/chess_set.{
add_piece, create_board, delete_first, first_piece, is_square_free, last_piece,
}
@ -18,9 +18,8 @@ type Direction {
RD
}
fn direction_list() {
const direction_list =
[UL, UR, DL, DR, LU, LD, RU, RD]
}
fn move(direction: Direction, tile: Tile) -> Tile {
let (x, y) = tile
@ -130,7 +129,7 @@ fn move_knight(board: ChessSet, direction: Direction) -> ChessSet {
}
fn possible_moves(board: ChessSet) -> List<Direction> {
direction_list() |> list.filter(can_move(board, _))
direction_list |> list.filter(can_move(board, _))
}
fn compare_chess_set(a: (Int, ChessSet), b: (Int, ChessSet)) -> Ordering {

View File

@ -1,12 +1,10 @@
use aiken/list
use aiken/collection/list
pub opaque type Queue<a> {
inner: List<a>,
}
pub fn new() -> Queue<a> {
[] |> Queue
}
pub const empty: Queue<a> = [] |> Queue
pub fn to_list(self: Queue<a>) -> List<a> {
self.inner

View File

@ -1,4 +1,4 @@
use aiken/list
use aiken/collection/list
pub fn quicksort(xs: List<a>, compare: fn(a, a) -> Ordering) -> List<a> {
when xs is {

View File

@ -161,6 +161,7 @@ impl TypedModule {
pub fn register_definitions(
&self,
functions: &mut IndexMap<FunctionAccessKey, TypedFunction>,
constants: &mut IndexMap<FunctionAccessKey, TypedExpr>,
data_types: &mut IndexMap<DataTypeKey, TypedDataType>,
) {
for def in self.definitions() {
@ -203,7 +204,17 @@ impl TypedModule {
}
}
Definition::TypeAlias(_) | Definition::ModuleConstant(_) | Definition::Use(_) => {}
Definition::ModuleConstant(ModuleConstant { name, value, .. }) => {
constants.insert(
FunctionAccessKey {
module_name: self.name.clone(),
function_name: name.clone(),
},
value.clone(),
);
}
Definition::TypeAlias(_) | Definition::Use(_) => {}
}
}
}
@ -459,18 +470,17 @@ pub struct Use<PackageName> {
pub unqualified: Vec<UnqualifiedImport>,
}
pub type TypedModuleConstant = ModuleConstant<Rc<Type>>;
pub type UntypedModuleConstant = ModuleConstant<()>;
pub type TypedModuleConstant = ModuleConstant<TypedExpr>;
pub type UntypedModuleConstant = ModuleConstant<UntypedExpr>;
#[derive(Debug, Clone, PartialEq, serde::Serialize, serde::Deserialize)]
pub struct ModuleConstant<T> {
pub struct ModuleConstant<Expr> {
pub doc: Option<String>,
pub location: Span,
pub public: bool,
pub name: String,
pub annotation: Option<Annotation>,
pub value: Box<Constant>,
pub tipo: T,
pub value: Expr,
}
pub type TypedValidator = Validator<Rc<Type>, TypedArg, TypedExpr>;
@ -746,7 +756,7 @@ pub enum Definition<T, Arg, Expr, PackageName> {
Use(Use<PackageName>),
ModuleConstant(ModuleConstant<T>),
ModuleConstant(ModuleConstant<Expr>),
Test(Function<T, Expr, ArgVia<Arg, Expr>>),
@ -843,55 +853,6 @@ pub struct DefinitionLocation<'module> {
pub span: Span,
}
#[derive(Debug, Clone, PartialEq, serde::Serialize, serde::Deserialize)]
pub enum Constant {
Int {
location: Span,
value: String,
base: Base,
},
String {
location: Span,
value: String,
},
ByteArray {
location: Span,
bytes: Vec<u8>,
preferred_format: ByteArrayFormatPreference,
},
CurvePoint {
location: Span,
point: Box<Curve>,
preferred_format: ByteArrayFormatPreference,
},
}
impl Constant {
pub fn tipo(&self) -> Rc<Type> {
match self {
Constant::Int { .. } => Type::int(),
Constant::String { .. } => Type::string(),
Constant::ByteArray { .. } => Type::byte_array(),
Constant::CurvePoint { point, .. } => match point.as_ref() {
Curve::Bls12_381(Bls12_381Point::G1(_)) => Type::g1_element(),
Curve::Bls12_381(Bls12_381Point::G2(_)) => Type::g2_element(),
},
}
}
pub fn location(&self) -> Span {
match self {
Constant::Int { location, .. }
| Constant::String { location, .. }
| Constant::ByteArray { location, .. }
| Constant::CurvePoint { location, .. } => *location,
}
}
}
pub type TypedCallArg = CallArg<TypedExpr>;
pub type ParsedCallArg = CallArg<Option<UntypedExpr>>;
@ -1298,6 +1259,19 @@ impl Annotation {
}
}
pub fn list(inner: Annotation, location: Span) -> Self {
Annotation::Constructor {
name: "List".to_string(),
module: None,
arguments: vec![inner],
location,
}
}
pub fn tuple(elems: Vec<Annotation>, location: Span) -> Self {
Annotation::Tuple { elems, location }
}
pub fn is_logically_equal(&self, other: &Annotation) -> bool {
match self {
Annotation::Constructor {

View File

@ -30,6 +30,7 @@ pub enum TypedExpr {
location: Span,
tipo: Rc<Type>,
value: String,
base: Base,
},
String {
@ -42,12 +43,14 @@ pub enum TypedExpr {
location: Span,
tipo: Rc<Type>,
bytes: Vec<u8>,
preferred_format: ByteArrayFormatPreference,
},
CurvePoint {
location: Span,
tipo: Rc<Type>,
point: Box<Curve>,
preferred_format: ByteArrayFormatPreference,
},
Sequence {

View File

@ -1,7 +1,7 @@
use crate::{
ast::{
Annotation, ArgBy, ArgName, ArgVia, AssignmentKind, AssignmentPattern, BinOp,
ByteArrayFormatPreference, CallArg, Constant, CurveType, DataType, Definition, Function,
ByteArrayFormatPreference, CallArg, CurveType, DataType, Definition, Function,
LogicalOpChainKind, ModuleConstant, OnTestFailure, Pattern, RecordConstructor,
RecordConstructorArg, RecordUpdateSpread, Span, TraceKind, TypeAlias, TypedArg,
TypedValidator, UnOp, UnqualifiedImport, UntypedArg, UntypedArgVia, UntypedAssignmentKind,
@ -9,7 +9,7 @@ use crate::{
UntypedPattern, UntypedRecordUpdateArg, Use, Validator, CAPTURE_VARIABLE,
},
docvec,
expr::{FnStyle, UntypedExpr, DEFAULT_ERROR_STR, DEFAULT_TODO_STR},
expr::{FnStyle, TypedExpr, UntypedExpr, DEFAULT_ERROR_STR, DEFAULT_TODO_STR},
parser::{
extra::{Comment, ModuleExtra},
token::Base,
@ -295,7 +295,7 @@ impl<'comments> Formatter<'comments> {
head.append(" =")
.append(break_("", " "))
.append(self.const_expr(value))
.append(self.expr(value, true))
.nest(INDENT)
.group()
}
@ -338,14 +338,32 @@ impl<'comments> Formatter<'comments> {
})
}
fn const_expr<'a>(&mut self, value: &'a Constant) -> Document<'a> {
pub fn docs_const_expr<'a>(&mut self, name: &'a str, value: &'a TypedExpr) -> Document<'a> {
let mut printer = tipo::pretty::Printer::new();
let doc = name
.to_doc()
.append(": ")
.append(printer.print(&value.tipo()));
// NOTE: Only display the full value for simple expressions.
let value = self.const_expr(value);
if value.is_empty() {
doc
} else {
doc.append(" = ").append(value)
}
}
pub fn const_expr<'a>(&mut self, value: &'a TypedExpr) -> Document<'a> {
match value {
Constant::ByteArray {
TypedExpr::UInt { value, base, .. } => self.int(value, base),
TypedExpr::String { value, .. } => self.string(value),
TypedExpr::ByteArray {
bytes,
preferred_format,
..
} => self.bytearray(bytes, None, preferred_format),
Constant::CurvePoint {
TypedExpr::CurvePoint {
point,
preferred_format,
..
@ -354,20 +372,23 @@ impl<'comments> Formatter<'comments> {
Some(point.as_ref().into()),
preferred_format,
),
Constant::Int { value, base, .. } => self.int(value, base),
Constant::String { value, .. } => self.string(value),
TypedExpr::Tuple { elems, .. } => {
wrap_args(elems.iter().map(|e| (self.const_expr(e), false))).group()
}
TypedExpr::Pair { fst, snd, .. } => {
let elems = [fst, snd];
"Pair"
.to_doc()
.append(wrap_args(elems.iter().map(|e| (self.const_expr(e), false))).group())
}
TypedExpr::List { elements, .. } => {
wrap_args(elements.iter().map(|e| (self.const_expr(e), false))).group()
}
TypedExpr::Var { name, .. } => name.to_doc(),
_ => Document::Str(""),
}
}
pub fn docs_const_expr<'a>(&mut self, name: &'a str, value: &'a Constant) -> Document<'a> {
let mut printer = tipo::pretty::Printer::new();
name.to_doc()
.append(": ")
.append(printer.print(&value.tipo()))
.append(" = ")
.append(self.const_expr(value))
}
fn documented_definition<'a>(&mut self, s: &'a UntypedDefinition) -> Document<'a> {
let comments = self.doc_comments(s.location().start);
comments.append(self.definition(s).group()).group()

View File

@ -5,9 +5,9 @@ pub mod tree;
use self::{
air::Air,
builder::{
cast_validator_args, constants_ir, convert_type_to_data, extract_constant,
modify_cyclic_calls, modify_self_calls, rearrange_list_clauses, AssignmentProperties,
ClauseProperties, CodeGenSpecialFuncs, CycleFunctionNames, HoistableFunction, Variant,
cast_validator_args, convert_type_to_data, extract_constant, modify_cyclic_calls,
modify_self_calls, rearrange_list_clauses, AssignmentProperties, ClauseProperties,
CodeGenSpecialFuncs, CycleFunctionNames, HoistableFunction, Variant,
},
tree::{AirTree, TreePath},
};
@ -58,6 +58,7 @@ pub struct CodeGenerator<'a> {
plutus_version: PlutusVersion,
/// immutable index maps
functions: IndexMap<&'a FunctionAccessKey, &'a TypedFunction>,
constants: IndexMap<&'a FunctionAccessKey, &'a TypedExpr>,
data_types: IndexMap<&'a DataTypeKey, &'a TypedDataType>,
module_types: IndexMap<&'a str, &'a TypeInfo>,
module_src: IndexMap<&'a str, &'a (String, LineNumbers)>,
@ -67,7 +68,6 @@ pub struct CodeGenerator<'a> {
defined_functions: IndexMap<FunctionAccessKey, ()>,
special_functions: CodeGenSpecialFuncs,
code_gen_functions: IndexMap<String, CodeGenFunction>,
zero_arg_functions: IndexMap<(FunctionAccessKey, Variant), Vec<Air>>,
cyclic_functions:
IndexMap<(FunctionAccessKey, Variant), (CycleFunctionNames, usize, FunctionAccessKey)>,
/// mutable and reset as well
@ -82,6 +82,7 @@ impl<'a> CodeGenerator<'a> {
pub fn new(
plutus_version: PlutusVersion,
functions: IndexMap<&'a FunctionAccessKey, &'a TypedFunction>,
constants: IndexMap<&'a FunctionAccessKey, &'a TypedExpr>,
data_types: IndexMap<&'a DataTypeKey, &'a TypedDataType>,
module_types: IndexMap<&'a str, &'a TypeInfo>,
module_src: IndexMap<&'a str, &'a (String, LineNumbers)>,
@ -90,6 +91,7 @@ impl<'a> CodeGenerator<'a> {
CodeGenerator {
plutus_version,
functions,
constants,
data_types,
module_types,
module_src,
@ -97,7 +99,6 @@ impl<'a> CodeGenerator<'a> {
defined_functions: IndexMap::new(),
special_functions: CodeGenSpecialFuncs::new(),
code_gen_functions: IndexMap::new(),
zero_arg_functions: IndexMap::new(),
cyclic_functions: IndexMap::new(),
id_gen: IdGenerator::new(),
}
@ -105,7 +106,6 @@ impl<'a> CodeGenerator<'a> {
pub fn reset(&mut self, reset_special_functions: bool) {
self.code_gen_functions = IndexMap::new();
self.zero_arg_functions = IndexMap::new();
self.defined_functions = IndexMap::new();
self.cyclic_functions = IndexMap::new();
self.id_gen = IdGenerator::new();
@ -163,17 +163,19 @@ impl<'a> CodeGenerator<'a> {
self.finalize(term)
}
fn finalize(&mut self, mut term: Term<Name>) -> Program<Name> {
term = self.special_functions.apply_used_functions(term);
fn new_program<T>(&self, term: Term<T>) -> Program<T> {
let version = match self.plutus_version {
PlutusVersion::V1 | PlutusVersion::V2 => (1, 0, 0),
PlutusVersion::V3 => (1, 1, 0),
};
let mut program = Program { version, term };
Program { version, term }
}
program = aiken_optimize_and_intern(program);
fn finalize(&mut self, mut term: Term<Name>) -> Program<Name> {
term = self.special_functions.apply_used_functions(term);
let program = aiken_optimize_and_intern(self.new_program(term));
// This is very important to call here.
// If this isn't done, re-using the same instance
@ -268,12 +270,7 @@ impl<'a> CodeGenerator<'a> {
TypedExpr::Var {
constructor, name, ..
} => match &constructor.variant {
ValueConstructorVariant::ModuleConstant { literal, .. } => {
constants_ir(literal)
}
_ => AirTree::var(constructor.clone(), name, ""),
},
} => AirTree::var(constructor.clone(), name, ""),
TypedExpr::Fn { args, body, .. } => AirTree::anon_func(
args.iter()
@ -743,8 +740,16 @@ impl<'a> CodeGenerator<'a> {
AirTree::builtin(*builtin, tipo.clone(), vec![])
}
}
ModuleValueConstructor::Constant { literal, .. } => {
builder::constants_ir(literal)
ModuleValueConstructor::Constant { module, name, .. } => {
let type_info = self.module_types.get(module_name.as_str()).unwrap();
let value = type_info.values.get(name).unwrap();
AirTree::var(
ValueConstructor::public(tipo.clone(), value.variant.clone()),
format!("{module}_{name}"),
"",
)
}
},
@ -3527,17 +3532,15 @@ impl<'a> CodeGenerator<'a> {
.unwrap_or_else(|| panic!("Missing Function Variant Definition"));
match function {
HoistableFunction::Function { deps, params, .. } => {
if !params.is_empty() {
for (dep_generic_func, dep_variant) in deps.iter() {
if !(dep_generic_func == &generic_func && dep_variant == &variant) {
validator_hoistable
.insert(0, (dep_generic_func.clone(), dep_variant.clone()));
HoistableFunction::Function { deps, .. } => {
for (dep_generic_func, dep_variant) in deps.iter() {
if !(dep_generic_func == &generic_func && dep_variant == &variant) {
validator_hoistable
.insert(0, (dep_generic_func.clone(), dep_variant.clone()));
sorted_function_vec.retain(|(generic_func, variant)| {
!(generic_func == dep_generic_func && variant == dep_variant)
});
}
sorted_function_vec.retain(|(generic_func, variant)| {
!(generic_func == dep_generic_func && variant == dep_variant)
});
}
}
@ -3680,56 +3683,39 @@ impl<'a> CodeGenerator<'a> {
// first grab dependencies
let func_params = params;
let params_empty = func_params.is_empty();
let deps = (tree_path, func_deps.clone());
if !params_empty {
let recursive_nonstatics = if is_recursive {
modify_self_calls(&mut body, key, variant, func_params)
} else {
func_params.clone()
};
let node_to_edit = air_tree.find_air_tree_node(tree_path);
let defined_function = AirTree::define_func(
&key.function_name,
&key.module_name,
variant,
func_params.clone(),
is_recursive,
recursive_nonstatics,
body,
node_to_edit.clone(),
);
let defined_dependencies = self.hoist_dependent_functions(
deps,
params_empty,
(key, variant),
hoisted_functions,
functions_to_hoist,
defined_function,
);
// now hoist full function onto validator tree
*node_to_edit = defined_dependencies;
hoisted_functions.push((key.clone(), variant.clone()));
let recursive_nonstatics = if is_recursive {
modify_self_calls(&mut body, key, variant, func_params)
} else {
let defined_func = self.hoist_dependent_functions(
deps,
params_empty,
(key, variant),
hoisted_functions,
functions_to_hoist,
body,
);
func_params.clone()
};
self.zero_arg_functions
.insert((key.clone(), variant.clone()), defined_func.to_vec());
}
let node_to_edit = air_tree.find_air_tree_node(tree_path);
let defined_function = AirTree::define_func(
&key.function_name,
&key.module_name,
variant,
func_params.clone(),
is_recursive,
recursive_nonstatics,
body,
node_to_edit.clone(),
);
let defined_dependencies = self.hoist_dependent_functions(
deps,
(key, variant),
hoisted_functions,
functions_to_hoist,
defined_function,
);
// now hoist full function onto validator tree
*node_to_edit = defined_dependencies;
hoisted_functions.push((key.clone(), variant.clone()));
}
HoistableFunction::CyclicFunction {
functions,
@ -3757,8 +3743,6 @@ impl<'a> CodeGenerator<'a> {
let defined_dependencies = self.hoist_dependent_functions(
deps,
// cyclic functions always have params
false,
(key, variant),
hoisted_functions,
functions_to_hoist,
@ -3782,7 +3766,6 @@ impl<'a> CodeGenerator<'a> {
fn hoist_dependent_functions(
&mut self,
deps: (&TreePath, Vec<(FunctionAccessKey, String)>),
params_empty: bool,
func_key_variant: (&FunctionAccessKey, &Variant),
hoisted_functions: &mut Vec<(FunctionAccessKey, String)>,
functions_to_hoist: &IndexMap<
@ -3807,18 +3790,17 @@ impl<'a> CodeGenerator<'a> {
.unwrap_or_else(|| panic!("Missing Function Variant Definition"));
match function {
HoistableFunction::Function { deps, params, .. } => {
if !params.is_empty() {
for (dep_generic_func, dep_variant) in deps.iter() {
if !(dep_generic_func == &dep.0 && dep_variant == &dep.1) {
sorted_dep_vec.retain(|(generic_func, variant)| {
!(generic_func == dep_generic_func && variant == dep_variant)
});
HoistableFunction::Function { deps, .. } => {
for (dep_generic_func, dep_variant) in deps.iter() {
if !(dep_generic_func == &dep.0 && dep_variant == &dep.1) {
sorted_dep_vec.retain(|(generic_func, variant)| {
!(generic_func == dep_generic_func && variant == dep_variant)
});
deps_vec.insert(0, (dep_generic_func.clone(), dep_variant.clone()));
}
deps_vec.insert(0, (dep_generic_func.clone(), dep_variant.clone()));
}
}
sorted_dep_vec.push((dep.0.clone(), dep.1.clone()));
}
HoistableFunction::CyclicFunction { deps, .. } => {
@ -3849,12 +3831,12 @@ impl<'a> CodeGenerator<'a> {
sorted_dep_vec
.into_iter()
.fold(air_tree, |then, (dep_key, dep_variant)| {
if (!params_empty
// if the dependency is the same as the function we're hoisting
// or we hoisted it, then skip it
&& hoisted_functions.iter().any(|(generic, variant)| {
generic == &dep_key && variant == &dep_variant
}))
if
// if the dependency is the same as the function we're hoisting
// or we hoisted it, then skip it
hoisted_functions
.iter()
.any(|(generic, variant)| generic == &dep_key && variant == &dep_variant)
|| (&dep_key == key && &dep_variant == variant)
{
return then;
@ -3871,18 +3853,13 @@ impl<'a> CodeGenerator<'a> {
// In the case of zero args, we need to hoist the dependency function to the top of the zero arg function
// The dependency we are hoisting should have an equal path to the function we hoisted
// if we are going to hoist it
if &dep_path.common_ancestor(func_path) == func_path || params_empty {
if &dep_path.common_ancestor(func_path) == func_path {
match dep_function.clone() {
HoistableFunction::Function {
body: mut dep_air_tree,
deps: dependency_deps,
params: dependent_params,
} => {
if dependent_params.is_empty() {
// continue for zero arg functions. They are treated like global hoists.
return then;
}
let is_dependent_recursive = dependency_deps
.iter()
.any(|(key, variant)| &dep_key == key && &dep_variant == variant);
@ -3898,9 +3875,7 @@ impl<'a> CodeGenerator<'a> {
dependent_params.clone()
};
if !params_empty {
hoisted_functions.push((dep_key.clone(), dep_variant.clone()));
}
hoisted_functions.push((dep_key.clone(), dep_variant.clone()));
AirTree::define_func(
&dep_key.function_name,
@ -3920,9 +3895,7 @@ impl<'a> CodeGenerator<'a> {
modify_cyclic_calls(body, &dep_key, &self.cyclic_functions);
}
if !params_empty {
hoisted_functions.push((dep_key.clone(), dep_variant.clone()));
}
hoisted_functions.push((dep_key.clone(), dep_variant.clone()));
AirTree::define_cyclic_func(
&dep_key.function_name,
@ -4048,6 +4021,7 @@ impl<'a> CodeGenerator<'a> {
let (path, _) = func_variants.get_mut("").unwrap();
*path = path.common_ancestor(tree_path);
} else {
// Shortcut path for compiler generated functions
let CodeGenFunction::Function { body, params } = code_gen_func else {
unreachable!()
};
@ -4245,8 +4219,52 @@ impl<'a> CodeGenerator<'a> {
}
.into(),
)),
ValueConstructorVariant::ModuleConstant { .. } => {
unreachable!("{:#?}, {}", constructor, name)
ValueConstructorVariant::ModuleConstant { module, name, .. } => {
let access_key = FunctionAccessKey {
module_name: module.clone(),
function_name: name.clone(),
};
let definition = self
.constants
.get(&access_key)
.unwrap_or_else(|| panic!("unknown constant {module}.{name}"));
let mut value =
AirTree::no_op(self.build(definition, &access_key.module_name, &[]));
value.traverse_tree_with(
&mut |air_tree, _| {
erase_opaque_type_operations(air_tree, &self.data_types);
},
true,
);
value = self.hoist_functions_to_validator(value);
let term = self
.uplc_code_gen(value.to_vec())
.constr_fields_exposer()
.constr_index_exposer();
let mut program =
self.new_program(self.special_functions.apply_used_functions(term));
let mut interner = CodeGenInterner::new();
interner.program(&mut program);
let eval_program: Program<NamedDeBruijn> =
program.remove_no_inlines().try_into().unwrap();
Some(
eval_program
.eval(ExBudget::max())
.result()
.unwrap_or_else(|e| panic!("Failed to evaluate constant: {e:#?}"))
.try_into()
.unwrap(),
)
}
ValueConstructorVariant::ModuleFn {
name: func_name,
@ -4340,10 +4358,7 @@ impl<'a> CodeGenerator<'a> {
.apply(Term::integer(constr_index.into()))
.apply(term);
let mut program: Program<Name> = Program {
version: (1, 0, 0),
term,
};
let mut program = self.new_program(term);
let mut interner = CodeGenInterner::new();
@ -4585,58 +4600,8 @@ impl<'a> CodeGenerator<'a> {
} else {
let term = arg_stack.pop().unwrap();
// How we handle zero arg anon functions has changed
// We now delay zero arg anon functions and force them on a call operation
match term.pierce_no_inlines() {
Term::Var(name) => {
let zero_arg_functions = self.zero_arg_functions.clone();
let text = &name.text;
if let Some((_, air_vec)) = zero_arg_functions.iter().find(
|(
(
FunctionAccessKey {
module_name,
function_name,
},
variant,
),
_,
)| {
let name_module =
format!("{module_name}_{function_name}{variant}");
let name = format!("{function_name}{variant}");
text == &name || text == &name_module
},
) {
let mut term = self.uplc_code_gen(air_vec.clone());
term = term.constr_fields_exposer().constr_index_exposer();
let mut program: Program<Name> = Program {
version: (1, 0, 0),
term: self.special_functions.apply_used_functions(term),
};
let mut interner = CodeGenInterner::new();
interner.program(&mut program);
let eval_program: Program<NamedDeBruijn> =
program.remove_no_inlines().try_into().unwrap();
let result = eval_program.eval(ExBudget::max()).result();
let evaluated_term: Term<NamedDeBruijn> = result.unwrap_or_else(|e| {
panic!("Evaluated a zero argument function and received this error: {e:#?}")
});
Some(evaluated_term.try_into().unwrap())
} else {
Some(term.force())
}
}
Term::Var(_) => Some(term.force()),
Term::Delay(inner_term) => Some(inner_term.as_ref().clone()),
Term::Apply { .. } => Some(term.force()),
_ => unreachable!(
@ -4826,6 +4791,10 @@ impl<'a> CodeGenerator<'a> {
func_body = func_body.lambda(param.clone());
}
if params.is_empty() {
func_body = func_body.delay();
}
if !recursive {
term = term.lambda(func_name).apply(func_body.lambda(NO_INLINE));
@ -4942,10 +4911,7 @@ impl<'a> CodeGenerator<'a> {
};
if extract_constant(term.pierce_no_inlines()).is_some() {
let mut program: Program<Name> = Program {
version: (1, 0, 0),
term,
};
let mut program = self.new_program(term);
let mut interner = CodeGenInterner::new();
@ -4970,10 +4936,7 @@ impl<'a> CodeGenerator<'a> {
if extract_constant(term.pierce_no_inlines()).is_some() {
term = builder::convert_type_to_data(term, &tipo);
let mut program: Program<Name> = Program {
version: (1, 0, 0),
term,
};
let mut program = self.new_program(term);
let mut interner = CodeGenInterner::new();
@ -5419,10 +5382,7 @@ impl<'a> CodeGenerator<'a> {
let maybe_const = extract_constant(item.pierce_no_inlines());
maybe_const.is_some()
}) {
let mut program: Program<Name> = Program {
version: (1, 0, 0),
term,
};
let mut program = self.new_program(term);
let mut interner = CodeGenInterner::new();

View File

@ -4,8 +4,8 @@ use super::{
};
use crate::{
ast::{
Constant, DataTypeKey, FunctionAccessKey, Pattern, Span, TraceLevel, TypedArg,
TypedAssignmentKind, TypedClause, TypedDataType, TypedPattern,
DataTypeKey, FunctionAccessKey, Pattern, Span, TraceLevel, TypedArg, TypedAssignmentKind,
TypedClause, TypedDataType, TypedPattern,
},
expr::TypedExpr,
line_numbers::{LineColumn, LineNumbers},
@ -287,15 +287,6 @@ impl Default for CodeGenSpecialFuncs {
}
}
pub fn constants_ir(literal: &Constant) -> AirTree {
match literal {
Constant::Int { value, .. } => AirTree::int(value),
Constant::String { value, .. } => AirTree::string(value),
Constant::ByteArray { bytes, .. } => AirTree::byte_array(bytes.clone()),
Constant::CurvePoint { point, .. } => AirTree::curve(*point.as_ref()),
}
}
pub fn get_generic_variant_name(t: &Rc<Type>) -> String {
let uplc_type = t.get_uplc_type();

View File

@ -1,10 +1,8 @@
use chumsky::prelude::*;
use uplc::machine::runtime::Compressable;
use crate::{
ast,
parser::{annotation, error::ParseError, literal, token::Token, utils},
parser::{annotation, error::ParseError, expr::pure_expression, token::Token, utils},
};
use chumsky::prelude::*;
pub fn parser() -> impl Parser<Token, ast::UntypedDefinition, Error = ParseError> {
utils::optional_flag(Token::Pub)
@ -16,7 +14,11 @@ pub fn parser() -> impl Parser<Token, ast::UntypedDefinition, Error = ParseError
.or_not(),
)
.then_ignore(just(Token::Equal))
.then(value())
.then(recursive(|sequence| {
recursive(|expression| pure_expression(sequence.clone(), expression))
.then(sequence.repeated())
.foldl(|current, next| current.append_in_sequence(next))
}))
.map_with_span(|(((public, name), annotation), value), span| {
ast::UntypedDefinition::ModuleConstant(ast::ModuleConstant {
doc: None,
@ -24,67 +26,11 @@ pub fn parser() -> impl Parser<Token, ast::UntypedDefinition, Error = ParseError
public,
name,
annotation,
value: Box::new(value),
tipo: (),
value,
})
})
}
pub fn value() -> impl Parser<Token, ast::Constant, Error = ParseError> {
let constant_string_parser =
select! {Token::String {value} => value}.map_with_span(|value, span| {
ast::Constant::String {
location: span,
value,
}
});
let constant_int_parser =
literal::int().map_with_span(|(value, base), location| ast::Constant::Int {
location,
value,
base,
});
let constant_bytearray_parser = literal::bytearray(
|bytes, preferred_format, curve, location, emit| match curve {
Some(curve @ ast::CurveType::Bls12_381(point)) => {
let point = match point {
ast::Bls12_381PointType::G1 => {
blst::blst_p1::uncompress(&bytes).map(ast::Bls12_381Point::G1)
}
ast::Bls12_381PointType::G2 => {
blst::blst_p2::uncompress(&bytes).map(ast::Bls12_381Point::G2)
}
};
let point = point.unwrap_or_else(|_err| {
emit(ParseError::point_not_on_curve(curve, location));
ast::Bls12_381Point::default()
});
ast::Constant::CurvePoint {
location,
point: ast::Curve::Bls12_381(point).into(),
preferred_format,
}
}
None => ast::Constant::ByteArray {
location,
bytes,
preferred_format,
},
},
);
choice((
constant_string_parser,
constant_int_parser,
constant_bytearray_parser,
))
}
#[cfg(test)]
mod tests {
use crate::assert_definition;

View File

@ -49,6 +49,5 @@ ModuleConstant(
),
preferred_format: HexadecimalString,
},
tipo: (),
},
)

View File

@ -91,6 +91,5 @@ ModuleConstant(
),
preferred_format: HexadecimalString,
},
tipo: (),
},
)

View File

@ -1,9 +1,8 @@
use chumsky::prelude::*;
use crate::{
expr::UntypedExpr,
parser::{error::ParseError, token::Token},
};
use chumsky::prelude::*;
pub fn parser() -> impl Parser<Token, UntypedExpr, Error = ParseError> {
recursive(|sequence| {

View File

@ -1,6 +1,6 @@
use crate::{
ast,
parser::{definition, error::ParseError, token::Token},
parser::{error::ParseError, literal, token::Token},
};
use chumsky::prelude::*;
@ -12,13 +12,11 @@ pub fn parser() -> impl Parser<Token, ast::UntypedClauseGuard, Error = ParseErro
}
.map_with_span(|_name, _span| ast::UntypedClauseGuard {});
let constant_parser = definition::constant::value().map(|_| ast::UntypedClauseGuard {});
let block_parser = expression
.clone()
.delimited_by(just(Token::LeftParen), just(Token::RightParen));
let leaf_parser = choice((var_parser, constant_parser, block_parser)).boxed();
let leaf_parser = choice((var_parser, constant(), block_parser)).boxed();
let unary_op = just(Token::Bang);
@ -57,3 +55,20 @@ pub fn parser() -> impl Parser<Token, ast::UntypedClauseGuard, Error = ParseErro
.foldl(|_left, (_tok, _right)| ast::UntypedClauseGuard {})
})
}
// NOTE: This is only there for backward-compatibility, in order to provide nicer error message
// when a clause guard is found. However, Aiken no longer supports clause guards.
pub fn constant() -> impl Parser<Token, ast::UntypedClauseGuard, Error = ParseError> {
let constant_string_parser =
select! {Token::String {value} => value}.map(|_| ast::UntypedClauseGuard {});
let constant_int_parser = literal::int().map(|_| ast::UntypedClauseGuard {});
let constant_bytearray_parser = literal::bytearray(|_, _, _, _, _| ast::UntypedClauseGuard {});
choice((
constant_string_parser,
constant_int_parser,
constant_bytearray_parser,
))
}

View File

@ -5,8 +5,8 @@ use serde::{Deserialize, Serialize};
#[serde(rename_all = "camelCase")]
pub enum PlutusVersion {
V1,
#[default]
V2,
#[default]
V3,
}

View File

@ -1,8 +1,8 @@
use self::{environment::Environment, pretty::Printer};
use crate::{
ast::{
well_known, Annotation, Constant, DataType, DataTypeKey, DefinitionLocation, ModuleKind,
Span, TypedDataType,
well_known, Annotation, DataType, DataTypeKey, DefinitionLocation, ModuleKind, Span,
TypedDataType,
},
tipo::fields::FieldMap,
};
@ -1216,7 +1216,7 @@ pub enum ValueConstructorVariant {
ModuleConstant {
location: Span,
module: String,
literal: Constant,
name: String,
},
/// A function belonging to the module
@ -1262,11 +1262,14 @@ impl ValueConstructorVariant {
location: *location,
},
// TODO: remove this clone with an rc clone
Self::ModuleConstant {
literal, location, ..
name,
module,
location,
..
} => ModuleValueConstructor::Constant {
literal: literal.clone(),
name: name.clone(),
module: module.clone(),
location: *location,
},
@ -1400,8 +1403,9 @@ pub enum ModuleValueConstructor {
},
Constant {
literal: Constant,
location: Span,
module: String,
name: String,
},
}

View File

@ -11,15 +11,16 @@ use super::{
use crate::{
ast::{
self, Annotation, ArgName, AssignmentKind, AssignmentPattern, BinOp, Bls12_381Point,
ByteArrayFormatPreference, CallArg, Constant, Curve, Function, IfBranch,
LogicalOpChainKind, Pattern, RecordUpdateSpread, Span, TraceKind, TraceLevel, Tracing,
TypedArg, TypedCallArg, TypedClause, TypedIfBranch, TypedPattern, TypedRecordUpdateArg,
TypedValidator, UnOp, UntypedArg, UntypedAssignmentKind, UntypedClause, UntypedFunction,
UntypedIfBranch, UntypedPattern, UntypedRecordUpdateArg,
ByteArrayFormatPreference, CallArg, Curve, Function, IfBranch, LogicalOpChainKind, Pattern,
RecordUpdateSpread, Span, TraceKind, TraceLevel, Tracing, TypedArg, TypedCallArg,
TypedClause, TypedIfBranch, TypedPattern, TypedRecordUpdateArg, TypedValidator, UnOp,
UntypedArg, UntypedAssignmentKind, UntypedClause, UntypedFunction, UntypedIfBranch,
UntypedPattern, UntypedRecordUpdateArg,
},
builtins::{from_default_function, BUILTIN},
expr::{FnStyle, TypedExpr, UntypedExpr},
format,
parser::token::Base,
tipo::{fields::FieldMap, DefaultFunction, PatternConstructor, TypeVar},
IdGenerator,
};
@ -434,8 +435,8 @@ impl<'a, 'b> ExprTyper<'a, 'b> {
UntypedExpr::UInt {
location,
value,
base: _,
} => Ok(self.infer_uint(value, location)),
base,
} => Ok(self.infer_uint(value, base, location)),
UntypedExpr::Sequence {
expressions,
@ -550,8 +551,8 @@ impl<'a, 'b> ExprTyper<'a, 'b> {
UntypedExpr::CurvePoint {
location,
point,
preferred_format: _,
} => self.infer_curve_point(*point, location),
preferred_format,
} => self.infer_curve_point(*point, preferred_format, location),
UntypedExpr::RecordUpdate {
location,
@ -592,10 +593,16 @@ impl<'a, 'b> ExprTyper<'a, 'b> {
location,
bytes,
tipo: Type::byte_array(),
preferred_format,
})
}
fn infer_curve_point(&mut self, curve: Curve, location: Span) -> Result<TypedExpr, Error> {
fn infer_curve_point(
&mut self,
curve: Curve,
preferred_format: ByteArrayFormatPreference,
location: Span,
) -> Result<TypedExpr, Error> {
let tipo = match curve {
Curve::Bls12_381(point) => match point {
Bls12_381Point::G1(_) => Type::g1_element(),
@ -607,6 +614,7 @@ impl<'a, 'b> ExprTyper<'a, 'b> {
location,
point: curve.into(),
tipo,
preferred_format,
})
}
@ -1176,7 +1184,7 @@ impl<'a, 'b> ExprTyper<'a, 'b> {
Ok((typed_arg, extra_assignment))
}
fn infer_assignment(
pub fn infer_assignment(
&mut self,
untyped_pattern: UntypedPattern,
untyped_value: UntypedExpr,
@ -1466,64 +1474,6 @@ impl<'a, 'b> ExprTyper<'a, 'b> {
Ok(typed_patterns)
}
// TODO: extract the type annotation checking into a infer_module_const
// function that uses this function internally
pub fn infer_const(
&mut self,
annotation: &Option<Annotation>,
value: Constant,
) -> Result<Constant, Error> {
let inferred = match value {
Constant::Int {
location,
value,
base,
} => Ok(Constant::Int {
location,
value,
base,
}),
Constant::String { location, value } => Ok(Constant::String { location, value }),
Constant::ByteArray {
location,
bytes,
preferred_format,
} => {
let _ = self.infer_bytearray(bytes.clone(), preferred_format, location)?;
Ok(Constant::ByteArray {
location,
bytes,
preferred_format,
})
}
Constant::CurvePoint {
location,
point,
preferred_format,
} => Ok(Constant::CurvePoint {
location,
point,
preferred_format,
}),
}?;
// Check type annotation is accurate.
if let Some(ann) = annotation {
let const_ann = self.type_from_annotation(ann)?;
self.unify(
const_ann.clone(),
inferred.tipo(),
inferred.location(),
const_ann.is_data(),
)?;
};
Ok(inferred)
}
fn infer_if(
&mut self,
branches: Vec1<UntypedIfBranch>,
@ -1765,11 +1715,12 @@ impl<'a, 'b> ExprTyper<'a, 'b> {
Ok((args, body, return_type))
}
fn infer_uint(&mut self, value: String, location: Span) -> TypedExpr {
fn infer_uint(&mut self, value: String, base: Base, location: Span) -> TypedExpr {
TypedExpr::UInt {
location,
value,
tipo: Type::int(),
base,
}
}
@ -2778,6 +2729,7 @@ fn diagnose_expr(expr: TypedExpr) -> TypedExpr {
tipo: Type::byte_array(),
bytes: vec![],
location,
preferred_format: ByteArrayFormatPreference::HexadecimalString,
},
},
],

View File

@ -9,9 +9,10 @@ use crate::{
ast::{
Annotation, ArgName, ArgVia, DataType, Definition, Function, ModuleConstant, ModuleKind,
RecordConstructor, RecordConstructorArg, Tracing, TypeAlias, TypedArg, TypedDefinition,
TypedModule, TypedValidator, UntypedArg, UntypedDefinition, UntypedModule,
TypedModule, TypedValidator, UntypedArg, UntypedDefinition, UntypedModule, UntypedPattern,
UntypedValidator, Use, Validator,
},
expr::{TypedExpr, UntypedAssignmentKind},
tipo::{expr::infer_function, Span, Type, TypeVar},
IdGenerator,
};
@ -619,10 +620,22 @@ fn infer_definition(
annotation,
public,
value,
tipo: _,
}) => {
let typed_expr =
ExprTyper::new(environment, tracing).infer_const(&annotation, *value)?;
let typed_assignment = ExprTyper::new(environment, tracing).infer_assignment(
UntypedPattern::Var {
location,
name: name.clone(),
},
value,
UntypedAssignmentKind::Let { backpassing: false },
&annotation,
location,
)?;
let typed_expr = match typed_assignment {
TypedExpr::Assignment { value, .. } => value,
_ => unreachable!("infer_assignment inferred something else than an assignment?"),
};
let tipo = typed_expr.tipo();
@ -630,7 +643,7 @@ fn infer_definition(
public,
variant: ValueConstructorVariant::ModuleConstant {
location,
literal: typed_expr.clone(),
name: name.to_owned(),
module: module_name.to_owned(),
},
tipo: tipo.clone(),
@ -650,8 +663,7 @@ fn infer_definition(
name,
annotation,
public,
value: Box::new(typed_expr),
tipo,
value: *typed_expr,
}))
}
}

View File

@ -1,8 +1,6 @@
use crate::{github::repo::LatestRelease, package_name::PackageName, paths, Error};
use aiken_lang::{
ast::{
Annotation, ByteArrayFormatPreference, Constant, ModuleConstant, Span, UntypedDefinition,
},
ast::{Annotation, ByteArrayFormatPreference, ModuleConstant, Span, UntypedDefinition},
expr::UntypedExpr,
parser::token::Base,
};
@ -47,7 +45,7 @@ pub enum SimpleExpr {
}
impl SimpleExpr {
pub fn as_untyped_expr(&self) -> UntypedExpr {
pub fn as_untyped_expr(&self, annotation: &Annotation) -> UntypedExpr {
match self {
SimpleExpr::Bool(b) => UntypedExpr::Var {
location: Span::empty(),
@ -65,50 +63,74 @@ impl SimpleExpr {
bytes: bs.to_vec(),
preferred_format: *preferred_format,
},
SimpleExpr::List(es) => UntypedExpr::List {
location: Span::empty(),
elements: es.iter().map(|e| e.as_untyped_expr()).collect(),
tail: None,
SimpleExpr::List(es) => match annotation {
Annotation::Tuple { elems, .. } => UntypedExpr::Tuple {
location: Span::empty(),
elems: es
.iter()
.zip(elems)
.map(|(e, ann)| e.as_untyped_expr(ann))
.collect(),
},
Annotation::Constructor {
module,
name,
arguments,
..
} if name == "List" && module.is_none() => UntypedExpr::List {
location: Span::empty(),
elements: es
.iter()
.map(|e| e.as_untyped_expr(arguments.first().unwrap()))
.collect(),
tail: None,
},
_ => unreachable!(
"unexpected annotation for simple list expression: {annotation:#?}"
),
},
}
}
pub fn as_definition(&self, identifier: &str) -> UntypedDefinition {
pub fn as_annotation(&self) -> Annotation {
let location = Span::empty();
match self {
SimpleExpr::Bool(..) => Annotation::boolean(location),
SimpleExpr::Int(_) => Annotation::int(location),
SimpleExpr::ByteArray(_, _) => Annotation::bytearray(location),
SimpleExpr::List(elems) => {
let elems = elems.iter().map(|e| e.as_annotation()).collect::<Vec<_>>();
let (value, annotation) = match self {
SimpleExpr::Bool(..) => todo!("requires https://github.com/aiken-lang/aiken/pull/992"),
SimpleExpr::Int(i) => (
// TODO: Replace with 'self.as_untyped_expr()' after https://github.com/aiken-lang/aiken/pull/992
Constant::Int {
location,
value: format!("{i}"),
base: Base::Decimal {
numeric_underscore: false,
},
},
Some(Annotation::int(location)),
),
SimpleExpr::ByteArray(bs, preferred_format) => (
// TODO: Replace with 'self.as_untyped_expr()' after https://github.com/aiken-lang/aiken/pull/992
Constant::ByteArray {
location,
bytes: bs.to_vec(),
preferred_format: *preferred_format,
},
Some(Annotation::bytearray(location)),
),
SimpleExpr::List(..) => todo!("requires https://github.com/aiken-lang/aiken/pull/992"),
};
let (is_uniform, inner) =
elems
.iter()
.fold((true, None), |(matches, ann), a| match ann {
None => (matches, Some(a)),
Some(b) => (matches && a == b, ann),
});
if is_uniform {
Annotation::list(
inner.cloned().unwrap_or_else(|| Annotation::data(location)),
location,
)
} else {
Annotation::tuple(elems, location)
}
}
}
}
pub fn as_definition(&self, identifier: &str) -> UntypedDefinition {
let annotation = self.as_annotation();
let value = self.as_untyped_expr(&annotation);
UntypedDefinition::ModuleConstant(ModuleConstant {
location: Span::empty(),
doc: None,
public: true,
name: identifier.to_string(),
annotation,
value: Box::new(value),
tipo: (),
annotation: Some(annotation),
value,
})
}
}

View File

@ -36,7 +36,7 @@ use aiken_lang::{
TypedFunction, UntypedDefinition,
},
builtins,
expr::UntypedExpr,
expr::{TypedExpr, UntypedExpr},
format::{Formatter, MAX_COLUMNS},
gen_uplc::CodeGenerator,
line_numbers::LineNumbers,
@ -98,6 +98,7 @@ where
checks_count: Option<usize>,
event_listener: T,
functions: IndexMap<FunctionAccessKey, TypedFunction>,
constants: IndexMap<FunctionAccessKey, TypedExpr>,
data_types: IndexMap<DataTypeKey, TypedDataType>,
module_sources: HashMap<String, (String, LineNumbers)>,
}
@ -149,6 +150,7 @@ where
checks_count: None,
event_listener,
functions,
constants: IndexMap::new(),
data_types,
module_sources: HashMap::new(),
}
@ -158,6 +160,7 @@ where
CodeGenerator::new(
self.config.plutus,
utils::indexmap::as_ref_values(&self.functions),
utils::indexmap::as_ref_values(&self.constants),
utils::indexmap::as_ref_values(&self.data_types),
utils::indexmap::as_str_ref_values(&self.module_types),
utils::indexmap::as_str_ref_values(&self.module_sources),
@ -805,10 +808,13 @@ where
&mut self.module_sources,
&mut self.module_types,
&mut self.functions,
&mut self.constants,
&mut self.data_types,
)?;
if our_modules.contains(checked_module.name.as_str()) {
if our_modules.contains(checked_module.name.as_str())
&& checked_module.name.as_str() != ast::CONFIG_MODULE
{
self.warnings.extend(warnings);
}

View File

@ -5,6 +5,7 @@ use aiken_lang::{
Tracing, TypedDataType, TypedFunction, TypedModule, TypedValidator, UntypedModule,
Validator,
},
expr::TypedExpr,
line_numbers::LineNumbers,
parser::extra::{comments_before, Comment, ModuleExtra},
tipo::TypeInfo,
@ -49,6 +50,7 @@ impl ParsedModule {
module_sources: &mut HashMap<String, (String, LineNumbers)>,
module_types: &mut HashMap<String, TypeInfo>,
functions: &mut IndexMap<FunctionAccessKey, TypedFunction>,
constants: &mut IndexMap<FunctionAccessKey, TypedExpr>,
data_types: &mut IndexMap<DataTypeKey, TypedDataType>,
) -> Result<(CheckedModule, Vec<Warning>), Error> {
let mut warnings = Vec::new();
@ -92,7 +94,7 @@ impl ParsedModule {
module_types.insert(self.name.clone(), ast.type_info.clone());
// Register function definitions & data-types for easier access later.
ast.register_definitions(functions, data_types);
ast.register_definitions(functions, constants, data_types);
Ok((
CheckedModule {

View File

@ -18,8 +18,8 @@ description: "Code:\n\npub fn add(a: Int, b: Int) -> Int {\n a + b\n}\n"
}
}
],
"compiledCode": "500100002322337000046eb4004dd68009",
"hash": "bcd6700b4dba798a7d19c5769ef3deb21423f8809594a6942860dd1f",
"compiledCode": "500101002322337000046eb4004dd68009",
"hash": "b8374597a772cef80d891b7f6a03588e10cc19b780251228ba4ce9c6",
"definitions": {
"Int": {
"dataType": "integer"

View File

@ -18,8 +18,8 @@ description: "Code:\n\npub type Foo<a> {\n Empty\n Bar(a, Foo<a>)\n}\n\npub fn
}
}
],
"compiledCode": "5901d501000032323232323222323232323253330083002300937540062a666010600460126ea801052000001001132323232533300b3004300c375400c2646464a66601c600e601e6ea80284c8cdc019b80003375a60260026600c0046026602800260206ea8028010c044c048008dd6980800098069baa006001132533300b3005300c375400c2a666016600860186ea801c4c8cdc01bad3010001330034c103d879800030103011001300d375400e00200226466e00dd698078009980118079808000a60103d8798000300c375400a600200244464646464a66601e601260206ea800854ccc03cc024c040dd50018a4000002002264a66601e601060206ea80084c8c8c94ccc048c02cc04cdd500309919b80337000066eb4c05c004ccc02c02c008c05cc060004c050dd5003002180a980b0011bad301400130113754004002264a66601e601260206ea800854ccc03cc020c040dd500189919b80375a6028002666010010980103d8798000301430150013011375400600200226466e00dd698098009998038039809980a000a60103d8798000301037540026022004602060220026601c0046601c00297ae0370e90011b8748000c024008c020c024004cc018008cc0180052f5c0ae6955ceaab9e5740ae855d101",
"hash": "dca86b6e092019b67ef310ba8360682d7bf8284cc728c6b525fb0b0d",
"compiledCode": "5901d501010032323232323222323232323253330083002300937540062a666010600460126ea801052000001001132323232533300b3004300c375400c2646464a66601c600e601e6ea80284c8cdc019b80003375a60260026600c0046026602800260206ea8028010c044c048008dd6980800098069baa006001132533300b3005300c375400c2a666016600860186ea801c4c8cdc01bad3010001330034c103d879800030103011001300d375400e00200226466e00dd698078009980118079808000a60103d8798000300c375400a600200244464646464a66601e601260206ea800854ccc03cc024c040dd50018a4000002002264a66601e601060206ea80084c8c8c94ccc048c02cc04cdd500309919b80337000066eb4c05c004ccc02c02c008c05cc060004c050dd5003002180a980b0011bad301400130113754004002264a66601e601260206ea800854ccc03cc020c040dd500189919b80375a6028002666010010980103d8798000301430150013011375400600200226466e00dd698098009998038039809980a000a60103d8798000301037540026022004602060220026601c0046601c00297ae0370e90011b8748000c024008c020c024004cc018008cc0180052f5c0ae6955ceaab9e5740ae855d101",
"hash": "46254b889c878fb25e55ad7e9eb3d331770a55defec65749c9504581",
"definitions": {
"Int": {
"dataType": "integer"

View File

@ -62,7 +62,8 @@ mod test {
let mut functions = builtins::prelude_functions(&id_gen, &module_types);
let mut data_types = builtins::prelude_data_types(&id_gen);
ast.register_definitions(&mut functions, &mut data_types);
let mut constants = IndexMap::new();
ast.register_definitions(&mut functions, &mut constants, &mut data_types);
let mut module_sources = HashMap::new();
module_sources.insert(
@ -87,6 +88,7 @@ mod test {
let mut generator = CodeGenerator::new(
PlutusVersion::default(),
utils::indexmap::as_ref_values(&functions),
utils::indexmap::as_ref_values(&constants),
utils::indexmap::as_ref_values(&data_types),
utils::indexmap::as_str_ref_values(&module_types),
utils::indexmap::as_str_ref_values(&module_sources),

View File

@ -57,7 +57,7 @@ fn assert_uplc(source_code: &str, expected: Term<Name>, should_fail: bool) {
let debruijn_program: Program<DeBruijn> = program.try_into().unwrap();
let expected = Program {
version: (1, 0, 0),
version: (1, 1, 0),
term: expected,
};
@ -97,7 +97,7 @@ fn assert_uplc(source_code: &str, expected: Term<Name>, should_fail: bool) {
let debruijn_program: Program<DeBruijn> = program.try_into().unwrap();
let expected = Program {
version: (1, 0, 0),
version: (1, 1, 0),
term: expected,
};
@ -2184,23 +2184,101 @@ fn acceptance_test_23_to_list() {
}
"#;
let do_insert = Term::var("elems")
.delayed_choose_list(
Term::mk_cons()
.apply(
Term::mk_pair_data()
.apply(Term::b_data().apply(Term::var("k")))
.apply(Term::i_data().apply(Term::var("v"))),
)
.apply(Term::empty_map()),
Term::head_list()
.apply(Term::var("elems"))
.as_var("elem_0", |elem_0| {
Term::tail_list()
.apply(Term::var("elems"))
.as_var("rest", |rest| {
Term::un_b_data()
.apply(Term::fst_pair().apply(Term::Var(elem_0.clone())))
.as_var("k2", |k2| {
Term::un_i_data()
.apply(Term::snd_pair().apply(Term::Var(elem_0.clone())))
.as_var("v2", |v2| {
Term::equals_bytestring()
.apply(Term::var("k"))
.apply(Term::Var(k2.clone()))
.delayed_if_then_else(
Term::mk_cons()
.apply(
Term::mk_pair_data()
.apply(
Term::b_data()
.apply(Term::var("k")),
)
.apply(
Term::i_data()
.apply(Term::var("v")),
),
)
.apply(Term::Var(rest.clone())),
Term::mk_cons()
.apply(
Term::mk_pair_data()
.apply(
Term::b_data()
.apply(Term::Var(k2)),
)
.apply(
Term::i_data()
.apply(Term::Var(v2)),
),
)
.apply(
Term::var("do_insert")
.apply(Term::var("do_insert"))
.apply(Term::Var(rest)),
),
)
})
})
})
}),
)
.lambda("elems")
.lambda("do_insert");
let insert = do_insert
.as_var("do_insert", |do_insert| {
Term::Var(do_insert.clone())
.apply(Term::Var(do_insert))
.apply(Term::var("m"))
})
.lambda("v")
.lambda("k")
.lambda("m");
assert_uplc(
src,
Term::equals_data()
.apply(Term::map_data().apply(Term::map_values(vec![
Constant::ProtoPair(
Type::Data,
Type::Data,
Constant::Data(Data::bytestring("foo".as_bytes().to_vec())).into(),
Constant::Data(Data::integer(42.into())).into(),
.apply(
Term::map_data().apply(
insert
.as_var("insert", |insert| {
Term::Var(insert.clone())
.apply(
Term::Var(insert)
.apply(Term::empty_map())
.apply(Term::byte_string("foo".as_bytes().to_vec()))
.apply(Term::integer(42.into())),
)
.apply(Term::byte_string("bar".as_bytes().to_vec()))
.apply(Term::integer(14.into()))
.delay()
})
.force(),
),
Constant::ProtoPair(
Type::Data,
Type::Data,
Constant::Data(Data::bytestring("bar".as_bytes().to_vec())).into(),
Constant::Data(Data::integer(14.into())).into(),
),
])))
)
.apply(Term::map_data().apply(Term::map_values(vec![
Constant::ProtoPair(
Type::Data,
@ -3021,9 +3099,7 @@ fn acceptance_test_29_union_pair() {
inner: Pairs<key, value>,
}
pub fn new() -> AssocList<key, value> {
AssocList { inner: [] }
}
const empty_list: AssocList<key, value> = AssocList { inner: [] }
pub fn from_list(xs: Pairs<key, value>) -> AssocList<key, value> {
AssocList { inner: do_from_list(xs) }
@ -3078,88 +3154,42 @@ fn acceptance_test_29_union_pair() {
}
}
fn fixture_1() {
new()
const fixture_1 = {
empty_list
|> insert("foo", 42)
|> insert("bar", 14)
}
test union_1() {
union(fixture_1(), new()) == fixture_1()
union(fixture_1, empty_list) == fixture_1
}
"#;
assert_uplc(
src,
Term::equals_data()
.apply(
Term::map_data().apply(
Term::var("union")
.lambda("union")
.apply(
Term::var("do_union")
.apply(Term::var("left"))
.apply(Term::var("right"))
.lambda("right")
.lambda("left"),
)
.lambda("do_union")
.apply(Term::var("do_union").apply(Term::var("do_union")))
.lambda("do_union")
.apply(
Term::var("left")
.delayed_choose_list(
Term::var("right"),
Term::var("do_union")
.apply(Term::var("do_union"))
.apply(Term::var("rest"))
.apply(
Term::var("do_insert")
.apply(Term::var("right"))
.apply(Term::var("k"))
.apply(Term::var("v")),
)
.lambda("v")
.apply(
Term::un_i_data()
.apply(Term::snd_pair().apply(Term::var("pair"))),
)
.lambda("k")
.apply(
Term::un_b_data()
.apply(Term::fst_pair().apply(Term::var("pair"))),
)
.lambda("rest")
.apply(Term::tail_list().apply(Term::var("left")))
.lambda("pair")
.apply(Term::head_list().apply(Term::var("left"))),
)
.lambda("right")
.lambda("left")
.lambda("do_union"),
)
.lambda("do_insert")
.apply(
Term::var("do_insert")
.apply(Term::var("do_insert"))
.apply(Term::var("elems"))
.lambda("do_insert")
.apply(
Term::var("elems")
.delayed_choose_list(
Term::mk_cons()
.apply(
Term::mk_pair_data()
.apply(Term::b_data().apply(Term::var("k")))
.apply(
Term::i_data().apply(Term::var("v")),
),
)
.apply(Term::empty_map()),
let do_insert = Term::var("elems")
.delayed_choose_list(
Term::mk_cons()
.apply(
Term::mk_pair_data()
.apply(Term::b_data().apply(Term::var("k")))
.apply(Term::i_data().apply(Term::var("v"))),
)
.apply(Term::empty_map()),
Term::head_list()
.apply(Term::var("elems"))
.as_var("elem_0", |elem_0| {
Term::tail_list()
.apply(Term::var("elems"))
.as_var("rest", |rest| {
Term::un_b_data()
.apply(Term::fst_pair().apply(Term::Var(elem_0.clone())))
.as_var("k2", |k2| {
Term::un_i_data()
.apply(Term::snd_pair().apply(Term::Var(elem_0.clone())))
.as_var("v2", |v2| {
Term::equals_bytestring()
.apply(Term::var("k"))
.apply(Term::var("k2"))
.apply(Term::Var(k2.clone()))
.delayed_if_then_else(
Term::mk_cons()
.apply(
@ -3173,72 +3203,128 @@ fn acceptance_test_29_union_pair() {
.apply(Term::var("v")),
),
)
.apply(Term::var("rest")),
.apply(Term::Var(rest.clone())),
Term::mk_cons()
.apply(
Term::mk_pair_data()
.apply(
Term::b_data()
.apply(Term::var("k2")),
.apply(Term::Var(k2)),
)
.apply(
Term::i_data()
.apply(Term::var("v2")),
.apply(Term::Var(v2)),
),
)
.apply(
Term::var("do_insert")
.apply(Term::var("do_insert"))
.apply(Term::var("rest")),
.apply(Term::Var(rest)),
),
)
.lambda("v2")
.apply(Term::un_i_data().apply(
Term::snd_pair().apply(Term::var("pair")),
))
.lambda("k2")
.apply(Term::un_b_data().apply(
Term::fst_pair().apply(Term::var("pair")),
))
.lambda("rest")
.apply(Term::tail_list().apply(Term::var("elems")))
.lambda("pair")
.apply(Term::head_list().apply(Term::var("elems"))),
)
.lambda("elems")
.lambda("do_insert"),
})
})
})
}),
)
.lambda("elems")
.lambda("do_insert");
let do_insert_recurse = do_insert
.as_var("do_insert", |do_insert| {
Term::Var(do_insert.clone())
.apply(Term::Var(do_insert))
.apply(Term::var("elems"))
})
.lambda("v")
.lambda("k")
.lambda("elems");
let insert = Term::var("do_insert")
.apply(Term::var("m"))
.apply(Term::var("k"))
.apply(Term::var("v"))
.lambda("v")
.lambda("k")
.lambda("m");
let empty_list = Term::empty_map();
let fixture = Term::data(Data::map(vec![
(
Data::bytestring(vec![0x66, 0x6f, 0x6f]),
Data::integer(42.into()),
),
(
Data::bytestring(vec![0x62, 0x61, 0x72]),
Data::integer(14.into()),
),
]));
let fixture_unwrapped = Term::Constant(
Constant::ProtoList(
Type::Pair(Type::Data.into(), Type::Data.into()),
vec![
Constant::ProtoPair(
Type::Data,
Type::Data,
Constant::Data(Data::bytestring(vec![0x66, 0x6f, 0x6f])).into(),
Constant::Data(Data::integer(42.into())).into(),
),
Constant::ProtoPair(
Type::Data,
Type::Data,
Constant::Data(Data::bytestring(vec![0x62, 0x61, 0x72])).into(),
Constant::Data(Data::integer(14.into())).into(),
),
],
)
.into(),
);
let do_union = Term::var("left")
.delayed_choose_list(
Term::var("right"),
Term::head_list()
.apply(Term::var("left"))
.as_var("elem_0", |elem_0| {
Term::var("do_union")
.apply(Term::var("do_union"))
.apply(Term::tail_list().apply(Term::var("left")))
.apply(
Term::var("do_insert")
.apply(Term::var("right"))
.apply(
Term::un_b_data()
.apply(Term::fst_pair().apply(Term::Var(elem_0.clone()))),
)
.lambda("v")
.lambda("k")
.lambda("elems"),
.apply(
Term::un_i_data()
.apply(Term::snd_pair().apply(Term::Var(elem_0))),
),
)
.apply(Term::map_values(vec![
Constant::ProtoPair(
Type::Data,
Type::Data,
Constant::Data(Data::bytestring("foo".as_bytes().to_vec())).into(),
Constant::Data(Data::integer(42.into())).into(),
),
Constant::ProtoPair(
Type::Data,
Type::Data,
Constant::Data(Data::bytestring("bar".as_bytes().to_vec())).into(),
Constant::Data(Data::integer(14.into())).into(),
),
]))
.apply(Term::empty_map()),
),
}),
)
.lambda("right")
.lambda("left")
.lambda("do_union");
assert_uplc(
src,
Term::equals_data()
.apply(
Term::map_data().apply(do_union.as_var("do_union", |do_union| {
Term::Var(do_union.clone())
.apply(Term::Var(do_union))
.apply(fixture_unwrapped)
.apply(empty_list)
})),
)
.apply(Term::data(Data::map(vec![
(
Data::bytestring("foo".as_bytes().to_vec()),
Data::integer(42.into()),
),
(
Data::bytestring("bar".as_bytes().to_vec()),
Data::integer(14.into()),
),
]))),
.apply(fixture)
.lambda("insert")
.apply(insert)
.lambda("do_insert")
.apply(do_insert_recurse),
false,
);
}
@ -3250,9 +3336,7 @@ fn acceptance_test_29_union_tuple() {
inner: List<(key, value)>,
}
pub fn new() -> AssocList<key, value> {
AssocList { inner: [] }
}
const empty_list = AssocList { inner: [] }
pub fn from_list(xs: List<(key, value)>) -> AssocList<key, value> {
AssocList { inner: do_from_list(xs) }
@ -3307,14 +3391,14 @@ fn acceptance_test_29_union_tuple() {
}
}
fn fixture_1() {
new()
const fixture_1 = {
empty_list
|> insert("foo", 42)
|> insert("bar", 14)
}
test union_1() {
union(fixture_1(), new()) == fixture_1()
union(fixture_1, empty_list) == fixture_1
}
"#;
@ -5442,6 +5526,14 @@ fn list_clause_with_assign() {
#[test]
fn opaque_value_in_test() {
let src = r#"
const dat: Dat = {
let v = Value { inner: Dict { inner: [Pair("", [Pair(#"aa", 4)] |> Dict)] } }
Dat {
c: 0,
a: v
}
}
pub opaque type Value {
inner: Dict<Dict<Int>>
}
@ -5455,18 +5547,7 @@ fn opaque_value_in_test() {
a: Value
}
pub fn dat_new() -> Dat {
let v = Value { inner: Dict { inner: [Pair("", [Pair(#"aa", 4)] |> Dict)] } }
Dat {
c: 0,
a: v
}
}
test spend() {
let dat = dat_new()
let val = dat.a
expect [Pair(_, amount)] = val.inner.inner

View File

@ -9,6 +9,7 @@ use aiken_lang::{
DataTypeKey, FunctionAccessKey, ModuleKind, TraceLevel, Tracing, TypedDataType,
TypedFunction,
},
expr::TypedExpr,
gen_uplc::CodeGenerator,
line_numbers::LineNumbers,
parser,
@ -28,6 +29,7 @@ pub struct TestProject {
pub package: PackageName,
pub id_gen: IdGenerator,
pub functions: IndexMap<FunctionAccessKey, TypedFunction>,
pub constants: IndexMap<FunctionAccessKey, TypedExpr>,
pub data_types: IndexMap<DataTypeKey, TypedDataType>,
pub module_types: HashMap<String, TypeInfo>,
pub module_sources: HashMap<String, (String, LineNumbers)>,
@ -48,12 +50,14 @@ impl TestProject {
let functions = builtins::prelude_functions(&id_gen, &module_types);
let data_types = builtins::prelude_data_types(&id_gen);
let constants = IndexMap::new();
TestProject {
package,
id_gen,
module_types,
functions,
constants,
data_types,
module_sources: HashMap::new(),
}
@ -63,6 +67,7 @@ impl TestProject {
CodeGenerator::new(
PlutusVersion::default(),
utils::indexmap::as_ref_values(&self.functions),
utils::indexmap::as_ref_values(&self.constants),
utils::indexmap::as_ref_values(&self.data_types),
utils::indexmap::as_str_ref_values(&self.module_types),
utils::indexmap::as_str_ref_values(&self.module_sources),
@ -104,7 +109,11 @@ impl TestProject {
.expect("Failed to type-check module");
// Register function definitions & data-types for easier access later.
ast.register_definitions(&mut self.functions, &mut self.data_types);
ast.register_definitions(
&mut self.functions,
&mut self.constants,
&mut self.data_types,
);
// Register module sources for an easier access later.
self.module_sources.insert(

View File

@ -19,7 +19,7 @@ pub enum Error {
format!(
"\n{:>13} {}",
"Trace",
if trace.contains("\n") {
if trace.contains('\n') {
trace.lines()
.enumerate()
.map(|(ix, row)| {

View File

@ -1 +1,3 @@
build/
aiken.lock
**/plutus.json

View File

@ -1,7 +0,0 @@
# This file was generated by Aiken
# You typically do not need to edit this file
requirements = []
packages = []
[etags]

View File

@ -1,7 +0,0 @@
# This file was generated by Aiken
# You typically do not need to edit this file
requirements = []
packages = []
[etags]

View File

@ -1,7 +0,0 @@
# This file was generated by Aiken
# You typically do not need to edit this file
requirements = []
packages = []
[etags]

View File

@ -1,7 +0,0 @@
# This file was generated by Aiken
# You typically do not need to edit this file
requirements = []
packages = []
[etags]

View File

@ -1,7 +0,0 @@
# This file was generated by Aiken
# You typically do not need to edit this file
requirements = []
packages = []
[etags]

View File

@ -1,7 +0,0 @@
# This file was generated by Aiken
# You typically do not need to edit this file
requirements = []
packages = []
[etags]

View File

@ -1,12 +0,0 @@
{
"preamble": {
"title": "aiken-lang/acceptance_test_006",
"version": "0.0.0",
"plutusVersion": "v2",
"compiler": {
"name": "Aiken",
"version": "v1.0.29-alpha+06ac851"
}
},
"validators": []
}

View File

@ -1,7 +0,0 @@
# This file was generated by Aiken
# You typically do not need to edit this file
requirements = []
packages = []
[etags]

View File

@ -1,7 +0,0 @@
# This file was generated by Aiken
# You typically do not need to edit this file
requirements = []
packages = []
[etags]

View File

@ -1,7 +0,0 @@
# This file was generated by Aiken
# You typically do not need to edit this file
requirements = []
packages = []
[etags]

View File

@ -1,7 +0,0 @@
# This file was generated by Aiken
# You typically do not need to edit this file
requirements = []
packages = []
[etags]

View File

@ -1,7 +0,0 @@
# This file was generated by Aiken
# You typically do not need to edit this file
requirements = []
packages = []
[etags]

View File

@ -1,7 +0,0 @@
# This file was generated by Aiken
# You typically do not need to edit this file
requirements = []
packages = []
[etags]

View File

@ -1,7 +0,0 @@
# This file was generated by Aiken
# You typically do not need to edit this file
requirements = []
packages = []
[etags]

View File

@ -1,7 +0,0 @@
# This file was generated by Aiken
# You typically do not need to edit this file
requirements = []
packages = []
[etags]

View File

@ -1,7 +0,0 @@
# This file was generated by Aiken
# You typically do not need to edit this file
requirements = []
packages = []
[etags]

View File

@ -1,7 +0,0 @@
# This file was generated by Aiken
# You typically do not need to edit this file
requirements = []
packages = []
[etags]

View File

@ -1,7 +0,0 @@
# This file was generated by Aiken
# You typically do not need to edit this file
requirements = []
packages = []
[etags]

View File

@ -1,7 +0,0 @@
# This file was generated by Aiken
# You typically do not need to edit this file
requirements = []
packages = []
[etags]

View File

@ -1,7 +0,0 @@
# This file was generated by Aiken
# You typically do not need to edit this file
requirements = []
packages = []
[etags]

View File

@ -1,7 +0,0 @@
# This file was generated by Aiken
# You typically do not need to edit this file
requirements = []
packages = []
[etags]

View File

@ -1,7 +0,0 @@
# This file was generated by Aiken
# You typically do not need to edit this file
requirements = []
packages = []
[etags]

View File

@ -1,7 +0,0 @@
# This file was generated by Aiken
# You typically do not need to edit this file
requirements = []
packages = []
[etags]

View File

@ -1,7 +0,0 @@
# This file was generated by Aiken
# You typically do not need to edit this file
requirements = []
packages = []
[etags]

View File

@ -1,7 +0,0 @@
# This file was generated by Aiken
# You typically do not need to edit this file
requirements = []
packages = []
[etags]

View File

@ -1,7 +0,0 @@
# This file was generated by Aiken
# You typically do not need to edit this file
requirements = []
packages = []
[etags]

View File

@ -1,7 +0,0 @@
# This file was generated by Aiken
# You typically do not need to edit this file
requirements = []
packages = []
[etags]

View File

@ -1,7 +0,0 @@
# This file was generated by Aiken
# You typically do not need to edit this file
requirements = []
packages = []
[etags]

View File

@ -1,7 +0,0 @@
# This file was generated by Aiken
# You typically do not need to edit this file
requirements = []
packages = []
[etags]

View File

@ -1,7 +0,0 @@
# This file was generated by Aiken
# You typically do not need to edit this file
requirements = []
packages = []
[etags]

View File

@ -1,7 +0,0 @@
# This file was generated by Aiken
# You typically do not need to edit this file
requirements = []
packages = []
[etags]

View File

@ -1,7 +0,0 @@
# This file was generated by Aiken
# You typically do not need to edit this file
requirements = []
packages = []
[etags]

View File

@ -1,7 +0,0 @@
# This file was generated by Aiken
# You typically do not need to edit this file
requirements = []
packages = []
[etags]

View File

@ -1,7 +0,0 @@
# This file was generated by Aiken
# You typically do not need to edit this file
requirements = []
packages = []
[etags]

View File

@ -1,7 +0,0 @@
# This file was generated by Aiken
# You typically do not need to edit this file
requirements = []
packages = []
[etags]

View File

@ -1,7 +0,0 @@
# This file was generated by Aiken
# You typically do not need to edit this file
requirements = []
packages = []
[etags]

View File

@ -1,16 +0,0 @@
# This file was generated by Aiken
# You typically do not need to edit this file
[[requirements]]
name = "aiken-lang/stdlib"
version = "main"
source = "github"
[[packages]]
name = "aiken-lang/stdlib"
version = "main"
requirements = []
source = "github"
[etags]
"aiken-lang/stdlib@main" = [{ secs_since_epoch = 1724859223, nanos_since_epoch = 234656000 }, "2d9c3cdf2746778c8bbe6d49721186b66a33e0ce9cf295ac4cb2976b887a971c"]

View File

@ -1,111 +0,0 @@
{
"preamble": {
"title": "aiken-lang/acceptance_test_036",
"version": "0.0.0",
"plutusVersion": "v2",
"compiler": {
"name": "Aiken",
"version": "v1.0.31-alpha+745f14c"
}
},
"validators": [
{
"title": "spend.bar.mint",
"redeemer": {
"title": "_redeemer",
"schema": {
"$ref": "#/definitions/Void"
}
},
"parameters": [
{
"title": "output_reference",
"schema": {
"$ref": "#/definitions/cardano~1transaction~1OutputReference"
}
}
],
"compiledCode": "58c90100003232323232323222533300332323232325332330093001300a375400426464a666016600660186ea8c8c8cc004004dd6180118079baa00922533301100114c0103d87a80001323253330103375e600a60246ea800803c4cdd2a40006602800497ae0133004004001301500230130012301100114a229414ccc028c008c02cdd50020a99980698061baa00414985858dd7180718059baa002370e90000b1806180680118058009805801180480098029baa00114984d9595cd2ab9d5573caae7d5d02ba157441",
"hash": "2edaecec5a072cd65b12395410ce6da7cafaffdf70506cbcb40b4df8"
},
{
"title": "spend.bar.else",
"parameters": [
{
"title": "output_reference",
"schema": {
"$ref": "#/definitions/cardano~1transaction~1OutputReference"
}
}
],
"compiledCode": "58c90100003232323232323222533300332323232325332330093001300a375400426464a666016600660186ea8c8c8cc004004dd6180118079baa00922533301100114c0103d87a80001323253330103375e600a60246ea800803c4cdd2a40006602800497ae0133004004001301500230130012301100114a229414ccc028c008c02cdd50020a99980698061baa00414985858dd7180718059baa002370e90000b1806180680118058009805801180480098029baa00114984d9595cd2ab9d5573caae7d5d02ba157441",
"hash": "2edaecec5a072cd65b12395410ce6da7cafaffdf70506cbcb40b4df8"
},
{
"title": "spend.foo.spend",
"datum": {
"title": "_datum",
"schema": {
"$ref": "#/definitions/Data"
}
},
"redeemer": {
"title": "_redeemer",
"schema": {
"$ref": "#/definitions/Data"
}
},
"compiledCode": "5901230100003232323232323225333002323232323253330073370e900118041baa001132323232330010013758602060226022601a6ea8020894ccc03c0045280992999806999806a99980819299980719b8748008c03cdd50008a5eb7bdb1804dd5980998081baa001323300100137566026602860206ea8c04c00c894ccc048004530103d87a800013232323253330133372291105000000000000002153330133371e91010500000000000000213374a90001980b9ba60014bd700a6103d87a8000133006006003375660280066eb8c048008c058008c0500045288a504a0944528899801801800980900098071807801180680098049baa00116300b300c002300a001300a00230080013004375400229309b2b2b9a5573aaae7955cfaba05742ae881",
"hash": "cf024265a1ff4ab129cef178c64b8c4cab25d62129242e01e29bb3d1"
},
{
"title": "spend.foo.else",
"compiledCode": "5901230100003232323232323225333002323232323253330073370e900118041baa001132323232330010013758602060226022601a6ea8020894ccc03c0045280992999806999806a99980819299980719b8748008c03cdd50008a5eb7bdb1804dd5980998081baa001323300100137566026602860206ea8c04c00c894ccc048004530103d87a800013232323253330133372291105000000000000002153330133371e91010500000000000000213374a90001980b9ba60014bd700a6103d87a8000133006006003375660280066eb8c048008c058008c0500045288a504a0944528899801801800980900098071807801180680098049baa00116300b300c002300a001300a00230080013004375400229309b2b2b9a5573aaae7955cfaba05742ae881",
"hash": "cf024265a1ff4ab129cef178c64b8c4cab25d62129242e01e29bb3d1"
}
],
"definitions": {
"ByteArray": {
"dataType": "bytes"
},
"Data": {
"title": "Data",
"description": "Any Plutus data."
},
"Int": {
"dataType": "integer"
},
"Void": {
"title": "Unit",
"description": "The nullary constructor.",
"anyOf": [
{
"dataType": "constructor",
"index": 0,
"fields": []
}
]
},
"cardano/transaction/OutputReference": {
"title": "OutputReference",
"description": "An `OutputReference` is a unique reference to an output on-chain. The `output_index`\n corresponds to the position in the output list of the transaction (identified by its id)\n that produced that output",
"anyOf": [
{
"title": "OutputReference",
"dataType": "constructor",
"index": 0,
"fields": [
{
"title": "transaction_id",
"$ref": "#/definitions/ByteArray"
},
{
"title": "output_index",
"$ref": "#/definitions/Int"
}
]
}
]
}
}
}

View File

@ -1,7 +0,0 @@
# This file was generated by Aiken
# You typically do not need to edit this file
requirements = []
packages = []
[etags]

View File

@ -1,7 +0,0 @@
# This file was generated by Aiken
# You typically do not need to edit this file
requirements = []
packages = []
[etags]

View File

@ -1,7 +0,0 @@
# This file was generated by Aiken
# You typically do not need to edit this file
requirements = []
packages = []
[etags]

View File

@ -1,7 +0,0 @@
# This file was generated by Aiken
# You typically do not need to edit this file
requirements = []
packages = []
[etags]

View File

@ -1,7 +0,0 @@
# This file was generated by Aiken
# You typically do not need to edit this file
requirements = []
packages = []
[etags]

View File

@ -1,7 +0,0 @@
# This file was generated by Aiken
# You typically do not need to edit this file
requirements = []
packages = []
[etags]

View File

@ -1,7 +0,0 @@
# This file was generated by Aiken
# You typically do not need to edit this file
requirements = []
packages = []
[etags]

View File

@ -1,7 +0,0 @@
# This file was generated by Aiken
# You typically do not need to edit this file
requirements = []
packages = []
[etags]

View File

@ -1,7 +0,0 @@
# This file was generated by Aiken
# You typically do not need to edit this file
requirements = []
packages = []
[etags]

View File

@ -1,7 +0,0 @@
# This file was generated by Aiken
# You typically do not need to edit this file
requirements = []
packages = []
[etags]

View File

@ -1,7 +0,0 @@
# This file was generated by Aiken
# You typically do not need to edit this file
requirements = []
packages = []
[etags]

View File

@ -1,48 +0,0 @@
{
"preamble": {
"title": "aiken-lang/acceptance_test_047",
"version": "0.0.0",
"plutusVersion": "v2",
"compiler": {
"name": "Aiken",
"version": "v1.0.31-alpha+745f14c"
}
},
"validators": [
{
"title": "foo.foo.spend",
"datum": {
"title": "_datum",
"schema": {
"$ref": "#/definitions/Void"
}
},
"redeemer": {
"title": "_redeemer",
"schema": {
"$ref": "#/definitions/Void"
}
},
"compiledCode": "587601000032323232323225333002323232323253330073370e900118041baa0011323232324a2a66601466e1d2000300b375400a2a66601a60186ea80145261616300d300e002300c001300937540022c6014601600460120026012004600e00260086ea8004526136565734aae7555cf2ab9f5742ae89",
"hash": "c613c8326fea00dff179b7108f248b60c155881bbce544d84fe573e1"
},
{
"title": "foo.foo.else",
"compiledCode": "587601000032323232323225333002323232323253330073370e900118041baa0011323232324a2a66601466e1d2000300b375400a2a66601a60186ea80145261616300d300e002300c001300937540022c6014601600460120026012004600e00260086ea8004526136565734aae7555cf2ab9f5742ae89",
"hash": "c613c8326fea00dff179b7108f248b60c155881bbce544d84fe573e1"
}
],
"definitions": {
"Void": {
"title": "Unit",
"description": "The nullary constructor.",
"anyOf": [
{
"dataType": "constructor",
"index": 0,
"fields": []
}
]
}
}
}

View File

@ -1,7 +0,0 @@
# This file was generated by Aiken
# You typically do not need to edit this file
requirements = []
packages = []
[etags]

View File

@ -1,7 +0,0 @@
# This file was generated by Aiken
# You typically do not need to edit this file
requirements = []
packages = []
[etags]

View File

@ -1,7 +0,0 @@
# This file was generated by Aiken
# You typically do not need to edit this file
requirements = []
packages = []
[etags]

View File

@ -1,7 +0,0 @@
# This file was generated by Aiken
# You typically do not need to edit this file
requirements = []
packages = []
[etags]

View File

@ -1,7 +0,0 @@
# This file was generated by Aiken
# You typically do not need to edit this file
requirements = []
packages = []
[etags]

View File

@ -1,16 +0,0 @@
# This file was generated by Aiken
# You typically do not need to edit this file
[[requirements]]
name = "aiken-lang/stdlib"
version = "main"
source = "github"
[[packages]]
name = "aiken-lang/stdlib"
version = "main"
requirements = []
source = "github"
[etags]
"aiken-lang/stdlib@main" = [{ secs_since_epoch = 1724858661, nanos_since_epoch = 121355000 }, "2d9c3cdf2746778c8bbe6d49721186b66a33e0ce9cf295ac4cb2976b887a971c"]

View File

@ -1,15 +0,0 @@
# This file was generated by Aiken
# You typically do not need to edit this file
[[requirements]]
name = "aiken-lang/stdlib"
version = "1.9.0"
source = "github"
[[packages]]
name = "aiken-lang/stdlib"
version = "1.9.0"
requirements = []
source = "github"
[etags]

View File

@ -1,7 +0,0 @@
# This file was generated by Aiken
# You typically do not need to edit this file
requirements = []
packages = []
[etags]

View File

@ -1,7 +0,0 @@
# This file was generated by Aiken
# You typically do not need to edit this file
requirements = []
packages = []
[etags]

View File

@ -1,7 +0,0 @@
# This file was generated by Aiken
# You typically do not need to edit this file
requirements = []
packages = []
[etags]

View File

@ -1,7 +0,0 @@
# This file was generated by Aiken
# You typically do not need to edit this file
requirements = []
packages = []
[etags]

View File

@ -1,7 +0,0 @@
# This file was generated by Aiken
# You typically do not need to edit this file
requirements = []
packages = []
[etags]

View File

@ -1,15 +0,0 @@
# This file was generated by Aiken
# You typically do not need to edit this file
[[requirements]]
name = "aiken-lang/stdlib"
version = "1.9.0"
source = "github"
[[packages]]
name = "aiken-lang/stdlib"
version = "1.9.0"
requirements = []
source = "github"
[etags]

View File

@ -1,7 +0,0 @@
# This file was generated by Aiken
# You typically do not need to edit this file
requirements = []
packages = []
[etags]

View File

@ -1,8 +0,0 @@
{
"preamble": {
"title": "aiken-lang/acceptance_test_062",
"version": "0.0.0",
"plutusVersion": "v2"
},
"validators": []
}

View File

@ -1,16 +0,0 @@
# This file was generated by Aiken
# You typically do not need to edit this file
[[requirements]]
name = "aiken-lang/stdlib"
version = "main"
source = "github"
[[packages]]
name = "aiken-lang/stdlib"
version = "main"
requirements = []
source = "github"
[etags]
"aiken-lang/stdlib@main" = [{ secs_since_epoch = 1724858662, nanos_since_epoch = 117929000 }, "2d9c3cdf2746778c8bbe6d49721186b66a33e0ce9cf295ac4cb2976b887a971c"]

View File

@ -1,7 +0,0 @@
# This file was generated by Aiken
# You typically do not need to edit this file
requirements = []
packages = []
[etags]

View File

@ -1,7 +0,0 @@
# This file was generated by Aiken
# You typically do not need to edit this file
requirements = []
packages = []
[etags]

Some files were not shown because too many files have changed in this diff Show More