feat: add end to end tests to replace acceptance tests with strict uplc comparison.
Add acceptance tests 1,2, 6 as end to end tests
This commit is contained in:
parent
661a9a7ab8
commit
7dd13f8d73
|
@ -134,6 +134,7 @@ dependencies = [
|
|||
"pallas",
|
||||
"pallas-traverse",
|
||||
"petgraph",
|
||||
"pretty_assertions",
|
||||
"proptest",
|
||||
"pulldown-cmark",
|
||||
"rayon",
|
||||
|
|
|
@ -3035,7 +3035,7 @@ impl<'a> CodeGenerator<'a> {
|
|||
} = ir else {
|
||||
let scope = ir.scope();
|
||||
|
||||
process_scope_updates(&mut to_be_defined_map, scope, func_index_map);
|
||||
process_scope_updates(&mut to_be_defined_map, &scope, func_index_map);
|
||||
continue;
|
||||
};
|
||||
|
||||
|
@ -3043,7 +3043,7 @@ impl<'a> CodeGenerator<'a> {
|
|||
let ValueConstructorVariant::ModuleFn {name, module, builtin: None, ..} = &constructor.variant else {
|
||||
let scope = ir.scope();
|
||||
|
||||
process_scope_updates(&mut to_be_defined_map, scope, func_index_map);
|
||||
process_scope_updates(&mut to_be_defined_map, &scope, func_index_map);
|
||||
continue;
|
||||
};
|
||||
|
||||
|
@ -3263,6 +3263,7 @@ impl<'a> CodeGenerator<'a> {
|
|||
} else {
|
||||
unreachable!("We found a function with no definitions");
|
||||
}
|
||||
process_scope_updates(&mut to_be_defined_map, scope, func_index_map);
|
||||
}
|
||||
|
||||
// Still to be defined
|
||||
|
@ -5023,13 +5024,13 @@ impl<'a> CodeGenerator<'a> {
|
|||
|
||||
fn process_scope_updates(
|
||||
to_be_defined_map: &mut IndexMap<FunctionAccessKey, Scope>,
|
||||
scope: Scope,
|
||||
scope: &Scope,
|
||||
func_index_map: &mut IndexMap<FunctionAccessKey, Scope>,
|
||||
) {
|
||||
for func in to_be_defined_map.clone().iter() {
|
||||
if scope.common_ancestor(func.1) == scope.clone() {
|
||||
if &scope.common_ancestor(func.1) == scope {
|
||||
if let Some(index_scope) = func_index_map.get(func.0) {
|
||||
if index_scope.common_ancestor(func.1) == scope.clone() {
|
||||
if &index_scope.common_ancestor(func.1) == scope {
|
||||
func_index_map.insert(func.0.clone(), scope.clone());
|
||||
to_be_defined_map.shift_remove(func.0);
|
||||
} else {
|
||||
|
|
|
@ -6,7 +6,11 @@ edition = "2021"
|
|||
repository = "https://github.com/aiken-lang/aiken/crates/project"
|
||||
homepage = "https://github.com/aiken-lang/aiken"
|
||||
license = "Apache-2.0"
|
||||
authors = ["Lucas Rosa <x@rvcas.dev>", "Kasey White <kwhitemsg@gmail.com>", "KtorZ <matthias.benkort@gmail.com>"]
|
||||
authors = [
|
||||
"Lucas Rosa <x@rvcas.dev>",
|
||||
"Kasey White <kwhitemsg@gmail.com>",
|
||||
"KtorZ <matthias.benkort@gmail.com>",
|
||||
]
|
||||
rust-version = "1.66.1"
|
||||
|
||||
[dependencies]
|
||||
|
@ -41,5 +45,11 @@ zip = "0.6.4"
|
|||
aiken-lang = { path = "../aiken-lang", version = "1.0.2-alpha" }
|
||||
uplc = { path = '../uplc', version = "1.0.2-alpha" }
|
||||
|
||||
|
||||
[dev-dependencies]
|
||||
proptest = "1.1.0"
|
||||
pretty_assertions = "1.3.0"
|
||||
|
||||
[features]
|
||||
default = ["uplc/default", "aiken-lang/default"]
|
||||
native-secp256k1 = ["uplc/native-secp256k1", "aiken-lang/native-secp256k1"]
|
||||
|
|
|
@ -175,6 +175,27 @@ impl Validator {
|
|||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use assert_json_diff::assert_json_eq;
|
||||
use indexmap::IndexMap;
|
||||
use pretty_assertions::assert_eq;
|
||||
use serde_json::{self, json};
|
||||
use std::{collections::HashMap, path::PathBuf};
|
||||
|
||||
use aiken_lang::{
|
||||
self,
|
||||
ast::{Definition, Function, ModuleKind, Tracing, TypedDataType, TypedFunction},
|
||||
builtins,
|
||||
gen_uplc::builder::{DataTypeKey, FunctionAccessKey},
|
||||
parser,
|
||||
tipo::TypeInfo,
|
||||
IdGenerator,
|
||||
};
|
||||
use uplc::{
|
||||
ast::{self as uplc_ast, Constant, Name},
|
||||
machine::cost_model::ExBudget,
|
||||
optimize, BigInt, PlutusData,
|
||||
};
|
||||
|
||||
use super::{
|
||||
super::{
|
||||
definitions::{Definitions, Reference},
|
||||
|
@ -184,20 +205,6 @@ mod test {
|
|||
*,
|
||||
};
|
||||
use crate::{module::ParsedModule, PackageName};
|
||||
use aiken_lang::{
|
||||
self,
|
||||
ast::{ModuleKind, Tracing, TypedDataType, TypedFunction},
|
||||
builtins,
|
||||
gen_uplc::builder::{DataTypeKey, FunctionAccessKey},
|
||||
parser,
|
||||
tipo::TypeInfo,
|
||||
IdGenerator,
|
||||
};
|
||||
use assert_json_diff::assert_json_eq;
|
||||
use indexmap::IndexMap;
|
||||
use serde_json::{self, json};
|
||||
use std::{collections::HashMap, path::PathBuf};
|
||||
use uplc::ast as uplc;
|
||||
|
||||
// TODO: Possible refactor this out of the module and have it used by `Project`. The idea would
|
||||
// be to make this struct below the actual project, and wrap it in another metadata struct
|
||||
|
@ -319,6 +326,58 @@ mod test {
|
|||
assert_json_eq!(serde_json::to_value(validator).unwrap(), expected);
|
||||
}
|
||||
|
||||
fn assert_uplc(source_code: &str, expected: Term<Name>) {
|
||||
let mut project = TestProject::new();
|
||||
|
||||
let modules = CheckedModules::singleton(project.check(project.parse(source_code)));
|
||||
let mut generator = modules.new_generator(
|
||||
&project.functions,
|
||||
&project.data_types,
|
||||
&project.module_types,
|
||||
);
|
||||
|
||||
let Some(checked_module) = modules.values().next()
|
||||
else {
|
||||
unreachable!("There's got to be one right?")
|
||||
};
|
||||
|
||||
let mut scripts = vec![];
|
||||
|
||||
for def in checked_module.ast.definitions() {
|
||||
if let Definition::Test(func) = def {
|
||||
scripts.push((
|
||||
checked_module.input_path.clone(),
|
||||
checked_module.name.clone(),
|
||||
func,
|
||||
));
|
||||
}
|
||||
}
|
||||
|
||||
assert_eq!(scripts.len(), 1);
|
||||
|
||||
let script = &scripts[0];
|
||||
|
||||
let Function { body, .. } = script.2;
|
||||
|
||||
let program = generator.generate_test(body);
|
||||
|
||||
let debruijn_program: Program<DeBruijn> = program.try_into().unwrap();
|
||||
|
||||
let expected = Program {
|
||||
version: (1, 0, 0),
|
||||
term: expected,
|
||||
};
|
||||
|
||||
let expected = optimize::aiken_optimize_and_intern(expected);
|
||||
let expected: Program<DeBruijn> = expected.try_into().unwrap();
|
||||
|
||||
assert_eq!(debruijn_program.to_pretty(), expected.to_pretty());
|
||||
|
||||
let eval = debruijn_program.eval(ExBudget::default());
|
||||
|
||||
assert!(!eval.failed())
|
||||
}
|
||||
|
||||
fn fixture_definitions() -> Definitions<Annotated<Schema>> {
|
||||
let mut definitions = Definitions::new();
|
||||
|
||||
|
@ -412,6 +471,139 @@ mod test {
|
|||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn acceptance_test_6_if_else() {
|
||||
let src = r#"
|
||||
test bar() {
|
||||
let x = 1
|
||||
if x == 1 {
|
||||
True
|
||||
} else {
|
||||
False
|
||||
}
|
||||
}
|
||||
"#;
|
||||
|
||||
assert_uplc(
|
||||
src,
|
||||
Term::equals_integer()
|
||||
.apply(Term::integer(1.into()))
|
||||
.apply(Term::integer(1.into()))
|
||||
.delayed_if_else(Term::bool(true), Term::bool(false)),
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn acceptance_test_1_length() {
|
||||
let src = r#"
|
||||
pub fn length(xs: List<a>) -> Int {
|
||||
when xs is {
|
||||
[] ->
|
||||
0
|
||||
[_, ..rest] ->
|
||||
1 + length(rest)
|
||||
}
|
||||
}
|
||||
|
||||
test length_1() {
|
||||
length([1, 2, 3]) == 3
|
||||
}
|
||||
"#;
|
||||
|
||||
assert_uplc(
|
||||
src,
|
||||
Term::equals_integer()
|
||||
.apply(
|
||||
Term::var("length")
|
||||
.lambda("length")
|
||||
.apply(Term::var("length").apply(Term::var("length")))
|
||||
.lambda("length")
|
||||
.apply(
|
||||
Term::var("xs")
|
||||
.delayed_choose_list(
|
||||
Term::integer(0.into()),
|
||||
Term::add_integer()
|
||||
.apply(Term::integer(1.into()))
|
||||
.apply(
|
||||
Term::var("length")
|
||||
.apply(Term::var("length"))
|
||||
.apply(Term::var("rest")),
|
||||
)
|
||||
.lambda("rest")
|
||||
.apply(Term::tail_list().apply(Term::var("xs"))),
|
||||
)
|
||||
.lambda("xs")
|
||||
.lambda("length"),
|
||||
)
|
||||
.apply(Term::list_values(vec![
|
||||
Constant::Data(PlutusData::BigInt(BigInt::Int(1.into()))),
|
||||
Constant::Data(PlutusData::BigInt(BigInt::Int(2.into()))),
|
||||
Constant::Data(PlutusData::BigInt(BigInt::Int(3.into()))),
|
||||
])),
|
||||
)
|
||||
.apply(Term::integer(3.into())),
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn acceptance_test_2_repeat() {
|
||||
let src = r#"
|
||||
pub fn repeat(x: a, n: Int) -> List<a> {
|
||||
if n <= 0 {
|
||||
[]
|
||||
} else {
|
||||
[x, ..repeat(x, n - 1)]
|
||||
}
|
||||
}
|
||||
|
||||
test repeat_1() {
|
||||
repeat("aiken", 2) == ["aiken", "aiken"]
|
||||
}
|
||||
"#;
|
||||
|
||||
assert_uplc(
|
||||
src,
|
||||
Term::equals_data()
|
||||
.apply(
|
||||
Term::list_data().apply(
|
||||
Term::var("repeat")
|
||||
.lambda("repeat")
|
||||
.apply(Term::var("repeat").apply(Term::var("repeat")))
|
||||
.lambda("repeat")
|
||||
.apply(
|
||||
Term::less_than_equals_integer()
|
||||
.apply(Term::var("n"))
|
||||
.apply(Term::integer(0.into()))
|
||||
.delayed_if_else(
|
||||
Term::empty_list(),
|
||||
Term::mk_cons()
|
||||
.apply(Term::b_data().apply(Term::var("x")))
|
||||
.apply(
|
||||
Term::var("repeat")
|
||||
.apply(Term::var("repeat"))
|
||||
.apply(Term::var("x"))
|
||||
.apply(
|
||||
Term::sub_integer()
|
||||
.apply(Term::var("n"))
|
||||
.apply(Term::integer(1.into())),
|
||||
),
|
||||
),
|
||||
)
|
||||
.lambda("n")
|
||||
.lambda("x")
|
||||
.lambda("repeat"),
|
||||
)
|
||||
.apply(Term::byte_string("aiken".as_bytes().to_vec()))
|
||||
.apply(Term::integer(3.into())),
|
||||
),
|
||||
)
|
||||
.apply(Term::list_data().apply(Term::list_values(vec![
|
||||
Constant::Data(PlutusData::BoundedBytes("aiken".as_bytes().to_vec().into())),
|
||||
Constant::Data(PlutusData::BoundedBytes("aiken".as_bytes().to_vec().into())),
|
||||
]))),
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn mint_parameterized() {
|
||||
assert_validator(
|
||||
|
@ -1165,7 +1357,7 @@ mod test {
|
|||
fn validate_arguments_integer() {
|
||||
let definitions = fixture_definitions();
|
||||
|
||||
let term = Term::data(uplc::Data::integer(42.into()));
|
||||
let term = Term::data(uplc_ast::Data::integer(42.into()));
|
||||
|
||||
let param = Parameter {
|
||||
title: None,
|
||||
|
@ -1179,7 +1371,7 @@ mod test {
|
|||
fn validate_arguments_bytestring() {
|
||||
let definitions = fixture_definitions();
|
||||
|
||||
let term = Term::data(uplc::Data::bytestring(vec![102, 111, 111]));
|
||||
let term = Term::data(uplc_ast::Data::bytestring(vec![102, 111, 111]));
|
||||
|
||||
let param = Parameter {
|
||||
title: None,
|
||||
|
@ -1208,9 +1400,9 @@ mod test {
|
|||
.into(),
|
||||
);
|
||||
|
||||
let term = Term::data(uplc::Data::list(vec![
|
||||
uplc::Data::integer(42.into()),
|
||||
uplc::Data::integer(14.into()),
|
||||
let term = Term::data(uplc_ast::Data::list(vec![
|
||||
uplc_ast::Data::integer(42.into()),
|
||||
uplc_ast::Data::integer(14.into()),
|
||||
]));
|
||||
|
||||
let param: Parameter = schema.into();
|
||||
|
@ -1237,9 +1429,9 @@ mod test {
|
|||
.into(),
|
||||
);
|
||||
|
||||
let term = Term::data(uplc::Data::list(vec![uplc::Data::bytestring(vec![
|
||||
102, 111, 111,
|
||||
])]));
|
||||
let term = Term::data(uplc_ast::Data::list(vec![uplc_ast::Data::bytestring(
|
||||
vec![102, 111, 111],
|
||||
)]));
|
||||
|
||||
let param: Parameter = schema.into();
|
||||
|
||||
|
@ -1269,9 +1461,9 @@ mod test {
|
|||
.into(),
|
||||
);
|
||||
|
||||
let term = Term::data(uplc::Data::list(vec![
|
||||
uplc::Data::integer(42.into()),
|
||||
uplc::Data::bytestring(vec![102, 111, 111]),
|
||||
let term = Term::data(uplc_ast::Data::list(vec![
|
||||
uplc_ast::Data::integer(42.into()),
|
||||
uplc_ast::Data::bytestring(vec![102, 111, 111]),
|
||||
]));
|
||||
|
||||
let param: Parameter = schema.into();
|
||||
|
@ -1300,9 +1492,9 @@ mod test {
|
|||
.into(),
|
||||
);
|
||||
|
||||
let term = Term::data(uplc::Data::map(vec![(
|
||||
uplc::Data::bytestring(vec![102, 111, 111]),
|
||||
uplc::Data::integer(42.into()),
|
||||
let term = Term::data(uplc_ast::Data::map(vec![(
|
||||
uplc_ast::Data::bytestring(vec![102, 111, 111]),
|
||||
uplc_ast::Data::integer(42.into()),
|
||||
)]));
|
||||
|
||||
let param: Parameter = schema.into();
|
||||
|
@ -1316,7 +1508,7 @@ mod test {
|
|||
|
||||
let definitions = fixture_definitions();
|
||||
|
||||
let term = Term::data(uplc::Data::constr(1, vec![]));
|
||||
let term = Term::data(uplc_ast::Data::constr(1, vec![]));
|
||||
|
||||
let param: Parameter = schema.into();
|
||||
|
||||
|
@ -1351,7 +1543,10 @@ mod test {
|
|||
.into(),
|
||||
);
|
||||
|
||||
let term = Term::data(uplc::Data::constr(0, vec![uplc::Data::constr(0, vec![])]));
|
||||
let term = Term::data(uplc_ast::Data::constr(
|
||||
0,
|
||||
vec![uplc_ast::Data::constr(0, vec![])],
|
||||
));
|
||||
|
||||
let param: Parameter = schema.into();
|
||||
|
||||
|
@ -1404,15 +1599,15 @@ mod test {
|
|||
.into(),
|
||||
);
|
||||
|
||||
let term = Term::data(uplc::Data::constr(
|
||||
let term = Term::data(uplc_ast::Data::constr(
|
||||
1,
|
||||
vec![
|
||||
uplc::Data::integer(14.into()),
|
||||
uplc::Data::constr(
|
||||
uplc_ast::Data::integer(14.into()),
|
||||
uplc_ast::Data::constr(
|
||||
1,
|
||||
vec![
|
||||
uplc::Data::integer(42.into()),
|
||||
uplc::Data::constr(0, vec![]),
|
||||
uplc_ast::Data::integer(42.into()),
|
||||
uplc_ast::Data::constr(0, vec![]),
|
||||
],
|
||||
),
|
||||
],
|
||||
|
|
|
@ -404,7 +404,7 @@ impl PartialEq for NamedDeBruijn {
|
|||
/// It allows for injecting fake textual names while also using Debruijn for decoding
|
||||
/// without having to loop through twice.
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct FakeNamedDeBruijn(pub NamedDeBruijn);
|
||||
pub struct FakeNamedDeBruijn(pub(crate) NamedDeBruijn);
|
||||
|
||||
impl From<DeBruijn> for FakeNamedDeBruijn {
|
||||
fn from(d: DeBruijn) -> Self {
|
||||
|
|
|
@ -53,6 +53,10 @@ impl<T> Term<T> {
|
|||
Term::Constant(Constant::ProtoList(Type::Data, vec![]).into())
|
||||
}
|
||||
|
||||
pub fn list_values(vals: Vec<Constant>) -> Self {
|
||||
Term::Constant(Constant::ProtoList(Type::Data, vals).into())
|
||||
}
|
||||
|
||||
pub fn empty_map() -> Self {
|
||||
Term::Constant(
|
||||
Constant::ProtoList(Type::Pair(Type::Data.into(), Type::Data.into()), vec![]).into(),
|
||||
|
@ -103,6 +107,14 @@ impl<T> Term<T> {
|
|||
Term::Builtin(DefaultFunction::EqualsInteger)
|
||||
}
|
||||
|
||||
pub fn less_than_integer() -> Self {
|
||||
Term::Builtin(DefaultFunction::LessThanInteger)
|
||||
}
|
||||
|
||||
pub fn less_than_equals_integer() -> Self {
|
||||
Term::Builtin(DefaultFunction::LessThanEqualsInteger)
|
||||
}
|
||||
|
||||
pub fn equals_string() -> Self {
|
||||
Term::Builtin(DefaultFunction::EqualsString)
|
||||
}
|
||||
|
|
|
@ -20,7 +20,7 @@ const TERM_TAG_WIDTH: u32 = 4;
|
|||
pub trait Binder<'b>: Encode + Decode<'b> {
|
||||
fn binder_encode(&self, e: &mut Encoder) -> Result<(), en::Error>;
|
||||
fn binder_decode(d: &mut Decoder) -> Result<Self, de::Error>;
|
||||
fn text(&self) -> &str;
|
||||
fn text(&self) -> String;
|
||||
}
|
||||
|
||||
impl<'b, T> Flat<'b> for Program<T> where T: Binder<'b> + Debug {}
|
||||
|
@ -255,7 +255,7 @@ where
|
|||
let var_option = T::binder_decode(d);
|
||||
match var_option {
|
||||
Ok(var) => {
|
||||
state_log.push(var.text().to_string());
|
||||
state_log.push(var.text());
|
||||
let term_option = Term::decode_debug(d, state_log);
|
||||
match term_option {
|
||||
Ok(term) => {
|
||||
|
@ -650,8 +650,8 @@ impl<'b> Binder<'b> for Name {
|
|||
Name::decode(d)
|
||||
}
|
||||
|
||||
fn text(&self) -> &str {
|
||||
&self.text
|
||||
fn text(&self) -> String {
|
||||
self.text.clone()
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -687,8 +687,8 @@ impl<'b> Binder<'b> for NamedDeBruijn {
|
|||
})
|
||||
}
|
||||
|
||||
fn text(&self) -> &str {
|
||||
&self.text
|
||||
fn text(&self) -> String {
|
||||
format!("{}_{}", &self.text, self.index)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -715,8 +715,8 @@ impl<'b> Binder<'b> for DeBruijn {
|
|||
Ok(DeBruijn::new(0))
|
||||
}
|
||||
|
||||
fn text(&self) -> &str {
|
||||
"i"
|
||||
fn text(&self) -> String {
|
||||
format!("i_{}", self)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -749,8 +749,8 @@ impl<'b> Binder<'b> for FakeNamedDeBruijn {
|
|||
Ok(index.into())
|
||||
}
|
||||
|
||||
fn text(&self) -> &str {
|
||||
&self.0.text
|
||||
fn text(&self) -> String {
|
||||
format!("{}_{}", self.0.text, self.0.index)
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -1,3 +1,13 @@
|
|||
test foo() {
|
||||
(1, []) == (1, [])
|
||||
}
|
||||
|
||||
test bar() {
|
||||
let x = 1
|
||||
if x == 1 {
|
||||
True
|
||||
} else {
|
||||
False
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -6,21 +6,29 @@
|
|||
},
|
||||
"validators": [
|
||||
{
|
||||
"title": "spend.staking",
|
||||
"datum": {
|
||||
"title": "_datum",
|
||||
"schema": {
|
||||
"$ref": "#/definitions/Void"
|
||||
}
|
||||
},
|
||||
"title": "spend.gift_card",
|
||||
"redeemer": {
|
||||
"title": "_redeemer",
|
||||
"title": "rdmr",
|
||||
"schema": {
|
||||
"$ref": "#/definitions/Void"
|
||||
"$ref": "#/definitions/spend~1Action"
|
||||
}
|
||||
},
|
||||
"compiledCode": "5904660100003232323232323232323232322223232323232323232323232325333014323232323232323232323375e6e98008dd300099980a1bac33019301b33019301b01648001200422533301f3375e66036603a00490000020998061bab3301b301d0024800800440052f5bded8c06660266eb0cc060c068cc060c068055200048000894ccc078cdd79980d180e1980d180e0012400490000018998059bab3301a301c3301a301c00248009200200110014bd6f7b6301980b980c800a400060400026040002602c002603a002602864a66602e66e1d20003016001100115330184912a4578706563746564206f6e20696e636f727265637420636f6e7374727563746f722076617269616e742e0016323233001375866028602c66028602c022900024000466ebccc054c05c00520000043001001222533301d00213374a900125eb804c8c94ccc06cc00c0084cdd2a40006604000497ae013330050050010033021003301f002301b00130123253330153370e9001180a00088008a9980b2492a4578706563746564206f6e20696e636f727265637420636f6e7374727563746f722076617269616e742e001633010301200d480085261622320013333008002001222325333019533301d00114a229404cdd2a400497ae013374a90001980f1ba60014bd70199980380100091119299980e19b87001480004cdd2a400497ae013374a9000198109ba80014bd7019b8000200100600322533301433720004002266e9520004bd700a99980a19b8f00200113374a900125eb804cdd2a400897ae030010012222253330190041003132323232333330090090033333300a007001002006005006005375c60340066eb4c068008c074014c06c010c0040048888894ccc0600144cc064cdd81ba9004375000697adef6c6013232323253330183375e6600a01000266e9520004bd7009980e99bb037520106ea001c02454ccc060cdc780400089919299980d19b87480000044c8c8cc084cdd81ba900c375000200e6eb4c084004c0600084014c060004ccc01802001c0084cc074cdd81ba9001375000466666601401400601000e00c00a6eb8c06400cdd6980c801180e003180d002980080091111299980a802080189919191919999804804801999998050038008010030028030029bae30160033756602c004603200a602e0086002002444444a66602800a26602a66ec0dd48021ba60034bd6f7b630099191919299980a19baf330050080013374a900025eb804cc064cdd81ba9008374c00e0122a66602866e3c0200044c8c94ccc058cdc3a4000002264646603a66ec0dd48061ba60010073756603a0026028004200a602800266600c01000e00426603266ec0dd48009ba600233333300a00a003008007006005375c602a0066eacc054008c060018c058014c00400488894ccc04000c40044c8c8cc010008cccc01801800401000cc050010c04800ccc0040052000222233330073370e00200601a4666600a00a66e000112002300f001002002230063754002460086ea80055cd2b9c5573aaae7955cfaba05742ae89",
|
||||
"hash": "0e3d18f2acce2c1fc26b5c7ede74e05f944826f2d6a0c102707b4419"
|
||||
"parameters": [
|
||||
{
|
||||
"title": "token_name",
|
||||
"schema": {
|
||||
"$ref": "#/definitions/ByteArray"
|
||||
}
|
||||
},
|
||||
{
|
||||
"title": "utxo_ref",
|
||||
"schema": {
|
||||
"$ref": "#/definitions/aiken~1transaction~1OutputReference"
|
||||
}
|
||||
}
|
||||
],
|
||||
"compiledCode": "5902df0100003232323232323232323232323223222232533300b323232323232323232323232323232533301f30220021323232533301d3370e90000008a99980e991919800806919baf3301c301e00148000064c0040048894ccc09400852809919299981118018010a511333005005001003302900330270021533301d3370e0049001099b8f00301714a02a6603e9201254578706563746564206f6e20696e636f727265637420626f6f6c65616e2076617269616e74001616301b012375a603e0046eb8c07400454cc0712401334c6973742f5475706c652f436f6e73747220636f6e7461696e73206d6f7265206974656d73207468616e206578706563746564001630200013200132323232533301c3370e90010008a5eb7bdb1804c8c8004dd59812000980d001180d0009980080180518008009112999810001099ba5480092f5c0264646464a66603e66e3c0140044cdd2a40006604a6e980092f5c0266600e00e00600a6eb8c08400cdd59810801181200198110011bab301e001301e001301d001301c001301b00237586032002601e00a6eb8c05c004c0354ccc03ccdc3a4000601c00220022a660229212a4578706563746564206f6e20696e636f727265637420636f6e7374727563746f722076617269616e742e00163015001301500230130013009002149858cc02cc94ccc02ccdc3a40000022a66602060120062930a99806a491d4578706563746564206e6f206669656c647320666f7220436f6e73747200161533300b3370e90010008a99980818048018a4c2a6601a92011d4578706563746564206e6f206669656c647320666f7220436f6e7374720016153300d4912b436f6e73747220696e64657820646964206e6f74206d6174636820616e7920747970652076617269616e7400163009002002375c0026600200290001111199980399b8700100300e233330050053370000890011808000801001118039baa001230053754002ae695cdab9c5573aaae7955cfaba05742ae881",
|
||||
"hash": "60a6357e1121370354de30ef09391b6b7c4a65539041b7fef805edf3"
|
||||
},
|
||||
{
|
||||
"title": "spend2.backtrace",
|
||||
|
@ -41,6 +49,12 @@
|
|||
}
|
||||
],
|
||||
"definitions": {
|
||||
"ByteArray": {
|
||||
"dataType": "bytes"
|
||||
},
|
||||
"Int": {
|
||||
"dataType": "integer"
|
||||
},
|
||||
"Void": {
|
||||
"title": "Unit",
|
||||
"description": "The nullary constructor.",
|
||||
|
@ -51,6 +65,61 @@
|
|||
"fields": []
|
||||
}
|
||||
]
|
||||
},
|
||||
"aiken/transaction/OutputReference": {
|
||||
"title": "OutputReference",
|
||||
"description": "An `OutputReference` is a unique reference to an output on-chain. The `output_index`\n corresponds to the position in the output list of the transaction (identified by its id)\n that produced that output",
|
||||
"anyOf": [
|
||||
{
|
||||
"title": "OutputReference",
|
||||
"dataType": "constructor",
|
||||
"index": 0,
|
||||
"fields": [
|
||||
{
|
||||
"title": "transaction_id",
|
||||
"$ref": "#/definitions/aiken~1transaction~1TransactionId"
|
||||
},
|
||||
{
|
||||
"title": "output_index",
|
||||
"$ref": "#/definitions/Int"
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
"aiken/transaction/TransactionId": {
|
||||
"title": "TransactionId",
|
||||
"description": "A unique transaction identifier, as the hash of a transaction body. Note that the transaction id\n isn't a direct hash of the `Transaction` as visible on-chain. Rather, they correspond to hash\n digests of transaction body as they are serialized on the network.",
|
||||
"anyOf": [
|
||||
{
|
||||
"title": "TransactionId",
|
||||
"dataType": "constructor",
|
||||
"index": 0,
|
||||
"fields": [
|
||||
{
|
||||
"title": "hash",
|
||||
"$ref": "#/definitions/ByteArray"
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
"spend/Action": {
|
||||
"title": "Action",
|
||||
"anyOf": [
|
||||
{
|
||||
"title": "Mint",
|
||||
"dataType": "constructor",
|
||||
"index": 0,
|
||||
"fields": []
|
||||
},
|
||||
{
|
||||
"title": "Burn",
|
||||
"dataType": "constructor",
|
||||
"index": 1,
|
||||
"fields": []
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,44 +1,37 @@
|
|||
use aiken/list.{find, foldr}
|
||||
use aiken/transaction.{Input, ScriptContext, Spend}
|
||||
use aiken/transaction.{Input, ScriptContext, Spend, OutputReference, Transaction} as tx
|
||||
use aiken/transaction/value.{add, zero}
|
||||
use aiken/dict
|
||||
|
||||
validator {
|
||||
fn staking(_datum: Void, _redeemer: Void, context: ScriptContext) -> Bool {
|
||||
expect Spend(ref) =
|
||||
context.purpose
|
||||
|
||||
expect Some(i) =
|
||||
find(context.transaction.inputs, fn(x) { x.output_reference == ref })
|
||||
let Input { output, .. } =
|
||||
i
|
||||
let staking_addr =
|
||||
output.address
|
||||
|
||||
let v_in =
|
||||
foldr(
|
||||
context.transaction.inputs,
|
||||
fn(x, y) {
|
||||
if x.output.address == staking_addr {
|
||||
add(x.output.value, y)
|
||||
} else {
|
||||
y
|
||||
}
|
||||
},
|
||||
zero(),
|
||||
)
|
||||
|
||||
let v_out =
|
||||
foldr(
|
||||
context.transaction.outputs,
|
||||
fn(x, y) {
|
||||
if x.address == staking_addr {
|
||||
add(x.value, y)
|
||||
} else {
|
||||
y
|
||||
}
|
||||
},
|
||||
zero(),
|
||||
)
|
||||
v_in == v_out
|
||||
}
|
||||
|
||||
type Action {
|
||||
Mint
|
||||
Burn
|
||||
}
|
||||
|
||||
validator(token_name: ByteArray, utxo_ref: OutputReference) {
|
||||
fn gift_card(rdmr: Action, ctx: ScriptContext) -> Bool {
|
||||
let ScriptContext { transaction, purpose } =
|
||||
ctx
|
||||
|
||||
expect tx.Mint(policy_id) =
|
||||
purpose
|
||||
|
||||
let Transaction { inputs, mint, .. } =
|
||||
transaction
|
||||
|
||||
expect [(asset_name, amount)] = mint
|
||||
|> value.tokens(policy_id)
|
||||
|> dict.to_list()
|
||||
|
||||
when rdmr is {
|
||||
Mint -> {
|
||||
expect True =
|
||||
list.any(inputs, fn(input) { input.output_reference == utxo_ref })
|
||||
amount == 1 && asset_name == token_name
|
||||
}
|
||||
Burn ->
|
||||
todo @"burn"
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,18 +1,18 @@
|
|||
use aiken/list
|
||||
use aiken/transaction.{Output, ScriptContext}
|
||||
// use aiken/list
|
||||
// use aiken/transaction.{Output, ScriptContext}
|
||||
|
||||
validator {
|
||||
fn backtrace(_datum: Void, _redeemer: Void, context: ScriptContext) -> Bool {
|
||||
expect Some(_) =
|
||||
list.find(context.transaction.outputs, fn(_) { True })
|
||||
let _ =
|
||||
find_stuff(context)
|
||||
True
|
||||
}
|
||||
}
|
||||
// validator {
|
||||
// fn backtrace(_datum: Void, _redeemer: Void, context: ScriptContext) -> Bool {
|
||||
// expect Some(_) =
|
||||
// list.find(context.transaction.outputs, fn(_) { True })
|
||||
// let _ =
|
||||
// find_stuff(context)
|
||||
// True
|
||||
// }
|
||||
// }
|
||||
|
||||
fn find_stuff(context) -> Output {
|
||||
expect Some(stuff) =
|
||||
list.find(context.transaction.outputs, fn(_) { True })
|
||||
stuff
|
||||
}
|
||||
// fn find_stuff(context) -> Output {
|
||||
// expect Some(stuff) =
|
||||
// list.find(context.transaction.outputs, fn(_) { True })
|
||||
// stuff
|
||||
// }
|
||||
|
|
Loading…
Reference in New Issue