Merge remote-tracking branch 'official/main' into waalge/bump-rust-1-76-0
This commit is contained in:
commit
658984e157
|
@ -5,6 +5,10 @@
|
|||
### Added
|
||||
|
||||
- **aiken-lang**: Data now has a generic argument that can be used to specify the blueprint type. @KtorZ
|
||||
- **aiken-lang**: New types `PRNG` and `Fuzzer` in the prelude. @KtorZ
|
||||
- **aiken-lang**: Test definitions now accept an (optional) argument alongside a new keyword `via` to specify fuzzers. @KtorZ
|
||||
- **aiken-project**: Property-based testing framework with integrated shrinking. @KtorZ
|
||||
- **aiken**: The `check` command now accept an extra arg `--seed` to provide an initial seed for the pseudo-random generator of properties. @KtorZ
|
||||
|
||||
### Fixed
|
||||
|
||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -1,5 +1,6 @@
|
|||
#!/usr/bin/env bash
|
||||
set -e
|
||||
set -o pipefail
|
||||
|
||||
AIKEN_DIR=${AIKEN_DIR-"$HOME/.aiken"}
|
||||
AIKEN_BIN_DIR="$AIKEN_DIR/bin"
|
||||
|
@ -86,7 +87,9 @@ main() {
|
|||
|
||||
# Download the binaries tarball and unpack it into the .aiken bin directory.
|
||||
say "downloading aiken"
|
||||
ensure curl -# -L "$BIN_TARBALL_URL" | tar -xzC "$AIKEN_BIN_DIR"
|
||||
curl -f -# -L "$BIN_TARBALL_URL" | tar -xzC "$AIKEN_BIN_DIR" || {
|
||||
err "failed to download aiken: version not found or network error"
|
||||
}
|
||||
|
||||
for bin in "${BINS[@]}"; do
|
||||
bin_path="$AIKEN_BIN_DIR/$bin"
|
||||
|
|
|
@ -21,6 +21,7 @@ itertools = "0.10.5"
|
|||
miette = "5.9.0"
|
||||
ordinal = "0.3.2"
|
||||
owo-colors = { version = "3.5.0", features = ["supports-colors"] }
|
||||
pallas.workspace = true
|
||||
strum = "0.24.1"
|
||||
thiserror = "1.0.39"
|
||||
vec1 = "1.10.1"
|
||||
|
|
|
@ -5,6 +5,7 @@ use crate::{
|
|||
parser::token::{Base, Token},
|
||||
tipo::{PatternConstructor, Type, TypeInfo},
|
||||
};
|
||||
use indexmap::IndexMap;
|
||||
use miette::Diagnostic;
|
||||
use owo_colors::{OwoColorize, Stream::Stdout};
|
||||
use std::{
|
||||
|
@ -127,6 +128,62 @@ impl TypedModule {
|
|||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
// TODO: Avoid cloning definitions here. This would likely require having a lifetime on
|
||||
// 'Project', so that we can enforce that those references live from the ast to here.
|
||||
pub fn register_definitions(
|
||||
&self,
|
||||
functions: &mut IndexMap<FunctionAccessKey, TypedFunction>,
|
||||
data_types: &mut IndexMap<DataTypeKey, TypedDataType>,
|
||||
) {
|
||||
for def in self.definitions() {
|
||||
match def {
|
||||
Definition::Fn(func) => {
|
||||
functions.insert(
|
||||
FunctionAccessKey {
|
||||
module_name: self.name.clone(),
|
||||
function_name: func.name.clone(),
|
||||
},
|
||||
func.clone(),
|
||||
);
|
||||
}
|
||||
|
||||
Definition::Test(test) => {
|
||||
functions.insert(
|
||||
FunctionAccessKey {
|
||||
module_name: self.name.clone(),
|
||||
function_name: test.name.clone(),
|
||||
},
|
||||
test.clone().into(),
|
||||
);
|
||||
}
|
||||
|
||||
Definition::DataType(dt) => {
|
||||
data_types.insert(
|
||||
DataTypeKey {
|
||||
module_name: self.name.clone(),
|
||||
defined_type: dt.name.clone(),
|
||||
},
|
||||
dt.clone(),
|
||||
);
|
||||
}
|
||||
|
||||
Definition::Validator(v) => {
|
||||
let module_name = self.name.as_str();
|
||||
|
||||
if let Some((k, v)) = v.into_function_definition(module_name, |f, _| Some(f)) {
|
||||
functions.insert(k, v);
|
||||
}
|
||||
|
||||
if let Some((k, v)) = v.into_function_definition(module_name, |_, f| f) {
|
||||
functions.insert(k, v);
|
||||
}
|
||||
}
|
||||
|
||||
Definition::TypeAlias(_) | Definition::ModuleConstant(_) | Definition::Use(_) => {}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn str_to_keyword(word: &str) -> Option<Token> {
|
||||
|
@ -154,16 +211,20 @@ fn str_to_keyword(word: &str) -> Option<Token> {
|
|||
"and" => Some(Token::And),
|
||||
"or" => Some(Token::Or),
|
||||
"validator" => Some(Token::Validator),
|
||||
"via" => Some(Token::Via),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
pub type TypedFunction = Function<Rc<Type>, TypedExpr>;
|
||||
pub type UntypedFunction = Function<(), UntypedExpr>;
|
||||
pub type TypedFunction = Function<Rc<Type>, TypedExpr, TypedArg>;
|
||||
pub type UntypedFunction = Function<(), UntypedExpr, UntypedArg>;
|
||||
|
||||
pub type TypedTest = Function<Rc<Type>, TypedExpr, TypedArgVia>;
|
||||
pub type UntypedTest = Function<(), UntypedExpr, UntypedArgVia>;
|
||||
|
||||
#[derive(Debug, Clone, PartialEq)]
|
||||
pub struct Function<T, Expr> {
|
||||
pub arguments: Vec<Arg<T>>,
|
||||
pub struct Function<T, Expr, Arg> {
|
||||
pub arguments: Vec<Arg>,
|
||||
pub body: Expr,
|
||||
pub doc: Option<String>,
|
||||
pub location: Span,
|
||||
|
@ -178,9 +239,47 @@ pub struct Function<T, Expr> {
|
|||
pub type TypedTypeAlias = TypeAlias<Rc<Type>>;
|
||||
pub type UntypedTypeAlias = TypeAlias<()>;
|
||||
|
||||
impl TypedFunction {
|
||||
impl From<UntypedTest> for UntypedFunction {
|
||||
fn from(f: UntypedTest) -> Self {
|
||||
Function {
|
||||
doc: f.doc,
|
||||
location: f.location,
|
||||
name: f.name,
|
||||
public: f.public,
|
||||
arguments: f.arguments.into_iter().map(|arg| arg.into()).collect(),
|
||||
return_annotation: f.return_annotation,
|
||||
return_type: f.return_type,
|
||||
body: f.body,
|
||||
can_error: f.can_error,
|
||||
end_position: f.end_position,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<TypedTest> for TypedFunction {
|
||||
fn from(f: TypedTest) -> Self {
|
||||
Function {
|
||||
doc: f.doc,
|
||||
location: f.location,
|
||||
name: f.name,
|
||||
public: f.public,
|
||||
arguments: f.arguments.into_iter().map(|arg| arg.into()).collect(),
|
||||
return_annotation: f.return_annotation,
|
||||
return_type: f.return_type,
|
||||
body: f.body,
|
||||
can_error: f.can_error,
|
||||
end_position: f.end_position,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl TypedTest {
|
||||
pub fn test_hint(&self) -> Option<(BinOp, Box<TypedExpr>, Box<TypedExpr>)> {
|
||||
do_test_hint(&self.body)
|
||||
if self.arguments.is_empty() {
|
||||
do_test_hint(&self.body)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -235,9 +334,77 @@ pub struct TypeAlias<T> {
|
|||
pub tipo: T,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Eq, PartialEq, Hash)]
|
||||
pub struct DataTypeKey {
|
||||
pub module_name: String,
|
||||
pub defined_type: String,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Eq, PartialEq, Hash, Ord, PartialOrd)]
|
||||
pub struct FunctionAccessKey {
|
||||
pub module_name: String,
|
||||
pub function_name: String,
|
||||
}
|
||||
|
||||
pub type TypedDataType = DataType<Rc<Type>>;
|
||||
|
||||
impl TypedDataType {
|
||||
pub fn bool() -> Self {
|
||||
DataType {
|
||||
constructors: vec![
|
||||
RecordConstructor {
|
||||
location: Span::empty(),
|
||||
name: "False".to_string(),
|
||||
arguments: vec![],
|
||||
doc: None,
|
||||
sugar: false,
|
||||
},
|
||||
RecordConstructor {
|
||||
location: Span::empty(),
|
||||
name: "True".to_string(),
|
||||
arguments: vec![],
|
||||
doc: None,
|
||||
sugar: false,
|
||||
},
|
||||
],
|
||||
doc: None,
|
||||
location: Span::empty(),
|
||||
name: "Bool".to_string(),
|
||||
opaque: false,
|
||||
parameters: vec![],
|
||||
public: true,
|
||||
typed_parameters: vec![],
|
||||
}
|
||||
}
|
||||
|
||||
pub fn prng() -> Self {
|
||||
DataType {
|
||||
constructors: vec![
|
||||
RecordConstructor {
|
||||
location: Span::empty(),
|
||||
name: "Seeded".to_string(),
|
||||
arguments: vec![],
|
||||
doc: None,
|
||||
sugar: false,
|
||||
},
|
||||
RecordConstructor {
|
||||
location: Span::empty(),
|
||||
name: "Replayed".to_string(),
|
||||
arguments: vec![],
|
||||
doc: None,
|
||||
sugar: false,
|
||||
},
|
||||
],
|
||||
doc: None,
|
||||
location: Span::empty(),
|
||||
name: "PRNG".to_string(),
|
||||
opaque: false,
|
||||
parameters: vec![],
|
||||
public: true,
|
||||
typed_parameters: vec![],
|
||||
}
|
||||
}
|
||||
|
||||
pub fn ordering() -> Self {
|
||||
DataType {
|
||||
constructors: vec![
|
||||
|
@ -358,18 +525,51 @@ pub type UntypedValidator = Validator<(), UntypedExpr>;
|
|||
pub struct Validator<T, Expr> {
|
||||
pub doc: Option<String>,
|
||||
pub end_position: usize,
|
||||
pub fun: Function<T, Expr>,
|
||||
pub other_fun: Option<Function<T, Expr>>,
|
||||
pub fun: Function<T, Expr, Arg<T>>,
|
||||
pub other_fun: Option<Function<T, Expr, Arg<T>>>,
|
||||
pub location: Span,
|
||||
pub params: Vec<Arg<T>>,
|
||||
}
|
||||
|
||||
impl TypedValidator {
|
||||
pub fn into_function_definition<'a, F>(
|
||||
&'a self,
|
||||
module_name: &str,
|
||||
select: F,
|
||||
) -> Option<(FunctionAccessKey, TypedFunction)>
|
||||
where
|
||||
F: Fn(&'a TypedFunction, Option<&'a TypedFunction>) -> Option<&'a TypedFunction> + 'a,
|
||||
{
|
||||
match select(&self.fun, self.other_fun.as_ref()) {
|
||||
None => None,
|
||||
Some(fun) => {
|
||||
let mut fun = fun.clone();
|
||||
|
||||
fun.arguments = self
|
||||
.params
|
||||
.clone()
|
||||
.into_iter()
|
||||
.chain(fun.arguments)
|
||||
.collect();
|
||||
|
||||
Some((
|
||||
FunctionAccessKey {
|
||||
module_name: module_name.to_string(),
|
||||
function_name: fun.name.clone(),
|
||||
},
|
||||
fun,
|
||||
))
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub type TypedDefinition = Definition<Rc<Type>, TypedExpr, String>;
|
||||
pub type UntypedDefinition = Definition<(), UntypedExpr, ()>;
|
||||
|
||||
#[derive(Debug, Clone, PartialEq)]
|
||||
pub enum Definition<T, Expr, PackageName> {
|
||||
Fn(Function<T, Expr>),
|
||||
Fn(Function<T, Expr, Arg<T>>),
|
||||
|
||||
TypeAlias(TypeAlias<T>),
|
||||
|
||||
|
@ -379,7 +579,7 @@ pub enum Definition<T, Expr, PackageName> {
|
|||
|
||||
ModuleConstant(ModuleConstant<T>),
|
||||
|
||||
Test(Function<T, Expr>),
|
||||
Test(Function<T, Expr, ArgVia<T, Expr>>),
|
||||
|
||||
Validator(Validator<T, Expr>),
|
||||
}
|
||||
|
@ -634,6 +834,30 @@ impl<A> Arg<A> {
|
|||
}
|
||||
}
|
||||
|
||||
pub type TypedArgVia = ArgVia<Rc<Type>, TypedExpr>;
|
||||
pub type UntypedArgVia = ArgVia<(), UntypedExpr>;
|
||||
|
||||
#[derive(Debug, Clone, PartialEq)]
|
||||
pub struct ArgVia<T, Expr> {
|
||||
pub arg_name: ArgName,
|
||||
pub location: Span,
|
||||
pub via: Expr,
|
||||
pub tipo: T,
|
||||
pub annotation: Option<Annotation>,
|
||||
}
|
||||
|
||||
impl<T, Ann> From<ArgVia<T, Ann>> for Arg<T> {
|
||||
fn from(arg: ArgVia<T, Ann>) -> Arg<T> {
|
||||
Arg {
|
||||
arg_name: arg.arg_name,
|
||||
location: arg.location,
|
||||
tipo: arg.tipo,
|
||||
annotation: None,
|
||||
doc: None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
pub enum ArgName {
|
||||
Discarded {
|
||||
|
|
|
@ -1,7 +1,9 @@
|
|||
use crate::{
|
||||
ast::{Arg, ArgName, CallArg, Function, ModuleKind, Span, TypedDataType, TypedFunction, UnOp},
|
||||
ast::{
|
||||
Arg, ArgName, CallArg, DataTypeKey, Function, FunctionAccessKey, ModuleKind, Span,
|
||||
TypedDataType, TypedFunction, UnOp,
|
||||
},
|
||||
expr::TypedExpr,
|
||||
gen_uplc::builder::{DataTypeKey, FunctionAccessKey},
|
||||
tipo::{
|
||||
fields::FieldMap, Type, TypeConstructor, TypeInfo, TypeVar, ValueConstructor,
|
||||
ValueConstructorVariant,
|
||||
|
@ -26,6 +28,8 @@ pub const STRING: &str = "String";
|
|||
pub const OPTION: &str = "Option";
|
||||
pub const ORDERING: &str = "Ordering";
|
||||
pub const REDEEMER_WRAPPER: &str = "RedeemerWrapper";
|
||||
pub const PRNG: &str = "PRNG";
|
||||
pub const FUZZER: &str = "Fuzzer";
|
||||
|
||||
/// Build a prelude that can be injected
|
||||
/// into a compiler pipeline
|
||||
|
@ -80,22 +84,7 @@ pub fn prelude(id_gen: &IdGenerator) -> TypeInfo {
|
|||
// Bool
|
||||
prelude.types_constructors.insert(
|
||||
BOOL.to_string(),
|
||||
vec!["True".to_string(), "False".to_string()],
|
||||
);
|
||||
|
||||
prelude.values.insert(
|
||||
"True".to_string(),
|
||||
ValueConstructor::public(
|
||||
bool(),
|
||||
ValueConstructorVariant::Record {
|
||||
module: "".into(),
|
||||
name: "True".to_string(),
|
||||
field_map: None::<FieldMap>,
|
||||
arity: 0,
|
||||
location: Span::empty(),
|
||||
constructors_count: 2,
|
||||
},
|
||||
),
|
||||
vec!["False".to_string(), "True".to_string()],
|
||||
);
|
||||
|
||||
prelude.values.insert(
|
||||
|
@ -113,6 +102,21 @@ pub fn prelude(id_gen: &IdGenerator) -> TypeInfo {
|
|||
),
|
||||
);
|
||||
|
||||
prelude.values.insert(
|
||||
"True".to_string(),
|
||||
ValueConstructor::public(
|
||||
bool(),
|
||||
ValueConstructorVariant::Record {
|
||||
module: "".into(),
|
||||
name: "True".to_string(),
|
||||
field_map: None::<FieldMap>,
|
||||
arity: 0,
|
||||
location: Span::empty(),
|
||||
constructors_count: 2,
|
||||
},
|
||||
),
|
||||
);
|
||||
|
||||
prelude.types.insert(
|
||||
BOOL.to_string(),
|
||||
TypeConstructor {
|
||||
|
@ -153,7 +157,7 @@ pub fn prelude(id_gen: &IdGenerator) -> TypeInfo {
|
|||
MILLER_LOOP_RESULT.to_string(),
|
||||
TypeConstructor {
|
||||
parameters: vec![],
|
||||
tipo: int(),
|
||||
tipo: miller_loop_result(),
|
||||
location: Span::empty(),
|
||||
module: "".to_string(),
|
||||
public: true,
|
||||
|
@ -412,6 +416,90 @@ pub fn prelude(id_gen: &IdGenerator) -> TypeInfo {
|
|||
),
|
||||
);
|
||||
|
||||
// PRNG
|
||||
//
|
||||
// pub type PRNG {
|
||||
// Seeded { seed: ByteArray, choices: ByteArray }
|
||||
// Replayed { cursor: Int, choices: ByteArray }
|
||||
// }
|
||||
|
||||
prelude.types.insert(
|
||||
PRNG.to_string(),
|
||||
TypeConstructor {
|
||||
location: Span::empty(),
|
||||
parameters: vec![],
|
||||
tipo: prng(),
|
||||
module: "".to_string(),
|
||||
public: true,
|
||||
},
|
||||
);
|
||||
|
||||
prelude.types_constructors.insert(
|
||||
PRNG.to_string(),
|
||||
vec!["Seeded".to_string(), "Replayed".to_string()],
|
||||
);
|
||||
|
||||
let mut seeded_fields = HashMap::new();
|
||||
seeded_fields.insert("seed".to_string(), (0, Span::empty()));
|
||||
seeded_fields.insert("choices".to_string(), (1, Span::empty()));
|
||||
prelude.values.insert(
|
||||
"Seeded".to_string(),
|
||||
ValueConstructor::public(
|
||||
function(vec![byte_array(), byte_array()], prng()),
|
||||
ValueConstructorVariant::Record {
|
||||
module: "".into(),
|
||||
name: "Seeded".to_string(),
|
||||
field_map: Some(FieldMap {
|
||||
arity: 2,
|
||||
fields: seeded_fields,
|
||||
is_function: false,
|
||||
}),
|
||||
arity: 2,
|
||||
location: Span::empty(),
|
||||
constructors_count: 2,
|
||||
},
|
||||
),
|
||||
);
|
||||
|
||||
let mut replayed_fields = HashMap::new();
|
||||
replayed_fields.insert("cursor".to_string(), (0, Span::empty()));
|
||||
replayed_fields.insert("choices".to_string(), (1, Span::empty()));
|
||||
prelude.values.insert(
|
||||
"Replayed".to_string(),
|
||||
ValueConstructor::public(
|
||||
function(vec![int(), byte_array()], prng()),
|
||||
ValueConstructorVariant::Record {
|
||||
module: "".into(),
|
||||
name: "Replayed".to_string(),
|
||||
field_map: Some(FieldMap {
|
||||
arity: 2,
|
||||
fields: replayed_fields,
|
||||
is_function: false,
|
||||
}),
|
||||
arity: 2,
|
||||
location: Span::empty(),
|
||||
constructors_count: 2,
|
||||
},
|
||||
),
|
||||
);
|
||||
|
||||
// Fuzzer
|
||||
//
|
||||
// pub type Fuzzer<a> =
|
||||
// fn(PRNG) -> Option<(PRNG, a)>
|
||||
|
||||
let fuzzer_value = generic_var(id_gen.next());
|
||||
prelude.types.insert(
|
||||
FUZZER.to_string(),
|
||||
TypeConstructor {
|
||||
location: Span::empty(),
|
||||
parameters: vec![fuzzer_value.clone()],
|
||||
tipo: fuzzer(fuzzer_value),
|
||||
module: "".to_string(),
|
||||
public: true,
|
||||
},
|
||||
);
|
||||
|
||||
prelude
|
||||
}
|
||||
|
||||
|
@ -1133,6 +1221,16 @@ pub fn prelude_data_types(id_gen: &IdGenerator) -> IndexMap<DataTypeKey, TypedDa
|
|||
ordering_data_type,
|
||||
);
|
||||
|
||||
// Bool
|
||||
let bool_data_type = TypedDataType::bool();
|
||||
data_types.insert(
|
||||
DataTypeKey {
|
||||
module_name: "".to_string(),
|
||||
defined_type: "Bool".to_string(),
|
||||
},
|
||||
bool_data_type,
|
||||
);
|
||||
|
||||
// Option
|
||||
let option_data_type = TypedDataType::option(generic_var(id_gen.next()));
|
||||
data_types.insert(
|
||||
|
@ -1143,6 +1241,16 @@ pub fn prelude_data_types(id_gen: &IdGenerator) -> IndexMap<DataTypeKey, TypedDa
|
|||
option_data_type,
|
||||
);
|
||||
|
||||
// PRNG
|
||||
let prng_data_type = TypedDataType::prng();
|
||||
data_types.insert(
|
||||
DataTypeKey {
|
||||
module_name: "".to_string(),
|
||||
defined_type: "PRNG".to_string(),
|
||||
},
|
||||
prng_data_type,
|
||||
);
|
||||
|
||||
data_types
|
||||
}
|
||||
|
||||
|
@ -1213,6 +1321,22 @@ pub fn bool() -> Rc<Type> {
|
|||
})
|
||||
}
|
||||
|
||||
pub fn prng() -> Rc<Type> {
|
||||
Rc::new(Type::App {
|
||||
args: vec![],
|
||||
public: true,
|
||||
name: PRNG.to_string(),
|
||||
module: "".to_string(),
|
||||
})
|
||||
}
|
||||
|
||||
pub fn fuzzer(a: Rc<Type>) -> Rc<Type> {
|
||||
Rc::new(Type::Fn {
|
||||
args: vec![prng()],
|
||||
ret: option(tuple(vec![prng(), a])),
|
||||
})
|
||||
}
|
||||
|
||||
pub fn list(t: Rc<Type>) -> Rc<Type> {
|
||||
Rc::new(Type::App {
|
||||
public: true,
|
||||
|
|
|
@ -1,18 +1,23 @@
|
|||
use std::rc::Rc;
|
||||
|
||||
use vec1::Vec1;
|
||||
|
||||
use crate::{
|
||||
ast::{
|
||||
self, Annotation, Arg, AssignmentKind, BinOp, ByteArrayFormatPreference, CallArg, Curve,
|
||||
DefinitionLocation, IfBranch, Located, LogicalOpChainKind, ParsedCallArg, Pattern,
|
||||
RecordUpdateSpread, Span, TraceKind, TypedClause, TypedRecordUpdateArg, UnOp,
|
||||
UntypedClause, UntypedRecordUpdateArg,
|
||||
DataType, DataTypeKey, DefinitionLocation, IfBranch, Located, LogicalOpChainKind,
|
||||
ParsedCallArg, Pattern, RecordConstructorArg, RecordUpdateSpread, Span, TraceKind,
|
||||
TypedClause, TypedDataType, TypedRecordUpdateArg, UnOp, UntypedClause,
|
||||
UntypedRecordUpdateArg,
|
||||
},
|
||||
builtins::void,
|
||||
gen_uplc::builder::{
|
||||
check_replaceable_opaque_type, convert_opaque_type, lookup_data_type_by_tipo,
|
||||
},
|
||||
parser::token::Base,
|
||||
tipo::{ModuleValueConstructor, PatternConstructor, Type, ValueConstructor},
|
||||
tipo::{ModuleValueConstructor, PatternConstructor, Type, TypeVar, ValueConstructor},
|
||||
};
|
||||
use indexmap::IndexMap;
|
||||
use pallas::ledger::primitives::alonzo::{Constr, PlutusData};
|
||||
use std::rc::Rc;
|
||||
use uplc::{machine::value::from_pallas_bigint, KeyValuePairs};
|
||||
use vec1::Vec1;
|
||||
|
||||
#[derive(Debug, Clone, PartialEq)]
|
||||
pub enum TypedExpr {
|
||||
|
@ -573,6 +578,185 @@ pub const DEFAULT_TODO_STR: &str = "aiken::todo";
|
|||
pub const DEFAULT_ERROR_STR: &str = "aiken::error";
|
||||
|
||||
impl UntypedExpr {
|
||||
// Reify some opaque 'PlutusData' into an 'UntypedExpr', using a Type annotation. We also need
|
||||
// an extra map to lookup record & enum constructor's names as they're completely erased when
|
||||
// in their PlutusData form, and the Type annotation only contains type name.
|
||||
//
|
||||
// The function performs some sanity check to ensure that the type does indeed somewhat
|
||||
// correspond to the data being given.
|
||||
pub fn reify(
|
||||
data_types: &IndexMap<&DataTypeKey, &TypedDataType>,
|
||||
data: PlutusData,
|
||||
tipo: &Type,
|
||||
) -> Result<Self, String> {
|
||||
if let Type::Var { tipo } = tipo {
|
||||
if let TypeVar::Link { tipo } = &*tipo.borrow() {
|
||||
return UntypedExpr::reify(data_types, data, tipo);
|
||||
}
|
||||
}
|
||||
|
||||
// NOTE: Opaque types are tricky. We can't tell from a type only if it is
|
||||
// opaque or not. We have to lookup its datatype definition.
|
||||
//
|
||||
// Also, we can't -- in theory -- peak into an opaque type. More so, if it
|
||||
// has a single constructor with a single argument, it is an zero-cost
|
||||
// wrapper. That means the underlying PlutusData has no residue of that
|
||||
// wrapper. So we have to manually reconstruct it before crawling further
|
||||
// down the type tree.
|
||||
if check_replaceable_opaque_type(tipo, data_types) {
|
||||
let DataType { name, .. } = lookup_data_type_by_tipo(data_types, tipo)
|
||||
.expect("Type just disappeared from known types? {tipo:?}");
|
||||
|
||||
let inner_type = convert_opaque_type(&tipo.clone().into(), data_types, false);
|
||||
|
||||
let value = UntypedExpr::reify(data_types, data, &inner_type)?;
|
||||
|
||||
return Ok(UntypedExpr::Call {
|
||||
location: Span::empty(),
|
||||
arguments: vec![CallArg {
|
||||
label: None,
|
||||
location: Span::empty(),
|
||||
value,
|
||||
}],
|
||||
fun: Box::new(UntypedExpr::Var {
|
||||
name,
|
||||
location: Span::empty(),
|
||||
}),
|
||||
});
|
||||
}
|
||||
|
||||
match data {
|
||||
PlutusData::BigInt(ref i) => Ok(UntypedExpr::UInt {
|
||||
location: Span::empty(),
|
||||
base: Base::Decimal {
|
||||
numeric_underscore: false,
|
||||
},
|
||||
value: from_pallas_bigint(i).to_string(),
|
||||
}),
|
||||
PlutusData::BoundedBytes(bytes) => Ok(UntypedExpr::ByteArray {
|
||||
location: Span::empty(),
|
||||
bytes: bytes.into(),
|
||||
preferred_format: ByteArrayFormatPreference::HexadecimalString,
|
||||
}),
|
||||
PlutusData::Array(args) => {
|
||||
match tipo {
|
||||
Type::App {
|
||||
module,
|
||||
name,
|
||||
args: type_args,
|
||||
..
|
||||
} if module.is_empty() && name.as_str() == "List" => {
|
||||
if let [inner] = &type_args[..] {
|
||||
Ok(UntypedExpr::List {
|
||||
location: Span::empty(),
|
||||
elements: args
|
||||
.into_iter()
|
||||
.map(|arg| UntypedExpr::reify(data_types, arg, inner))
|
||||
.collect::<Result<Vec<_>, _>>()?,
|
||||
tail: None,
|
||||
})
|
||||
} else {
|
||||
Err("invalid List type annotation: the list has multiple type-parameters.".to_string())
|
||||
}
|
||||
}
|
||||
Type::Tuple { elems } => Ok(UntypedExpr::Tuple {
|
||||
location: Span::empty(),
|
||||
elems: args
|
||||
.into_iter()
|
||||
.zip(elems)
|
||||
.map(|(arg, arg_type)| UntypedExpr::reify(data_types, arg, arg_type))
|
||||
.collect::<Result<Vec<_>, _>>()?,
|
||||
}),
|
||||
_ => Err(format!(
|
||||
"invalid type annotation. expected List but got: {tipo:?}"
|
||||
)),
|
||||
}
|
||||
}
|
||||
|
||||
PlutusData::Constr(Constr {
|
||||
tag,
|
||||
any_constructor,
|
||||
fields,
|
||||
}) => {
|
||||
let ix = if tag == 102 {
|
||||
any_constructor.unwrap() as usize
|
||||
} else if tag < 128 {
|
||||
tag as usize - 121
|
||||
} else {
|
||||
tag as usize - 1280 + 7
|
||||
};
|
||||
|
||||
if let Type::App { .. } = tipo {
|
||||
if let Some(DataType { constructors, .. }) =
|
||||
lookup_data_type_by_tipo(data_types, tipo)
|
||||
{
|
||||
let constructor = &constructors[ix];
|
||||
|
||||
return if fields.is_empty() {
|
||||
Ok(UntypedExpr::Var {
|
||||
location: Span::empty(),
|
||||
name: constructor.name.to_string(),
|
||||
})
|
||||
} else {
|
||||
let arguments = fields
|
||||
.into_iter()
|
||||
.zip(constructor.arguments.iter())
|
||||
.map(
|
||||
|(
|
||||
field,
|
||||
RecordConstructorArg {
|
||||
ref label,
|
||||
ref tipo,
|
||||
..
|
||||
},
|
||||
)| {
|
||||
UntypedExpr::reify(data_types, field, tipo).map(|value| {
|
||||
CallArg {
|
||||
label: label.clone(),
|
||||
location: Span::empty(),
|
||||
value,
|
||||
}
|
||||
})
|
||||
},
|
||||
)
|
||||
.collect::<Result<Vec<_>, _>>()?;
|
||||
|
||||
Ok(UntypedExpr::Call {
|
||||
location: Span::empty(),
|
||||
arguments,
|
||||
fun: Box::new(UntypedExpr::Var {
|
||||
name: constructor.name.to_string(),
|
||||
location: Span::empty(),
|
||||
}),
|
||||
})
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
Err(format!(
|
||||
"invalid type annotation {tipo:?} for constructor: {tag:?} with {fields:?}"
|
||||
))
|
||||
}
|
||||
|
||||
PlutusData::Map(indef_or_def) => {
|
||||
let kvs = match indef_or_def {
|
||||
KeyValuePairs::Def(kvs) => kvs,
|
||||
KeyValuePairs::Indef(kvs) => kvs,
|
||||
};
|
||||
|
||||
UntypedExpr::reify(
|
||||
data_types,
|
||||
PlutusData::Array(
|
||||
kvs.into_iter()
|
||||
.map(|(k, v)| PlutusData::Array(vec![k, v]))
|
||||
.collect(),
|
||||
),
|
||||
tipo,
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn todo(reason: Option<Self>, location: Span) -> Self {
|
||||
UntypedExpr::Trace {
|
||||
location,
|
||||
|
|
|
@ -1,11 +1,12 @@
|
|||
use crate::{
|
||||
ast::{
|
||||
Annotation, Arg, ArgName, AssignmentKind, BinOp, ByteArrayFormatPreference, CallArg,
|
||||
ClauseGuard, Constant, CurveType, DataType, Definition, Function, IfBranch,
|
||||
Annotation, Arg, ArgName, ArgVia, AssignmentKind, BinOp, ByteArrayFormatPreference,
|
||||
CallArg, ClauseGuard, Constant, CurveType, DataType, Definition, Function, IfBranch,
|
||||
LogicalOpChainKind, ModuleConstant, Pattern, RecordConstructor, RecordConstructorArg,
|
||||
RecordUpdateSpread, Span, TraceKind, TypeAlias, TypedArg, UnOp, UnqualifiedImport,
|
||||
UntypedArg, UntypedClause, UntypedClauseGuard, UntypedDefinition, UntypedFunction,
|
||||
UntypedModule, UntypedPattern, UntypedRecordUpdateArg, Use, Validator, CAPTURE_VARIABLE,
|
||||
UntypedArg, UntypedArgVia, UntypedClause, UntypedClauseGuard, UntypedDefinition,
|
||||
UntypedFunction, UntypedModule, UntypedPattern, UntypedRecordUpdateArg, Use, Validator,
|
||||
CAPTURE_VARIABLE,
|
||||
},
|
||||
docvec,
|
||||
expr::{FnStyle, UntypedExpr, DEFAULT_ERROR_STR, DEFAULT_TODO_STR},
|
||||
|
@ -231,16 +232,7 @@ impl<'comments> Formatter<'comments> {
|
|||
return_annotation,
|
||||
end_position,
|
||||
..
|
||||
}) => self.definition_fn(
|
||||
public,
|
||||
"fn",
|
||||
name,
|
||||
args,
|
||||
return_annotation,
|
||||
body,
|
||||
*end_position,
|
||||
false,
|
||||
),
|
||||
}) => self.definition_fn(public, name, args, return_annotation, body, *end_position),
|
||||
|
||||
Definition::Validator(Validator {
|
||||
end_position,
|
||||
|
@ -257,16 +249,7 @@ impl<'comments> Formatter<'comments> {
|
|||
end_position,
|
||||
can_error,
|
||||
..
|
||||
}) => self.definition_fn(
|
||||
&false,
|
||||
"test",
|
||||
name,
|
||||
args,
|
||||
&None,
|
||||
body,
|
||||
*end_position,
|
||||
*can_error,
|
||||
),
|
||||
}) => self.definition_test(name, args, body, *end_position, *can_error),
|
||||
|
||||
Definition::TypeAlias(TypeAlias {
|
||||
alias,
|
||||
|
@ -488,25 +471,38 @@ impl<'comments> Formatter<'comments> {
|
|||
commented(doc, comments)
|
||||
}
|
||||
|
||||
fn fn_arg_via<'a, A>(&mut self, arg: &'a ArgVia<A, UntypedExpr>) -> Document<'a> {
|
||||
let comments = self.pop_comments(arg.location.start);
|
||||
|
||||
let doc_comments = self.doc_comments(arg.location.start);
|
||||
|
||||
let doc = arg
|
||||
.arg_name
|
||||
.to_doc()
|
||||
.append(" via ")
|
||||
.append(self.expr(&arg.via, false))
|
||||
.group();
|
||||
|
||||
let doc = doc_comments.append(doc.group()).group();
|
||||
|
||||
commented(doc, comments)
|
||||
}
|
||||
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
fn definition_fn<'a>(
|
||||
&mut self,
|
||||
public: &'a bool,
|
||||
keyword: &'a str,
|
||||
name: &'a str,
|
||||
args: &'a [UntypedArg],
|
||||
return_annotation: &'a Option<Annotation>,
|
||||
body: &'a UntypedExpr,
|
||||
end_location: usize,
|
||||
can_error: bool,
|
||||
) -> Document<'a> {
|
||||
// Fn name and args
|
||||
let head = pub_(*public)
|
||||
.append(keyword)
|
||||
.append(" ")
|
||||
.append("fn ")
|
||||
.append(name)
|
||||
.append(wrap_args(args.iter().map(|e| (self.fn_arg(e), false))))
|
||||
.append(if can_error { " fail" } else { "" });
|
||||
.append(wrap_args(args.iter().map(|e| (self.fn_arg(e), false))));
|
||||
|
||||
// Add return annotation
|
||||
let head = match return_annotation {
|
||||
|
@ -531,6 +527,39 @@ impl<'comments> Formatter<'comments> {
|
|||
.append("}")
|
||||
}
|
||||
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
fn definition_test<'a>(
|
||||
&mut self,
|
||||
name: &'a str,
|
||||
args: &'a [UntypedArgVia],
|
||||
body: &'a UntypedExpr,
|
||||
end_location: usize,
|
||||
can_error: bool,
|
||||
) -> Document<'a> {
|
||||
// Fn name and args
|
||||
let head = "test "
|
||||
.to_doc()
|
||||
.append(name)
|
||||
.append(wrap_args(args.iter().map(|e| (self.fn_arg_via(e), false))))
|
||||
.append(if can_error { " fail" } else { "" })
|
||||
.group();
|
||||
|
||||
// Format body
|
||||
let body = self.expr(body, true);
|
||||
|
||||
// Add any trailing comments
|
||||
let body = match printed_comments(self.pop_comments(end_location), false) {
|
||||
Some(comments) => body.append(line()).append(comments),
|
||||
None => body,
|
||||
};
|
||||
|
||||
// Stick it all together
|
||||
head.append(" {")
|
||||
.append(line().append(body).nest(INDENT).group())
|
||||
.append(line())
|
||||
.append("}")
|
||||
}
|
||||
|
||||
fn definition_validator<'a>(
|
||||
&mut self,
|
||||
params: &'a [UntypedArg],
|
||||
|
@ -550,13 +579,11 @@ impl<'comments> Formatter<'comments> {
|
|||
let first_fn = self
|
||||
.definition_fn(
|
||||
&false,
|
||||
"fn",
|
||||
&fun.name,
|
||||
&fun.arguments,
|
||||
&fun.return_annotation,
|
||||
&fun.body,
|
||||
fun.end_position,
|
||||
false,
|
||||
)
|
||||
.group();
|
||||
let first_fn = commented(fun_doc_comments.append(first_fn).group(), fun_comments);
|
||||
|
@ -570,13 +597,11 @@ impl<'comments> Formatter<'comments> {
|
|||
let other_fn = self
|
||||
.definition_fn(
|
||||
&false,
|
||||
"fn",
|
||||
&other.name,
|
||||
&other.arguments,
|
||||
&other.return_annotation,
|
||||
&other.body,
|
||||
other.end_position,
|
||||
false,
|
||||
)
|
||||
.group();
|
||||
|
||||
|
|
|
@ -2,25 +2,21 @@ pub mod air;
|
|||
pub mod builder;
|
||||
pub mod tree;
|
||||
|
||||
use petgraph::{algo, Graph};
|
||||
use std::collections::HashMap;
|
||||
use std::rc::Rc;
|
||||
|
||||
use indexmap::{IndexMap, IndexSet};
|
||||
use itertools::Itertools;
|
||||
use uplc::{
|
||||
ast::{Constant as UplcConstant, Name, NamedDeBruijn, Program, Term, Type as UplcType},
|
||||
builder::{CONSTR_FIELDS_EXPOSER, CONSTR_INDEX_EXPOSER, EXPECT_ON_LIST},
|
||||
builtins::DefaultFunction,
|
||||
machine::cost_model::ExBudget,
|
||||
optimize::aiken_optimize_and_intern,
|
||||
parser::interner::Interner,
|
||||
use self::{
|
||||
air::Air,
|
||||
builder::{
|
||||
air_holds_msg, cast_validator_args, constants_ir, convert_type_to_data, extract_constant,
|
||||
lookup_data_type_by_tipo, modify_cyclic_calls, modify_self_calls, rearrange_list_clauses,
|
||||
AssignmentProperties, ClauseProperties, CodeGenSpecialFuncs, CycleFunctionNames,
|
||||
HoistableFunction, Variant,
|
||||
},
|
||||
tree::{AirMsg, AirTree, TreePath},
|
||||
};
|
||||
|
||||
use crate::{
|
||||
ast::{
|
||||
AssignmentKind, BinOp, Bls12_381Point, Curve, Pattern, Span, TraceLevel, TypedArg,
|
||||
TypedClause, TypedDataType, TypedFunction, TypedPattern, TypedValidator, UnOp,
|
||||
AssignmentKind, BinOp, Bls12_381Point, Curve, DataTypeKey, FunctionAccessKey, Pattern,
|
||||
Span, TraceLevel, Tracing, TypedArg, TypedClause, TypedDataType, TypedFunction,
|
||||
TypedPattern, TypedValidator, UnOp,
|
||||
},
|
||||
builtins::{bool, data, int, list, string, void},
|
||||
expr::TypedExpr,
|
||||
|
@ -41,25 +37,26 @@ use crate::{
|
|||
},
|
||||
IdGenerator,
|
||||
};
|
||||
|
||||
use self::{
|
||||
air::Air,
|
||||
builder::{
|
||||
air_holds_msg, cast_validator_args, constants_ir, convert_type_to_data, extract_constant,
|
||||
lookup_data_type_by_tipo, modify_cyclic_calls, modify_self_calls, rearrange_list_clauses,
|
||||
AssignmentProperties, ClauseProperties, CodeGenSpecialFuncs, CycleFunctionNames,
|
||||
DataTypeKey, FunctionAccessKey, HoistableFunction, Variant,
|
||||
},
|
||||
tree::{AirMsg, AirTree, TreePath},
|
||||
use indexmap::{IndexMap, IndexSet};
|
||||
use itertools::Itertools;
|
||||
use petgraph::{algo, Graph};
|
||||
use std::{collections::HashMap, rc::Rc};
|
||||
use uplc::{
|
||||
ast::{Constant as UplcConstant, Name, NamedDeBruijn, Program, Term, Type as UplcType},
|
||||
builder::{CONSTR_FIELDS_EXPOSER, CONSTR_INDEX_EXPOSER, EXPECT_ON_LIST},
|
||||
builtins::DefaultFunction,
|
||||
machine::cost_model::ExBudget,
|
||||
optimize::aiken_optimize_and_intern,
|
||||
parser::interner::Interner,
|
||||
};
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct CodeGenerator<'a> {
|
||||
/// immutable index maps
|
||||
functions: IndexMap<FunctionAccessKey, &'a TypedFunction>,
|
||||
data_types: IndexMap<DataTypeKey, &'a TypedDataType>,
|
||||
module_types: IndexMap<&'a String, &'a TypeInfo>,
|
||||
module_src: IndexMap<String, (String, LineNumbers)>,
|
||||
functions: IndexMap<&'a FunctionAccessKey, &'a TypedFunction>,
|
||||
data_types: IndexMap<&'a DataTypeKey, &'a TypedDataType>,
|
||||
module_types: IndexMap<&'a str, &'a TypeInfo>,
|
||||
module_src: IndexMap<&'a str, &'a (String, LineNumbers)>,
|
||||
/// immutable option
|
||||
tracing: TraceLevel,
|
||||
/// mutable index maps that are reset
|
||||
|
@ -74,19 +71,23 @@ pub struct CodeGenerator<'a> {
|
|||
}
|
||||
|
||||
impl<'a> CodeGenerator<'a> {
|
||||
pub fn data_types(&self) -> &IndexMap<&'a DataTypeKey, &'a TypedDataType> {
|
||||
&self.data_types
|
||||
}
|
||||
|
||||
pub fn new(
|
||||
functions: IndexMap<FunctionAccessKey, &'a TypedFunction>,
|
||||
data_types: IndexMap<DataTypeKey, &'a TypedDataType>,
|
||||
module_types: IndexMap<&'a String, &'a TypeInfo>,
|
||||
module_src: IndexMap<String, (String, LineNumbers)>,
|
||||
tracing: TraceLevel,
|
||||
functions: IndexMap<&'a FunctionAccessKey, &'a TypedFunction>,
|
||||
data_types: IndexMap<&'a DataTypeKey, &'a TypedDataType>,
|
||||
module_types: IndexMap<&'a str, &'a TypeInfo>,
|
||||
module_src: IndexMap<&'a str, &'a (String, LineNumbers)>,
|
||||
tracing: Tracing,
|
||||
) -> Self {
|
||||
CodeGenerator {
|
||||
functions,
|
||||
data_types,
|
||||
module_types,
|
||||
module_src,
|
||||
tracing,
|
||||
tracing: tracing.trace_level(true),
|
||||
defined_functions: IndexMap::new(),
|
||||
special_functions: CodeGenSpecialFuncs::new(),
|
||||
code_gen_functions: IndexMap::new(),
|
||||
|
@ -107,21 +108,6 @@ impl<'a> CodeGenerator<'a> {
|
|||
}
|
||||
}
|
||||
|
||||
pub fn insert_function(
|
||||
&mut self,
|
||||
module_name: String,
|
||||
function_name: String,
|
||||
value: &'a TypedFunction,
|
||||
) -> Option<&'a TypedFunction> {
|
||||
self.functions.insert(
|
||||
FunctionAccessKey {
|
||||
module_name,
|
||||
function_name,
|
||||
},
|
||||
value,
|
||||
)
|
||||
}
|
||||
|
||||
pub fn generate(
|
||||
&mut self,
|
||||
TypedValidator {
|
||||
|
@ -130,16 +116,16 @@ impl<'a> CodeGenerator<'a> {
|
|||
params,
|
||||
..
|
||||
}: &TypedValidator,
|
||||
module_name: &String,
|
||||
module_name: &str,
|
||||
) -> Program<Name> {
|
||||
let mut air_tree_fun = self.build(&fun.body, module_name, &[]);
|
||||
|
||||
air_tree_fun = wrap_validator_condition(air_tree_fun, self.tracing);
|
||||
|
||||
let (src_code, lines) = self.module_src.get(module_name).unwrap().clone();
|
||||
let (src_code, lines) = self.module_src.get(module_name).unwrap();
|
||||
|
||||
let mut validator_args_tree =
|
||||
self.check_validator_args(&fun.arguments, true, air_tree_fun, &src_code, &lines);
|
||||
self.check_validator_args(&fun.arguments, true, air_tree_fun, src_code, lines);
|
||||
|
||||
validator_args_tree = AirTree::no_op(validator_args_tree);
|
||||
|
||||
|
@ -162,8 +148,8 @@ impl<'a> CodeGenerator<'a> {
|
|||
&other.arguments,
|
||||
true,
|
||||
air_tree_fun_other,
|
||||
&src_code,
|
||||
&lines,
|
||||
src_code,
|
||||
lines,
|
||||
);
|
||||
|
||||
validator_args_tree_other = AirTree::no_op(validator_args_tree_other);
|
||||
|
@ -198,8 +184,13 @@ impl<'a> CodeGenerator<'a> {
|
|||
self.finalize(term)
|
||||
}
|
||||
|
||||
pub fn generate_test(&mut self, test_body: &TypedExpr, module_name: &String) -> Program<Name> {
|
||||
let mut air_tree = self.build(test_body, module_name, &[]);
|
||||
pub fn generate_raw(
|
||||
&mut self,
|
||||
body: &TypedExpr,
|
||||
args: &[TypedArg],
|
||||
module_name: &str,
|
||||
) -> Program<Name> {
|
||||
let mut air_tree = self.build(body, module_name, &[]);
|
||||
|
||||
air_tree = AirTree::no_op(air_tree);
|
||||
|
||||
|
@ -208,7 +199,13 @@ impl<'a> CodeGenerator<'a> {
|
|||
// optimizations on air tree
|
||||
let full_vec = full_tree.to_vec();
|
||||
|
||||
let term = self.uplc_code_gen(full_vec);
|
||||
let mut term = self.uplc_code_gen(full_vec);
|
||||
|
||||
term = if args.is_empty() {
|
||||
term
|
||||
} else {
|
||||
cast_validator_args(term, args)
|
||||
};
|
||||
|
||||
self.finalize(term)
|
||||
}
|
||||
|
@ -239,7 +236,7 @@ impl<'a> CodeGenerator<'a> {
|
|||
fn build(
|
||||
&mut self,
|
||||
body: &TypedExpr,
|
||||
module_build_name: &String,
|
||||
module_build_name: &str,
|
||||
context: &[TypedExpr],
|
||||
) -> AirTree {
|
||||
if !context.is_empty() {
|
||||
|
@ -254,7 +251,7 @@ impl<'a> CodeGenerator<'a> {
|
|||
panic!("Dangling expressions without an assignment")
|
||||
};
|
||||
|
||||
let replaced_type = convert_opaque_type(tipo, &self.data_types);
|
||||
let replaced_type = convert_opaque_type(tipo, &self.data_types, true);
|
||||
|
||||
let air_value = self.build(value, module_build_name, &[]);
|
||||
|
||||
|
@ -449,7 +446,7 @@ impl<'a> CodeGenerator<'a> {
|
|||
constructor: ModuleValueConstructor::Fn { name, .. },
|
||||
..
|
||||
} => {
|
||||
let type_info = self.module_types.get(module_name).unwrap();
|
||||
let type_info = self.module_types.get(module_name.as_str()).unwrap();
|
||||
let value = type_info.values.get(name).unwrap();
|
||||
|
||||
let ValueConstructorVariant::ModuleFn { builtin, .. } = &value.variant
|
||||
|
@ -722,7 +719,7 @@ impl<'a> CodeGenerator<'a> {
|
|||
function_name: name.clone(),
|
||||
});
|
||||
|
||||
let type_info = self.module_types.get(module_name).unwrap();
|
||||
let type_info = self.module_types.get(module_name.as_str()).unwrap();
|
||||
|
||||
let value = type_info.values.get(name).unwrap();
|
||||
|
||||
|
@ -894,7 +891,7 @@ impl<'a> CodeGenerator<'a> {
|
|||
if props.full_check {
|
||||
let mut index_map = IndexMap::new();
|
||||
|
||||
let non_opaque_tipo = convert_opaque_type(tipo, &self.data_types);
|
||||
let non_opaque_tipo = convert_opaque_type(tipo, &self.data_types, true);
|
||||
|
||||
let val = AirTree::local_var(name, tipo.clone());
|
||||
|
||||
|
@ -932,7 +929,7 @@ impl<'a> CodeGenerator<'a> {
|
|||
let name = &format!("__discard_expect_{}", name);
|
||||
let mut index_map = IndexMap::new();
|
||||
|
||||
let non_opaque_tipo = convert_opaque_type(tipo, &self.data_types);
|
||||
let non_opaque_tipo = convert_opaque_type(tipo, &self.data_types, true);
|
||||
|
||||
let val = AirTree::local_var(name, tipo.clone());
|
||||
|
||||
|
@ -1325,7 +1322,7 @@ impl<'a> CodeGenerator<'a> {
|
|||
msg_func: Option<AirMsg>,
|
||||
) -> AirTree {
|
||||
assert!(tipo.get_generic().is_none());
|
||||
let tipo = &convert_opaque_type(tipo, &self.data_types);
|
||||
let tipo = &convert_opaque_type(tipo, &self.data_types, true);
|
||||
|
||||
if tipo.is_primitive() {
|
||||
// Since we would return void anyway and ignore then we can just return value here and ignore
|
||||
|
@ -1761,7 +1758,7 @@ impl<'a> CodeGenerator<'a> {
|
|||
final_clause: TypedClause,
|
||||
subject_tipo: &Rc<Type>,
|
||||
props: &mut ClauseProperties,
|
||||
module_name: &String,
|
||||
module_name: &str,
|
||||
) -> AirTree {
|
||||
assert!(
|
||||
!subject_tipo.is_void(),
|
||||
|
@ -2774,7 +2771,7 @@ impl<'a> CodeGenerator<'a> {
|
|||
|
||||
let param = AirTree::local_var(&arg_name, data());
|
||||
|
||||
let actual_type = convert_opaque_type(&arg.tipo, &self.data_types);
|
||||
let actual_type = convert_opaque_type(&arg.tipo, &self.data_types, true);
|
||||
|
||||
let msg_func = match self.tracing {
|
||||
TraceLevel::Silent => None,
|
||||
|
@ -3565,7 +3562,13 @@ impl<'a> CodeGenerator<'a> {
|
|||
let code_gen_func = self
|
||||
.code_gen_functions
|
||||
.get(&generic_function_key.function_name)
|
||||
.unwrap_or_else(|| panic!("Missing Code Gen Function Definition"));
|
||||
.unwrap_or_else(|| {
|
||||
panic!(
|
||||
"Missing function definition for {}. Known definitions: {:?}",
|
||||
generic_function_key.function_name,
|
||||
self.code_gen_functions.keys(),
|
||||
)
|
||||
});
|
||||
|
||||
if !dependency_functions
|
||||
.iter()
|
||||
|
@ -3625,12 +3628,13 @@ impl<'a> CodeGenerator<'a> {
|
|||
let mut function_def_types = function_def
|
||||
.arguments
|
||||
.iter()
|
||||
.map(|arg| convert_opaque_type(&arg.tipo, &self.data_types))
|
||||
.map(|arg| convert_opaque_type(&arg.tipo, &self.data_types, true))
|
||||
.collect_vec();
|
||||
|
||||
function_def_types.push(convert_opaque_type(
|
||||
&function_def.return_type,
|
||||
&self.data_types,
|
||||
true,
|
||||
));
|
||||
|
||||
let mono_types: IndexMap<u64, Rc<Type>> = if !function_def_types.is_empty() {
|
||||
|
@ -3832,7 +3836,9 @@ impl<'a> CodeGenerator<'a> {
|
|||
}
|
||||
|
||||
DefaultFunction::MkCons | DefaultFunction::MkPairData => {
|
||||
unimplemented!("MkCons and MkPairData should be handled by an anon function or using [] or ( a, b, .., z).\n")
|
||||
unimplemented!(
|
||||
"MkCons and MkPairData should be handled by an anon function or using [] or ( a, b, .., z).\n"
|
||||
)
|
||||
}
|
||||
_ => {
|
||||
let mut term: Term<Name> = (*builtin).into();
|
||||
|
@ -4225,7 +4231,9 @@ impl<'a> CodeGenerator<'a> {
|
|||
}
|
||||
|
||||
DefaultFunction::MkCons | DefaultFunction::MkPairData => {
|
||||
unimplemented!("MkCons and MkPairData should be handled by an anon function or using [] or ( a, b, .., z).\n")
|
||||
unimplemented!(
|
||||
"MkCons and MkPairData should be handled by an anon function or using [] or ( a, b, .., z).\n"
|
||||
)
|
||||
}
|
||||
_ => {
|
||||
let mut term: Term<Name> = func.into();
|
||||
|
|
|
@ -1,7 +1,21 @@
|
|||
use std::{collections::HashMap, ops::Deref, rc::Rc};
|
||||
|
||||
use super::{
|
||||
air::{Air, ExpectLevel},
|
||||
tree::{AirMsg, AirTree, TreePath},
|
||||
};
|
||||
use crate::{
|
||||
ast::{
|
||||
AssignmentKind, BinOp, ClauseGuard, Constant, DataType, DataTypeKey, FunctionAccessKey,
|
||||
Pattern, Span, TraceLevel, TypedArg, TypedClause, TypedClauseGuard, TypedDataType,
|
||||
TypedPattern, UnOp,
|
||||
},
|
||||
builtins::{bool, data, function, int, list, string, void},
|
||||
expr::TypedExpr,
|
||||
line_numbers::{LineColumn, LineNumbers},
|
||||
tipo::{PatternConstructor, Type, TypeVar, ValueConstructor, ValueConstructorVariant},
|
||||
};
|
||||
use indexmap::{IndexMap, IndexSet};
|
||||
use itertools::{Itertools, Position};
|
||||
use std::{collections::HashMap, ops::Deref, rc::Rc};
|
||||
use uplc::{
|
||||
ast::{Constant as UplcConstant, Name, Term, Type as UplcType},
|
||||
builder::{CONSTR_FIELDS_EXPOSER, CONSTR_INDEX_EXPOSER},
|
||||
|
@ -13,27 +27,6 @@ use uplc::{
|
|||
Constr, KeyValuePairs, PlutusData,
|
||||
};
|
||||
|
||||
use crate::{
|
||||
ast::{
|
||||
AssignmentKind, DataType, Pattern, Span, TraceLevel, TypedArg, TypedClause,
|
||||
TypedClauseGuard, TypedDataType, TypedPattern,
|
||||
},
|
||||
builtins::{bool, data, function, int, list, string, void},
|
||||
expr::TypedExpr,
|
||||
line_numbers::{LineColumn, LineNumbers},
|
||||
tipo::{PatternConstructor, TypeVar, ValueConstructor, ValueConstructorVariant},
|
||||
};
|
||||
|
||||
use crate::{
|
||||
ast::{BinOp, ClauseGuard, Constant, UnOp},
|
||||
tipo::Type,
|
||||
};
|
||||
|
||||
use super::{
|
||||
air::{Air, ExpectLevel},
|
||||
tree::{AirMsg, AirTree, TreePath},
|
||||
};
|
||||
|
||||
pub type Variant = String;
|
||||
|
||||
pub type Params = Vec<String>;
|
||||
|
@ -68,18 +61,6 @@ pub enum HoistableFunction {
|
|||
CyclicLink(FunctionAccessKey),
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Eq, PartialEq, Hash)]
|
||||
pub struct DataTypeKey {
|
||||
pub module_name: String,
|
||||
pub defined_type: String,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Eq, PartialEq, Hash, Ord, PartialOrd)]
|
||||
pub struct FunctionAccessKey {
|
||||
pub module_name: String,
|
||||
pub function_name: String,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct AssignmentProperties {
|
||||
pub value_type: Rc<Type>,
|
||||
|
@ -312,7 +293,7 @@ pub fn get_generic_id_and_type(tipo: &Type, param: &Type) -> Vec<(u64, Rc<Type>)
|
|||
}
|
||||
|
||||
pub fn lookup_data_type_by_tipo(
|
||||
data_types: &IndexMap<DataTypeKey, &TypedDataType>,
|
||||
data_types: &IndexMap<&DataTypeKey, &TypedDataType>,
|
||||
tipo: &Type,
|
||||
) -> Option<DataType<Rc<Type>>> {
|
||||
match tipo {
|
||||
|
@ -365,7 +346,8 @@ pub fn get_arg_type_name(tipo: &Type) -> String {
|
|||
|
||||
pub fn convert_opaque_type(
|
||||
t: &Rc<Type>,
|
||||
data_types: &IndexMap<DataTypeKey, &TypedDataType>,
|
||||
data_types: &IndexMap<&DataTypeKey, &TypedDataType>,
|
||||
deep: bool,
|
||||
) -> Rc<Type> {
|
||||
if check_replaceable_opaque_type(t, data_types) && matches!(t.as_ref(), Type::App { .. }) {
|
||||
let data_type = lookup_data_type_by_tipo(data_types, t).unwrap();
|
||||
|
@ -382,7 +364,11 @@ pub fn convert_opaque_type(
|
|||
|
||||
let mono_type = find_and_replace_generics(generic_type, &mono_types);
|
||||
|
||||
convert_opaque_type(&mono_type, data_types)
|
||||
if deep {
|
||||
convert_opaque_type(&mono_type, data_types, deep)
|
||||
} else {
|
||||
mono_type
|
||||
}
|
||||
} else {
|
||||
match t.as_ref() {
|
||||
Type::App {
|
||||
|
@ -393,7 +379,7 @@ pub fn convert_opaque_type(
|
|||
} => {
|
||||
let mut new_args = vec![];
|
||||
for arg in args {
|
||||
let arg = convert_opaque_type(arg, data_types);
|
||||
let arg = convert_opaque_type(arg, data_types, deep);
|
||||
new_args.push(arg);
|
||||
}
|
||||
Type::App {
|
||||
|
@ -407,11 +393,11 @@ pub fn convert_opaque_type(
|
|||
Type::Fn { args, ret } => {
|
||||
let mut new_args = vec![];
|
||||
for arg in args {
|
||||
let arg = convert_opaque_type(arg, data_types);
|
||||
let arg = convert_opaque_type(arg, data_types, deep);
|
||||
new_args.push(arg);
|
||||
}
|
||||
|
||||
let ret = convert_opaque_type(ret, data_types);
|
||||
let ret = convert_opaque_type(ret, data_types, deep);
|
||||
|
||||
Type::Fn {
|
||||
args: new_args,
|
||||
|
@ -421,7 +407,7 @@ pub fn convert_opaque_type(
|
|||
}
|
||||
Type::Var { tipo: var_tipo } => {
|
||||
if let TypeVar::Link { tipo } = &var_tipo.borrow().clone() {
|
||||
convert_opaque_type(tipo, data_types)
|
||||
convert_opaque_type(tipo, data_types, deep)
|
||||
} else {
|
||||
t.clone()
|
||||
}
|
||||
|
@ -429,7 +415,7 @@ pub fn convert_opaque_type(
|
|||
Type::Tuple { elems } => {
|
||||
let mut new_elems = vec![];
|
||||
for arg in elems {
|
||||
let arg = convert_opaque_type(arg, data_types);
|
||||
let arg = convert_opaque_type(arg, data_types, deep);
|
||||
new_elems.push(arg);
|
||||
}
|
||||
Type::Tuple { elems: new_elems }.into()
|
||||
|
@ -439,8 +425,8 @@ pub fn convert_opaque_type(
|
|||
}
|
||||
|
||||
pub fn check_replaceable_opaque_type(
|
||||
t: &Rc<Type>,
|
||||
data_types: &IndexMap<DataTypeKey, &TypedDataType>,
|
||||
t: &Type,
|
||||
data_types: &IndexMap<&DataTypeKey, &TypedDataType>,
|
||||
) -> bool {
|
||||
let data_type = lookup_data_type_by_tipo(data_types, t);
|
||||
|
||||
|
@ -636,7 +622,7 @@ pub fn monomorphize(air_tree: &mut AirTree, mono_types: &IndexMap<u64, Rc<Type>>
|
|||
|
||||
pub fn erase_opaque_type_operations(
|
||||
air_tree: &mut AirTree,
|
||||
data_types: &IndexMap<DataTypeKey, &TypedDataType>,
|
||||
data_types: &IndexMap<&DataTypeKey, &TypedDataType>,
|
||||
) {
|
||||
if let AirTree::Constr { tipo, args, .. } = air_tree {
|
||||
if check_replaceable_opaque_type(tipo, data_types) {
|
||||
|
@ -652,7 +638,7 @@ pub fn erase_opaque_type_operations(
|
|||
let mut held_types = air_tree.mut_held_types();
|
||||
|
||||
while let Some(tipo) = held_types.pop() {
|
||||
*tipo = convert_opaque_type(tipo, data_types);
|
||||
*tipo = convert_opaque_type(tipo, data_types, true);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -917,7 +903,7 @@ pub fn modify_cyclic_calls(
|
|||
|
||||
pub fn pattern_has_conditions(
|
||||
pattern: &TypedPattern,
|
||||
data_types: &IndexMap<DataTypeKey, &TypedDataType>,
|
||||
data_types: &IndexMap<&DataTypeKey, &TypedDataType>,
|
||||
) -> bool {
|
||||
match pattern {
|
||||
Pattern::List { .. } | Pattern::Int { .. } => true,
|
||||
|
@ -943,7 +929,7 @@ pub fn pattern_has_conditions(
|
|||
// TODO: write some tests
|
||||
pub fn rearrange_list_clauses(
|
||||
clauses: Vec<TypedClause>,
|
||||
data_types: &IndexMap<DataTypeKey, &TypedDataType>,
|
||||
data_types: &IndexMap<&DataTypeKey, &TypedDataType>,
|
||||
) -> Vec<TypedClause> {
|
||||
let mut sorted_clauses = clauses;
|
||||
|
||||
|
@ -1181,7 +1167,7 @@ pub fn find_list_clause_or_default_first(clauses: &[TypedClause]) -> &TypedClaus
|
|||
.unwrap_or(&clauses[0])
|
||||
}
|
||||
|
||||
pub fn convert_data_to_type(term: Term<Name>, field_type: &Rc<Type>) -> Term<Name> {
|
||||
pub fn convert_data_to_type(term: Term<Name>, field_type: &Type) -> Term<Name> {
|
||||
if field_type.is_int() {
|
||||
Term::un_i_data().apply(term)
|
||||
} else if field_type.is_bytearray() {
|
||||
|
@ -1222,7 +1208,7 @@ pub fn convert_data_to_type(term: Term<Name>, field_type: &Rc<Type>) -> Term<Nam
|
|||
|
||||
pub fn convert_data_to_type_debug(
|
||||
term: Term<Name>,
|
||||
field_type: &Rc<Type>,
|
||||
field_type: &Type,
|
||||
error_term: Term<Name>,
|
||||
) -> Term<Name> {
|
||||
if field_type.is_int() {
|
||||
|
@ -1494,9 +1480,9 @@ pub fn convert_type_to_data(term: Term<Name>, field_type: &Rc<Type>) -> Term<Nam
|
|||
),
|
||||
)
|
||||
} else if field_type.is_bls381_12_g1() {
|
||||
Term::bls12_381_g1_compress().apply(Term::b_data().apply(term))
|
||||
Term::b_data().apply(Term::bls12_381_g1_compress().apply(term))
|
||||
} else if field_type.is_bls381_12_g2() {
|
||||
Term::bls12_381_g2_compress().apply(Term::b_data().apply(term))
|
||||
Term::b_data().apply(Term::bls12_381_g2_compress().apply(term))
|
||||
} else if field_type.is_ml_result() {
|
||||
panic!("ML Result not supported")
|
||||
} else {
|
||||
|
@ -1955,9 +1941,9 @@ pub fn extract_constant(term: &Term<Name>) -> Option<Rc<UplcConstant>> {
|
|||
}
|
||||
|
||||
pub fn get_src_code_by_span(
|
||||
module_name: &String,
|
||||
module_name: &str,
|
||||
span: &Span,
|
||||
module_src: &IndexMap<String, (String, LineNumbers)>,
|
||||
module_src: &IndexMap<&str, &(String, LineNumbers)>,
|
||||
) -> String {
|
||||
let (src, _) = module_src
|
||||
.get(module_name)
|
||||
|
@ -1969,9 +1955,9 @@ pub fn get_src_code_by_span(
|
|||
}
|
||||
|
||||
pub fn get_line_columns_by_span(
|
||||
module_name: &String,
|
||||
module_name: &str,
|
||||
span: &Span,
|
||||
module_src: &IndexMap<String, (String, LineNumbers)>,
|
||||
module_src: &IndexMap<&str, &(String, LineNumbers)>,
|
||||
) -> LineColumn {
|
||||
let (_, lines) = module_src
|
||||
.get(module_name)
|
||||
|
|
|
@ -0,0 +1,59 @@
|
|||
---
|
||||
source: crates/aiken-lang/src/parser/definition/test.rs
|
||||
description: "Code:\n\ntest foo(x via f, y via g) {\n True\n}\n"
|
||||
---
|
||||
Test(
|
||||
Function {
|
||||
arguments: [
|
||||
ArgVia {
|
||||
arg_name: Named {
|
||||
name: "x",
|
||||
label: "x",
|
||||
location: 9..10,
|
||||
is_validator_param: false,
|
||||
},
|
||||
location: 9..10,
|
||||
via: Var {
|
||||
location: 15..16,
|
||||
name: "f",
|
||||
},
|
||||
tipo: (),
|
||||
annotation: None,
|
||||
},
|
||||
ArgVia {
|
||||
arg_name: Named {
|
||||
name: "y",
|
||||
label: "y",
|
||||
location: 18..19,
|
||||
is_validator_param: false,
|
||||
},
|
||||
location: 18..19,
|
||||
via: Var {
|
||||
location: 24..25,
|
||||
name: "g",
|
||||
},
|
||||
tipo: (),
|
||||
annotation: None,
|
||||
},
|
||||
],
|
||||
body: Var {
|
||||
location: 33..37,
|
||||
name: "True",
|
||||
},
|
||||
doc: None,
|
||||
location: 0..26,
|
||||
name: "foo",
|
||||
public: false,
|
||||
return_annotation: Some(
|
||||
Constructor {
|
||||
location: 0..39,
|
||||
module: None,
|
||||
name: "Bool",
|
||||
arguments: [],
|
||||
},
|
||||
),
|
||||
return_type: (),
|
||||
end_position: 38,
|
||||
can_error: false,
|
||||
},
|
||||
)
|
|
@ -0,0 +1,48 @@
|
|||
---
|
||||
source: crates/aiken-lang/src/parser/definition/test.rs
|
||||
description: "Code:\n\ntest foo(x via fuzz.any_int) {\n True\n}\n"
|
||||
---
|
||||
Test(
|
||||
Function {
|
||||
arguments: [
|
||||
ArgVia {
|
||||
arg_name: Named {
|
||||
name: "x",
|
||||
label: "x",
|
||||
location: 9..10,
|
||||
is_validator_param: false,
|
||||
},
|
||||
location: 9..10,
|
||||
via: FieldAccess {
|
||||
location: 15..27,
|
||||
label: "any_int",
|
||||
container: Var {
|
||||
location: 15..19,
|
||||
name: "fuzz",
|
||||
},
|
||||
},
|
||||
tipo: (),
|
||||
annotation: None,
|
||||
},
|
||||
],
|
||||
body: Var {
|
||||
location: 35..39,
|
||||
name: "True",
|
||||
},
|
||||
doc: None,
|
||||
location: 0..28,
|
||||
name: "foo",
|
||||
public: false,
|
||||
return_annotation: Some(
|
||||
Constructor {
|
||||
location: 0..41,
|
||||
module: None,
|
||||
name: "Bool",
|
||||
arguments: [],
|
||||
},
|
||||
),
|
||||
return_type: (),
|
||||
end_position: 40,
|
||||
can_error: false,
|
||||
},
|
||||
)
|
|
@ -0,0 +1,55 @@
|
|||
---
|
||||
source: crates/aiken-lang/src/parser/definition/test.rs
|
||||
description: "Code:\n\ntest foo(x: Int via foo()) {\n True\n}\n"
|
||||
---
|
||||
Test(
|
||||
Function {
|
||||
arguments: [
|
||||
ArgVia {
|
||||
arg_name: Named {
|
||||
name: "x",
|
||||
label: "x",
|
||||
location: 9..10,
|
||||
is_validator_param: false,
|
||||
},
|
||||
location: 9..15,
|
||||
via: Call {
|
||||
arguments: [],
|
||||
fun: Var {
|
||||
location: 20..23,
|
||||
name: "foo",
|
||||
},
|
||||
location: 20..25,
|
||||
},
|
||||
tipo: (),
|
||||
annotation: Some(
|
||||
Constructor {
|
||||
location: 12..15,
|
||||
module: None,
|
||||
name: "Int",
|
||||
arguments: [],
|
||||
},
|
||||
),
|
||||
},
|
||||
],
|
||||
body: Var {
|
||||
location: 33..37,
|
||||
name: "True",
|
||||
},
|
||||
doc: None,
|
||||
location: 0..26,
|
||||
name: "foo",
|
||||
public: false,
|
||||
return_annotation: Some(
|
||||
Constructor {
|
||||
location: 0..39,
|
||||
module: None,
|
||||
name: "Bool",
|
||||
arguments: [],
|
||||
},
|
||||
),
|
||||
return_type: (),
|
||||
end_position: 38,
|
||||
can_error: false,
|
||||
},
|
||||
)
|
|
@ -0,0 +1,28 @@
|
|||
---
|
||||
source: crates/aiken-lang/src/parser/definition/test.rs
|
||||
description: "Code:\n\ntest foo() {\n True\n}\n"
|
||||
---
|
||||
Test(
|
||||
Function {
|
||||
arguments: [],
|
||||
body: Var {
|
||||
location: 17..21,
|
||||
name: "True",
|
||||
},
|
||||
doc: None,
|
||||
location: 0..10,
|
||||
name: "foo",
|
||||
public: false,
|
||||
return_annotation: Some(
|
||||
Constructor {
|
||||
location: 0..23,
|
||||
module: None,
|
||||
name: "Bool",
|
||||
arguments: [],
|
||||
},
|
||||
),
|
||||
return_type: (),
|
||||
end_position: 22,
|
||||
can_error: false,
|
||||
},
|
||||
)
|
|
@ -37,7 +37,14 @@ Test(
|
|||
location: 0..26,
|
||||
name: "invalid_inputs",
|
||||
public: false,
|
||||
return_annotation: None,
|
||||
return_annotation: Some(
|
||||
Constructor {
|
||||
location: 0..61,
|
||||
module: None,
|
||||
name: "Bool",
|
||||
arguments: [],
|
||||
},
|
||||
),
|
||||
return_type: (),
|
||||
end_position: 60,
|
||||
can_error: true,
|
||||
|
|
|
@ -3,7 +3,13 @@ use chumsky::prelude::*;
|
|||
use crate::{
|
||||
ast,
|
||||
expr::UntypedExpr,
|
||||
parser::{error::ParseError, expr, token::Token},
|
||||
parser::{
|
||||
annotation,
|
||||
chain::{call::parser as call, field_access, tuple_index::parser as tuple_index, Chain},
|
||||
error::ParseError,
|
||||
expr::{self, var},
|
||||
token::Token,
|
||||
},
|
||||
};
|
||||
|
||||
pub fn parser() -> impl Parser<Token, ast::UntypedDefinition, Error = ParseError> {
|
||||
|
@ -13,8 +19,12 @@ pub fn parser() -> impl Parser<Token, ast::UntypedDefinition, Error = ParseError
|
|||
.or_not()
|
||||
.then_ignore(just(Token::Test))
|
||||
.then(select! {Token::Name {name} => name})
|
||||
.then_ignore(just(Token::LeftParen))
|
||||
.then_ignore(just(Token::RightParen))
|
||||
.then(
|
||||
via()
|
||||
.separated_by(just(Token::Comma))
|
||||
.allow_trailing()
|
||||
.delimited_by(just(Token::LeftParen), just(Token::RightParen)),
|
||||
)
|
||||
.then(just(Token::Fail).ignored().or_not())
|
||||
.map_with_span(|name, span| (name, span))
|
||||
.then(
|
||||
|
@ -22,26 +32,88 @@ pub fn parser() -> impl Parser<Token, ast::UntypedDefinition, Error = ParseError
|
|||
.or_not()
|
||||
.delimited_by(just(Token::LeftBrace), just(Token::RightBrace)),
|
||||
)
|
||||
.map_with_span(|((((old_fail, name), fail), span_end), body), span| {
|
||||
ast::UntypedDefinition::Test(ast::Function {
|
||||
arguments: vec![],
|
||||
body: body.unwrap_or_else(|| UntypedExpr::todo(None, span)),
|
||||
doc: None,
|
||||
location: span_end,
|
||||
end_position: span.end - 1,
|
||||
.map_with_span(
|
||||
|(((((old_fail, name), arguments), fail), span_end), body), span| {
|
||||
ast::UntypedDefinition::Test(ast::Function {
|
||||
arguments,
|
||||
body: body.unwrap_or_else(|| UntypedExpr::todo(None, span)),
|
||||
doc: None,
|
||||
location: span_end,
|
||||
end_position: span.end - 1,
|
||||
name,
|
||||
public: false,
|
||||
return_annotation: Some(ast::Annotation::boolean(span)),
|
||||
return_type: (),
|
||||
can_error: fail.is_some() || old_fail.is_some(),
|
||||
})
|
||||
},
|
||||
)
|
||||
}
|
||||
|
||||
pub fn via() -> impl Parser<Token, ast::UntypedArgVia, Error = ParseError> {
|
||||
choice((
|
||||
select! {Token::DiscardName {name} => name}.map_with_span(|name, span| {
|
||||
ast::ArgName::Discarded {
|
||||
label: name.clone(),
|
||||
name,
|
||||
public: false,
|
||||
return_annotation: None,
|
||||
return_type: (),
|
||||
can_error: fail.is_some() || old_fail.is_some(),
|
||||
location: span,
|
||||
}
|
||||
}),
|
||||
select! {Token::Name {name} => name}.map_with_span(move |name, location| {
|
||||
ast::ArgName::Named {
|
||||
label: name.clone(),
|
||||
name,
|
||||
location,
|
||||
is_validator_param: false,
|
||||
}
|
||||
}),
|
||||
))
|
||||
.then(just(Token::Colon).ignore_then(annotation()).or_not())
|
||||
.map_with_span(|(arg_name, annotation), location| (arg_name, annotation, location))
|
||||
.then_ignore(just(Token::Via))
|
||||
.then(fuzzer())
|
||||
.map(|((arg_name, annotation, location), via)| ast::ArgVia {
|
||||
arg_name,
|
||||
via,
|
||||
annotation,
|
||||
tipo: (),
|
||||
location,
|
||||
})
|
||||
}
|
||||
|
||||
pub fn fuzzer<'a>() -> impl Parser<Token, UntypedExpr, Error = ParseError> + 'a {
|
||||
recursive(|expression| {
|
||||
let chain = choice((
|
||||
tuple_index(),
|
||||
field_access::parser(),
|
||||
call(expression.clone()),
|
||||
));
|
||||
|
||||
var()
|
||||
.then(chain.repeated())
|
||||
.foldl(|expr, chain| match chain {
|
||||
Chain::Call(args, span) => expr.call(args, span),
|
||||
Chain::FieldAccess(label, span) => expr.field_access(label, span),
|
||||
Chain::TupleIndex(index, span) => expr.tuple_index(index, span),
|
||||
})
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use crate::assert_definition;
|
||||
|
||||
#[test]
|
||||
fn def_test() {
|
||||
assert_definition!(
|
||||
r#"
|
||||
test foo() {
|
||||
True
|
||||
}
|
||||
"#
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn def_test_fail() {
|
||||
assert_definition!(
|
||||
|
@ -54,4 +126,37 @@ mod tests {
|
|||
"#
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn def_property_test() {
|
||||
assert_definition!(
|
||||
r#"
|
||||
test foo(x via fuzz.any_int) {
|
||||
True
|
||||
}
|
||||
"#
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn def_invalid_property_test() {
|
||||
assert_definition!(
|
||||
r#"
|
||||
test foo(x via f, y via g) {
|
||||
True
|
||||
}
|
||||
"#
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn def_property_test_annotated_fuzzer() {
|
||||
assert_definition!(
|
||||
r#"
|
||||
test foo(x: Int via foo()) {
|
||||
True
|
||||
}
|
||||
"#
|
||||
);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -187,6 +187,11 @@ pub fn parser(
|
|||
mod tests {
|
||||
use crate::assert_expr;
|
||||
|
||||
#[test]
|
||||
fn record_enum() {
|
||||
assert_expr!(r#"Winter"#);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn record_create_labeled() {
|
||||
assert_expr!(r#"User { name: "Aiken", age, thing: 2 }"#);
|
||||
|
|
|
@ -0,0 +1,8 @@
|
|||
---
|
||||
source: crates/aiken-lang/src/parser/expr/record.rs
|
||||
description: "Code:\n\nWinter"
|
||||
---
|
||||
Var {
|
||||
location: 0..6,
|
||||
name: "Winter",
|
||||
}
|
|
@ -240,6 +240,7 @@ pub fn lexer() -> impl Parser<char, Vec<(Token, Span)>, Error = ParseError> {
|
|||
"type" => Token::Type,
|
||||
"when" => Token::When,
|
||||
"validator" => Token::Validator,
|
||||
"via" => Token::Via,
|
||||
_ => {
|
||||
if s.chars().next().map_or(false, |c| c.is_uppercase()) {
|
||||
Token::UpName {
|
||||
|
|
|
@ -89,6 +89,7 @@ pub enum Token {
|
|||
When,
|
||||
Trace,
|
||||
Validator,
|
||||
Via,
|
||||
}
|
||||
|
||||
impl fmt::Display for Token {
|
||||
|
@ -176,6 +177,7 @@ impl fmt::Display for Token {
|
|||
Token::Test => "test",
|
||||
Token::Fail => "fail",
|
||||
Token::Validator => "validator",
|
||||
Token::Via => "via",
|
||||
};
|
||||
write!(f, "\"{s}\"")
|
||||
}
|
||||
|
|
|
@ -50,6 +50,33 @@ fn check_validator(
|
|||
check_module(ast, ModuleKind::Validator)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn bls12_381_elements_in_data_type() {
|
||||
let source_code = r#"
|
||||
type Datum {
|
||||
D0(G1Element)
|
||||
D1(G2Element)
|
||||
}
|
||||
"#;
|
||||
|
||||
assert!(check(parse(source_code)).is_ok())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn bls12_381_ml_result_in_data_type() {
|
||||
let source_code = r#"
|
||||
type Datum {
|
||||
thing: MillerLoopResult
|
||||
}
|
||||
"#;
|
||||
|
||||
let res = check(parse(source_code));
|
||||
|
||||
dbg!(&res);
|
||||
|
||||
assert!(matches!(res, Err((_, Error::IllegalTypeInData { .. }))))
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn validator_illegal_return_type() {
|
||||
let source_code = r#"
|
||||
|
@ -1175,6 +1202,113 @@ fn pipe_with_wrong_type_and_full_args() {
|
|||
))
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn fuzzer_ok_basic() {
|
||||
let source_code = r#"
|
||||
fn int() -> Fuzzer<Int> { todo }
|
||||
|
||||
test prop(n via int()) { todo }
|
||||
"#;
|
||||
|
||||
assert!(check(parse(source_code)).is_ok());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn fuzzer_ok_explicit() {
|
||||
let source_code = r#"
|
||||
fn int(prng: PRNG) -> Option<(PRNG, Int)> { todo }
|
||||
|
||||
test prop(n via int) { todo }
|
||||
"#;
|
||||
|
||||
assert!(check(parse(source_code)).is_ok());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn fuzzer_ok_list() {
|
||||
let source_code = r#"
|
||||
fn int() -> Fuzzer<Int> { todo }
|
||||
fn list(a: Fuzzer<a>) -> Fuzzer<List<a>> { todo }
|
||||
|
||||
test prop(xs via list(int())) { todo }
|
||||
"#;
|
||||
|
||||
assert!(check(parse(source_code)).is_ok());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn fuzzer_err_unbound() {
|
||||
let source_code = r#"
|
||||
fn any() -> Fuzzer<a> { todo }
|
||||
fn list(a: Fuzzer<a>) -> Fuzzer<List<a>> { todo }
|
||||
|
||||
test prop(xs via list(any())) { todo }
|
||||
"#;
|
||||
|
||||
assert!(matches!(
|
||||
check(parse(source_code)),
|
||||
Err((_, Error::GenericLeftAtBoundary { .. }))
|
||||
))
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn fuzzer_err_unify_1() {
|
||||
let source_code = r#"
|
||||
test prop(xs via Void) { todo }
|
||||
"#;
|
||||
|
||||
assert!(matches!(
|
||||
check(parse(source_code)),
|
||||
Err((
|
||||
_,
|
||||
Error::CouldNotUnify {
|
||||
situation: None,
|
||||
..
|
||||
}
|
||||
))
|
||||
))
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn fuzzer_err_unify_2() {
|
||||
let source_code = r#"
|
||||
fn any() -> Fuzzer<a> { todo }
|
||||
test prop(xs via any) { todo }
|
||||
"#;
|
||||
|
||||
assert!(matches!(
|
||||
check(parse(source_code)),
|
||||
Err((
|
||||
_,
|
||||
Error::CouldNotUnify {
|
||||
situation: None,
|
||||
..
|
||||
}
|
||||
))
|
||||
))
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn fuzzer_err_unify_3() {
|
||||
let source_code = r#"
|
||||
fn list(a: Fuzzer<a>) -> Fuzzer<List<a>> { todo }
|
||||
fn int() -> Fuzzer<Int> { todo }
|
||||
|
||||
test prop(xs: Int via list(int())) { todo }
|
||||
"#;
|
||||
|
||||
assert!(matches!(
|
||||
check(parse(source_code)),
|
||||
Err((
|
||||
_,
|
||||
Error::CouldNotUnify {
|
||||
situation: Some(UnifyErrorSituation::FuzzerAnnotationMismatch),
|
||||
..
|
||||
}
|
||||
))
|
||||
))
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn utf8_hex_literal_warning() {
|
||||
let source_code = r#"
|
||||
|
|
|
@ -57,6 +57,18 @@ pub enum Type {
|
|||
}
|
||||
|
||||
impl Type {
|
||||
pub fn qualifier(&self) -> Option<(String, String)> {
|
||||
match self {
|
||||
Type::App { module, name, .. } => Some((module.to_string(), name.to_string())),
|
||||
Type::Fn { .. } => None,
|
||||
Type::Var { ref tipo } => match &*tipo.borrow() {
|
||||
TypeVar::Link { ref tipo } => tipo.qualifier(),
|
||||
_ => None,
|
||||
},
|
||||
Type::Tuple { .. } => Some((String::new(), "Tuple".to_string())),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn is_result_constructor(&self) -> bool {
|
||||
match self {
|
||||
Type::Fn { ret, .. } => ret.is_result(),
|
||||
|
|
|
@ -10,7 +10,7 @@ use crate::{
|
|||
RecordConstructor, RecordConstructorArg, Span, TypeAlias, TypedDefinition, TypedPattern,
|
||||
UnqualifiedImport, UntypedArg, UntypedDefinition, Use, Validator, PIPE_VARIABLE,
|
||||
},
|
||||
builtins::{self, function, generic_var, tuple, unbound_var},
|
||||
builtins::{function, generic_var, tuple, unbound_var},
|
||||
tipo::fields::FieldMap,
|
||||
IdGenerator,
|
||||
};
|
||||
|
@ -1185,23 +1185,22 @@ impl<'a> Environment<'a> {
|
|||
})
|
||||
}
|
||||
|
||||
Definition::Test(Function { name, location, .. }) => {
|
||||
assert_unique_value_name(names, name, location)?;
|
||||
hydrators.insert(name.clone(), Hydrator::new());
|
||||
let arg_types = vec![];
|
||||
let return_type = builtins::bool();
|
||||
self.insert_variable(
|
||||
name.clone(),
|
||||
ValueConstructorVariant::ModuleFn {
|
||||
name: name.clone(),
|
||||
field_map: None,
|
||||
module: module_name.to_owned(),
|
||||
arity: 0,
|
||||
location: *location,
|
||||
builtin: None,
|
||||
},
|
||||
function(arg_types, return_type),
|
||||
);
|
||||
Definition::Test(test) => {
|
||||
let arguments = test
|
||||
.arguments
|
||||
.iter()
|
||||
.map(|arg| arg.clone().into())
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
self.register_function(
|
||||
&test.name,
|
||||
&arguments,
|
||||
&test.return_annotation,
|
||||
module_name,
|
||||
hydrators,
|
||||
names,
|
||||
&test.location,
|
||||
)?;
|
||||
}
|
||||
|
||||
Definition::DataType(DataType {
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
use super::Type;
|
||||
use crate::error::ExtraData;
|
||||
use crate::{
|
||||
ast::{Annotation, BinOp, CallArg, LogicalOpChainKind, Span, UntypedPattern},
|
||||
error::ExtraData,
|
||||
expr::{self, UntypedExpr},
|
||||
format::Formatter,
|
||||
levenshtein,
|
||||
|
@ -259,14 +259,28 @@ You can use '{discard}' and numbers to distinguish between similar names.
|
|||
name: String,
|
||||
},
|
||||
|
||||
#[error("I found a data type that has a function type in it. This is not allowed.\n")]
|
||||
#[error("I found a type definition that has a function type in it. This is not allowed.\n")]
|
||||
#[diagnostic(code("illegal::function_in_type"))]
|
||||
#[diagnostic(help("Data-types can't hold functions. If you want to define method-like functions, group the type definition and the methods under a common namespace in a standalone module."))]
|
||||
#[diagnostic(help(
|
||||
"Data-types can't hold functions. If you want to define method-like functions, group the type definition and the methods under a common namespace in a standalone module."
|
||||
))]
|
||||
FunctionTypeInData {
|
||||
#[label]
|
||||
location: Span,
|
||||
},
|
||||
|
||||
#[error("I found a type definition that has an unsupported type in it.\n")]
|
||||
#[diagnostic(code("illegal::type_in_data"))]
|
||||
#[diagnostic(help(
|
||||
r#"Data-types can't contain type {type_info} because it isn't serializable into a Plutus Data. Yet, this is a strong requirement for types found in compound structures such as List or Tuples."#,
|
||||
type_info = tipo.to_pretty(0).if_supports_color(Stdout, |s| s.red())
|
||||
))]
|
||||
IllegalTypeInData {
|
||||
#[label]
|
||||
location: Span,
|
||||
tipo: Rc<Type>,
|
||||
},
|
||||
|
||||
#[error("I found a discarded expression not bound to a variable.\n")]
|
||||
#[diagnostic(code("implicit_discard"))]
|
||||
#[diagnostic(help(
|
||||
|
@ -465,9 +479,13 @@ If you really meant to return that last expression, try to replace it with the f
|
|||
"I stumbled upon an invalid (non-local) clause guard '{}'.\n",
|
||||
name.if_supports_color(Stdout, |s| s.purple())
|
||||
)]
|
||||
#[diagnostic(url("https://aiken-lang.org/language-tour/control-flow#checking-equality-and-ordering-in-patterns"))]
|
||||
#[diagnostic(url(
|
||||
"https://aiken-lang.org/language-tour/control-flow#checking-equality-and-ordering-in-patterns"
|
||||
))]
|
||||
#[diagnostic(code("illegal::clause_guard"))]
|
||||
#[diagnostic(help("There are some conditions regarding what can be used in a guard. Values must be either local to the function, or defined as module constants. You can't use functions or records in there."))]
|
||||
#[diagnostic(help(
|
||||
"There are some conditions regarding what can be used in a guard. Values must be either local to the function, or defined as module constants. You can't use functions or records in there."
|
||||
))]
|
||||
NonLocalClauseGuardVariable {
|
||||
#[label]
|
||||
location: Span,
|
||||
|
@ -480,7 +498,7 @@ If you really meant to return that last expression, try to replace it with the f
|
|||
#[diagnostic(url("https://aiken-lang.org/language-tour/primitive-types#tuples"))]
|
||||
#[diagnostic(code("illegal::tuple_index"))]
|
||||
#[diagnostic(help(
|
||||
r#"Because you used a tuple-index on an element, I assumed it had to be a tuple or some kind, but instead I found:
|
||||
r#"Because you used a tuple-index on an element, I assumed it had to be a tuple but instead I found something of type:
|
||||
|
||||
╰─▶ {type_info}"#,
|
||||
type_info = tipo.to_pretty(0).if_supports_color(Stdout, |s| s.red())
|
||||
|
@ -625,7 +643,9 @@ You can help me by providing a type-annotation for 'x', as such:
|
|||
#[error("I almost got caught in an endless loop while inferring a recursive type.\n")]
|
||||
#[diagnostic(url("https://aiken-lang.org/language-tour/custom-types#type-annotations"))]
|
||||
#[diagnostic(code("missing::type_annotation"))]
|
||||
#[diagnostic(help("I have several aptitudes, but inferring recursive types isn't one them. It is still possible to define recursive types just fine, but I will need a little help in the form of type annotation to infer their types should they show up."))]
|
||||
#[diagnostic(help(
|
||||
"I have several aptitudes, but inferring recursive types isn't one them. It is still possible to define recursive types just fine, but I will need a little help in the form of type annotation to infer their types should they show up."
|
||||
))]
|
||||
RecursiveType {
|
||||
#[label]
|
||||
location: Span,
|
||||
|
@ -934,6 +954,27 @@ The best thing to do from here is to remove it."#))]
|
|||
#[label("{} arguments", if *count < 2 { "not enough" } else { "too many" })]
|
||||
location: Span,
|
||||
},
|
||||
|
||||
#[error("I caught a test with too many arguments.\n")]
|
||||
#[diagnostic(code("illegal::test_arity"))]
|
||||
#[diagnostic(help(
|
||||
"Tests are allowed to have 0 or 1 argument, but no more. Here I've found a test definition with {count} arguments. If you need to provide multiple values to a test, use a Record or a Tuple.",
|
||||
))]
|
||||
IncorrectTestArity {
|
||||
count: usize,
|
||||
#[label("too many arguments")]
|
||||
location: Span,
|
||||
},
|
||||
|
||||
#[error("I choked on a generic type left in an outward-facing interface.\n")]
|
||||
#[diagnostic(code("illegal::generic_in_abi"))]
|
||||
#[diagnostic(help(
|
||||
"Functions of the outer-most parts of a project, such as a validator or a property-based test, must be fully instantiated. That means they can no longer carry unbound generic variables. The type must be fully-known at this point since many structural validation must occur to ensure a safe boundary between the on-chain and off-chain worlds."
|
||||
))]
|
||||
GenericLeftAtBoundary {
|
||||
#[label("unbound generic at boundary")]
|
||||
location: Span,
|
||||
},
|
||||
}
|
||||
|
||||
impl ExtraData for Error {
|
||||
|
@ -951,6 +992,7 @@ impl ExtraData for Error {
|
|||
| Error::DuplicateVarInPattern { .. }
|
||||
| Error::ExtraVarInAlternativePattern { .. }
|
||||
| Error::FunctionTypeInData { .. }
|
||||
| Error::IllegalTypeInData { .. }
|
||||
| Error::ImplicitlyDiscardedExpression { .. }
|
||||
| Error::IncorrectFieldsArity { .. }
|
||||
| Error::IncorrectFunctionCallArity { .. }
|
||||
|
@ -984,6 +1026,8 @@ impl ExtraData for Error {
|
|||
| Error::UnnecessarySpreadOperator { .. }
|
||||
| Error::UpdateMultiConstructorType { .. }
|
||||
| Error::ValidatorImported { .. }
|
||||
| Error::IncorrectTestArity { .. }
|
||||
| Error::GenericLeftAtBoundary { .. }
|
||||
| Error::ValidatorMustReturnBool { .. } => None,
|
||||
|
||||
Error::UnknownType { name, .. }
|
||||
|
@ -1181,14 +1225,14 @@ fn suggest_unify(
|
|||
|
||||
(
|
||||
format!(
|
||||
"{} - {}",
|
||||
"{}.{{{}}}",
|
||||
expected_module.if_supports_color(Stdout, |s| s.bright_blue()),
|
||||
expected_str.if_supports_color(Stdout, |s| s.green()),
|
||||
expected_module.if_supports_color(Stdout, |s| s.bright_blue())
|
||||
),
|
||||
format!(
|
||||
"{} - {}",
|
||||
"{}.{{{}}}",
|
||||
given_module.if_supports_color(Stdout, |s| s.bright_blue()),
|
||||
given_str.if_supports_color(Stdout, |s| s.red()),
|
||||
given_module.if_supports_color(Stdout, |s| s.bright_blue())
|
||||
),
|
||||
)
|
||||
}
|
||||
|
@ -1263,6 +1307,21 @@ fn suggest_unify(
|
|||
expected,
|
||||
given
|
||||
},
|
||||
Some(UnifyErrorSituation::FuzzerAnnotationMismatch) => formatdoc! {
|
||||
r#"While comparing the return annotation of a Fuzzer with its actual return type, I realized that both don't match.
|
||||
|
||||
I am inferring the Fuzzer should return:
|
||||
|
||||
{}
|
||||
|
||||
but I found a conflicting annotation saying it returns:
|
||||
|
||||
{}
|
||||
|
||||
Either, fix (or remove) the annotation or adjust the Fuzzer to return the expected type."#,
|
||||
expected,
|
||||
given
|
||||
},
|
||||
None => formatdoc! {
|
||||
r#"I am inferring the following type:
|
||||
|
||||
|
@ -1652,6 +1711,8 @@ pub enum UnifyErrorSituation {
|
|||
|
||||
/// The operands of a binary operator were incorrect.
|
||||
Operator(BinOp),
|
||||
|
||||
FuzzerAnnotationMismatch,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
|
||||
|
|
|
@ -1860,7 +1860,7 @@ impl<'a, 'b> ExprTyper<'a, 'b> {
|
|||
}
|
||||
}
|
||||
|
||||
fn infer_value_constructor(
|
||||
pub fn infer_value_constructor(
|
||||
&mut self,
|
||||
module: &Option<String>,
|
||||
name: &str,
|
||||
|
|
|
@ -1,24 +1,25 @@
|
|||
use std::collections::HashMap;
|
||||
|
||||
use crate::{
|
||||
ast::{
|
||||
ArgName, DataType, Definition, Function, Layer, ModuleConstant, ModuleKind,
|
||||
RecordConstructor, RecordConstructorArg, Tracing, TypeAlias, TypedDefinition,
|
||||
TypedFunction, TypedModule, UntypedDefinition, UntypedModule, Use, Validator,
|
||||
},
|
||||
builtins,
|
||||
builtins::function,
|
||||
line_numbers::LineNumbers,
|
||||
IdGenerator,
|
||||
};
|
||||
|
||||
use super::{
|
||||
environment::{generalise, EntityKind, Environment},
|
||||
error::{Error, Warning},
|
||||
error::{Error, UnifyErrorSituation, Warning},
|
||||
expr::ExprTyper,
|
||||
hydrator::Hydrator,
|
||||
TypeInfo, ValueConstructor, ValueConstructorVariant,
|
||||
};
|
||||
use crate::{
|
||||
ast::{
|
||||
Annotation, Arg, ArgName, ArgVia, DataType, Definition, Function, Layer, ModuleConstant,
|
||||
ModuleKind, RecordConstructor, RecordConstructorArg, Tracing, TypeAlias, TypedArg,
|
||||
TypedDefinition, TypedFunction, TypedModule, UntypedArg, UntypedDefinition, UntypedModule,
|
||||
Use, Validator,
|
||||
},
|
||||
builtins,
|
||||
builtins::{function, fuzzer, generic_var},
|
||||
expr::{TypedExpr, UntypedExpr},
|
||||
line_numbers::LineNumbers,
|
||||
tipo::{Span, Type, TypeVar},
|
||||
IdGenerator,
|
||||
};
|
||||
use std::{borrow::Borrow, collections::HashMap, ops::Deref, rc::Rc};
|
||||
|
||||
impl UntypedModule {
|
||||
pub fn infer(
|
||||
|
@ -159,97 +160,14 @@ fn infer_definition(
|
|||
tracing: Tracing,
|
||||
) -> Result<TypedDefinition, Error> {
|
||||
match def {
|
||||
Definition::Fn(Function {
|
||||
doc,
|
||||
location,
|
||||
name,
|
||||
public,
|
||||
arguments: args,
|
||||
body,
|
||||
return_annotation,
|
||||
end_position,
|
||||
can_error,
|
||||
..
|
||||
}) => {
|
||||
let preregistered_fn = environment
|
||||
.get_variable(&name)
|
||||
.expect("Could not find preregistered type for function");
|
||||
|
||||
let field_map = preregistered_fn.field_map().cloned();
|
||||
|
||||
let preregistered_type = preregistered_fn.tipo.clone();
|
||||
|
||||
let (args_types, return_type) = preregistered_type
|
||||
.function_types()
|
||||
.expect("Preregistered type for fn was not a fn");
|
||||
|
||||
// Infer the type using the preregistered args + return types as a starting point
|
||||
let (tipo, args, body, safe_to_generalise) =
|
||||
environment.in_new_scope(|environment| {
|
||||
let args = args
|
||||
.into_iter()
|
||||
.zip(&args_types)
|
||||
.map(|(arg_name, tipo)| arg_name.set_type(tipo.clone()))
|
||||
.collect();
|
||||
|
||||
let mut expr_typer = ExprTyper::new(environment, lines, tracing);
|
||||
|
||||
expr_typer.hydrator = hydrators
|
||||
.remove(&name)
|
||||
.expect("Could not find hydrator for fn");
|
||||
|
||||
let (args, body) =
|
||||
expr_typer.infer_fn_with_known_types(args, body, Some(return_type))?;
|
||||
|
||||
let args_types = args.iter().map(|a| a.tipo.clone()).collect();
|
||||
|
||||
let tipo = function(args_types, body.tipo());
|
||||
|
||||
let safe_to_generalise = !expr_typer.ungeneralised_function_used;
|
||||
|
||||
Ok::<_, Error>((tipo, args, body, safe_to_generalise))
|
||||
})?;
|
||||
|
||||
// Assert that the inferred type matches the type of any recursive call
|
||||
environment.unify(preregistered_type, tipo.clone(), location, false)?;
|
||||
|
||||
// Generalise the function if safe to do so
|
||||
let tipo = if safe_to_generalise {
|
||||
environment.ungeneralised_functions.remove(&name);
|
||||
|
||||
let tipo = generalise(tipo, 0);
|
||||
|
||||
let module_fn = ValueConstructorVariant::ModuleFn {
|
||||
name: name.clone(),
|
||||
field_map,
|
||||
module: module_name.to_owned(),
|
||||
arity: args.len(),
|
||||
location,
|
||||
builtin: None,
|
||||
};
|
||||
|
||||
environment.insert_variable(name.clone(), module_fn, tipo.clone());
|
||||
|
||||
tipo
|
||||
} else {
|
||||
tipo
|
||||
};
|
||||
|
||||
Ok(Definition::Fn(Function {
|
||||
doc,
|
||||
location,
|
||||
name,
|
||||
public,
|
||||
arguments: args,
|
||||
return_annotation,
|
||||
return_type: tipo
|
||||
.return_type()
|
||||
.expect("Could not find return type for fn"),
|
||||
body,
|
||||
can_error,
|
||||
end_position,
|
||||
}))
|
||||
}
|
||||
Definition::Fn(f) => Ok(Definition::Fn(infer_function(
|
||||
f,
|
||||
module_name,
|
||||
hydrators,
|
||||
environment,
|
||||
lines,
|
||||
tracing,
|
||||
)?)),
|
||||
|
||||
Definition::Validator(Validator {
|
||||
doc,
|
||||
|
@ -412,20 +330,105 @@ fn infer_definition(
|
|||
}
|
||||
|
||||
Definition::Test(f) => {
|
||||
if let Definition::Fn(f) = infer_definition(
|
||||
Definition::Fn(f),
|
||||
let (typed_via, annotation) = match f.arguments.first() {
|
||||
Some(arg) => {
|
||||
if f.arguments.len() > 1 {
|
||||
return Err(Error::IncorrectTestArity {
|
||||
count: f.arguments.len(),
|
||||
location: f.arguments.get(1).expect("arguments.len() > 1").location,
|
||||
});
|
||||
}
|
||||
|
||||
let typed_via =
|
||||
ExprTyper::new(environment, lines, tracing).infer(arg.via.clone())?;
|
||||
|
||||
let (inferred_annotation, inner_type) =
|
||||
infer_fuzzer(environment, &typed_via.tipo(), &arg.via.location())?;
|
||||
|
||||
// Replace the pre-registered type for the test function, to allow inferring
|
||||
// the function body with the right type arguments.
|
||||
let scope = environment
|
||||
.scope
|
||||
.get_mut(&f.name)
|
||||
.expect("Could not find preregistered type for test");
|
||||
if let Type::Fn { ref ret, .. } = scope.tipo.as_ref() {
|
||||
scope.tipo = Rc::new(Type::Fn {
|
||||
ret: ret.clone(),
|
||||
args: vec![inner_type.clone()],
|
||||
})
|
||||
}
|
||||
|
||||
// Ensure that the annotation, if any, matches the type inferred from the
|
||||
// Fuzzer.
|
||||
if let Some(ref provided_annotation) = arg.annotation {
|
||||
let hydrator: &mut Hydrator = hydrators.get_mut(&f.name).unwrap();
|
||||
|
||||
let given =
|
||||
hydrator.type_from_annotation(provided_annotation, environment)?;
|
||||
|
||||
if !provided_annotation.is_logically_equal(&inferred_annotation) {
|
||||
return Err(Error::CouldNotUnify {
|
||||
location: arg.location,
|
||||
expected: inner_type.clone(),
|
||||
given,
|
||||
situation: Some(UnifyErrorSituation::FuzzerAnnotationMismatch),
|
||||
rigid_type_names: hydrator.rigid_names(),
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
Ok((Some((typed_via, inner_type)), Some(inferred_annotation)))
|
||||
}
|
||||
None => Ok((None, None)),
|
||||
}?;
|
||||
|
||||
let typed_f = infer_function(
|
||||
f.into(),
|
||||
module_name,
|
||||
hydrators,
|
||||
environment,
|
||||
lines,
|
||||
tracing,
|
||||
)? {
|
||||
environment.unify(f.return_type.clone(), builtins::bool(), f.location, false)?;
|
||||
)?;
|
||||
|
||||
Ok(Definition::Test(f))
|
||||
} else {
|
||||
unreachable!("test definition inferred as something other than a function?")
|
||||
}
|
||||
environment.unify(
|
||||
typed_f.return_type.clone(),
|
||||
builtins::bool(),
|
||||
typed_f.location,
|
||||
false,
|
||||
)?;
|
||||
|
||||
Ok(Definition::Test(Function {
|
||||
doc: typed_f.doc,
|
||||
location: typed_f.location,
|
||||
name: typed_f.name,
|
||||
public: typed_f.public,
|
||||
arguments: match typed_via {
|
||||
Some((via, tipo)) => {
|
||||
let Arg {
|
||||
arg_name, location, ..
|
||||
} = typed_f
|
||||
.arguments
|
||||
.first()
|
||||
.expect("has exactly one argument")
|
||||
.to_owned();
|
||||
|
||||
vec![ArgVia {
|
||||
annotation,
|
||||
arg_name,
|
||||
location,
|
||||
tipo,
|
||||
via,
|
||||
}]
|
||||
}
|
||||
None => vec![],
|
||||
},
|
||||
return_annotation: typed_f.return_annotation,
|
||||
return_type: typed_f.return_type,
|
||||
body: typed_f.body,
|
||||
can_error: typed_f.can_error,
|
||||
end_position: typed_f.end_position,
|
||||
}))
|
||||
}
|
||||
|
||||
Definition::TypeAlias(TypeAlias {
|
||||
|
@ -545,6 +548,13 @@ fn infer_definition(
|
|||
location: *location,
|
||||
});
|
||||
}
|
||||
|
||||
if tipo.is_ml_result() {
|
||||
return Err(Error::IllegalTypeInData {
|
||||
location: *location,
|
||||
tipo: tipo.clone(),
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -633,3 +643,204 @@ fn infer_definition(
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn infer_function(
|
||||
f: Function<(), UntypedExpr, UntypedArg>,
|
||||
module_name: &String,
|
||||
hydrators: &mut HashMap<String, Hydrator>,
|
||||
environment: &mut Environment<'_>,
|
||||
lines: &LineNumbers,
|
||||
tracing: Tracing,
|
||||
) -> Result<Function<Rc<Type>, TypedExpr, TypedArg>, Error> {
|
||||
let Function {
|
||||
doc,
|
||||
location,
|
||||
name,
|
||||
public,
|
||||
arguments,
|
||||
body,
|
||||
return_annotation,
|
||||
end_position,
|
||||
can_error,
|
||||
..
|
||||
} = f;
|
||||
|
||||
let preregistered_fn = environment
|
||||
.get_variable(&name)
|
||||
.expect("Could not find preregistered type for function");
|
||||
|
||||
let field_map = preregistered_fn.field_map().cloned();
|
||||
|
||||
let preregistered_type = preregistered_fn.tipo.clone();
|
||||
|
||||
let (args_types, return_type) = preregistered_type
|
||||
.function_types()
|
||||
.expect("Preregistered type for fn was not a fn");
|
||||
|
||||
// Infer the type using the preregistered args + return types as a starting point
|
||||
let (tipo, arguments, body, safe_to_generalise) = environment.in_new_scope(|environment| {
|
||||
let args = arguments
|
||||
.into_iter()
|
||||
.zip(&args_types)
|
||||
.map(|(arg_name, tipo)| arg_name.set_type(tipo.clone()))
|
||||
.collect();
|
||||
|
||||
let mut expr_typer = ExprTyper::new(environment, lines, tracing);
|
||||
|
||||
expr_typer.hydrator = hydrators
|
||||
.remove(&name)
|
||||
.expect("Could not find hydrator for fn");
|
||||
|
||||
let (args, body) = expr_typer.infer_fn_with_known_types(args, body, Some(return_type))?;
|
||||
|
||||
let args_types = args.iter().map(|a| a.tipo.clone()).collect();
|
||||
|
||||
let tipo = function(args_types, body.tipo());
|
||||
|
||||
let safe_to_generalise = !expr_typer.ungeneralised_function_used;
|
||||
|
||||
Ok::<_, Error>((tipo, args, body, safe_to_generalise))
|
||||
})?;
|
||||
|
||||
// Assert that the inferred type matches the type of any recursive call
|
||||
environment.unify(preregistered_type, tipo.clone(), location, false)?;
|
||||
|
||||
// Generalise the function if safe to do so
|
||||
let tipo = if safe_to_generalise {
|
||||
environment.ungeneralised_functions.remove(&name);
|
||||
|
||||
let tipo = generalise(tipo, 0);
|
||||
|
||||
let module_fn = ValueConstructorVariant::ModuleFn {
|
||||
name: name.clone(),
|
||||
field_map,
|
||||
module: module_name.to_owned(),
|
||||
arity: arguments.len(),
|
||||
location,
|
||||
builtin: None,
|
||||
};
|
||||
|
||||
environment.insert_variable(name.clone(), module_fn, tipo.clone());
|
||||
|
||||
tipo
|
||||
} else {
|
||||
tipo
|
||||
};
|
||||
|
||||
Ok(Function {
|
||||
doc,
|
||||
location,
|
||||
name,
|
||||
public,
|
||||
arguments,
|
||||
return_annotation,
|
||||
return_type: tipo
|
||||
.return_type()
|
||||
.expect("Could not find return type for fn"),
|
||||
body,
|
||||
can_error,
|
||||
end_position,
|
||||
})
|
||||
}
|
||||
|
||||
fn infer_fuzzer(
|
||||
environment: &mut Environment<'_>,
|
||||
tipo: &Rc<Type>,
|
||||
location: &Span,
|
||||
) -> Result<(Annotation, Rc<Type>), Error> {
|
||||
let could_not_unify = || Error::CouldNotUnify {
|
||||
location: *location,
|
||||
expected: fuzzer(generic_var(0)),
|
||||
given: tipo.clone(),
|
||||
situation: None,
|
||||
rigid_type_names: HashMap::new(),
|
||||
};
|
||||
|
||||
match tipo.borrow() {
|
||||
Type::Fn { ret, .. } => match ret.borrow() {
|
||||
Type::App {
|
||||
module, name, args, ..
|
||||
} if module.is_empty() && name == "Option" && args.len() == 1 => {
|
||||
match args.first().expect("args.len() == 1").borrow() {
|
||||
Type::Tuple { elems } if elems.len() == 2 => {
|
||||
let wrapped = elems.get(1).expect("Tuple has two elements");
|
||||
|
||||
// NOTE: Although we've drilled through the Fuzzer structure to get here,
|
||||
// we still need to enforce that:
|
||||
//
|
||||
// 1. The Fuzzer is a function with a single argument of type PRNG
|
||||
// 2. It returns not only a wrapped type, but also a new PRNG
|
||||
//
|
||||
// All-in-all, we could bundle those verification through the
|
||||
// `infer_fuzzer` function, but instead, we can also just piggyback on
|
||||
// `unify` now that we have figured out the type carried by the fuzzer.
|
||||
environment.unify(
|
||||
tipo.clone(),
|
||||
fuzzer(wrapped.clone()),
|
||||
*location,
|
||||
false,
|
||||
)?;
|
||||
|
||||
Ok((annotate_fuzzer(wrapped, location)?, wrapped.clone()))
|
||||
}
|
||||
_ => Err(could_not_unify()),
|
||||
}
|
||||
}
|
||||
_ => Err(could_not_unify()),
|
||||
},
|
||||
|
||||
Type::Var { tipo } => match &*tipo.deref().borrow() {
|
||||
TypeVar::Link { tipo } => infer_fuzzer(environment, tipo, location),
|
||||
_ => Err(Error::GenericLeftAtBoundary {
|
||||
location: *location,
|
||||
}),
|
||||
},
|
||||
|
||||
Type::App { .. } | Type::Tuple { .. } => Err(could_not_unify()),
|
||||
}
|
||||
}
|
||||
|
||||
fn annotate_fuzzer(tipo: &Type, location: &Span) -> Result<Annotation, Error> {
|
||||
match tipo {
|
||||
Type::App {
|
||||
name, module, args, ..
|
||||
} => {
|
||||
let arguments = args
|
||||
.iter()
|
||||
.map(|arg| annotate_fuzzer(arg, location))
|
||||
.collect::<Result<Vec<Annotation>, _>>()?;
|
||||
Ok(Annotation::Constructor {
|
||||
name: name.to_owned(),
|
||||
module: if module.is_empty() {
|
||||
None
|
||||
} else {
|
||||
Some(module.to_owned())
|
||||
},
|
||||
arguments,
|
||||
location: *location,
|
||||
})
|
||||
}
|
||||
|
||||
Type::Tuple { elems } => {
|
||||
let elems = elems
|
||||
.iter()
|
||||
.map(|arg| annotate_fuzzer(arg, location))
|
||||
.collect::<Result<Vec<Annotation>, _>>()?;
|
||||
Ok(Annotation::Tuple {
|
||||
elems,
|
||||
location: *location,
|
||||
})
|
||||
}
|
||||
|
||||
Type::Var { tipo } => match &*tipo.deref().borrow() {
|
||||
TypeVar::Link { tipo } => annotate_fuzzer(tipo, location),
|
||||
_ => Err(Error::GenericLeftAtBoundary {
|
||||
location: *location,
|
||||
}),
|
||||
},
|
||||
Type::Fn { .. } => Err(Error::IllegalTypeInData {
|
||||
location: *location,
|
||||
tipo: Rc::new(tipo.clone()),
|
||||
}),
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,7 +1,6 @@
|
|||
use std::{collections::HashMap, path::PathBuf};
|
||||
|
||||
use aiken_lang::{ast::Tracing, line_numbers::LineNumbers};
|
||||
use aiken_project::{config::Config, error::Error as ProjectError, module::CheckedModule, Project};
|
||||
use std::{collections::HashMap, path::PathBuf};
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct SourceInfo {
|
||||
|
@ -30,9 +29,9 @@ impl LspProject {
|
|||
pub fn compile(&mut self) -> Result<(), Vec<ProjectError>> {
|
||||
let checkpoint = self.project.checkpoint();
|
||||
|
||||
let result = self
|
||||
.project
|
||||
.check(true, None, false, false, Tracing::silent());
|
||||
let result =
|
||||
self.project
|
||||
.check(true, None, false, false, u32::default(), Tracing::silent());
|
||||
|
||||
self.project.restore(checkpoint);
|
||||
|
||||
|
|
|
@ -7,9 +7,9 @@ repository = "https://github.com/aiken-lang/aiken/crates/project"
|
|||
homepage = "https://github.com/aiken-lang/aiken"
|
||||
license = "Apache-2.0"
|
||||
authors = [
|
||||
"Lucas Rosa <x@rvcas.dev>",
|
||||
"Kasey White <kwhitemsg@gmail.com>",
|
||||
"KtorZ <matthias.benkort@gmail.com>",
|
||||
"Lucas Rosa <x@rvcas.dev>",
|
||||
"Kasey White <kwhitemsg@gmail.com>",
|
||||
"KtorZ <matthias.benkort@gmail.com>",
|
||||
]
|
||||
rust-version = "1.66.1"
|
||||
build = "build.rs"
|
||||
|
@ -43,8 +43,11 @@ zip = "0.6.4"
|
|||
|
||||
aiken-lang = { path = "../aiken-lang", version = "1.0.24-alpha" }
|
||||
uplc = { path = '../uplc', version = "1.0.24-alpha" }
|
||||
num-bigint = "0.4.4"
|
||||
cryptoxide = "0.4.4"
|
||||
|
||||
[dev-dependencies]
|
||||
blst = "0.3.11"
|
||||
indoc = "2.0.1"
|
||||
insta.workspace = true
|
||||
proptest = "1.2.0"
|
||||
|
|
|
@ -5,7 +5,7 @@ use super::{
|
|||
};
|
||||
use std::{iter, ops::Deref};
|
||||
use uplc::{
|
||||
ast::{Constant, Data as UplcData, DeBruijn, Term},
|
||||
ast::{Constant, Data as UplcData},
|
||||
PlutusData,
|
||||
};
|
||||
|
||||
|
@ -30,7 +30,7 @@ impl Parameter {
|
|||
pub fn validate(
|
||||
&self,
|
||||
definitions: &Definitions<Annotated<Schema>>,
|
||||
term: &Term<DeBruijn>,
|
||||
constant: &Constant,
|
||||
) -> Result<(), Error> {
|
||||
let schema = &definitions
|
||||
.lookup(&self.schema)
|
||||
|
@ -42,11 +42,7 @@ impl Parameter {
|
|||
})?
|
||||
.annotated;
|
||||
|
||||
if let Term::Constant(constant) = term {
|
||||
validate_schema(schema, definitions, constant)
|
||||
} else {
|
||||
Err(Error::NonConstantParameter)
|
||||
}
|
||||
validate_schema(schema, definitions, constant)
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -1,5 +1,7 @@
|
|||
use crate::blueprint::definitions::{Definitions, Reference};
|
||||
use crate::CheckedModule;
|
||||
use crate::{
|
||||
blueprint::definitions::{Definitions, Reference},
|
||||
CheckedModule,
|
||||
};
|
||||
use aiken_lang::{
|
||||
ast::{Definition, TypedDataType, TypedDefinition},
|
||||
builtins::wrapped_redeemer,
|
||||
|
@ -12,8 +14,7 @@ use serde::{
|
|||
ser::{Serialize, SerializeStruct, Serializer},
|
||||
};
|
||||
use serde_json as json;
|
||||
use std::rc::Rc;
|
||||
use std::{collections::HashMap, fmt, ops::Deref};
|
||||
use std::{collections::HashMap, fmt, ops::Deref, rc::Rc};
|
||||
|
||||
// NOTE: Can be anything BUT 0
|
||||
pub const REDEEMER_DISCRIMINANT: usize = 1;
|
||||
|
@ -385,21 +386,6 @@ impl Annotated<Schema> {
|
|||
Type::Fn { .. } => unreachable!(),
|
||||
}
|
||||
}
|
||||
|
||||
fn into_data(self, type_info: &Type) -> Result<Annotated<Data>, Error> {
|
||||
match self {
|
||||
Annotated {
|
||||
title,
|
||||
description,
|
||||
annotated: Schema::Data(data),
|
||||
} => Ok(Annotated {
|
||||
title,
|
||||
description,
|
||||
annotated: data,
|
||||
}),
|
||||
_ => Err(Error::new(ErrorContext::ExpectedData, type_info)),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Data {
|
||||
|
@ -409,28 +395,24 @@ impl Data {
|
|||
type_parameters: &mut HashMap<u64, Rc<Type>>,
|
||||
definitions: &mut Definitions<Annotated<Schema>>,
|
||||
) -> Result<Self, Error> {
|
||||
if data_type.opaque {
|
||||
// NOTE: No breadcrumbs here which is *okay*, as the caller will backtrack
|
||||
// and add the necessary type information.
|
||||
return Err(Error {
|
||||
context: ErrorContext::IllegalOpaqueType,
|
||||
breadcrumbs: vec![],
|
||||
});
|
||||
}
|
||||
|
||||
let mut variants = vec![];
|
||||
|
||||
let len_constructors = data_type.constructors.len();
|
||||
for (index, constructor) in data_type.constructors.iter().enumerate() {
|
||||
let mut fields = vec![];
|
||||
|
||||
let len_arguments = data_type.constructors.len();
|
||||
for field in constructor.arguments.iter() {
|
||||
let reference =
|
||||
Annotated::do_from_type(&field.tipo, modules, type_parameters, definitions)?;
|
||||
|
||||
// NOTE: Opaque data-types with a single variant and a single field are transparent, they
|
||||
// are erased completely at compilation time.
|
||||
if data_type.opaque && len_constructors == 1 && len_arguments == 1 {
|
||||
let schema = definitions
|
||||
.lookup(&reference)
|
||||
.expect("Schema definition registered just above")
|
||||
.clone();
|
||||
definitions.remove(&reference);
|
||||
return Ok(schema.into_data(&field.tipo)?.annotated);
|
||||
}
|
||||
|
||||
fields.push(Annotated {
|
||||
title: field.label.clone(),
|
||||
description: field.doc.clone().map(|s| s.trim().to_string()),
|
||||
|
@ -479,7 +461,7 @@ fn find_data_type(name: &str, definitions: &[TypedDefinition]) -> Option<TypedDa
|
|||
for def in definitions {
|
||||
match def {
|
||||
Definition::DataType(data_type) if name == data_type.name => {
|
||||
return Some(data_type.clone())
|
||||
return Some(data_type.clone());
|
||||
}
|
||||
Definition::Fn { .. }
|
||||
| Definition::Validator { .. }
|
||||
|
@ -928,7 +910,9 @@ pub struct Error {
|
|||
|
||||
#[derive(Debug, PartialEq, Clone, thiserror::Error)]
|
||||
pub enum ErrorContext {
|
||||
#[error("I failed at my own job and couldn't figure out how to generate a specification for a type.")]
|
||||
#[error(
|
||||
"I failed at my own job and couldn't figure out how to generate a specification for a type."
|
||||
)]
|
||||
UnsupportedType,
|
||||
|
||||
#[error("I discovered a type hole where I would expect a concrete type.")]
|
||||
|
@ -942,6 +926,9 @@ pub enum ErrorContext {
|
|||
|
||||
#[error("I figured you tried to export a function in your contract's binary interface.")]
|
||||
UnexpectedFunction,
|
||||
|
||||
#[error("I caught an opaque type trying to escape")]
|
||||
IllegalOpaqueType,
|
||||
}
|
||||
|
||||
impl Error {
|
||||
|
@ -963,6 +950,26 @@ impl Error {
|
|||
|
||||
pub fn help(&self) -> String {
|
||||
match self.context {
|
||||
ErrorContext::IllegalOpaqueType => format!(
|
||||
r#"Opaque types cannot figure anywhere in an outward-facing type like a validator's redeemer or datum. This is because an {opaque} type hides its implementation details, and likely enforce invariants that cannot be expressed only structurally. In particular, the {opaque} type {signature} cannot be safely constructed from any Plutus Data.
|
||||
|
||||
Hence, {opaque} types are forbidden from interface points with the off-chain world. Instead, use an intermediate representation and construct the {opaque} type at runtime using constructors and methods provided for that type (e.g. {Dict}.{from_list}, {Rational}.{new}, ...)."#,
|
||||
opaque = "opaque".if_supports_color(Stdout, |s| s.purple()),
|
||||
signature = Error::fmt_breadcrumbs(&[self
|
||||
.breadcrumbs
|
||||
.last()
|
||||
.expect("always at least one breadcrumb")
|
||||
.to_owned()]),
|
||||
Dict = "Dict"
|
||||
.if_supports_color(Stdout, |s| s.bright_blue())
|
||||
.if_supports_color(Stdout, |s| s.bold()),
|
||||
from_list = "from_list".if_supports_color(Stdout, |s| s.blue()),
|
||||
Rational = "Rational"
|
||||
.if_supports_color(Stdout, |s| s.bright_blue())
|
||||
.if_supports_color(Stdout, |s| s.bold()),
|
||||
new = "new".if_supports_color(Stdout, |s| s.blue()),
|
||||
),
|
||||
|
||||
ErrorContext::UnsupportedType => format!(
|
||||
r#"I do not know how to generate a portable Plutus specification for the following type:
|
||||
|
||||
|
|
|
@ -0,0 +1,22 @@
|
|||
---
|
||||
source: crates/aiken-project/src/blueprint/validator.rs
|
||||
description: "Code:\n\npub opaque type Rational {\n numerator: Int,\n denominator: Int,\n}\n\nvalidator {\n fn opaque_singleton_multi_variants(redeemer: Rational, ctx: Void) {\n True\n }\n}\n"
|
||||
---
|
||||
Schema {
|
||||
error: Error {
|
||||
context: IllegalOpaqueType,
|
||||
breadcrumbs: [
|
||||
App {
|
||||
public: true,
|
||||
module: "test_module",
|
||||
name: "Rational",
|
||||
args: [],
|
||||
},
|
||||
],
|
||||
},
|
||||
location: 117..135,
|
||||
source_code: NamedSource {
|
||||
name: "",
|
||||
source: "<redacted>",
|
||||
,
|
||||
}
|
|
@ -2,32 +2,46 @@
|
|||
source: crates/aiken-project/src/blueprint/validator.rs
|
||||
description: "Code:\n\npub opaque type Dict<key, value> {\n inner: List<(ByteArray, value)>\n}\n\ntype UUID { UUID }\n\nvalidator {\n fn opaque_singleton_variants(redeemer: Dict<UUID, Int>, ctx: Void) {\n True\n }\n}\n"
|
||||
---
|
||||
{
|
||||
"title": "test_module.opaque_singleton_variants",
|
||||
"redeemer": {
|
||||
"title": "redeemer",
|
||||
"schema": {
|
||||
"$ref": "#/definitions/test_module~1Dict$test_module~1UUID_Int"
|
||||
}
|
||||
},
|
||||
"compiledCode": "58f201000032323232323232323223232253330064a22930a99803a4811856616c696461746f722072657475726e65642066616c73650013656323300100100222533300a00114984c8cc00c00cc034008c8c8c94cccccc04000454cc0280205854cc0280205854cc028020584dd68008a998050040b18058011929999998078008a998048038b0a998048038b0a998048038b0a998048038b09bae0013009001300b001533333300a001153300400216137560022a660080042c2a660080042c2a660080042c9211972656465656d65723a20446963743c555549442c20496e743e005734ae7155ceaab9e5573eae855d12ba41",
|
||||
"hash": "c3f68ad7fb4d6c26e1f19799fe0ded6c9904bf04b924835ddad2abf0",
|
||||
"definitions": {
|
||||
"ByteArray": {
|
||||
"dataType": "bytes"
|
||||
Schema {
|
||||
error: Error {
|
||||
context: IllegalOpaqueType,
|
||||
breadcrumbs: [
|
||||
App {
|
||||
public: true,
|
||||
module: "test_module",
|
||||
name: "Dict",
|
||||
args: [
|
||||
Var {
|
||||
tipo: RefCell {
|
||||
value: Link {
|
||||
tipo: App {
|
||||
public: false,
|
||||
module: "test_module",
|
||||
name: "UUID",
|
||||
args: [],
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
Var {
|
||||
tipo: RefCell {
|
||||
value: Link {
|
||||
tipo: App {
|
||||
public: true,
|
||||
module: "",
|
||||
name: "Int",
|
||||
args: [],
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
},
|
||||
"Int": {
|
||||
"dataType": "integer"
|
||||
},
|
||||
"test_module/Dict$test_module/UUID_Int": {
|
||||
"title": "Dict",
|
||||
"dataType": "map",
|
||||
"keys": {
|
||||
"$ref": "#/definitions/ByteArray"
|
||||
},
|
||||
"values": {
|
||||
"$ref": "#/definitions/Int"
|
||||
}
|
||||
}
|
||||
}
|
||||
location: 137..162,
|
||||
source_code: NamedSource {
|
||||
name: "",
|
||||
source: "<redacted>",
|
||||
,
|
||||
}
|
||||
|
|
|
@ -13,9 +13,8 @@ use aiken_lang::{
|
|||
use miette::NamedSource;
|
||||
use serde;
|
||||
use std::borrow::Borrow;
|
||||
use std::rc::Rc;
|
||||
use uplc::{
|
||||
ast::{Constant, DeBruijn, Program, Term},
|
||||
ast::{Constant, DeBruijn, Program},
|
||||
PlutusData,
|
||||
};
|
||||
|
||||
|
@ -199,14 +198,14 @@ impl Validator {
|
|||
pub fn apply(
|
||||
self,
|
||||
definitions: &Definitions<Annotated<Schema>>,
|
||||
arg: &Term<DeBruijn>,
|
||||
arg: &PlutusData,
|
||||
) -> Result<Self, Error> {
|
||||
match self.parameters.split_first() {
|
||||
None => Err(Error::NoParametersToApply),
|
||||
Some((head, tail)) => {
|
||||
head.validate(definitions, arg)?;
|
||||
head.validate(definitions, &Constant::Data(arg.clone()))?;
|
||||
Ok(Self {
|
||||
program: self.program.apply_term(arg),
|
||||
program: self.program.apply_data(arg.clone()),
|
||||
parameters: tail.to_vec(),
|
||||
..self
|
||||
})
|
||||
|
@ -218,7 +217,7 @@ impl Validator {
|
|||
&self,
|
||||
definitions: &Definitions<Annotated<Schema>>,
|
||||
ask: F,
|
||||
) -> Result<Term<DeBruijn>, Error>
|
||||
) -> Result<PlutusData, Error>
|
||||
where
|
||||
F: Fn(&Annotated<Schema>, &Definitions<Annotated<Schema>>) -> Result<PlutusData, Error>,
|
||||
{
|
||||
|
@ -242,7 +241,7 @@ impl Validator {
|
|||
|
||||
let data = ask(&schema, definitions)?;
|
||||
|
||||
Ok(Term::Constant(Rc::new(Constant::Data(data.clone()))))
|
||||
Ok(data.clone())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -250,17 +249,6 @@ impl Validator {
|
|||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use std::collections::HashMap;
|
||||
|
||||
use aiken_lang::{
|
||||
self,
|
||||
ast::{TraceLevel, Tracing},
|
||||
builtins,
|
||||
};
|
||||
use uplc::ast as uplc_ast;
|
||||
|
||||
use crate::tests::TestProject;
|
||||
|
||||
use super::{
|
||||
super::{
|
||||
definitions::{Definitions, Reference},
|
||||
|
@ -269,16 +257,22 @@ mod tests {
|
|||
},
|
||||
*,
|
||||
};
|
||||
use crate::tests::TestProject;
|
||||
use aiken_lang::{
|
||||
self,
|
||||
ast::{TraceLevel, Tracing},
|
||||
builtins,
|
||||
};
|
||||
use std::collections::HashMap;
|
||||
use uplc::ast as uplc_ast;
|
||||
|
||||
macro_rules! assert_validator {
|
||||
($code:expr) => {
|
||||
let mut project = TestProject::new();
|
||||
|
||||
let modules = CheckedModules::singleton(project.check(project.parse(indoc::indoc! { $code })));
|
||||
let mut generator = modules.new_generator(
|
||||
&project.functions,
|
||||
&project.data_types,
|
||||
&project.module_types,
|
||||
|
||||
let mut generator = project.new_generator(
|
||||
Tracing::All(TraceLevel::Verbose),
|
||||
);
|
||||
|
||||
|
@ -296,15 +290,23 @@ mod tests {
|
|||
let validator = validators
|
||||
.get(0)
|
||||
.unwrap()
|
||||
.as_ref()
|
||||
.expect("Failed to create validator blueprint");
|
||||
.as_ref();
|
||||
|
||||
insta::with_settings!({
|
||||
description => concat!("Code:\n\n", indoc::indoc! { $code }),
|
||||
omit_expression => true
|
||||
}, {
|
||||
insta::assert_json_snapshot!(validator);
|
||||
});
|
||||
match validator {
|
||||
Err(e) => insta::with_settings!({
|
||||
description => concat!("Code:\n\n", indoc::indoc! { $code }),
|
||||
omit_expression => true
|
||||
}, {
|
||||
insta::assert_debug_snapshot!(e);
|
||||
}),
|
||||
|
||||
Ok(validator) => insta::with_settings!({
|
||||
description => concat!("Code:\n\n", indoc::indoc! { $code }),
|
||||
omit_expression => true
|
||||
}, {
|
||||
insta::assert_json_snapshot!(validator);
|
||||
}),
|
||||
};
|
||||
};
|
||||
}
|
||||
|
||||
|
@ -514,6 +516,24 @@ mod tests {
|
|||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn opaque_singleton_multi_variants() {
|
||||
assert_validator!(
|
||||
r#"
|
||||
pub opaque type Rational {
|
||||
numerator: Int,
|
||||
denominator: Int,
|
||||
}
|
||||
|
||||
validator {
|
||||
fn opaque_singleton_multi_variants(redeemer: Rational, ctx: Void) {
|
||||
True
|
||||
}
|
||||
}
|
||||
"#
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn nested_data() {
|
||||
assert_validator!(
|
||||
|
@ -599,7 +619,7 @@ mod tests {
|
|||
fn validate_arguments_integer() {
|
||||
let definitions = fixture_definitions();
|
||||
|
||||
let term = Term::data(uplc_ast::Data::integer(42.into()));
|
||||
let term = Constant::Data(uplc_ast::Data::integer(42.into()));
|
||||
|
||||
let param = Parameter {
|
||||
title: None,
|
||||
|
@ -613,7 +633,7 @@ mod tests {
|
|||
fn validate_arguments_bytestring() {
|
||||
let definitions = fixture_definitions();
|
||||
|
||||
let term = Term::data(uplc_ast::Data::bytestring(vec![102, 111, 111]));
|
||||
let term = Constant::Data(uplc_ast::Data::bytestring(vec![102, 111, 111]));
|
||||
|
||||
let param = Parameter {
|
||||
title: None,
|
||||
|
@ -642,7 +662,7 @@ mod tests {
|
|||
.into(),
|
||||
);
|
||||
|
||||
let term = Term::data(uplc_ast::Data::list(vec![
|
||||
let term = Constant::Data(uplc_ast::Data::list(vec![
|
||||
uplc_ast::Data::integer(42.into()),
|
||||
uplc_ast::Data::integer(14.into()),
|
||||
]));
|
||||
|
@ -671,7 +691,7 @@ mod tests {
|
|||
.into(),
|
||||
);
|
||||
|
||||
let term = Term::data(uplc_ast::Data::list(vec![uplc_ast::Data::bytestring(
|
||||
let term = Constant::Data(uplc_ast::Data::list(vec![uplc_ast::Data::bytestring(
|
||||
vec![102, 111, 111],
|
||||
)]));
|
||||
|
||||
|
@ -703,7 +723,7 @@ mod tests {
|
|||
.into(),
|
||||
);
|
||||
|
||||
let term = Term::data(uplc_ast::Data::list(vec![
|
||||
let term = Constant::Data(uplc_ast::Data::list(vec![
|
||||
uplc_ast::Data::integer(42.into()),
|
||||
uplc_ast::Data::bytestring(vec![102, 111, 111]),
|
||||
]));
|
||||
|
@ -734,7 +754,7 @@ mod tests {
|
|||
.into(),
|
||||
);
|
||||
|
||||
let term = Term::data(uplc_ast::Data::map(vec![(
|
||||
let term = Constant::Data(uplc_ast::Data::map(vec![(
|
||||
uplc_ast::Data::bytestring(vec![102, 111, 111]),
|
||||
uplc_ast::Data::integer(42.into()),
|
||||
)]));
|
||||
|
@ -750,7 +770,7 @@ mod tests {
|
|||
|
||||
let definitions = fixture_definitions();
|
||||
|
||||
let term = Term::data(uplc_ast::Data::constr(1, vec![]));
|
||||
let term = Constant::Data(uplc_ast::Data::constr(1, vec![]));
|
||||
|
||||
let param: Parameter = schema.into();
|
||||
|
||||
|
@ -785,7 +805,7 @@ mod tests {
|
|||
.into(),
|
||||
);
|
||||
|
||||
let term = Term::data(uplc_ast::Data::constr(
|
||||
let term = Constant::Data(uplc_ast::Data::constr(
|
||||
0,
|
||||
vec![uplc_ast::Data::constr(0, vec![])],
|
||||
));
|
||||
|
@ -841,7 +861,7 @@ mod tests {
|
|||
.into(),
|
||||
);
|
||||
|
||||
let term = Term::data(uplc_ast::Data::constr(
|
||||
let term = Constant::Data(uplc_ast::Data::constr(
|
||||
1,
|
||||
vec![
|
||||
uplc_ast::Data::integer(14.into()),
|
||||
|
|
|
@ -93,7 +93,7 @@ pub enum Error {
|
|||
path: PathBuf,
|
||||
verbose: bool,
|
||||
src: String,
|
||||
evaluation_hint: Option<String>,
|
||||
assertion: Option<String>,
|
||||
},
|
||||
|
||||
#[error(
|
||||
|
@ -125,6 +125,12 @@ pub enum Error {
|
|||
|
||||
impl Error {
|
||||
pub fn report(&self) {
|
||||
if let Error::TestFailure { verbose, .. } = self {
|
||||
if !verbose {
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
println!("{self:?}")
|
||||
}
|
||||
|
||||
|
@ -320,49 +326,54 @@ impl Diagnostic for Error {
|
|||
Error::Parse { error, .. } => error.kind.help(),
|
||||
Error::Type { error, .. } => error.help(),
|
||||
Error::StandardIo(_) => None,
|
||||
Error::MissingManifest { .. } => Some(Box::new("Try running `aiken new <REPOSITORY/PROJECT>` to initialise a project with an example manifest.")),
|
||||
Error::MissingManifest { .. } => Some(Box::new(
|
||||
"Try running `aiken new <REPOSITORY/PROJECT>` to initialise a project with an example manifest.",
|
||||
)),
|
||||
Error::TomlLoading { .. } => None,
|
||||
Error::Format { .. } => None,
|
||||
Error::TestFailure { evaluation_hint, .. } => match evaluation_hint {
|
||||
Error::TestFailure { assertion, .. } => match assertion {
|
||||
None => None,
|
||||
Some(hint) => Some(Box::new(hint.to_string()))
|
||||
Some(hint) => Some(Box::new(hint.to_string())),
|
||||
},
|
||||
Error::Http(_) => None,
|
||||
Error::ZipExtract(_) => None,
|
||||
Error::JoinError(_) => None,
|
||||
Error::UnknownPackageVersion{..} => Some(Box::new("Perhaps, double-check the package repository and version?")),
|
||||
Error::UnableToResolvePackage{..} => Some(Box::new("The network is unavailable and the package isn't in the local cache either. Try connecting to the Internet so I can look it up?")),
|
||||
Error::UnknownPackageVersion { .. } => Some(Box::new(
|
||||
"Perhaps, double-check the package repository and version?",
|
||||
)),
|
||||
Error::UnableToResolvePackage { .. } => Some(Box::new(
|
||||
"The network is unavailable and the package isn't in the local cache either. Try connecting to the Internet so I can look it up?",
|
||||
)),
|
||||
Error::Json(error) => Some(Box::new(format!("{error}"))),
|
||||
Error::MalformedStakeAddress { error } => Some(Box::new(format!("A stake address must be provided either as a base16-encoded string, or as a bech32-encoded string with the 'stake' or 'stake_test' prefix.{hint}", hint = match error {
|
||||
Some(error) => format!("\n\nHere's the error I encountered: {error}"),
|
||||
None => String::new(),
|
||||
}))),
|
||||
Error::NoValidatorNotFound { known_validators } => {
|
||||
Some(Box::new(format!(
|
||||
"Here's a list of all validators I've found in your project. Please double-check this list against the options that you've provided:\n\n{}",
|
||||
known_validators
|
||||
.iter()
|
||||
.map(|title| format!(
|
||||
"→ {title}",
|
||||
title = title.if_supports_color(Stdout, |s| s.purple())
|
||||
))
|
||||
.collect::<Vec<String>>()
|
||||
.join("\n")
|
||||
)))
|
||||
},
|
||||
Error::MoreThanOneValidatorFound { known_validators } => {
|
||||
Some(Box::new(format!(
|
||||
"Here's a list of all validators I've found in your project. Select one of them using the appropriate options:\n\n{}",
|
||||
known_validators
|
||||
.iter()
|
||||
.map(|title| format!(
|
||||
"→ {title}",
|
||||
title = title.if_supports_color(Stdout, |s| s.purple())
|
||||
))
|
||||
.collect::<Vec<String>>()
|
||||
.join("\n")
|
||||
)))
|
||||
},
|
||||
Error::MalformedStakeAddress { error } => Some(Box::new(format!(
|
||||
"A stake address must be provided either as a base16-encoded string, or as a bech32-encoded string with the 'stake' or 'stake_test' prefix.{hint}",
|
||||
hint = match error {
|
||||
Some(error) => format!("\n\nHere's the error I encountered: {error}"),
|
||||
None => String::new(),
|
||||
}
|
||||
))),
|
||||
Error::NoValidatorNotFound { known_validators } => Some(Box::new(format!(
|
||||
"Here's a list of all validators I've found in your project. Please double-check this list against the options that you've provided:\n\n{}",
|
||||
known_validators
|
||||
.iter()
|
||||
.map(|title| format!(
|
||||
"→ {title}",
|
||||
title = title.if_supports_color(Stdout, |s| s.purple())
|
||||
))
|
||||
.collect::<Vec<String>>()
|
||||
.join("\n")
|
||||
))),
|
||||
Error::MoreThanOneValidatorFound { known_validators } => Some(Box::new(format!(
|
||||
"Here's a list of all validators I've found in your project. Select one of them using the appropriate options:\n\n{}",
|
||||
known_validators
|
||||
.iter()
|
||||
.map(|title| format!(
|
||||
"→ {title}",
|
||||
title = title.if_supports_color(Stdout, |s| s.purple())
|
||||
))
|
||||
.collect::<Vec<String>>()
|
||||
.join("\n")
|
||||
))),
|
||||
Error::Module(e) => e.help(),
|
||||
}
|
||||
}
|
||||
|
|
|
@ -10,21 +10,34 @@ pub mod options;
|
|||
pub mod package_name;
|
||||
pub mod paths;
|
||||
pub mod pretty;
|
||||
pub mod script;
|
||||
pub mod telemetry;
|
||||
#[cfg(test)]
|
||||
mod tests;
|
||||
pub mod test_framework;
|
||||
pub mod utils;
|
||||
pub mod watch;
|
||||
|
||||
use crate::blueprint::{
|
||||
definitions::Definitions,
|
||||
schema::{Annotated, Schema},
|
||||
Blueprint,
|
||||
#[cfg(test)]
|
||||
mod tests;
|
||||
|
||||
use crate::{
|
||||
blueprint::{
|
||||
definitions::Definitions,
|
||||
schema::{Annotated, Schema},
|
||||
Blueprint,
|
||||
},
|
||||
config::Config,
|
||||
error::{Error, Warning},
|
||||
module::{CheckedModule, CheckedModules, ParsedModule, ParsedModules},
|
||||
telemetry::Event,
|
||||
};
|
||||
use aiken_lang::{
|
||||
ast::{Definition, Function, ModuleKind, Tracing, TypedDataType, TypedFunction, Validator},
|
||||
ast::{
|
||||
DataTypeKey, Definition, FunctionAccessKey, ModuleKind, Tracing, TypedDataType,
|
||||
TypedFunction,
|
||||
},
|
||||
builtins,
|
||||
gen_uplc::builder::{DataTypeKey, FunctionAccessKey},
|
||||
expr::UntypedExpr,
|
||||
gen_uplc::CodeGenerator,
|
||||
line_numbers::LineNumbers,
|
||||
tipo::TypeInfo,
|
||||
IdGenerator,
|
||||
};
|
||||
|
@ -37,8 +50,6 @@ use pallas::ledger::{
|
|||
primitives::babbage::{self as cardano, PolicyId},
|
||||
traverse::ComputeHash,
|
||||
};
|
||||
|
||||
use script::{EvalHint, EvalInfo, Script};
|
||||
use std::{
|
||||
collections::HashMap,
|
||||
fs::{self, File},
|
||||
|
@ -46,19 +57,12 @@ use std::{
|
|||
path::{Path, PathBuf},
|
||||
};
|
||||
use telemetry::EventListener;
|
||||
use test_framework::{Test, TestResult};
|
||||
use uplc::{
|
||||
ast::{DeBruijn, Name, Program, Term},
|
||||
machine::cost_model::ExBudget,
|
||||
ast::{Name, Program},
|
||||
PlutusData,
|
||||
};
|
||||
|
||||
use crate::{
|
||||
config::Config,
|
||||
error::{Error, Warning},
|
||||
module::{CheckedModule, CheckedModules, ParsedModule, ParsedModules},
|
||||
telemetry::Event,
|
||||
};
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct Source {
|
||||
pub path: PathBuf,
|
||||
|
@ -87,6 +91,7 @@ where
|
|||
event_listener: T,
|
||||
functions: IndexMap<FunctionAccessKey, TypedFunction>,
|
||||
data_types: IndexMap<DataTypeKey, TypedDataType>,
|
||||
module_sources: HashMap<String, (String, LineNumbers)>,
|
||||
}
|
||||
|
||||
impl<T> Project<T>
|
||||
|
@ -125,9 +130,20 @@ where
|
|||
event_listener,
|
||||
functions,
|
||||
data_types,
|
||||
module_sources: HashMap::new(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn new_generator(&'_ self, tracing: Tracing) -> CodeGenerator<'_> {
|
||||
CodeGenerator::new(
|
||||
utils::indexmap::as_ref_values(&self.functions),
|
||||
utils::indexmap::as_ref_values(&self.data_types),
|
||||
utils::indexmap::as_str_ref_values(&self.module_types),
|
||||
utils::indexmap::as_str_ref_values(&self.module_sources),
|
||||
tracing,
|
||||
)
|
||||
}
|
||||
|
||||
pub fn warnings(&mut self) -> Vec<Warning> {
|
||||
std::mem::take(&mut self.warnings)
|
||||
}
|
||||
|
@ -210,6 +226,7 @@ where
|
|||
match_tests: Option<Vec<String>>,
|
||||
verbose: bool,
|
||||
exact_match: bool,
|
||||
seed: u32,
|
||||
tracing: Tracing,
|
||||
) -> Result<(), Vec<Error>> {
|
||||
let options = Options {
|
||||
|
@ -221,6 +238,7 @@ where
|
|||
match_tests,
|
||||
verbose,
|
||||
exact_match,
|
||||
seed,
|
||||
}
|
||||
},
|
||||
};
|
||||
|
@ -279,12 +297,7 @@ where
|
|||
m.attach_doc_and_module_comments();
|
||||
});
|
||||
|
||||
let mut generator = self.checked_modules.new_generator(
|
||||
&self.functions,
|
||||
&self.data_types,
|
||||
&self.module_types,
|
||||
options.tracing,
|
||||
);
|
||||
let mut generator = self.new_generator(options.tracing);
|
||||
|
||||
let blueprint = Blueprint::new(&self.config, &self.checked_modules, &mut generator)
|
||||
.map_err(Error::Blueprint)?;
|
||||
|
@ -311,6 +324,7 @@ where
|
|||
match_tests,
|
||||
verbose,
|
||||
exact_match,
|
||||
seed,
|
||||
} => {
|
||||
let tests =
|
||||
self.collect_tests(verbose, match_tests, exact_match, options.tracing)?;
|
||||
|
@ -319,31 +333,21 @@ where
|
|||
self.event_listener.handle_event(Event::RunningTests);
|
||||
}
|
||||
|
||||
let results = self.eval_scripts(tests);
|
||||
let tests = self.run_tests(tests, seed);
|
||||
|
||||
let errors: Vec<Error> = results
|
||||
let errors: Vec<Error> = tests
|
||||
.iter()
|
||||
.filter_map(|e| {
|
||||
if e.success {
|
||||
if e.is_success() {
|
||||
None
|
||||
} else {
|
||||
Some(Error::TestFailure {
|
||||
name: e.script.name.clone(),
|
||||
path: e.script.input_path.clone(),
|
||||
evaluation_hint: e
|
||||
.script
|
||||
.evaluation_hint
|
||||
.as_ref()
|
||||
.map(|hint| hint.to_string()),
|
||||
src: e.script.program.to_pretty(),
|
||||
verbose,
|
||||
})
|
||||
Some(e.into_error(verbose))
|
||||
}
|
||||
})
|
||||
.collect();
|
||||
|
||||
self.event_listener
|
||||
.handle_event(Event::FinishedTests { tests: results });
|
||||
.handle_event(Event::FinishedTests { tests });
|
||||
|
||||
if !errors.is_empty() {
|
||||
Err(errors)
|
||||
|
@ -359,6 +363,7 @@ where
|
|||
&self,
|
||||
title: Option<&String>,
|
||||
stake_address: Option<&String>,
|
||||
mainnet: bool,
|
||||
) -> Result<ShelleyAddress, Error> {
|
||||
// Parse stake address
|
||||
let stake_address = stake_address
|
||||
|
@ -398,9 +403,15 @@ where
|
|||
if n > 0 {
|
||||
Err(blueprint::error::Error::ParameterizedValidator { n }.into())
|
||||
} else {
|
||||
let network = if mainnet {
|
||||
Network::Mainnet
|
||||
} else {
|
||||
Network::Testnet
|
||||
};
|
||||
|
||||
Ok(validator
|
||||
.program
|
||||
.address(Network::Testnet, delegation_part.to_owned()))
|
||||
.address(network, delegation_part.to_owned()))
|
||||
}
|
||||
})
|
||||
}
|
||||
|
@ -437,7 +448,7 @@ where
|
|||
&self,
|
||||
title: Option<&String>,
|
||||
ask: F,
|
||||
) -> Result<Term<DeBruijn>, Error>
|
||||
) -> Result<PlutusData, Error>
|
||||
where
|
||||
F: Fn(
|
||||
&Annotated<Schema>,
|
||||
|
@ -454,19 +465,19 @@ where
|
|||
|known_validators| Error::MoreThanOneValidatorFound { known_validators };
|
||||
let when_missing = |known_validators| Error::NoValidatorNotFound { known_validators };
|
||||
|
||||
let term = blueprint.with_validator(title, when_too_many, when_missing, |validator| {
|
||||
let data = blueprint.with_validator(title, when_too_many, when_missing, |validator| {
|
||||
validator
|
||||
.ask_next_parameter(&blueprint.definitions, &ask)
|
||||
.map_err(|e| e.into())
|
||||
})?;
|
||||
|
||||
Ok(term)
|
||||
Ok(data)
|
||||
}
|
||||
|
||||
pub fn apply_parameter(
|
||||
&self,
|
||||
title: Option<&String>,
|
||||
param: &Term<DeBruijn>,
|
||||
param: &PlutusData,
|
||||
) -> Result<Blueprint, Error> {
|
||||
// Read blueprint
|
||||
let blueprint = File::open(self.blueprint_path())
|
||||
|
@ -649,11 +660,18 @@ where
|
|||
|
||||
self.warnings.extend(type_warnings);
|
||||
|
||||
// Register the types from this module so they can be imported into
|
||||
// other modules.
|
||||
// Register module sources for an easier access later.
|
||||
self.module_sources
|
||||
.insert(name.clone(), (code.clone(), LineNumbers::new(&code)));
|
||||
|
||||
// Register the types from this module so they can be
|
||||
// imported into other modules.
|
||||
self.module_types
|
||||
.insert(name.clone(), ast.type_info.clone());
|
||||
|
||||
// Register function definitions & data-types for easier access later.
|
||||
ast.register_definitions(&mut self.functions, &mut self.data_types);
|
||||
|
||||
let checked_module = CheckedModule {
|
||||
kind,
|
||||
extra,
|
||||
|
@ -677,9 +695,8 @@ where
|
|||
match_tests: Option<Vec<String>>,
|
||||
exact_match: bool,
|
||||
tracing: Tracing,
|
||||
) -> Result<Vec<Script>, Error> {
|
||||
) -> Result<Vec<Test>, Error> {
|
||||
let mut scripts = Vec::new();
|
||||
let mut testable_validators = Vec::new();
|
||||
|
||||
let match_tests = match_tests.map(|mt| {
|
||||
mt.into_iter()
|
||||
|
@ -711,161 +728,83 @@ where
|
|||
}
|
||||
|
||||
for def in checked_module.ast.definitions() {
|
||||
match def {
|
||||
Definition::Validator(Validator {
|
||||
params,
|
||||
fun,
|
||||
other_fun,
|
||||
..
|
||||
}) => {
|
||||
let mut fun = fun.clone();
|
||||
if let Definition::Test(func) = def {
|
||||
if let Some(match_tests) = &match_tests {
|
||||
let is_match = match_tests.iter().any(|(module, names)| {
|
||||
let matched_module =
|
||||
module.is_empty() || checked_module.name.contains(module);
|
||||
|
||||
fun.arguments = params.clone().into_iter().chain(fun.arguments).collect();
|
||||
let matched_name = match names {
|
||||
None => true,
|
||||
Some(names) => names.iter().any(|name| {
|
||||
if exact_match {
|
||||
name == &func.name
|
||||
} else {
|
||||
func.name.contains(name)
|
||||
}
|
||||
}),
|
||||
};
|
||||
|
||||
testable_validators.push((&checked_module.name, fun));
|
||||
matched_module && matched_name
|
||||
});
|
||||
|
||||
if let Some(other) = other_fun {
|
||||
let mut other = other.clone();
|
||||
|
||||
other.arguments =
|
||||
params.clone().into_iter().chain(other.arguments).collect();
|
||||
|
||||
testable_validators.push((&checked_module.name, other));
|
||||
}
|
||||
}
|
||||
Definition::Test(func) => {
|
||||
if let Some(match_tests) = &match_tests {
|
||||
let is_match = match_tests.iter().any(|(module, names)| {
|
||||
let matched_module =
|
||||
module.is_empty() || checked_module.name.contains(module);
|
||||
|
||||
let matched_name = match names {
|
||||
None => true,
|
||||
Some(names) => names.iter().any(|name| {
|
||||
if exact_match {
|
||||
name == &func.name
|
||||
} else {
|
||||
func.name.contains(name)
|
||||
}
|
||||
}),
|
||||
};
|
||||
|
||||
matched_module && matched_name
|
||||
});
|
||||
|
||||
if is_match {
|
||||
scripts.push((
|
||||
checked_module.input_path.clone(),
|
||||
checked_module.name.clone(),
|
||||
func,
|
||||
))
|
||||
}
|
||||
} else {
|
||||
if is_match {
|
||||
scripts.push((
|
||||
checked_module.input_path.clone(),
|
||||
checked_module.name.clone(),
|
||||
func,
|
||||
))
|
||||
}
|
||||
} else {
|
||||
scripts.push((
|
||||
checked_module.input_path.clone(),
|
||||
checked_module.name.clone(),
|
||||
func,
|
||||
))
|
||||
}
|
||||
_ => (),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let mut programs = Vec::new();
|
||||
let mut generator = self.new_generator(tracing);
|
||||
|
||||
let mut generator = self.checked_modules.new_generator(
|
||||
&self.functions,
|
||||
&self.data_types,
|
||||
&self.module_types,
|
||||
tracing,
|
||||
);
|
||||
|
||||
for (module_name, testable_validator) in &testable_validators {
|
||||
generator.insert_function(
|
||||
module_name.to_string(),
|
||||
testable_validator.name.clone(),
|
||||
testable_validator,
|
||||
);
|
||||
}
|
||||
|
||||
for (input_path, module_name, func_def) in scripts {
|
||||
let Function {
|
||||
name,
|
||||
body,
|
||||
can_error,
|
||||
..
|
||||
} = func_def;
|
||||
let mut tests = Vec::new();
|
||||
|
||||
for (input_path, module_name, test) in scripts.into_iter() {
|
||||
if verbose {
|
||||
self.event_listener.handle_event(Event::GeneratingUPLCFor {
|
||||
name: name.clone(),
|
||||
name: test.name.clone(),
|
||||
path: input_path.clone(),
|
||||
})
|
||||
}
|
||||
|
||||
let evaluation_hint = func_def.test_hint().map(|(bin_op, left_src, right_src)| {
|
||||
let left = generator
|
||||
.clone()
|
||||
.generate_test(&left_src, &module_name)
|
||||
.try_into()
|
||||
.unwrap();
|
||||
|
||||
let right = generator
|
||||
.clone()
|
||||
.generate_test(&right_src, &module_name)
|
||||
.try_into()
|
||||
.unwrap();
|
||||
|
||||
EvalHint {
|
||||
bin_op,
|
||||
left,
|
||||
right,
|
||||
can_error: *can_error,
|
||||
}
|
||||
});
|
||||
|
||||
let program = generator.generate_test(body, &module_name);
|
||||
|
||||
let script = Script::new(
|
||||
input_path,
|
||||
tests.push(Test::from_function_definition(
|
||||
&mut generator,
|
||||
test.to_owned(),
|
||||
module_name,
|
||||
name.to_string(),
|
||||
*can_error,
|
||||
program.try_into().unwrap(),
|
||||
evaluation_hint,
|
||||
);
|
||||
|
||||
programs.push(script);
|
||||
input_path,
|
||||
));
|
||||
}
|
||||
|
||||
Ok(programs)
|
||||
Ok(tests)
|
||||
}
|
||||
|
||||
fn eval_scripts(&self, scripts: Vec<Script>) -> Vec<EvalInfo> {
|
||||
fn run_tests(&self, tests: Vec<Test>, seed: u32) -> Vec<TestResult<UntypedExpr>> {
|
||||
use rayon::prelude::*;
|
||||
|
||||
// TODO: in the future we probably just want to be able to
|
||||
// tell the machine to not explode on budget consumption.
|
||||
let initial_budget = ExBudget {
|
||||
mem: i64::MAX,
|
||||
cpu: i64::MAX,
|
||||
};
|
||||
let data_types = utils::indexmap::as_ref_values(&self.data_types);
|
||||
|
||||
scripts
|
||||
tests
|
||||
.into_par_iter()
|
||||
.map(|script| {
|
||||
let mut eval_result = script.program.clone().eval(initial_budget);
|
||||
|
||||
EvalInfo {
|
||||
success: !eval_result.failed(script.can_error),
|
||||
script,
|
||||
spent_budget: eval_result.cost(),
|
||||
logs: eval_result.logs(),
|
||||
output: eval_result.result().ok(),
|
||||
}
|
||||
.map(|test| match test {
|
||||
Test::UnitTest(unit_test) => unit_test.run(),
|
||||
// TODO: Get the seed from the command-line, defaulting to a random one when not
|
||||
// provided.
|
||||
Test::PropertyTest(property_test) => property_test.run(seed),
|
||||
})
|
||||
.collect::<Vec<TestResult<PlutusData>>>()
|
||||
.into_iter()
|
||||
.map(|test| test.reify(&data_types))
|
||||
.collect()
|
||||
}
|
||||
|
||||
|
@ -929,7 +868,7 @@ fn is_aiken_path(path: &Path, dir: impl AsRef<Path>) -> bool {
|
|||
use regex::Regex;
|
||||
|
||||
let re = Regex::new(&format!(
|
||||
"^({module}{slash})*{module}\\.ak$",
|
||||
"^({module}{slash})*{module}(\\.[-_a-z0-9]*)*\\.ak$",
|
||||
module = "[a-z][-_a-z0-9]*",
|
||||
slash = "(/|\\\\)",
|
||||
))
|
||||
|
|
|
@ -1,18 +1,11 @@
|
|||
use crate::error::Error;
|
||||
use aiken_lang::{
|
||||
ast::{
|
||||
DataType, Definition, Function, Located, ModuleKind, Tracing, TypedDataType, TypedFunction,
|
||||
TypedModule, TypedValidator, UntypedModule, Validator,
|
||||
DataType, Definition, Function, Located, ModuleKind, TypedModule, TypedValidator,
|
||||
UntypedModule, Validator,
|
||||
},
|
||||
gen_uplc::{
|
||||
builder::{DataTypeKey, FunctionAccessKey},
|
||||
CodeGenerator,
|
||||
},
|
||||
line_numbers::LineNumbers,
|
||||
parser::extra::{comments_before, Comment, ModuleExtra},
|
||||
tipo::TypeInfo,
|
||||
};
|
||||
use indexmap::IndexMap;
|
||||
use petgraph::{algo, graph::NodeIndex, Direction, Graph};
|
||||
use std::{
|
||||
collections::{HashMap, HashSet},
|
||||
|
@ -353,72 +346,6 @@ impl CheckedModules {
|
|||
.into_values()
|
||||
.filter(|module| module.kind.is_validator())
|
||||
}
|
||||
|
||||
pub fn new_generator<'a>(
|
||||
&'a self,
|
||||
builtin_functions: &'a IndexMap<FunctionAccessKey, TypedFunction>,
|
||||
builtin_data_types: &'a IndexMap<DataTypeKey, TypedDataType>,
|
||||
module_types: &'a HashMap<String, TypeInfo>,
|
||||
tracing: Tracing,
|
||||
) -> CodeGenerator<'a> {
|
||||
let mut functions = IndexMap::new();
|
||||
for (k, v) in builtin_functions {
|
||||
functions.insert(k.clone(), v);
|
||||
}
|
||||
|
||||
let mut data_types = IndexMap::new();
|
||||
for (k, v) in builtin_data_types {
|
||||
data_types.insert(k.clone(), v);
|
||||
}
|
||||
|
||||
let mut module_src = IndexMap::new();
|
||||
|
||||
for module in self.values() {
|
||||
for def in module.ast.definitions() {
|
||||
match def {
|
||||
Definition::Fn(func) => {
|
||||
functions.insert(
|
||||
FunctionAccessKey {
|
||||
module_name: module.name.clone(),
|
||||
function_name: func.name.clone(),
|
||||
},
|
||||
func,
|
||||
);
|
||||
}
|
||||
Definition::DataType(dt) => {
|
||||
data_types.insert(
|
||||
DataTypeKey {
|
||||
module_name: module.name.clone(),
|
||||
defined_type: dt.name.clone(),
|
||||
},
|
||||
dt,
|
||||
);
|
||||
}
|
||||
|
||||
Definition::TypeAlias(_)
|
||||
| Definition::ModuleConstant(_)
|
||||
| Definition::Test(_)
|
||||
| Definition::Validator(_)
|
||||
| Definition::Use(_) => {}
|
||||
}
|
||||
}
|
||||
module_src.insert(
|
||||
module.name.clone(),
|
||||
(module.code.clone(), LineNumbers::new(&module.code)),
|
||||
);
|
||||
}
|
||||
|
||||
let mut module_types_index = IndexMap::new();
|
||||
module_types_index.extend(module_types);
|
||||
|
||||
CodeGenerator::new(
|
||||
functions,
|
||||
data_types,
|
||||
module_types_index,
|
||||
module_src,
|
||||
tracing.trace_level(true),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
impl Deref for CheckedModules {
|
||||
|
|
|
@ -10,6 +10,7 @@ pub enum CodeGenMode {
|
|||
match_tests: Option<Vec<String>>,
|
||||
verbose: bool,
|
||||
exact_match: bool,
|
||||
seed: u32,
|
||||
},
|
||||
Build(bool),
|
||||
NoOp,
|
||||
|
|
|
@ -7,8 +7,8 @@ pub fn ansi_len(s: &str) -> usize {
|
|||
.count()
|
||||
}
|
||||
|
||||
pub fn len_longest_line(s: &str) -> usize {
|
||||
s.lines().fold(0, |max, l| {
|
||||
pub fn len_longest_line(zero: usize, s: &str) -> usize {
|
||||
s.lines().fold(zero, |max, l| {
|
||||
let n = ansi_len(l);
|
||||
if n > max {
|
||||
n
|
||||
|
@ -23,7 +23,7 @@ pub fn boxed(title: &str, content: &str) -> String {
|
|||
}
|
||||
|
||||
pub fn boxed_with(title: &str, content: &str, border_style: fn(&str) -> String) -> String {
|
||||
let n = len_longest_line(content);
|
||||
let n = len_longest_line(ansi_len(title) + 1, content);
|
||||
|
||||
let content = content
|
||||
.lines()
|
||||
|
@ -62,7 +62,7 @@ pub fn open_box(
|
|||
border_style: fn(&str) -> String,
|
||||
) -> String {
|
||||
let i = ansi_len(content.lines().collect::<Vec<_>>().first().unwrap());
|
||||
let j = len_longest_line(content);
|
||||
let j = len_longest_line(ansi_len(title) + 1, content);
|
||||
let k = ansi_len(footer);
|
||||
|
||||
let content = content
|
||||
|
@ -73,15 +73,27 @@ pub fn open_box(
|
|||
|
||||
let top = format!(
|
||||
"{} {}",
|
||||
border_style("┍━"),
|
||||
border_style(if footer.is_empty() {
|
||||
"┝━"
|
||||
} else {
|
||||
"┍━"
|
||||
}),
|
||||
pad_right(format!("{title} "), i - 1, &border_style("━")),
|
||||
);
|
||||
|
||||
let bottom = format!(
|
||||
"{} {}",
|
||||
pad_right(border_style("┕"), j - k + 1, &border_style("━")),
|
||||
footer
|
||||
);
|
||||
let bottom = if footer.is_empty() {
|
||||
border_style("╽")
|
||||
} else {
|
||||
format!(
|
||||
"{} {}",
|
||||
pad_right(
|
||||
border_style("┕"),
|
||||
if j < k { 0 } else { j + 1 - k },
|
||||
&border_style("━")
|
||||
),
|
||||
footer
|
||||
)
|
||||
};
|
||||
|
||||
format!("{top}\n{content}\n{bottom}")
|
||||
}
|
||||
|
|
|
@ -1,122 +0,0 @@
|
|||
use crate::{pretty, ExBudget, Term};
|
||||
use aiken_lang::ast::BinOp;
|
||||
use std::{
|
||||
fmt::{self, Display},
|
||||
path::PathBuf,
|
||||
};
|
||||
use uplc::ast::{NamedDeBruijn, Program};
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct Script {
|
||||
pub input_path: PathBuf,
|
||||
pub module: String,
|
||||
pub name: String,
|
||||
pub can_error: bool,
|
||||
pub program: Program<NamedDeBruijn>,
|
||||
pub evaluation_hint: Option<EvalHint>,
|
||||
}
|
||||
|
||||
unsafe impl Send for Script {}
|
||||
|
||||
impl Script {
|
||||
pub fn new(
|
||||
input_path: PathBuf,
|
||||
module: String,
|
||||
name: String,
|
||||
can_error: bool,
|
||||
program: Program<NamedDeBruijn>,
|
||||
evaluation_hint: Option<EvalHint>,
|
||||
) -> Script {
|
||||
Script {
|
||||
input_path,
|
||||
module,
|
||||
name,
|
||||
program,
|
||||
can_error,
|
||||
evaluation_hint,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct EvalHint {
|
||||
pub bin_op: BinOp,
|
||||
pub left: Program<NamedDeBruijn>,
|
||||
pub right: Program<NamedDeBruijn>,
|
||||
pub can_error: bool,
|
||||
}
|
||||
|
||||
impl Display for EvalHint {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
let unlimited_budget = ExBudget {
|
||||
mem: i64::MAX,
|
||||
cpu: i64::MAX,
|
||||
};
|
||||
|
||||
let left = pretty::boxed(
|
||||
"left",
|
||||
&match self.left.clone().eval(unlimited_budget).result() {
|
||||
Ok(term) => format!("{term}"),
|
||||
Err(err) => format!("{err}"),
|
||||
},
|
||||
);
|
||||
let right = pretty::boxed(
|
||||
"right",
|
||||
&match self.right.clone().eval(unlimited_budget).result() {
|
||||
Ok(term) => format!("{term}"),
|
||||
Err(err) => format!("{err}"),
|
||||
},
|
||||
);
|
||||
let msg = if self.can_error {
|
||||
match self.bin_op {
|
||||
BinOp::And => Some(format!(
|
||||
"{left}\n\nand\n\n{right}\n\nare both true but shouldn't."
|
||||
)),
|
||||
BinOp::Or => Some(format!(
|
||||
"neither\n\n{left}\n\nnor\n\n{right}\n\nshould be true."
|
||||
)),
|
||||
BinOp::Eq => Some(format!("{left}\n\nshould not be equal to\n\n{right}")),
|
||||
BinOp::NotEq => Some(format!("{left}\n\nshould be equal to\n\n{right}")),
|
||||
BinOp::LtInt => Some(format!(
|
||||
"{left}\n\nshould be greater than or equal to\n\n{right}"
|
||||
)),
|
||||
BinOp::LtEqInt => Some(format!("{left}\n\nshould be greater than\n\n{right}")),
|
||||
BinOp::GtEqInt => Some(format!(
|
||||
"{left}\n\nshould be lower than or equal\n\n{right}"
|
||||
)),
|
||||
BinOp::GtInt => Some(format!("{left}\n\nshould be lower than\n\n{right}")),
|
||||
_ => None,
|
||||
}
|
||||
} else {
|
||||
match self.bin_op {
|
||||
BinOp::And => Some(format!("{left}\n\nand\n\n{right}\n\nshould both be true.")),
|
||||
BinOp::Or => Some(format!("{left}\n\nor\n\n{right}\n\nshould be true.")),
|
||||
BinOp::Eq => Some(format!("{left}\n\nshould be equal to\n\n{right}")),
|
||||
BinOp::NotEq => Some(format!("{left}\n\nshould not be equal to\n\n{right}")),
|
||||
BinOp::LtInt => Some(format!("{left}\n\nshould be lower than\n\n{right}")),
|
||||
BinOp::LtEqInt => Some(format!(
|
||||
"{left}\n\nshould be lower than or equal to\n\n{right}"
|
||||
)),
|
||||
BinOp::GtEqInt => Some(format!("{left}\n\nshould be greater than\n\n{right}")),
|
||||
BinOp::GtInt => Some(format!(
|
||||
"{left}\n\nshould be greater than or equal to\n\n{right}"
|
||||
)),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
.ok_or(fmt::Error)?;
|
||||
|
||||
f.write_str(&msg)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct EvalInfo {
|
||||
pub success: bool,
|
||||
pub script: Script,
|
||||
pub spent_budget: ExBudget,
|
||||
pub output: Option<Term<NamedDeBruijn>>,
|
||||
pub logs: Vec<String>,
|
||||
}
|
||||
|
||||
unsafe impl Send for EvalInfo {}
|
|
@ -1,9 +1,9 @@
|
|||
use crate::pretty;
|
||||
use crate::script::EvalInfo;
|
||||
use owo_colors::{
|
||||
OwoColorize,
|
||||
Stream::{self, Stderr},
|
||||
use crate::{
|
||||
pretty,
|
||||
test_framework::{PropertyTestResult, TestResult, UnitTestResult},
|
||||
};
|
||||
use aiken_lang::{expr::UntypedExpr, format::Formatter};
|
||||
use owo_colors::{OwoColorize, Stream::Stderr};
|
||||
use std::{collections::BTreeMap, fmt::Display, path::PathBuf};
|
||||
use uplc::machine::cost_model::ExBudget;
|
||||
|
||||
|
@ -35,12 +35,9 @@ pub enum Event {
|
|||
name: String,
|
||||
path: PathBuf,
|
||||
},
|
||||
EvaluatingFunction {
|
||||
results: Vec<EvalInfo>,
|
||||
},
|
||||
RunningTests,
|
||||
FinishedTests {
|
||||
tests: Vec<EvalInfo>,
|
||||
tests: Vec<TestResult<UntypedExpr>>,
|
||||
},
|
||||
WaitingForBuildDirLock,
|
||||
ResolvingPackages {
|
||||
|
@ -164,20 +161,6 @@ impl EventListener for Terminal {
|
|||
name.if_supports_color(Stderr, |s| s.bright_blue()),
|
||||
);
|
||||
}
|
||||
Event::EvaluatingFunction { results } => {
|
||||
eprintln!(
|
||||
"{}\n",
|
||||
" Evaluating function ..."
|
||||
.if_supports_color(Stderr, |s| s.bold())
|
||||
.if_supports_color(Stderr, |s| s.purple())
|
||||
);
|
||||
|
||||
let (max_mem, max_cpu) = find_max_execution_units(&results);
|
||||
|
||||
for eval_info in &results {
|
||||
println!(" {}", fmt_eval(eval_info, max_mem, max_cpu, Stderr))
|
||||
}
|
||||
}
|
||||
Event::RunningTests => {
|
||||
eprintln!(
|
||||
"{} {}\n",
|
||||
|
@ -188,21 +171,21 @@ impl EventListener for Terminal {
|
|||
);
|
||||
}
|
||||
Event::FinishedTests { tests } => {
|
||||
let (max_mem, max_cpu) = find_max_execution_units(&tests);
|
||||
let (max_mem, max_cpu, max_iter) = find_max_execution_units(&tests);
|
||||
|
||||
for (module, infos) in &group_by_module(&tests) {
|
||||
for (module, results) in &group_by_module(&tests) {
|
||||
let title = module
|
||||
.if_supports_color(Stderr, |s| s.bold())
|
||||
.if_supports_color(Stderr, |s| s.blue())
|
||||
.to_string();
|
||||
|
||||
let tests = infos
|
||||
let tests = results
|
||||
.iter()
|
||||
.map(|eval_info| fmt_test(eval_info, max_mem, max_cpu, true))
|
||||
.map(|r| fmt_test(r, max_mem, max_cpu, max_iter, true))
|
||||
.collect::<Vec<String>>()
|
||||
.join("\n");
|
||||
|
||||
let summary = fmt_test_summary(infos, true);
|
||||
let summary = fmt_test_summary(results, true);
|
||||
|
||||
eprintln!(
|
||||
"{}\n",
|
||||
|
@ -269,81 +252,125 @@ impl EventListener for Terminal {
|
|||
}
|
||||
}
|
||||
|
||||
fn fmt_test(eval_info: &EvalInfo, max_mem: usize, max_cpu: usize, styled: bool) -> String {
|
||||
let EvalInfo {
|
||||
success,
|
||||
script,
|
||||
spent_budget,
|
||||
logs,
|
||||
..
|
||||
} = eval_info;
|
||||
|
||||
let ExBudget { mem, cpu } = spent_budget;
|
||||
let mem_pad = pretty::pad_left(mem.to_string(), max_mem, " ");
|
||||
let cpu_pad = pretty::pad_left(cpu.to_string(), max_cpu, " ");
|
||||
|
||||
let test = format!(
|
||||
"{status} [mem: {mem_unit}, cpu: {cpu_unit}] {module}",
|
||||
status = if *success {
|
||||
pretty::style_if(styled, "PASS".to_string(), |s| {
|
||||
s.if_supports_color(Stderr, |s| s.bold())
|
||||
.if_supports_color(Stderr, |s| s.green())
|
||||
.to_string()
|
||||
})
|
||||
} else {
|
||||
pretty::style_if(styled, "FAIL".to_string(), |s| {
|
||||
s.if_supports_color(Stderr, |s| s.bold())
|
||||
.if_supports_color(Stderr, |s| s.red())
|
||||
.to_string()
|
||||
})
|
||||
},
|
||||
mem_unit = pretty::style_if(styled, mem_pad, |s| s
|
||||
.if_supports_color(Stderr, |s| s.cyan())
|
||||
.to_string()),
|
||||
cpu_unit = pretty::style_if(styled, cpu_pad, |s| s
|
||||
.if_supports_color(Stderr, |s| s.cyan())
|
||||
.to_string()),
|
||||
module = pretty::style_if(styled, script.name.clone(), |s| s
|
||||
.if_supports_color(Stderr, |s| s.bright_blue())
|
||||
.to_string()),
|
||||
);
|
||||
|
||||
let logs = if logs.is_empty() {
|
||||
String::new()
|
||||
fn fmt_test(
|
||||
result: &TestResult<UntypedExpr>,
|
||||
max_mem: usize,
|
||||
max_cpu: usize,
|
||||
max_iter: usize,
|
||||
styled: bool,
|
||||
) -> String {
|
||||
// Status
|
||||
let mut test = if result.is_success() {
|
||||
pretty::style_if(styled, "PASS".to_string(), |s| {
|
||||
s.if_supports_color(Stderr, |s| s.bold())
|
||||
.if_supports_color(Stderr, |s| s.green())
|
||||
.to_string()
|
||||
})
|
||||
} else {
|
||||
logs.iter()
|
||||
.map(|line| {
|
||||
format!(
|
||||
"{arrow} {styled_line}",
|
||||
arrow = "↳".if_supports_color(Stderr, |s| s.bright_yellow()),
|
||||
styled_line = line
|
||||
.split('\n')
|
||||
.map(|l| format!("{}", l.if_supports_color(Stderr, |s| s.bright_black())))
|
||||
.collect::<Vec<_>>()
|
||||
.join("\n")
|
||||
)
|
||||
})
|
||||
.collect::<Vec<_>>()
|
||||
.join("\n")
|
||||
pretty::style_if(styled, "FAIL".to_string(), |s| {
|
||||
s.if_supports_color(Stderr, |s| s.bold())
|
||||
.if_supports_color(Stderr, |s| s.red())
|
||||
.to_string()
|
||||
})
|
||||
};
|
||||
|
||||
if logs.is_empty() {
|
||||
test
|
||||
} else {
|
||||
[test, logs].join("\n")
|
||||
// Execution units / iteration steps
|
||||
match result {
|
||||
TestResult::UnitTestResult(UnitTestResult { spent_budget, .. }) => {
|
||||
let ExBudget { mem, cpu } = spent_budget;
|
||||
let mem_pad = pretty::pad_left(mem.to_string(), max_mem, " ");
|
||||
let cpu_pad = pretty::pad_left(cpu.to_string(), max_cpu, " ");
|
||||
|
||||
test = format!(
|
||||
"{test} [mem: {mem_unit}, cpu: {cpu_unit}]",
|
||||
mem_unit = pretty::style_if(styled, mem_pad, |s| s
|
||||
.if_supports_color(Stderr, |s| s.cyan())
|
||||
.to_string()),
|
||||
cpu_unit = pretty::style_if(styled, cpu_pad, |s| s
|
||||
.if_supports_color(Stderr, |s| s.cyan())
|
||||
.to_string()),
|
||||
);
|
||||
}
|
||||
TestResult::PropertyTestResult(PropertyTestResult { iterations, .. }) => {
|
||||
test = pretty::pad_right(
|
||||
format!(
|
||||
"{test} [after {} test{}]",
|
||||
pretty::pad_left(iterations.to_string(), max_iter, " "),
|
||||
if *iterations > 1 { "s" } else { "" }
|
||||
),
|
||||
18 + max_mem + max_cpu + max_iter,
|
||||
" ",
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
// Title
|
||||
test = format!(
|
||||
"{test} {title}",
|
||||
title = pretty::style_if(styled, result.title().to_string(), |s| s
|
||||
.if_supports_color(Stderr, |s| s.bright_blue())
|
||||
.to_string())
|
||||
);
|
||||
|
||||
// CounterExample
|
||||
if let TestResult::PropertyTestResult(PropertyTestResult {
|
||||
counterexample: Some(counterexample),
|
||||
..
|
||||
}) = result
|
||||
{
|
||||
test = format!(
|
||||
"{test}\n{}",
|
||||
pretty::open_box(
|
||||
&pretty::style_if(styled, "counterexample".to_string(), |s| s
|
||||
.if_supports_color(Stderr, |s| s.red())
|
||||
.if_supports_color(Stderr, |s| s.bold())
|
||||
.to_string()),
|
||||
&Formatter::new()
|
||||
.expr(counterexample, false)
|
||||
.to_pretty_string(70),
|
||||
"",
|
||||
|s| s.red().to_string()
|
||||
)
|
||||
)
|
||||
}
|
||||
|
||||
// Traces
|
||||
if !result.logs().is_empty() {
|
||||
test = format!(
|
||||
"{test}\n{logs}",
|
||||
logs = result
|
||||
.logs()
|
||||
.iter()
|
||||
.map(|line| {
|
||||
format!(
|
||||
"{arrow} {styled_line}",
|
||||
arrow = "↳".if_supports_color(Stderr, |s| s.bright_yellow()),
|
||||
styled_line = line
|
||||
.split('\n')
|
||||
.map(|l| format!(
|
||||
"{}",
|
||||
l.if_supports_color(Stderr, |s| s.bright_black())
|
||||
))
|
||||
.collect::<Vec<_>>()
|
||||
.join("\n")
|
||||
)
|
||||
})
|
||||
.collect::<Vec<_>>()
|
||||
.join("\n")
|
||||
);
|
||||
};
|
||||
|
||||
test
|
||||
}
|
||||
|
||||
fn fmt_test_summary(tests: &[&EvalInfo], styled: bool) -> String {
|
||||
let (n_passed, n_failed) = tests
|
||||
.iter()
|
||||
.fold((0, 0), |(n_passed, n_failed), test_info| {
|
||||
if test_info.success {
|
||||
(n_passed + 1, n_failed)
|
||||
} else {
|
||||
(n_passed, n_failed + 1)
|
||||
}
|
||||
});
|
||||
fn fmt_test_summary<T>(tests: &[&TestResult<T>], styled: bool) -> String {
|
||||
let (n_passed, n_failed) = tests.iter().fold((0, 0), |(n_passed, n_failed), result| {
|
||||
if result.is_success() {
|
||||
(n_passed + 1, n_failed)
|
||||
} else {
|
||||
(n_passed, n_failed + 1)
|
||||
}
|
||||
});
|
||||
format!(
|
||||
"{} | {} | {}",
|
||||
pretty::style_if(styled, format!("{} tests", tests.len()), |s| s
|
||||
|
@ -360,53 +387,38 @@ fn fmt_test_summary(tests: &[&EvalInfo], styled: bool) -> String {
|
|||
)
|
||||
}
|
||||
|
||||
fn fmt_eval(eval_info: &EvalInfo, max_mem: usize, max_cpu: usize, stream: Stream) -> String {
|
||||
let EvalInfo {
|
||||
output,
|
||||
script,
|
||||
spent_budget,
|
||||
..
|
||||
} = eval_info;
|
||||
|
||||
let ExBudget { mem, cpu } = spent_budget;
|
||||
|
||||
format!(
|
||||
" {}::{} [mem: {}, cpu: {}]\n │\n ╰─▶ {}",
|
||||
script.module.if_supports_color(stream, |s| s.blue()),
|
||||
script.name.if_supports_color(stream, |s| s.bright_blue()),
|
||||
pretty::pad_left(mem.to_string(), max_mem, " "),
|
||||
pretty::pad_left(cpu.to_string(), max_cpu, " "),
|
||||
output
|
||||
.as_ref()
|
||||
.map(|x| format!("{x}"))
|
||||
.unwrap_or_else(|| "Error.".to_string()),
|
||||
)
|
||||
}
|
||||
|
||||
fn group_by_module(infos: &Vec<EvalInfo>) -> BTreeMap<String, Vec<&EvalInfo>> {
|
||||
fn group_by_module<T>(results: &Vec<TestResult<T>>) -> BTreeMap<String, Vec<&TestResult<T>>> {
|
||||
let mut modules = BTreeMap::new();
|
||||
for eval_info in infos {
|
||||
let xs: &mut Vec<&EvalInfo> = modules.entry(eval_info.script.module.clone()).or_default();
|
||||
xs.push(eval_info);
|
||||
for r in results {
|
||||
let xs: &mut Vec<&TestResult<_>> = modules.entry(r.module().to_string()).or_default();
|
||||
xs.push(r);
|
||||
}
|
||||
modules
|
||||
}
|
||||
|
||||
fn find_max_execution_units(xs: &[EvalInfo]) -> (usize, usize) {
|
||||
let (max_mem, max_cpu) = xs.iter().fold(
|
||||
(0, 0),
|
||||
|(max_mem, max_cpu), EvalInfo { spent_budget, .. }| {
|
||||
if spent_budget.mem >= max_mem && spent_budget.cpu >= max_cpu {
|
||||
(spent_budget.mem, spent_budget.cpu)
|
||||
} else if spent_budget.mem > max_mem {
|
||||
(spent_budget.mem, max_cpu)
|
||||
} else if spent_budget.cpu > max_cpu {
|
||||
(max_mem, spent_budget.cpu)
|
||||
} else {
|
||||
(max_mem, max_cpu)
|
||||
}
|
||||
},
|
||||
);
|
||||
fn find_max_execution_units<T>(xs: &[TestResult<T>]) -> (usize, usize, usize) {
|
||||
let (max_mem, max_cpu, max_iter) =
|
||||
xs.iter()
|
||||
.fold((0, 0, 0), |(max_mem, max_cpu, max_iter), test| match test {
|
||||
TestResult::PropertyTestResult(PropertyTestResult { iterations, .. }) => {
|
||||
(max_mem, max_cpu, std::cmp::max(max_iter, *iterations))
|
||||
}
|
||||
TestResult::UnitTestResult(UnitTestResult { spent_budget, .. }) => {
|
||||
if spent_budget.mem >= max_mem && spent_budget.cpu >= max_cpu {
|
||||
(spent_budget.mem, spent_budget.cpu, max_iter)
|
||||
} else if spent_budget.mem > max_mem {
|
||||
(spent_budget.mem, max_cpu, max_iter)
|
||||
} else if spent_budget.cpu > max_cpu {
|
||||
(max_mem, spent_budget.cpu, max_iter)
|
||||
} else {
|
||||
(max_mem, max_cpu, max_iter)
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
(max_mem.to_string().len(), max_cpu.to_string().len())
|
||||
(
|
||||
max_mem.to_string().len(),
|
||||
max_cpu.to_string().len(),
|
||||
max_iter.to_string().len(),
|
||||
)
|
||||
}
|
||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -1,19 +1,16 @@
|
|||
use super::TestProject;
|
||||
use crate::module::CheckedModules;
|
||||
use aiken_lang::ast::{Definition, Function, TraceLevel, Tracing, TypedTest, TypedValidator};
|
||||
use pretty_assertions::assert_eq;
|
||||
|
||||
use aiken_lang::ast::{Definition, Function, TraceLevel, Tracing, TypedFunction, TypedValidator};
|
||||
use uplc::{
|
||||
ast::{Constant, Data, DeBruijn, Name, Program, Term, Type},
|
||||
builder::{CONSTR_FIELDS_EXPOSER, CONSTR_INDEX_EXPOSER},
|
||||
machine::cost_model::ExBudget,
|
||||
machine::{cost_model::ExBudget, runtime::Compressable},
|
||||
optimize,
|
||||
};
|
||||
|
||||
use crate::module::CheckedModules;
|
||||
|
||||
use super::TestProject;
|
||||
|
||||
enum TestType {
|
||||
Func(TypedFunction),
|
||||
Func(TypedTest),
|
||||
Validator(TypedValidator),
|
||||
}
|
||||
|
||||
|
@ -22,12 +19,7 @@ fn assert_uplc(source_code: &str, expected: Term<Name>, should_fail: bool) {
|
|||
|
||||
let modules = CheckedModules::singleton(project.check(project.parse(source_code)));
|
||||
|
||||
let mut generator = modules.new_generator(
|
||||
&project.functions,
|
||||
&project.data_types,
|
||||
&project.module_types,
|
||||
Tracing::All(TraceLevel::Verbose),
|
||||
);
|
||||
let mut generator = project.new_generator(Tracing::All(TraceLevel::Verbose));
|
||||
|
||||
let Some(checked_module) = modules.values().next() else {
|
||||
unreachable!("There's got to be one right?")
|
||||
|
@ -57,7 +49,7 @@ fn assert_uplc(source_code: &str, expected: Term<Name>, should_fail: bool) {
|
|||
|
||||
match &script.2 {
|
||||
TestType::Func(Function { body: func, .. }) => {
|
||||
let program = generator.generate_test(func, &script.1);
|
||||
let program = generator.generate_raw(func, &[], &script.1);
|
||||
|
||||
let debruijn_program: Program<DeBruijn> = program.try_into().unwrap();
|
||||
|
||||
|
@ -6206,3 +6198,119 @@ fn tuple_2_match() {
|
|||
false,
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn bls12_381_elements_to_data_conversion() {
|
||||
let src = r#"
|
||||
pub type Proof {
|
||||
piA: G1Element,
|
||||
piB: G2Element,
|
||||
}
|
||||
|
||||
test thing() {
|
||||
let pk =
|
||||
Proof {
|
||||
piA: #<Bls12_381, G1>"b28cb29bc282be68df977b35eb9d8e98b3a0a3fc7c372990bddc50419ca86693e491755338fed4fb42231a7c081252ce",
|
||||
piB: #<Bls12_381, G2>"b9215e5bc481ba6552384c89c23d45bd650b69462868248bfbb83aee7060579404dba41c781dec7c2bec5fccec06842e0e66ad6d86c7c76c468a32c9c0080eea0219d0953b44b1c4f5605afb1e5a3193264ff730222e94f55207628235f3b423",
|
||||
}
|
||||
|
||||
pk == pk
|
||||
}
|
||||
"#;
|
||||
|
||||
let constant = Term::Constant(
|
||||
Constant::Data(Data::constr(
|
||||
0,
|
||||
vec![
|
||||
Data::bytestring(vec![
|
||||
0xb2, 0x8c, 0xb2, 0x9b, 0xc2, 0x82, 0xbe, 0x68, 0xdf, 0x97, 0x7b, 0x35, 0xeb,
|
||||
0x9d, 0x8e, 0x98, 0xb3, 0xa0, 0xa3, 0xfc, 0x7c, 0x37, 0x29, 0x90, 0xbd, 0xdc,
|
||||
0x50, 0x41, 0x9c, 0xa8, 0x66, 0x93, 0xe4, 0x91, 0x75, 0x53, 0x38, 0xfe, 0xd4,
|
||||
0xfb, 0x42, 0x23, 0x1a, 0x7c, 0x08, 0x12, 0x52, 0xce,
|
||||
]),
|
||||
Data::bytestring(vec![
|
||||
0xb9, 0x21, 0x5e, 0x5b, 0xc4, 0x81, 0xba, 0x65, 0x52, 0x38, 0x4c, 0x89, 0xc2,
|
||||
0x3d, 0x45, 0xbd, 0x65, 0x0b, 0x69, 0x46, 0x28, 0x68, 0x24, 0x8b, 0xfb, 0xb8,
|
||||
0x3a, 0xee, 0x70, 0x60, 0x57, 0x94, 0x04, 0xdb, 0xa4, 0x1c, 0x78, 0x1d, 0xec,
|
||||
0x7c, 0x2b, 0xec, 0x5f, 0xcc, 0xec, 0x06, 0x84, 0x2e, 0x0e, 0x66, 0xad, 0x6d,
|
||||
0x86, 0xc7, 0xc7, 0x6c, 0x46, 0x8a, 0x32, 0xc9, 0xc0, 0x08, 0x0e, 0xea, 0x02,
|
||||
0x19, 0xd0, 0x95, 0x3b, 0x44, 0xb1, 0xc4, 0xf5, 0x60, 0x5a, 0xfb, 0x1e, 0x5a,
|
||||
0x31, 0x93, 0x26, 0x4f, 0xf7, 0x30, 0x22, 0x2e, 0x94, 0xf5, 0x52, 0x07, 0x62,
|
||||
0x82, 0x35, 0xf3, 0xb4, 0x23,
|
||||
]),
|
||||
],
|
||||
))
|
||||
.into(),
|
||||
);
|
||||
|
||||
assert_uplc(
|
||||
src,
|
||||
Term::equals_data().apply(constant.clone()).apply(constant),
|
||||
false,
|
||||
)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn bls12_381_elements_from_data_conversion() {
|
||||
let src = r#"
|
||||
pub type Proof {
|
||||
piA: G1Element,
|
||||
piB: G2Element,
|
||||
}
|
||||
|
||||
test thing() {
|
||||
let pk =
|
||||
Proof {
|
||||
piA: #<Bls12_381, G1>"b28cb29bc282be68df977b35eb9d8e98b3a0a3fc7c372990bddc50419ca86693e491755338fed4fb42231a7c081252ce",
|
||||
piB: #<Bls12_381, G2>"b9215e5bc481ba6552384c89c23d45bd650b69462868248bfbb83aee7060579404dba41c781dec7c2bec5fccec06842e0e66ad6d86c7c76c468a32c9c0080eea0219d0953b44b1c4f5605afb1e5a3193264ff730222e94f55207628235f3b423",
|
||||
}
|
||||
|
||||
pk.piA == #<Bls12_381, G1>"b28cb29bc282be68df977b35eb9d8e98b3a0a3fc7c372990bddc50419ca86693e491755338fed4fb42231a7c081252ce"
|
||||
}
|
||||
"#;
|
||||
|
||||
let bytes = vec![
|
||||
0xb2, 0x8c, 0xb2, 0x9b, 0xc2, 0x82, 0xbe, 0x68, 0xdf, 0x97, 0x7b, 0x35, 0xeb, 0x9d, 0x8e,
|
||||
0x98, 0xb3, 0xa0, 0xa3, 0xfc, 0x7c, 0x37, 0x29, 0x90, 0xbd, 0xdc, 0x50, 0x41, 0x9c, 0xa8,
|
||||
0x66, 0x93, 0xe4, 0x91, 0x75, 0x53, 0x38, 0xfe, 0xd4, 0xfb, 0x42, 0x23, 0x1a, 0x7c, 0x08,
|
||||
0x12, 0x52, 0xce,
|
||||
];
|
||||
|
||||
let g1 = Term::Constant(
|
||||
Constant::Bls12_381G1Element(blst::blst_p1::uncompress(&bytes).unwrap().into()).into(),
|
||||
);
|
||||
|
||||
let constant = Term::Constant(
|
||||
Constant::Data(Data::constr(
|
||||
0,
|
||||
vec![
|
||||
Data::bytestring(bytes),
|
||||
Data::bytestring(vec![
|
||||
0xb9, 0x21, 0x5e, 0x5b, 0xc4, 0x81, 0xba, 0x65, 0x52, 0x38, 0x4c, 0x89, 0xc2,
|
||||
0x3d, 0x45, 0xbd, 0x65, 0x0b, 0x69, 0x46, 0x28, 0x68, 0x24, 0x8b, 0xfb, 0xb8,
|
||||
0x3a, 0xee, 0x70, 0x60, 0x57, 0x94, 0x04, 0xdb, 0xa4, 0x1c, 0x78, 0x1d, 0xec,
|
||||
0x7c, 0x2b, 0xec, 0x5f, 0xcc, 0xec, 0x06, 0x84, 0x2e, 0x0e, 0x66, 0xad, 0x6d,
|
||||
0x86, 0xc7, 0xc7, 0x6c, 0x46, 0x8a, 0x32, 0xc9, 0xc0, 0x08, 0x0e, 0xea, 0x02,
|
||||
0x19, 0xd0, 0x95, 0x3b, 0x44, 0xb1, 0xc4, 0xf5, 0x60, 0x5a, 0xfb, 0x1e, 0x5a,
|
||||
0x31, 0x93, 0x26, 0x4f, 0xf7, 0x30, 0x22, 0x2e, 0x94, 0xf5, 0x52, 0x07, 0x62,
|
||||
0x82, 0x35, 0xf3, 0xb4, 0x23,
|
||||
]),
|
||||
],
|
||||
))
|
||||
.into(),
|
||||
);
|
||||
|
||||
assert_uplc(
|
||||
src,
|
||||
Term::bls12_381_g1_equal()
|
||||
.apply(Term::bls12_381_g1_uncompress().apply(
|
||||
Term::un_b_data().apply(
|
||||
Term::head_list().apply(
|
||||
Term::snd_pair().apply(Term::unconstr_data().apply(constant.clone())),
|
||||
),
|
||||
),
|
||||
))
|
||||
.apply(g1),
|
||||
false,
|
||||
)
|
||||
}
|
||||
|
|
|
@ -1,20 +1,22 @@
|
|||
use std::collections::HashMap;
|
||||
use std::path::PathBuf;
|
||||
|
||||
use crate::{
|
||||
builtins,
|
||||
module::{CheckedModule, ParsedModule},
|
||||
package_name::PackageName,
|
||||
utils,
|
||||
};
|
||||
use aiken_lang::{
|
||||
ast::{ModuleKind, TraceLevel, Tracing, TypedDataType, TypedFunction},
|
||||
gen_uplc::builder::{DataTypeKey, FunctionAccessKey},
|
||||
ast::{
|
||||
DataTypeKey, FunctionAccessKey, ModuleKind, TraceLevel, Tracing, TypedDataType,
|
||||
TypedFunction,
|
||||
},
|
||||
gen_uplc::CodeGenerator,
|
||||
line_numbers::LineNumbers,
|
||||
parser,
|
||||
tipo::TypeInfo,
|
||||
IdGenerator,
|
||||
};
|
||||
use indexmap::IndexMap;
|
||||
|
||||
use crate::{
|
||||
builtins,
|
||||
module::{CheckedModule, ParsedModule},
|
||||
package_name::PackageName,
|
||||
};
|
||||
use std::{collections::HashMap, path::PathBuf};
|
||||
|
||||
mod gen_uplc;
|
||||
|
||||
|
@ -24,9 +26,10 @@ mod gen_uplc;
|
|||
pub struct TestProject {
|
||||
pub package: PackageName,
|
||||
pub id_gen: IdGenerator,
|
||||
pub module_types: HashMap<String, TypeInfo>,
|
||||
pub functions: IndexMap<FunctionAccessKey, TypedFunction>,
|
||||
pub data_types: IndexMap<DataTypeKey, TypedDataType>,
|
||||
pub module_types: HashMap<String, TypeInfo>,
|
||||
pub module_sources: HashMap<String, (String, LineNumbers)>,
|
||||
}
|
||||
|
||||
impl TestProject {
|
||||
|
@ -51,9 +54,20 @@ impl TestProject {
|
|||
module_types,
|
||||
functions,
|
||||
data_types,
|
||||
module_sources: HashMap::new(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn new_generator(&'_ self, tracing: Tracing) -> CodeGenerator<'_> {
|
||||
CodeGenerator::new(
|
||||
utils::indexmap::as_ref_values(&self.functions),
|
||||
utils::indexmap::as_ref_values(&self.data_types),
|
||||
utils::indexmap::as_str_ref_values(&self.module_types),
|
||||
utils::indexmap::as_str_ref_values(&self.module_sources),
|
||||
tracing,
|
||||
)
|
||||
}
|
||||
|
||||
pub fn parse(&self, source_code: &str) -> ParsedModule {
|
||||
let kind = ModuleKind::Validator;
|
||||
let name = "test_module".to_owned();
|
||||
|
@ -86,6 +100,17 @@ impl TestProject {
|
|||
)
|
||||
.expect("Failed to type-check module");
|
||||
|
||||
// Register function definitions & data-types for easier access later.
|
||||
ast.register_definitions(&mut self.functions, &mut self.data_types);
|
||||
|
||||
// Register module sources for an easier access later.
|
||||
self.module_sources.insert(
|
||||
module.name.clone(),
|
||||
(module.code.clone(), LineNumbers::new(&module.code)),
|
||||
);
|
||||
|
||||
// Register the types from this module so they can be
|
||||
// imported into other modules.
|
||||
self.module_types
|
||||
.insert(module.name.clone(), ast.type_info.clone());
|
||||
|
||||
|
|
|
@ -0,0 +1,21 @@
|
|||
use indexmap::IndexMap;
|
||||
use std::{collections::HashMap, hash::Hash};
|
||||
|
||||
pub fn as_ref_values<'a, K, V>(iter: &'a IndexMap<K, V>) -> IndexMap<&'a K, &'a V>
|
||||
where
|
||||
K: Eq + Hash + Clone + 'a,
|
||||
{
|
||||
let mut refs = IndexMap::new();
|
||||
for (k, v) in iter {
|
||||
refs.insert(k, v);
|
||||
}
|
||||
refs
|
||||
}
|
||||
|
||||
pub fn as_str_ref_values<V>(iter: &'_ HashMap<String, V>) -> IndexMap<&'_ str, &'_ V> {
|
||||
let mut refs = IndexMap::new();
|
||||
for (k, v) in iter {
|
||||
refs.insert(k.as_str(), v);
|
||||
}
|
||||
refs
|
||||
}
|
|
@ -0,0 +1 @@
|
|||
pub mod indexmap;
|
|
@ -1,4 +1,4 @@
|
|||
use crate::{telemetry::Terminal, Project};
|
||||
use crate::{telemetry::Terminal, Error, Project};
|
||||
use miette::{Diagnostic, IntoDiagnostic};
|
||||
use notify::{Event, RecursiveMode, Watcher};
|
||||
use owo_colors::{OwoColorize, Stream::Stderr};
|
||||
|
@ -75,7 +75,12 @@ pub fn default_filter(evt: &Event) -> bool {
|
|||
}
|
||||
}
|
||||
|
||||
pub fn with_project<A>(directory: Option<&Path>, deny: bool, mut action: A) -> miette::Result<()>
|
||||
pub fn with_project<A>(
|
||||
directory: Option<&Path>,
|
||||
seed: u32,
|
||||
deny: bool,
|
||||
mut action: A,
|
||||
) -> miette::Result<()>
|
||||
where
|
||||
A: FnMut(&mut Project<Terminal>) -> Result<(), Vec<crate::error::Error>>,
|
||||
{
|
||||
|
@ -116,17 +121,26 @@ where
|
|||
}
|
||||
);
|
||||
|
||||
if errs.iter().any(|e| matches!(e, Error::TestFailure { .. })) {
|
||||
eprintln!(
|
||||
" {}══╤══\n{} ╰─▶ use {} {} to replay",
|
||||
if errs.len() > 1 { "═" } else { "" },
|
||||
if errs.len() > 1 { " " } else { "" },
|
||||
"--seed".if_supports_color(Stderr, |s| s.bold()),
|
||||
format!("{seed}").if_supports_color(Stderr, |s| s.bold())
|
||||
);
|
||||
}
|
||||
return Err(ExitFailure::into_report());
|
||||
} else {
|
||||
eprintln!(
|
||||
"{}",
|
||||
Summary {
|
||||
error_count: 0,
|
||||
warning_count
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
eprintln!(
|
||||
"{}",
|
||||
Summary {
|
||||
error_count: 0,
|
||||
warning_count
|
||||
}
|
||||
);
|
||||
|
||||
if warning_count > 0 && deny {
|
||||
Err(ExitFailure::into_report())
|
||||
} else {
|
||||
|
@ -148,6 +162,7 @@ where
|
|||
pub fn watch_project<F, A>(
|
||||
directory: Option<&Path>,
|
||||
filter: F,
|
||||
seed: u32,
|
||||
debounce: u32,
|
||||
mut action: A,
|
||||
) -> miette::Result<()>
|
||||
|
@ -219,7 +234,7 @@ where
|
|||
.if_supports_color(Stderr, |s| s.bold())
|
||||
.if_supports_color(Stderr, |s| s.purple()),
|
||||
);
|
||||
with_project(directory, false, &mut action).unwrap_or(())
|
||||
with_project(directory, seed, false, &mut action).unwrap_or(())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -38,3 +38,4 @@ clap_complete = "4.3.2"
|
|||
inquire = "0.6.2"
|
||||
num-bigint = "0.4.3"
|
||||
ordinal = "0.3.2"
|
||||
rand = "0.8.5"
|
||||
|
|
|
@ -23,6 +23,10 @@ pub struct Args {
|
|||
/// Force the project to be rebuilt, otherwise relies on existing artifacts (i.e. plutus.json)
|
||||
#[clap(long)]
|
||||
rebuild: bool,
|
||||
|
||||
/// Output the address for mainnet (this command defaults to testnet)
|
||||
#[clap(long)]
|
||||
mainnet: bool,
|
||||
}
|
||||
|
||||
pub fn exec(
|
||||
|
@ -32,9 +36,10 @@ pub fn exec(
|
|||
validator,
|
||||
delegated_to,
|
||||
rebuild,
|
||||
mainnet,
|
||||
}: Args,
|
||||
) -> miette::Result<()> {
|
||||
with_project(directory.as_deref(), false, |p| {
|
||||
with_project(directory.as_deref(), u32::default(), false, |p| {
|
||||
if rebuild {
|
||||
p.build(false, Tracing::silent())?;
|
||||
}
|
||||
|
@ -51,7 +56,7 @@ pub fn exec(
|
|||
|
||||
let title = title.as_ref().or(validator.as_ref());
|
||||
|
||||
let address = p.address(title, delegated_to.as_ref())?;
|
||||
let address = p.address(title, delegated_to.as_ref(), mainnet)?;
|
||||
|
||||
println!("{}", address.to_bech32().unwrap());
|
||||
|
||||
|
|
|
@ -13,9 +13,8 @@ use num_bigint::BigInt;
|
|||
use ordinal::Ordinal;
|
||||
use owo_colors::{OwoColorize, Stream::Stderr};
|
||||
use pallas::ledger::primitives::alonzo::PlutusData;
|
||||
use std::str::FromStr;
|
||||
use std::{fs, path::PathBuf, process, rc::Rc};
|
||||
use uplc::ast::{Constant, Data as UplcData, DeBruijn, Term};
|
||||
use std::{fs, path::PathBuf, process, str::FromStr};
|
||||
use uplc::ast::Data as UplcData;
|
||||
|
||||
/// Apply a parameter to a parameterized validator.
|
||||
#[derive(clap::Args)]
|
||||
|
@ -48,7 +47,7 @@ pub fn exec(
|
|||
validator,
|
||||
}: Args,
|
||||
) -> miette::Result<()> {
|
||||
with_project(None, false, |p| {
|
||||
with_project(None, u32::default(), false, |p| {
|
||||
let title = module.as_ref().map(|m| {
|
||||
format!(
|
||||
"{m}{}",
|
||||
|
@ -68,7 +67,7 @@ pub fn exec(
|
|||
.if_supports_color(Stderr, |s| s.bold()),
|
||||
);
|
||||
|
||||
let term: Term<DeBruijn> = match ¶meter {
|
||||
let data: PlutusData = match ¶meter {
|
||||
Some(param) => {
|
||||
eprintln!(
|
||||
"{} inputs",
|
||||
|
@ -90,7 +89,7 @@ pub fn exec(
|
|||
process::exit(1)
|
||||
});
|
||||
|
||||
let data = uplc::plutus_data(&bytes)
|
||||
uplc::plutus_data(&bytes)
|
||||
.map_err::<Error, _>(|e| {
|
||||
blueprint::error::Error::MalformedParameter {
|
||||
hint: format!("Invalid Plutus data; malformed CBOR encoding: {e}"),
|
||||
|
@ -101,9 +100,7 @@ pub fn exec(
|
|||
println!();
|
||||
e.report();
|
||||
process::exit(1)
|
||||
});
|
||||
|
||||
Term::Constant(Rc::new(Constant::Data(data)))
|
||||
})
|
||||
}
|
||||
|
||||
None => p.construct_parameter_incrementally(title, ask_schema)?,
|
||||
|
@ -114,16 +111,13 @@ pub fn exec(
|
|||
" Applying"
|
||||
.if_supports_color(Stderr, |s| s.purple())
|
||||
.if_supports_color(Stderr, |s| s.bold()),
|
||||
match TryInto::<PlutusData>::try_into(term.clone()) {
|
||||
Ok(data) => {
|
||||
let padding = "\n ";
|
||||
multiline(48, UplcData::to_hex(data)).join(padding)
|
||||
}
|
||||
Err(_) => term.to_pretty(),
|
||||
{
|
||||
let padding = "\n ";
|
||||
multiline(48, UplcData::to_hex(data.clone())).join(padding)
|
||||
}
|
||||
);
|
||||
|
||||
let blueprint = p.apply_parameter(title, &term)?;
|
||||
let blueprint = p.apply_parameter(title, &data)?;
|
||||
|
||||
let json = serde_json::to_string_pretty(&blueprint).unwrap();
|
||||
|
||||
|
@ -179,16 +173,21 @@ fn ask_schema(
|
|||
}
|
||||
|
||||
Schema::Data(Data::List(Items::Many(ref decls))) => {
|
||||
eprintln!(" {}", asking(schema, "Found", &format!("a {}-tuple", decls.len())));
|
||||
eprintln!(
|
||||
" {}",
|
||||
asking(schema, "Found", &format!("a {}-tuple", decls.len()))
|
||||
);
|
||||
|
||||
let mut elems = vec![];
|
||||
|
||||
for (ix, decl) in decls.iter().enumerate() {
|
||||
eprintln!(
|
||||
" {} Tuple's {}{} element",
|
||||
"Asking".if_supports_color(Stderr, |s| s.purple()).if_supports_color(Stderr, |s| s.bold()),
|
||||
ix+1,
|
||||
Ordinal::<usize>(ix+1).suffix()
|
||||
"Asking"
|
||||
.if_supports_color(Stderr, |s| s.purple())
|
||||
.if_supports_color(Stderr, |s| s.bold()),
|
||||
ix + 1,
|
||||
Ordinal::<usize>(ix + 1).suffix()
|
||||
);
|
||||
let inner_schema = lookup_declaration(&decl.clone().into(), definitions);
|
||||
elems.push(ask_schema(&inner_schema, definitions)?);
|
||||
|
@ -252,7 +251,9 @@ fn ask_schema(
|
|||
Ok(UplcData::constr(ix.try_into().unwrap(), fields))
|
||||
}
|
||||
|
||||
_ => unimplemented!("Hey! You've found a case that we haven't implemented yet. Yes, we've been a bit lazy on that one... If that use-case is important to you, please let us know on Discord or on Github."),
|
||||
_ => unimplemented!(
|
||||
"Hey! You've found a case that we haven't implemented yet. Yes, we've been a bit lazy on that one... If that use-case is important to you, please let us know on Discord or on Github."
|
||||
),
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -29,7 +29,7 @@ pub fn exec(
|
|||
rebuild,
|
||||
}: Args,
|
||||
) -> miette::Result<()> {
|
||||
with_project(directory.as_deref(), false, |p| {
|
||||
with_project(directory.as_deref(), u32::default(), false, |p| {
|
||||
if rebuild {
|
||||
p.build(false, Tracing::silent())?;
|
||||
}
|
||||
|
@ -46,7 +46,7 @@ pub fn exec(
|
|||
|
||||
let title = title.as_ref().or(validator.as_ref());
|
||||
|
||||
let address = p.address(title, None)?;
|
||||
let address = p.address(title, None, false)?;
|
||||
|
||||
println!("{}", address.payment().to_hex());
|
||||
|
||||
|
|
|
@ -29,7 +29,7 @@ pub fn exec(
|
|||
rebuild,
|
||||
}: Args,
|
||||
) -> miette::Result<()> {
|
||||
with_project(directory.as_deref(), false, |p| {
|
||||
with_project(directory.as_deref(), u32::default(), false, |p| {
|
||||
if rebuild {
|
||||
p.build(false, Tracing::silent())?;
|
||||
}
|
||||
|
|
|
@ -1,7 +1,6 @@
|
|||
use aiken_lang::ast::{TraceLevel, Tracing};
|
||||
use aiken_project::watch::{self, watch_project, with_project};
|
||||
use clap::builder::MapValueParser;
|
||||
use clap::builder::{PossibleValuesParser, TypedValueParser};
|
||||
use clap::builder::{MapValueParser, PossibleValuesParser, TypedValueParser};
|
||||
use std::{path::PathBuf, process};
|
||||
|
||||
#[derive(clap::Args)]
|
||||
|
@ -52,17 +51,23 @@ pub fn exec(
|
|||
}: Args,
|
||||
) -> miette::Result<()> {
|
||||
let result = if watch {
|
||||
watch_project(directory.as_deref(), watch::default_filter, 500, |p| {
|
||||
p.build(
|
||||
uplc,
|
||||
match filter_traces {
|
||||
Some(filter_traces) => filter_traces(trace_level),
|
||||
None => Tracing::All(trace_level),
|
||||
},
|
||||
)
|
||||
})
|
||||
watch_project(
|
||||
directory.as_deref(),
|
||||
watch::default_filter,
|
||||
u32::default(),
|
||||
500,
|
||||
|p| {
|
||||
p.build(
|
||||
uplc,
|
||||
match filter_traces {
|
||||
Some(filter_traces) => filter_traces(trace_level),
|
||||
None => Tracing::All(trace_level),
|
||||
},
|
||||
)
|
||||
},
|
||||
)
|
||||
} else {
|
||||
with_project(directory.as_deref(), deny, |p| {
|
||||
with_project(directory.as_deref(), u32::default(), deny, |p| {
|
||||
p.build(
|
||||
uplc,
|
||||
match filter_traces {
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
use super::build::{filter_traces_parser, trace_level_parser};
|
||||
use aiken_lang::ast::{TraceLevel, Tracing};
|
||||
use aiken_project::watch::{self, watch_project, with_project};
|
||||
use rand::prelude::*;
|
||||
use std::{path::PathBuf, process};
|
||||
|
||||
#[derive(clap::Args)]
|
||||
|
@ -25,6 +26,10 @@ pub struct Args {
|
|||
#[clap(long)]
|
||||
watch: bool,
|
||||
|
||||
/// An initial seed to initialize the pseudo-random generator for property-tests.
|
||||
#[clap(long)]
|
||||
seed: Option<u32>,
|
||||
|
||||
/// Only run tests if they match any of these strings.
|
||||
/// You can match a module with `-m aiken/list` or `-m list`.
|
||||
/// You can match a test with `-m "aiken/list.{map}"` or `-m "aiken/option.{flatten_1}"`
|
||||
|
@ -66,28 +71,41 @@ pub fn exec(
|
|||
watch,
|
||||
filter_traces,
|
||||
trace_level,
|
||||
seed,
|
||||
}: Args,
|
||||
) -> miette::Result<()> {
|
||||
let mut rng = rand::thread_rng();
|
||||
|
||||
let seed = seed.unwrap_or_else(|| rng.gen());
|
||||
|
||||
let result = if watch {
|
||||
watch_project(directory.as_deref(), watch::default_filter, 500, |p| {
|
||||
p.check(
|
||||
skip_tests,
|
||||
match_tests.clone(),
|
||||
debug,
|
||||
exact_match,
|
||||
match filter_traces {
|
||||
Some(filter_traces) => filter_traces(trace_level),
|
||||
None => Tracing::All(trace_level),
|
||||
},
|
||||
)
|
||||
})
|
||||
watch_project(
|
||||
directory.as_deref(),
|
||||
watch::default_filter,
|
||||
seed,
|
||||
500,
|
||||
|p| {
|
||||
p.check(
|
||||
skip_tests,
|
||||
match_tests.clone(),
|
||||
debug,
|
||||
exact_match,
|
||||
seed,
|
||||
match filter_traces {
|
||||
Some(filter_traces) => filter_traces(trace_level),
|
||||
None => Tracing::All(trace_level),
|
||||
},
|
||||
)
|
||||
},
|
||||
)
|
||||
} else {
|
||||
with_project(directory.as_deref(), deny, |p| {
|
||||
with_project(directory.as_deref(), seed, deny, |p| {
|
||||
p.check(
|
||||
skip_tests,
|
||||
match_tests.clone(),
|
||||
debug,
|
||||
exact_match,
|
||||
seed,
|
||||
match filter_traces {
|
||||
Some(filter_traces) => filter_traces(trace_level),
|
||||
None => Tracing::All(trace_level),
|
||||
|
|
|
@ -29,11 +29,17 @@ pub fn exec(
|
|||
}: Args,
|
||||
) -> miette::Result<()> {
|
||||
let result = if watch {
|
||||
watch_project(directory.as_deref(), watch::default_filter, 500, |p| {
|
||||
watch_project(
|
||||
directory.as_deref(),
|
||||
watch::default_filter,
|
||||
u32::default(),
|
||||
500,
|
||||
|p| p.docs(destination.clone()),
|
||||
)
|
||||
} else {
|
||||
with_project(directory.as_deref(), u32::default(), deny, |p| {
|
||||
p.docs(destination.clone())
|
||||
})
|
||||
} else {
|
||||
with_project(directory.as_deref(), deny, |p| p.docs(destination.clone()))
|
||||
};
|
||||
|
||||
result.map_err(|_| process::exit(1))
|
||||
|
|
|
@ -30,39 +30,41 @@ pub fn exec(
|
|||
cbor,
|
||||
}: Args,
|
||||
) -> miette::Result<()> {
|
||||
let mut program = if cbor {
|
||||
let mut program: Program<Name> = if cbor {
|
||||
let cbor_hex = std::fs::read_to_string(&script).into_diagnostic()?;
|
||||
|
||||
let raw_cbor = hex::decode(cbor_hex.trim()).into_diagnostic()?;
|
||||
|
||||
let prog = Program::<FakeNamedDeBruijn>::from_cbor(&raw_cbor, &mut Vec::new())
|
||||
let program = Program::<FakeNamedDeBruijn>::from_cbor(&raw_cbor, &mut Vec::new())
|
||||
.into_diagnostic()?;
|
||||
|
||||
prog.into()
|
||||
let program: Program<NamedDeBruijn> = program.into();
|
||||
|
||||
Program::<Name>::try_from(program).into_diagnostic()?
|
||||
} else if flat {
|
||||
let bytes = std::fs::read(&script).into_diagnostic()?;
|
||||
|
||||
let prog = Program::<FakeNamedDeBruijn>::from_flat(&bytes).into_diagnostic()?;
|
||||
let program = Program::<FakeNamedDeBruijn>::from_flat(&bytes).into_diagnostic()?;
|
||||
|
||||
prog.into()
|
||||
let program: Program<NamedDeBruijn> = program.into();
|
||||
|
||||
Program::<Name>::try_from(program).into_diagnostic()?
|
||||
} else {
|
||||
let code = std::fs::read_to_string(&script).into_diagnostic()?;
|
||||
|
||||
let prog = parser::program(&code).into_diagnostic()?;
|
||||
|
||||
Program::<NamedDeBruijn>::try_from(prog).into_diagnostic()?
|
||||
parser::program(&code).into_diagnostic()?
|
||||
};
|
||||
|
||||
for arg in args {
|
||||
let term = parser::term(&arg).into_diagnostic()?;
|
||||
|
||||
let term = Term::<NamedDeBruijn>::try_from(term).into_diagnostic()?;
|
||||
|
||||
program = program.apply_term(&term);
|
||||
program = program.apply_term(&term)
|
||||
}
|
||||
|
||||
let budget = ExBudget::default();
|
||||
|
||||
let program = Program::<NamedDeBruijn>::try_from(program).into_diagnostic()?;
|
||||
|
||||
let mut eval_result = program.eval(budget);
|
||||
|
||||
let cost = eval_result.cost();
|
||||
|
|
|
@ -7,6 +7,7 @@ use crate::{
|
|||
eval_result::EvalResult,
|
||||
Machine,
|
||||
},
|
||||
parser::interner::Interner,
|
||||
};
|
||||
use num_bigint::BigInt;
|
||||
use num_traits::ToPrimitive;
|
||||
|
@ -18,7 +19,6 @@ use pallas::ledger::{
|
|||
},
|
||||
traverse::ComputeHash,
|
||||
};
|
||||
|
||||
use serde::{
|
||||
self,
|
||||
de::{self, Deserialize, Deserializer, MapAccess, Visitor},
|
||||
|
@ -58,21 +58,8 @@ where
|
|||
}
|
||||
}
|
||||
|
||||
/// We use this to apply the validator to Datum,
|
||||
/// then redeemer, then ScriptContext. If datum is
|
||||
/// even necessary (i.e. minting policy).
|
||||
pub fn apply_term(&self, term: &Term<T>) -> Self {
|
||||
let applied_term = Term::Apply {
|
||||
function: Rc::new(self.term.clone()),
|
||||
argument: Rc::new(term.clone()),
|
||||
};
|
||||
|
||||
Program {
|
||||
version: self.version,
|
||||
term: applied_term,
|
||||
}
|
||||
}
|
||||
|
||||
/// A convenient and faster version that `apply_term` since the program doesn't need to be
|
||||
/// re-interned (constant Data do not introduce new bindings).
|
||||
pub fn apply_data(&self, plutus_data: PlutusData) -> Self {
|
||||
let applied_term = Term::Apply {
|
||||
function: Rc::new(self.term.clone()),
|
||||
|
@ -86,6 +73,27 @@ where
|
|||
}
|
||||
}
|
||||
|
||||
impl Program<Name> {
|
||||
/// We use this to apply the validator to Datum,
|
||||
/// then redeemer, then ScriptContext. If datum is
|
||||
/// even necessary (i.e. minting policy).
|
||||
pub fn apply_term(&self, term: &Term<Name>) -> Self {
|
||||
let applied_term = Term::Apply {
|
||||
function: Rc::new(self.term.clone()),
|
||||
argument: Rc::new(term.clone()),
|
||||
};
|
||||
|
||||
let mut program = Program {
|
||||
version: self.version,
|
||||
term: applied_term,
|
||||
};
|
||||
|
||||
Interner::new().program(&mut program);
|
||||
|
||||
program
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, T> Display for Program<T>
|
||||
where
|
||||
T: Binder<'a>,
|
||||
|
@ -161,7 +169,9 @@ impl<'a> Deserialize<'a> for Program<DeBruijn> {
|
|||
impl Program<DeBruijn> {
|
||||
pub fn address(&self, network: Network, delegation: ShelleyDelegationPart) -> ShelleyAddress {
|
||||
let cbor = self.to_cbor().unwrap();
|
||||
|
||||
let validator_hash = babbage::PlutusV2Script(cbor.into()).compute_hash();
|
||||
|
||||
ShelleyAddress::new(
|
||||
network,
|
||||
ShelleyPaymentPart::Script(validator_hash),
|
||||
|
@ -269,7 +279,7 @@ pub enum Constant {
|
|||
Bls12_381MlResult(Box<blst::blst_fp12>),
|
||||
}
|
||||
|
||||
pub struct Data {}
|
||||
pub struct Data;
|
||||
|
||||
// TODO: See about moving these builders upstream to Pallas?
|
||||
impl Data {
|
||||
|
|
|
@ -6,7 +6,7 @@ use crate::ast::{NamedDeBruijn, Term, Type};
|
|||
|
||||
use super::{ExBudget, Value};
|
||||
|
||||
#[derive(thiserror::Error, Debug, miette::Diagnostic)]
|
||||
#[derive(Debug, Clone, thiserror::Error, miette::Diagnostic)]
|
||||
pub enum Error {
|
||||
#[error("Over budget mem: {} & cpu: {}", .0.mem, .0.cpu)]
|
||||
OutOfExError(ExBudget),
|
||||
|
|
|
@ -2,6 +2,7 @@ use crate::ast::{Constant, NamedDeBruijn, Term};
|
|||
|
||||
use super::{cost_model::ExBudget, Error};
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct EvalResult {
|
||||
result: Result<Term<NamedDeBruijn>, Error>,
|
||||
remaining_budget: ExBudget,
|
||||
|
@ -39,11 +40,11 @@ impl EvalResult {
|
|||
} else {
|
||||
self.result.is_err()
|
||||
|| matches!(self.result, Ok(Term::Error))
|
||||
|| matches!(self.result, Ok(Term::Constant(ref con)) if matches!(con.as_ref(), Constant::Bool(false)))
|
||||
|| !matches!(self.result, Ok(Term::Constant(ref con)) if matches!(con.as_ref(), Constant::Bool(true)))
|
||||
}
|
||||
}
|
||||
|
||||
pub fn result(self) -> Result<Term<NamedDeBruijn>, Error> {
|
||||
self.result
|
||||
pub fn result(&self) -> Result<Term<NamedDeBruijn>, Error> {
|
||||
self.result.clone()
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,13 +1,12 @@
|
|||
use std::{collections::VecDeque, mem::size_of, ops::Deref, rc::Rc};
|
||||
|
||||
use num_bigint::BigInt;
|
||||
use num_traits::{Signed, ToPrimitive, Zero};
|
||||
use pallas::ledger::primitives::babbage::{self, PlutusData};
|
||||
|
||||
use crate::{
|
||||
ast::{Constant, NamedDeBruijn, Term, Type},
|
||||
builtins::DefaultFunction,
|
||||
};
|
||||
use num_bigint::BigInt;
|
||||
use num_traits::{Signed, ToPrimitive, Zero};
|
||||
use pallas::ledger::primitives::babbage::{self, PlutusData};
|
||||
|
||||
use super::{runtime::BuiltinRuntime, Error};
|
||||
|
||||
|
|
|
@ -13,4 +13,4 @@ requirements = []
|
|||
source = "github"
|
||||
|
||||
[etags]
|
||||
"aiken-lang/stdlib@main" = [{ secs_since_epoch = 1707877506, nanos_since_epoch = 997686000 }, "cf946239d3dd481ed41f20e56bf24910b5229ea35aa171a708edc2a47fc20a7b"]
|
||||
"aiken-lang/stdlib@main" = [{ secs_since_epoch = 1709380938, nanos_since_epoch = 98285000 }, "cf946239d3dd481ed41f20e56bf24910b5229ea35aa171a708edc2a47fc20a7b"]
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
"plutusVersion": "v2",
|
||||
"compiler": {
|
||||
"name": "Aiken",
|
||||
"version": "v1.0.24-alpha+ac0c73a"
|
||||
"version": "v1.0.24-alpha+8532aff"
|
||||
}
|
||||
},
|
||||
"validators": [
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
"plutusVersion": "v2",
|
||||
"compiler": {
|
||||
"name": "Aiken",
|
||||
"version": "v1.0.24-alpha+ac0c73a"
|
||||
"version": "v1.0.24-alpha+8532aff"
|
||||
}
|
||||
},
|
||||
"validators": [
|
||||
|
|
|
@ -13,4 +13,4 @@ requirements = []
|
|||
source = "github"
|
||||
|
||||
[etags]
|
||||
"aiken-lang/stdlib@main" = [{ secs_since_epoch = 1707877451, nanos_since_epoch = 945453000 }, "cf946239d3dd481ed41f20e56bf24910b5229ea35aa171a708edc2a47fc20a7b"]
|
||||
"aiken-lang/stdlib@main" = [{ secs_since_epoch = 1709380936, nanos_since_epoch = 549765000 }, "cf946239d3dd481ed41f20e56bf24910b5229ea35aa171a708edc2a47fc20a7b"]
|
||||
|
|
|
@ -13,4 +13,4 @@ requirements = []
|
|||
source = "github"
|
||||
|
||||
[etags]
|
||||
"aiken-lang/stdlib@main" = [{ secs_since_epoch = 1707877454, nanos_since_epoch = 923046000 }, "cf946239d3dd481ed41f20e56bf24910b5229ea35aa171a708edc2a47fc20a7b"]
|
||||
"aiken-lang/stdlib@main" = [{ secs_since_epoch = 1709380938, nanos_since_epoch = 133943000 }, "cf946239d3dd481ed41f20e56bf24910b5229ea35aa171a708edc2a47fc20a7b"]
|
||||
|
|
|
@ -13,4 +13,4 @@ requirements = []
|
|||
source = "github"
|
||||
|
||||
[etags]
|
||||
"aiken-lang/stdlib@main" = [{ secs_since_epoch = 1707877469, nanos_since_epoch = 962343000 }, "cf946239d3dd481ed41f20e56bf24910b5229ea35aa171a708edc2a47fc20a7b"]
|
||||
"aiken-lang/stdlib@main" = [{ secs_since_epoch = 1709380954, nanos_since_epoch = 378021000 }, "cf946239d3dd481ed41f20e56bf24910b5229ea35aa171a708edc2a47fc20a7b"]
|
||||
|
|
|
@ -13,4 +13,4 @@ requirements = []
|
|||
source = "github"
|
||||
|
||||
[etags]
|
||||
"aiken-lang/stdlib@main" = [{ secs_since_epoch = 1707877454, nanos_since_epoch = 909901000 }, "cf946239d3dd481ed41f20e56bf24910b5229ea35aa171a708edc2a47fc20a7b"]
|
||||
"aiken-lang/stdlib@main" = [{ secs_since_epoch = 1709380939, nanos_since_epoch = 227480000 }, "cf946239d3dd481ed41f20e56bf24910b5229ea35aa171a708edc2a47fc20a7b"]
|
||||
|
|
|
@ -13,4 +13,4 @@ requirements = []
|
|||
source = "github"
|
||||
|
||||
[etags]
|
||||
"aiken-lang/stdlib@main" = [{ secs_since_epoch = 1707877474, nanos_since_epoch = 639765000 }, "cf946239d3dd481ed41f20e56bf24910b5229ea35aa171a708edc2a47fc20a7b"]
|
||||
"aiken-lang/stdlib@main" = [{ secs_since_epoch = 1709380958, nanos_since_epoch = 713857000 }, "cf946239d3dd481ed41f20e56bf24910b5229ea35aa171a708edc2a47fc20a7b"]
|
||||
|
|
|
@ -13,4 +13,4 @@ requirements = []
|
|||
source = "github"
|
||||
|
||||
[etags]
|
||||
"aiken-lang/stdlib@main" = [{ secs_since_epoch = 1707877472, nanos_since_epoch = 989842000 }, "cf946239d3dd481ed41f20e56bf24910b5229ea35aa171a708edc2a47fc20a7b"]
|
||||
"aiken-lang/stdlib@main" = [{ secs_since_epoch = 1709380955, nanos_since_epoch = 138699000 }, "cf946239d3dd481ed41f20e56bf24910b5229ea35aa171a708edc2a47fc20a7b"]
|
||||
|
|
|
@ -13,4 +13,4 @@ requirements = []
|
|||
source = "github"
|
||||
|
||||
[etags]
|
||||
"aiken-lang/stdlib@main" = [{ secs_since_epoch = 1707877474, nanos_since_epoch = 306725000 }, "cf946239d3dd481ed41f20e56bf24910b5229ea35aa171a708edc2a47fc20a7b"]
|
||||
"aiken-lang/stdlib@main" = [{ secs_since_epoch = 1709380957, nanos_since_epoch = 530063000 }, "cf946239d3dd481ed41f20e56bf24910b5229ea35aa171a708edc2a47fc20a7b"]
|
||||
|
|
|
@ -13,4 +13,4 @@ requirements = []
|
|||
source = "github"
|
||||
|
||||
[etags]
|
||||
"aiken-lang/stdlib@main" = [{ secs_since_epoch = 1707877444, nanos_since_epoch = 552129000 }, "cf946239d3dd481ed41f20e56bf24910b5229ea35aa171a708edc2a47fc20a7b"]
|
||||
"aiken-lang/stdlib@main" = [{ secs_since_epoch = 1709380929, nanos_since_epoch = 819852000 }, "cf946239d3dd481ed41f20e56bf24910b5229ea35aa171a708edc2a47fc20a7b"]
|
||||
|
|
|
@ -13,4 +13,4 @@ requirements = []
|
|||
source = "github"
|
||||
|
||||
[etags]
|
||||
"aiken-lang/stdlib@main" = [{ secs_since_epoch = 1707877445, nanos_since_epoch = 711317000 }, "cf946239d3dd481ed41f20e56bf24910b5229ea35aa171a708edc2a47fc20a7b"]
|
||||
"aiken-lang/stdlib@main" = [{ secs_since_epoch = 1709380931, nanos_since_epoch = 69042000 }, "cf946239d3dd481ed41f20e56bf24910b5229ea35aa171a708edc2a47fc20a7b"]
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
"plutusVersion": "v2",
|
||||
"compiler": {
|
||||
"name": "Aiken",
|
||||
"version": "v1.0.24-alpha+ac0c73a"
|
||||
"version": "v1.0.24-alpha+8532aff"
|
||||
}
|
||||
},
|
||||
"validators": [
|
||||
|
|
|
@ -13,4 +13,4 @@ requirements = []
|
|||
source = "github"
|
||||
|
||||
[etags]
|
||||
"aiken-lang/stdlib@main" = [{ secs_since_epoch = 1707877462, nanos_since_epoch = 746349000 }, "cf946239d3dd481ed41f20e56bf24910b5229ea35aa171a708edc2a47fc20a7b"]
|
||||
"aiken-lang/stdlib@main" = [{ secs_since_epoch = 1709380947, nanos_since_epoch = 463143000 }, "cf946239d3dd481ed41f20e56bf24910b5229ea35aa171a708edc2a47fc20a7b"]
|
||||
|
|
|
@ -13,4 +13,4 @@ requirements = []
|
|||
source = "github"
|
||||
|
||||
[etags]
|
||||
"aiken-lang/stdlib@main" = [{ secs_since_epoch = 1707877459, nanos_since_epoch = 940642000 }, "cf946239d3dd481ed41f20e56bf24910b5229ea35aa171a708edc2a47fc20a7b"]
|
||||
"aiken-lang/stdlib@main" = [{ secs_since_epoch = 1709380945, nanos_since_epoch = 171552000 }, "cf946239d3dd481ed41f20e56bf24910b5229ea35aa171a708edc2a47fc20a7b"]
|
||||
|
|
|
@ -13,4 +13,4 @@ requirements = []
|
|||
source = "github"
|
||||
|
||||
[etags]
|
||||
"aiken-lang/stdlib@main" = [{ secs_since_epoch = 1707877443, nanos_since_epoch = 793660000 }, "cf946239d3dd481ed41f20e56bf24910b5229ea35aa171a708edc2a47fc20a7b"]
|
||||
"aiken-lang/stdlib@main" = [{ secs_since_epoch = 1709380929, nanos_since_epoch = 326252000 }, "cf946239d3dd481ed41f20e56bf24910b5229ea35aa171a708edc2a47fc20a7b"]
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
"plutusVersion": "v2",
|
||||
"compiler": {
|
||||
"name": "Aiken",
|
||||
"version": "v1.0.24-alpha+ac0c73a"
|
||||
"version": "v1.0.24-alpha+8532aff"
|
||||
}
|
||||
},
|
||||
"validators": [
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
"plutusVersion": "v2",
|
||||
"compiler": {
|
||||
"name": "Aiken",
|
||||
"version": "v1.0.24-alpha+ac0c73a"
|
||||
"version": "v1.0.24-alpha+8532aff"
|
||||
}
|
||||
},
|
||||
"validators": [
|
||||
|
|
|
@ -13,4 +13,4 @@ requirements = []
|
|||
source = "github"
|
||||
|
||||
[etags]
|
||||
"aiken-lang/stdlib@main" = [{ secs_since_epoch = 1707877446, nanos_since_epoch = 760870000 }, "cf946239d3dd481ed41f20e56bf24910b5229ea35aa171a708edc2a47fc20a7b"]
|
||||
"aiken-lang/stdlib@main" = [{ secs_since_epoch = 1709380932, nanos_since_epoch = 592853000 }, "cf946239d3dd481ed41f20e56bf24910b5229ea35aa171a708edc2a47fc20a7b"]
|
||||
|
|
|
@ -13,4 +13,4 @@ requirements = []
|
|||
source = "github"
|
||||
|
||||
[etags]
|
||||
"aiken-lang/stdlib@main" = [{ secs_since_epoch = 1707877444, nanos_since_epoch = 539452000 }, "cf946239d3dd481ed41f20e56bf24910b5229ea35aa171a708edc2a47fc20a7b"]
|
||||
"aiken-lang/stdlib@main" = [{ secs_since_epoch = 1709380929, nanos_since_epoch = 321331000 }, "cf946239d3dd481ed41f20e56bf24910b5229ea35aa171a708edc2a47fc20a7b"]
|
||||
|
|
|
@ -13,4 +13,4 @@ requirements = []
|
|||
source = "github"
|
||||
|
||||
[etags]
|
||||
"aiken-lang/stdlib@main" = [{ secs_since_epoch = 1707877443, nanos_since_epoch = 796476000 }, "cf946239d3dd481ed41f20e56bf24910b5229ea35aa171a708edc2a47fc20a7b"]
|
||||
"aiken-lang/stdlib@main" = [{ secs_since_epoch = 1709380929, nanos_since_epoch = 897643000 }, "cf946239d3dd481ed41f20e56bf24910b5229ea35aa171a708edc2a47fc20a7b"]
|
||||
|
|
|
@ -13,4 +13,4 @@ requirements = []
|
|||
source = "github"
|
||||
|
||||
[etags]
|
||||
"aiken-lang/stdlib@main" = [{ secs_since_epoch = 1707877463, nanos_since_epoch = 421256000 }, "cf946239d3dd481ed41f20e56bf24910b5229ea35aa171a708edc2a47fc20a7b"]
|
||||
"aiken-lang/stdlib@main" = [{ secs_since_epoch = 1709380947, nanos_since_epoch = 468037000 }, "cf946239d3dd481ed41f20e56bf24910b5229ea35aa171a708edc2a47fc20a7b"]
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
"plutusVersion": "v2",
|
||||
"compiler": {
|
||||
"name": "Aiken",
|
||||
"version": "v1.0.24-alpha+ac0c73a"
|
||||
"version": "v1.0.24-alpha+8532aff"
|
||||
}
|
||||
},
|
||||
"validators": [
|
||||
|
|
|
@ -13,4 +13,4 @@ requirements = []
|
|||
source = "github"
|
||||
|
||||
[etags]
|
||||
"aiken-lang/stdlib@main" = [{ secs_since_epoch = 1707877459, nanos_since_epoch = 979094000 }, "cf946239d3dd481ed41f20e56bf24910b5229ea35aa171a708edc2a47fc20a7b"]
|
||||
"aiken-lang/stdlib@main" = [{ secs_since_epoch = 1709380945, nanos_since_epoch = 203745000 }, "cf946239d3dd481ed41f20e56bf24910b5229ea35aa171a708edc2a47fc20a7b"]
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
use aiken/cbor.{diagnostic, serialise}
|
||||
|
||||
test cbor_serialise_large_num() {
|
||||
serialise(18446744073709551615) == #"c248ffffffffffffffff"
|
||||
serialise(18446744073709551615) == #"1bffffffffffffffff"
|
||||
}
|
||||
|
||||
test cbor_diagnostic_large_num() {
|
||||
|
|
|
@ -13,4 +13,4 @@ requirements = []
|
|||
source = "github"
|
||||
|
||||
[etags]
|
||||
"aiken-lang/stdlib@main" = [{ secs_since_epoch = 1707877462, nanos_since_epoch = 777187000 }, "cf946239d3dd481ed41f20e56bf24910b5229ea35aa171a708edc2a47fc20a7b"]
|
||||
"aiken-lang/stdlib@main" = [{ secs_since_epoch = 1709380948, nanos_since_epoch = 315193000 }, "cf946239d3dd481ed41f20e56bf24910b5229ea35aa171a708edc2a47fc20a7b"]
|
||||
|
|
|
@ -13,4 +13,4 @@ requirements = []
|
|||
source = "github"
|
||||
|
||||
[etags]
|
||||
"aiken-lang/stdlib@main" = [{ secs_since_epoch = 1707877458, nanos_since_epoch = 575935000 }, "cf946239d3dd481ed41f20e56bf24910b5229ea35aa171a708edc2a47fc20a7b"]
|
||||
"aiken-lang/stdlib@main" = [{ secs_since_epoch = 1709380943, nanos_since_epoch = 662175000 }, "cf946239d3dd481ed41f20e56bf24910b5229ea35aa171a708edc2a47fc20a7b"]
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
"plutusVersion": "v2",
|
||||
"compiler": {
|
||||
"name": "Aiken",
|
||||
"version": "v1.0.24-alpha+ac0c73a"
|
||||
"version": "v1.0.24-alpha+8532aff"
|
||||
}
|
||||
},
|
||||
"validators": [
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
"plutusVersion": "v2",
|
||||
"compiler": {
|
||||
"name": "Aiken",
|
||||
"version": "v1.0.24-alpha+ac0c73a"
|
||||
"version": "v1.0.24-alpha+8532aff"
|
||||
}
|
||||
},
|
||||
"validators": [
|
||||
|
|
|
@ -1,7 +0,0 @@
|
|||
# This file was generated by Aiken
|
||||
# You typically do not need to edit this file
|
||||
|
||||
requirements = []
|
||||
packages = []
|
||||
|
||||
[etags]
|
|
@ -1,4 +0,0 @@
|
|||
name = "thing/thing"
|
||||
version = "0.0.0"
|
||||
license = "Apache-2.0"
|
||||
description = "Aiken contracts for project 'thing/thing'"
|
|
@ -1,10 +0,0 @@
|
|||
pub fn wow(a: Void) -> Int {
|
||||
when Some(a) is {
|
||||
Some(Void) -> 42
|
||||
None -> 0
|
||||
}
|
||||
}
|
||||
|
||||
test wow_1() {
|
||||
wow(Void) == 42
|
||||
}
|
|
@ -2,11 +2,18 @@
|
|||
|
||||
exit_codes=()
|
||||
|
||||
for scenario in $(find . -maxdepth 1 -mindepth 1 -regex ".*[0-9]\{3\}" -type d); do
|
||||
./run $scenario
|
||||
exit_codes+=("$?")
|
||||
TESTS=()
|
||||
for scenario in $(find . -maxdepth 1 -mindepth 1 -type d ! -name script_context); do
|
||||
./run $scenario &
|
||||
TESTS+=("$!")
|
||||
done
|
||||
|
||||
for p in ${TESTS[@]}; do
|
||||
wait $p
|
||||
exit_codes+=("$?")
|
||||
done
|
||||
|
||||
|
||||
for interaction in $(find script_context/validators -type f); do
|
||||
title=$(basename $interaction)
|
||||
title="${title%.*}"
|
||||
|
@ -17,6 +24,7 @@ done
|
|||
|
||||
for code in ${exit_codes[@]}; do
|
||||
if [ $code -ne 0 ]; then
|
||||
echo "Some test returned non-zero code: $code"
|
||||
exit $code
|
||||
fi
|
||||
done
|
||||
|
|
|
@ -13,4 +13,4 @@ requirements = []
|
|||
source = "github"
|
||||
|
||||
[etags]
|
||||
"aiken-lang/stdlib@main" = [{ secs_since_epoch = 1706677006, nanos_since_epoch = 304401000 }, "cf946239d3dd481ed41f20e56bf24910b5229ea35aa171a708edc2a47fc20a7b"]
|
||||
"aiken-lang/stdlib@main" = [{ secs_since_epoch = 1709381590, nanos_since_epoch = 532309000 }, "cf946239d3dd481ed41f20e56bf24910b5229ea35aa171a708edc2a47fc20a7b"]
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
"plutusVersion": "v2",
|
||||
"compiler": {
|
||||
"name": "Aiken",
|
||||
"version": "v1.0.23-alpha+3a53427"
|
||||
"version": "v1.0.24-alpha+8532aff"
|
||||
}
|
||||
},
|
||||
"validators": [
|
||||
|
@ -23,8 +23,8 @@
|
|||
"$ref": "#/definitions/Void"
|
||||
}
|
||||
},
|
||||
"compiledCode": "59032b01000032323232323232323222253330063253330073330073375e6018601a601a601a601a601a601a601a601a601a601a601a600a6002600a00498126d8799f58200000000000000000000000000000000000000000000000000000000000000000ff004a09444c94ccc020c8c94ccc028cdc3a400400226464a66601866ebcc018c02800530126d8799f58200000000000000000000000000000000000000000000000000000000000000000ff0013370e6eb4c014c028005200014a0602000260100042c60100026002600c0062646464a666016664464a6660220022c264a666024002264a66602066ebcc024c038004dd318032410151ad720e2a66602066ebcc028c038c028c038005300122d8799f581c11111111111111111111111111111111111111111111111111111111ff0015333010323253330123370e90000008a5014a260200026012601c6014601c0022a66602066ebcc014c03800530103d879800013375e6010601c00298103d87a800014a029405280a5030140021630140013758600460160024602260246024002600a601200c266ebcc00cc024c014c024018dd31800a40a8294094ccc02ccdc3800a4000297adef6c6013232330010014bd6f7b63011299980880089980919bb04c1014000374c00697adef6c6013232323253330123375e66012911000024c103d879800013301633760981014000374c00e00a2a66602466e3d22100002133016337609801014000374c00e00626602c66ec0dd48011ba600133006006003375660260066eb8c044008c054008c04c004c8cc0040052f5bded8c044a66602000226602266ec13001014000375000697adef6c6013232323253330113375e66010911000024c103d879800013301533760981014000375000e00a2a66602266e3d22100002133015337609801014000375000e00626602a66ec0dd48011ba800133006006003375a60240066eb8c040008c050008c048004894ccc02ccdc80010008a6103d87980001533300b3371e0040022980103d87a800014c103d87b80002300e300f300f300f00114a04601a601c00229408c03000452613656230053754002460066ea80055cd2ab9d5573caae7d5d02ba157441",
|
||||
"hash": "899f2484d18c438e0e100fe6b08e8646d48322f3b3634f86cb51889d"
|
||||
"compiledCode": "59034d010000323232323232323232232232253330083253330093330093375e601c601e601e601e601e601e601e601e601e601e601e601e600e6002600e00498126d8799f58200000000000000000000000000000000000000000000000000000000000000000ff004a09444c94ccc028c8c94ccc030cdc3a400400226464a66601c66ebcc018c03000530126d8799f58200000000000000000000000000000000000000000000000000000000000000000ff0013370e6eb4c014c030005200014a0602400260140042c6014002600260100062646464a66601a664464a6660260022c264a666028002264a66602466ebcc024c040004dd318032410151ad720e2a66602466ebcc028c040c028c040005300122d8799f581c11111111111111111111111111111111111111111111111111111111ff0015333012323253330143370e90000008a5014a2602400260126020601460200022a66602466ebcc014c04000530103d879800013375e6010602000298103d87a800014a029405280a50301600216301600137586004601a0024602660286028002600a601600c266ebcc00cc02cc014c02c018dd31800a40a8294094ccc034cdc3800a4000297adef6c6013232330010014bd6f7b63011299980980089980a19bb04c1014000374c00697adef6c6013232323253330143375e66012911000024c103d879800013301833760981014000374c00e00a2a66602866e3d22100002133018337609801014000374c00e00626603066ec0dd48011ba6001330060060033756602a0066eb8c04c008c05c008c054004c8cc0040052f5bded8c044a66602400226602666ec13001014000375000697adef6c6013232323253330133375e66010911000024c103d879800013301733760981014000375000e00a2a66602666e3d22100002133017337609801014000375000e00626602e66ec0dd48011ba800133006006003375a60280066eb8c048008c058008c050004894ccc034cdc80010008a6103d87980001533300d3371e0040022980103d87a800014c103d87b80002301030113011301100114a04601e602000229408c0380045261365653330063370e900018039baa0011498594ccc010cdc3a4000600a6ea800452616230053754002460066ea80055cd2ab9d5573caae7d5d02ba15745",
|
||||
"hash": "381d52fa18759846189ed3aefd79cc3348e9a21bf37d02efd4d82803"
|
||||
},
|
||||
{
|
||||
"title": "deploy.spend",
|
||||
|
@ -68,8 +68,8 @@
|
|||
"$ref": "#/definitions/Void"
|
||||
}
|
||||
},
|
||||
"compiledCode": "5901cb010000323232323232323232222533300632323253330093232533300b3370e90010008b099b87375a60206012004902a1804800998009bab30023007300330070044c0126d8799fd8799f581c22222222222222222222222222222222222222222222222222222222ffff00153330093232533300b3370e90010008b099b87375a60206012004900e1804800998009bab30023007300330070044c126d8799fd87a9f581cafddc16c18e7d8de379fb9aad39b3d1b5afd27603e5ebac818432a72ffff0013375e6e9cc8cc004004dd598019804180218040029129998070008a5eb804cc03cc030c040004cc008008c044004dd399806a6126d8799fd8799f581c22222222222222222222222222222222222222222222222222222222ffff003300d4c126d8799fd87a9f581cafddc16c18e7d8de379fb9aad39b3d1b5afd27603e5ebac818432a72ffff004bd700a5014a044646600200200644a66601e002298103d87a800013232323253330103375e00e004266e95200033014375000297ae0133006006003375a6022006601e004602600460220024601a601c601c601c601c601c601c0024601800229309b2b118029baa001230033754002ae6955ceaab9e5573eae815d0aba21",
|
||||
"hash": "ab5c648e1399ad1c5ed964ca5eb98e90a842f3674dd26ebf2ec8e107"
|
||||
"compiledCode": "5901ee01000032323232323232323223223225333008323232533300b3232533300d3370e90010008b099b87375a60246016004902a1805800998009bab30023009300330090044c0126d8799fd8799f581c22222222222222222222222222222222222222222222222222222222ffff001533300b3232533300d3370e90010008b099b87375a60246016004900e1805800998009bab30023009300330090044c126d8799fd87a9f581cafddc16c18e7d8de379fb9aad39b3d1b5afd27603e5ebac818432a72ffff0013375e6e9cc8cc004004dd598019805180218050029129998080008a5eb804cc044c038c048004cc008008c04c004dd399807a6126d8799fd8799f581c22222222222222222222222222222222222222222222222222222222ffff003300f4c126d8799fd87a9f581cafddc16c18e7d8de379fb9aad39b3d1b5afd27603e5ebac818432a72ffff004bd700a5014a044646600200200644a666022002298103d87a800013232323253330123375e00e004266e95200033016375000297ae0133006006003375a60260066022004602a00460260024601e6020602060206020602060200024601c00229309b2b299980319b8748000c01cdd50008a4c2ca66600866e1d2000300537540022930b118029baa001230033754002ae6955ceaab9e5573eae815d0aba201",
|
||||
"hash": "1595536930902a5b877f84aa220ecf2f140f3f1c8a3f1c6face4b959"
|
||||
}
|
||||
],
|
||||
"definitions": {
|
||||
|
|
Loading…
Reference in New Issue