Merge pull request #316 from aiken-lang/cip-0057-blueprints

Blueprints (CIP-0057) as build artifacts
This commit is contained in:
Matthias Benkort 2023-01-31 17:45:56 +01:00 committed by GitHub
commit ecc5e13ccd
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
46 changed files with 2255 additions and 451 deletions

12
Cargo.lock generated
View File

@ -66,6 +66,7 @@ dependencies = [
"pallas-primitives", "pallas-primitives",
"pallas-traverse", "pallas-traverse",
"regex", "regex",
"serde_json",
"thiserror", "thiserror",
"uplc", "uplc",
] ]
@ -112,6 +113,7 @@ version = "0.0.28"
dependencies = [ dependencies = [
"aiken-lang", "aiken-lang",
"askama", "askama",
"assert-json-diff",
"dirs", "dirs",
"fslock", "fslock",
"futures", "futures",
@ -197,6 +199,16 @@ dependencies = [
"toml", "toml",
] ]
[[package]]
name = "assert-json-diff"
version = "2.0.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "47e4f2b81832e72834d7518d8487a0396a28cc408186a2e8854c0f98011faf12"
dependencies = [
"serde",
"serde_json",
]
[[package]] [[package]]
name = "atty" name = "atty"
version = "0.2.14" version = "0.2.14"

View File

@ -988,7 +988,7 @@ pub fn convert_constants_to_data(constants: Vec<UplcConstant>) -> Vec<UplcConsta
new_constants new_constants
} }
pub fn wrap_validator_args(term: Term<Name>, arguments: Vec<TypedArg>) -> Term<Name> { pub fn wrap_validator_args(term: Term<Name>, arguments: &[TypedArg]) -> Term<Name> {
let mut term = term; let mut term = term;
for arg in arguments.iter().rev() { for arg in arguments.iter().rev() {
if !matches!(arg.tipo.get_uplc_type(), UplcType::Data) { if !matches!(arg.tipo.get_uplc_type(), UplcType::Data) {

View File

@ -1,12 +1,6 @@
use std::{cell::RefCell, collections::HashMap, sync::Arc};
use strum::IntoEnumIterator;
use uplc::builtins::DefaultFunction;
use crate::{ use crate::{
ast::{Arg, ArgName, CallArg, Function, ModuleKind, Span, TypedFunction, UnOp}, ast::{Arg, ArgName, CallArg, Function, ModuleKind, Span, TypedDataType, TypedFunction, UnOp},
builder::FunctionAccessKey, builder::{DataTypeKey, FunctionAccessKey},
expr::TypedExpr, expr::TypedExpr,
tipo::{ tipo::{
fields::FieldMap, Type, TypeConstructor, TypeInfo, TypeVar, ValueConstructor, fields::FieldMap, Type, TypeConstructor, TypeInfo, TypeVar, ValueConstructor,
@ -14,6 +8,10 @@ use crate::{
}, },
IdGenerator, IdGenerator,
}; };
use indexmap::IndexMap;
use std::{cell::RefCell, collections::HashMap, sync::Arc};
use strum::IntoEnumIterator;
use uplc::builtins::DefaultFunction;
pub const BYTE_ARRAY: &str = "ByteArray"; pub const BYTE_ARRAY: &str = "ByteArray";
pub const BOOL: &str = "Bool"; pub const BOOL: &str = "Bool";
@ -533,8 +531,8 @@ pub fn from_default_function(
}) })
} }
pub fn prelude_functions(id_gen: &IdGenerator) -> HashMap<FunctionAccessKey, TypedFunction> { pub fn prelude_functions(id_gen: &IdGenerator) -> IndexMap<FunctionAccessKey, TypedFunction> {
let mut functions = HashMap::new(); let mut functions = IndexMap::new();
// /// Negate the argument. Useful for map/fold and pipelines. // /// Negate the argument. Useful for map/fold and pipelines.
// pub fn not(self: Bool) -> Bool { // pub fn not(self: Bool) -> Bool {
@ -800,6 +798,22 @@ pub fn prelude_functions(id_gen: &IdGenerator) -> HashMap<FunctionAccessKey, Typ
functions functions
} }
pub fn prelude_data_types(id_gen: &IdGenerator) -> IndexMap<DataTypeKey, TypedDataType> {
let mut data_types = IndexMap::new();
// Option
let option_data_type = TypedDataType::option(generic_var(id_gen.next()));
data_types.insert(
DataTypeKey {
module_name: "".to_string(),
defined_type: "Option".to_string(),
},
option_data_type,
);
data_types
}
pub fn int() -> Arc<Type> { pub fn int() -> Arc<Type> {
Arc::new(Type::App { Arc::new(Type::App {
public: true, public: true,

View File

@ -466,7 +466,7 @@ Perhaps, try the following:
#[diagnostic(code("unknown::module"))] #[diagnostic(code("unknown::module"))]
#[diagnostic(help( #[diagnostic(help(
"{}", "{}",
suggest_neighbor(name, imported_modules.iter(), "Did you forget to import it?") suggest_neighbor(name, imported_modules.iter(), "Did you forget to add a package as dependency?")
))] ))]
UnknownModule { UnknownModule {
#[label] #[label]

View File

@ -80,7 +80,6 @@ impl UntypedModule {
for def in consts.into_iter().chain(not_consts) { for def in consts.into_iter().chain(not_consts) {
let definition = infer_definition(def, &name, &mut hydrators, &mut environment, kind)?; let definition = infer_definition(def, &name, &mut hydrators, &mut environment, kind)?;
definitions.push(definition); definitions.push(definition);
} }
@ -339,6 +338,7 @@ fn infer_definition(
label, label,
annotation, annotation,
location, location,
doc,
.. ..
}, },
t, t,
@ -348,7 +348,7 @@ fn infer_definition(
annotation, annotation,
location, location,
tipo: t.clone(), tipo: t.clone(),
doc: None, doc,
} }
}, },
) )

View File

@ -39,12 +39,12 @@ use crate::{
IdGenerator, IdGenerator,
}; };
#[derive(Clone)]
pub struct CodeGenerator<'a> { pub struct CodeGenerator<'a> {
defined_functions: IndexMap<FunctionAccessKey, ()>, defined_functions: IndexMap<FunctionAccessKey, ()>,
functions: &'a IndexMap<FunctionAccessKey, &'a TypedFunction>, functions: IndexMap<FunctionAccessKey, &'a TypedFunction>,
// type_aliases: &'a IndexMap<(String, String), &'a TypeAlias<Arc<tipo::Type>>>, data_types: IndexMap<DataTypeKey, &'a TypedDataType>,
data_types: &'a IndexMap<DataTypeKey, &'a TypedDataType>, module_types: IndexMap<&'a String, &'a TypeInfo>,
module_types: &'a IndexMap<String, TypeInfo>,
id_gen: IdGenerator, id_gen: IdGenerator,
needs_field_access: bool, needs_field_access: bool,
used_data_assert_on_list: bool, used_data_assert_on_list: bool,
@ -53,15 +53,13 @@ pub struct CodeGenerator<'a> {
impl<'a> CodeGenerator<'a> { impl<'a> CodeGenerator<'a> {
pub fn new( pub fn new(
functions: &'a IndexMap<FunctionAccessKey, &'a TypedFunction>, functions: IndexMap<FunctionAccessKey, &'a TypedFunction>,
// type_aliases: &'a IndexMap<(String, String), &'a TypeAlias<Arc<tipo::Type>>>, data_types: IndexMap<DataTypeKey, &'a TypedDataType>,
data_types: &'a IndexMap<DataTypeKey, &'a TypedDataType>, module_types: IndexMap<&'a String, &'a TypeInfo>,
module_types: &'a IndexMap<String, TypeInfo>,
) -> Self { ) -> Self {
CodeGenerator { CodeGenerator {
defined_functions: IndexMap::new(), defined_functions: IndexMap::new(),
functions, functions,
// type_aliases,
data_types, data_types,
module_types, module_types,
id_gen: IdGenerator::new(), id_gen: IdGenerator::new(),
@ -73,14 +71,14 @@ impl<'a> CodeGenerator<'a> {
pub fn generate( pub fn generate(
&mut self, &mut self,
body: TypedExpr, body: &TypedExpr,
arguments: Vec<TypedArg>, arguments: &[TypedArg],
wrap_as_validator: bool, wrap_as_validator: bool,
) -> Program<Name> { ) -> Program<Name> {
let mut ir_stack = vec![]; let mut ir_stack = vec![];
let scope = vec![self.id_gen.next()]; let scope = vec![self.id_gen.next()];
self.build_ir(&body, &mut ir_stack, scope); self.build_ir(body, &mut ir_stack, scope);
self.define_ir(&mut ir_stack); self.define_ir(&mut ir_stack);
@ -2865,7 +2863,7 @@ impl<'a> CodeGenerator<'a> {
variant_name: String::new(), variant_name: String::new(),
}; };
let function = self.functions.get(&non_variant_function_key).unwrap(); let function = *self.functions.get(&non_variant_function_key).unwrap();
let mut func_ir = vec![]; let mut func_ir = vec![];
@ -3355,7 +3353,7 @@ impl<'a> CodeGenerator<'a> {
count, count,
scope, scope,
} => { } => {
if check_replaceable_opaque_type(&tipo, self.data_types) { if check_replaceable_opaque_type(&tipo, &self.data_types) {
indices_to_remove.push(index); indices_to_remove.push(index);
} else { } else {
let mut replaced_type = tipo.clone(); let mut replaced_type = tipo.clone();
@ -3377,7 +3375,7 @@ impl<'a> CodeGenerator<'a> {
let record = ir_stack[index + 1].clone(); let record = ir_stack[index + 1].clone();
let record_type = record.tipo(); let record_type = record.tipo();
if let Some(record_type) = record_type { if let Some(record_type) = record_type {
if check_replaceable_opaque_type(&record_type, self.data_types) { if check_replaceable_opaque_type(&record_type, &self.data_types) {
indices_to_remove.push(index); indices_to_remove.push(index);
} else { } else {
let mut replaced_type = tipo.clone(); let mut replaced_type = tipo.clone();
@ -3408,7 +3406,7 @@ impl<'a> CodeGenerator<'a> {
let record = ir_stack[index + 1].clone(); let record = ir_stack[index + 1].clone();
let record_type = record.tipo(); let record_type = record.tipo();
if let Some(record_type) = record_type { if let Some(record_type) = record_type {
if check_replaceable_opaque_type(&record_type, self.data_types) { if check_replaceable_opaque_type(&record_type, &self.data_types) {
ir_stack[index] = Air::Let { ir_stack[index] = Air::Let {
scope, scope,
name: indices[0].1.clone(), name: indices[0].1.clone(),

View File

@ -11,6 +11,7 @@ authors = ["Lucas Rosa <x@rvcas.dev>", "Kasey White <kwhitemsg@gmail.com>"]
[dependencies] [dependencies]
aiken-lang = { path = "../aiken-lang", version = "0.0.28" } aiken-lang = { path = "../aiken-lang", version = "0.0.28" }
askama = "0.10.5" askama = "0.10.5"
assert-json-diff = "2.0.2"
dirs = "4.0.0" dirs = "4.0.0"
fslock = "0.2.1" fslock = "0.2.1"
futures = "0.3.25" futures = "0.3.25"

View File

@ -0,0 +1,90 @@
use super::schema;
use crate::module::CheckedModule;
use aiken_lang::{
ast::{Span, TypedFunction},
tipo::Type,
};
use miette::{Diagnostic, NamedSource};
use owo_colors::OwoColorize;
use std::{fmt::Debug, sync::Arc};
#[derive(Debug, thiserror::Error, Diagnostic)]
pub enum Error {
#[error("A validator must return {}", "Bool".bright_blue().bold())]
#[diagnostic(code("aiken::blueprint::invalid::return_type"))]
#[diagnostic(help(r#"While analyzing the return type of your validator, I found it to be:
{signature}
...but I expected this to be a {type_Bool}. If I am inferring the wrong type, you may want to add a type annotation to the function."#
, type_Bool = "Bool".bright_blue().bold()
, signature = return_type.to_pretty(0).red()
))]
ValidatorMustReturnBool {
#[label("invalid return type")]
location: Span,
#[source_code]
source_code: NamedSource,
return_type: Arc<Type>,
},
#[error("A {} validator requires at least {} arguments.", name.purple().bold(), at_least.to_string().purple().bold())]
#[diagnostic(code("aiken::blueprint::invalid::arity"))]
WrongValidatorArity {
name: String,
at_least: u8,
#[label("not enough arguments")]
location: Span,
#[source_code]
source_code: NamedSource,
},
#[error("{}", error)]
#[diagnostic(help("{}", error.help()))]
#[diagnostic(code("aiken::blueprint::interface"))]
Schema {
error: schema::Error,
#[label("invalid contract's boundary")]
location: Span,
#[source_code]
source_code: NamedSource,
},
#[error("Invalid or missing project's blueprint file.")]
#[diagnostic(code("aiken::blueprint::missing"))]
#[diagnostic(help("Did you forget to {build} the project?", build = "build".purple().bold()))]
InvalidOrMissingFile,
}
pub fn assert_return_bool(module: &CheckedModule, def: &TypedFunction) -> Result<(), Error> {
if !def.return_type.is_bool() {
Err(Error::ValidatorMustReturnBool {
return_type: def.return_type.clone(),
location: def.location,
source_code: NamedSource::new(
module.input_path.display().to_string(),
module.code.clone(),
),
})
} else {
Ok(())
}
}
pub fn assert_min_arity(
module: &CheckedModule,
def: &TypedFunction,
at_least: u8,
) -> Result<(), Error> {
if def.arguments.len() < at_least as usize {
Err(Error::WrongValidatorArity {
name: def.name.clone(),
at_least,
location: def.location,
source_code: NamedSource::new(
module.input_path.display().to_string(),
module.code.clone(),
),
})
} else {
Ok(())
}
}

View File

@ -0,0 +1,124 @@
pub mod error;
pub mod schema;
pub mod validator;
use crate::{config::Config, module::CheckedModules};
use aiken_lang::uplc::CodeGenerator;
use error::Error;
use schema::Schema;
use std::fmt::{self, Debug, Display};
use validator::Validator;
#[derive(Debug, PartialEq, Clone, serde::Serialize, serde::Deserialize)]
pub struct Blueprint<T> {
pub preamble: Preamble,
pub validators: Vec<validator::Validator<T>>,
}
#[derive(Debug, PartialEq, Clone, serde::Serialize, serde::Deserialize)]
pub struct Preamble {
pub title: String,
#[serde(skip_serializing_if = "Option::is_none")]
pub description: Option<String>,
pub version: String,
#[serde(skip_serializing_if = "Option::is_none")]
pub license: Option<String>,
}
impl Blueprint<Schema> {
pub fn new(
config: &Config,
modules: &CheckedModules,
generator: &mut CodeGenerator,
) -> Result<Self, Error> {
let preamble = config.into();
let validators: Result<Vec<_>, Error> = modules
.validators()
.map(|(validator, def)| {
Validator::from_checked_module(modules, generator, validator, def)
})
.collect();
Ok(Blueprint {
preamble,
validators: validators?,
})
}
}
impl Display for Blueprint<Schema> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let s = serde_json::to_string_pretty(self).map_err(|_| fmt::Error)?;
f.write_str(&s)
}
}
impl From<&Config> for Preamble {
fn from(config: &Config) -> Self {
Preamble {
title: config.name.to_string(),
description: if config.description.is_empty() {
None
} else {
Some(config.description.clone())
},
version: config.version.clone(),
license: config.license.clone(),
}
}
}
#[cfg(test)]
mod test {
use super::*;
use serde_json::{self, json};
#[test]
fn serialize_no_description() {
let blueprint: Blueprint<Schema> = Blueprint {
preamble: Preamble {
title: "Foo".to_string(),
description: None,
version: "1.0.0".to_string(),
license: Some("Apache-2.0".to_string()),
},
validators: vec![],
};
assert_eq!(
serde_json::to_value(&blueprint).unwrap(),
json!({
"preamble": {
"title": "Foo",
"version": "1.0.0",
"license": "Apache-2.0"
},
"validators": []
}),
);
}
#[test]
fn serialize_with_description() {
let blueprint: Blueprint<Schema> = Blueprint {
preamble: Preamble {
title: "Foo".to_string(),
description: Some("Lorem ipsum".to_string()),
version: "1.0.0".to_string(),
license: None,
},
validators: vec![],
};
assert_eq!(
serde_json::to_value(&blueprint).unwrap(),
json!({
"preamble": {
"title": "Foo",
"description": "Lorem ipsum",
"version": "1.0.0",
},
"validators": []
}),
);
}
}

View File

@ -0,0 +1,750 @@
use crate::CheckedModule;
use aiken_lang::{
ast::{DataType, Definition, TypedDefinition},
tipo::{pretty, Type, TypeVar},
};
use owo_colors::OwoColorize;
use serde::{
self,
ser::{Serialize, SerializeStruct, Serializer},
};
use serde_json;
use std::ops::Deref;
use std::{
collections::HashMap,
fmt::{self, Display},
sync::Arc,
};
#[derive(Debug, PartialEq, Eq, Clone, serde::Serialize, serde::Deserialize)]
pub struct Annotated<T> {
#[serde(skip_serializing_if = "Option::is_none")]
pub title: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub description: Option<String>,
#[serde(flatten)]
pub annotated: T,
}
/// A schema for low-level UPLC primitives.
#[derive(Debug, PartialEq, Eq, Clone)]
pub enum Schema {
Unit,
Boolean,
Integer,
Bytes,
String,
Pair(Data, Data),
List(Vec<Data>),
Data(Option<Data>),
}
/// A schema for Plutus' Data.
#[derive(Debug, PartialEq, Eq, Clone)]
pub enum Data {
Integer,
Bytes,
List(Box<Data>),
Map(Box<Data>, Box<Data>),
AnyOf(Vec<Annotated<Constructor>>),
}
/// Captures a single UPLC constructor with its
#[derive(Debug, PartialEq, Eq, Clone)]
pub struct Constructor {
pub index: usize,
pub fields: Vec<Annotated<Data>>,
}
impl<T> From<T> for Annotated<T> {
fn from(annotated: T) -> Self {
Annotated {
title: None,
description: None,
annotated,
}
}
}
impl Annotated<Schema> {
pub fn from_type(
modules: &HashMap<String, CheckedModule>,
type_info: &Type,
type_parameters: &HashMap<u64, &Arc<Type>>,
) -> Result<Self, Error> {
match type_info {
Type::App {
module: module_name,
name: type_name,
args,
..
} if module_name.is_empty() => match &type_name[..] {
"Data" => Ok(Annotated {
title: Some("Data".to_string()),
description: Some("Any Plutus data.".to_string()),
annotated: Schema::Data(None),
}),
"ByteArray" => Ok(Schema::Data(Some(Data::Bytes)).into()),
"Int" => Ok(Schema::Data(Some(Data::Integer)).into()),
"String" => Ok(Schema::String.into()),
"Void" => Ok(Annotated {
title: Some("Unit".to_string()),
description: Some("The nullary constructor.".to_string()),
annotated: Schema::Data(Some(Data::AnyOf(vec![Annotated {
title: None,
description: None,
annotated: Constructor {
index: 0,
fields: vec![],
},
}]))),
}),
"Bool" => Ok(Annotated {
title: Some("Bool".to_string()),
description: None,
annotated: Schema::Data(Some(Data::AnyOf(vec![
Annotated {
title: Some("False".to_string()),
description: None,
annotated: Constructor {
index: 0,
fields: vec![],
},
},
Annotated {
title: Some("True".to_string()),
description: None,
annotated: Constructor {
index: 1,
fields: vec![],
},
},
]))),
}),
"Option" => {
let generic =
Annotated::from_type(modules, args.get(0).unwrap(), type_parameters)
.and_then(|s| s.into_data(type_info))?;
Ok(Annotated {
title: Some("Optional".to_string()),
description: None,
annotated: Schema::Data(Some(Data::AnyOf(vec![
Annotated {
title: Some("Some".to_string()),
description: Some("An optional value.".to_string()),
annotated: Constructor {
index: 0,
fields: vec![generic],
},
},
Annotated {
title: Some("None".to_string()),
description: Some("Nothing.".to_string()),
annotated: Constructor {
index: 1,
fields: vec![],
},
},
]))),
})
}
"List" => {
let generic =
Annotated::from_type(modules, args.get(0).unwrap(), type_parameters)?;
// NOTE: Lists of 2-tuples are treated as Maps. This is an oddity we inherit
// from the PlutusTx / LedgerApi Haskell codebase, which encodes some elements
// as such. We don't have a concept of language maps in Aiken, so we simply
// make all types abide by this convention.
let data = match generic.annotated {
Schema::Pair(left, right) => Data::Map(Box::new(left), Box::new(right)),
_ => {
let inner = generic.into_data(type_info)?.annotated;
Data::List(Box::new(inner))
}
};
Ok(Schema::Data(Some(data)).into())
}
_ => Err(Error::new(ErrorContext::UnsupportedType, type_info)),
},
Type::App {
module: module_name,
name: type_name,
args,
..
} => {
let module = modules.get(module_name).unwrap();
let constructor = find_definition(type_name, &module.ast.definitions).unwrap();
let type_parameters = collect_type_parameters(&constructor.typed_parameters, args);
let annotated = Schema::Data(Some(
Data::from_data_type(modules, constructor, &type_parameters)
.map_err(|e| e.backtrack(type_info))?,
));
Ok(Annotated {
title: Some(constructor.name.clone()),
description: constructor.doc.clone().map(|s| s.trim().to_string()),
annotated,
})
}
Type::Var { tipo } => match tipo.borrow().deref() {
TypeVar::Link { tipo } => Annotated::from_type(modules, tipo, type_parameters),
TypeVar::Generic { id } => {
let tipo = type_parameters
.get(id)
.ok_or_else(|| Error::new(ErrorContext::FreeTypeVariable, type_info))?;
Annotated::from_type(modules, tipo, type_parameters)
}
TypeVar::Unbound { .. } => {
Err(Error::new(ErrorContext::UnboundTypeVariable, type_info))
}
},
Type::Tuple { elems } => match &elems[..] {
[left, right] => {
let left = Annotated::from_type(modules, left, type_parameters)?
.into_data(left)
.map_err(|e| e.backtrack(type_info))?;
let right = Annotated::from_type(modules, right, type_parameters)?
.into_data(right)
.map_err(|e| e.backtrack(type_info))?;
Ok(Schema::Pair(left.annotated, right.annotated).into())
}
_ => {
let elems = elems
.iter()
.map(|e| {
Annotated::from_type(modules, e, type_parameters)
.and_then(|s| s.into_data(e).map(|s| s.annotated))
})
.collect::<Result<Vec<_>, _>>()
.map_err(|e| e.backtrack(type_info))?;
Ok(Annotated {
title: Some("Tuple".to_owned()),
description: None,
annotated: Schema::List(elems),
})
}
},
Type::Fn { .. } => Err(Error::new(ErrorContext::UnexpectedFunction, type_info)),
}
}
fn into_data(self, type_info: &Type) -> Result<Annotated<Data>, Error> {
match self {
Annotated {
title,
description,
annotated: Schema::Data(Some(data)),
} => Ok(Annotated {
title,
description,
annotated: data,
}),
_ => Err(Error::new(ErrorContext::ExpectedData, type_info)),
}
}
}
impl Data {
pub fn from_data_type(
modules: &HashMap<String, CheckedModule>,
data_type: &DataType<Arc<Type>>,
type_parameters: &HashMap<u64, &Arc<Type>>,
) -> Result<Self, Error> {
let mut variants = vec![];
for (index, constructor) in data_type.constructors.iter().enumerate() {
let mut fields = vec![];
for field in constructor.arguments.iter() {
let mut schema = Annotated::from_type(modules, &field.tipo, type_parameters)
.and_then(|t| t.into_data(&field.tipo))?;
if field.label.is_some() {
schema.title = field.label.clone();
}
if field.doc.is_some() {
schema.description = field.doc.clone().map(|s| s.trim().to_string());
}
fields.push(schema);
}
let variant = Annotated {
title: Some(constructor.name.clone()),
description: constructor.doc.clone().map(|s| s.trim().to_string()),
annotated: Constructor { index, fields },
};
variants.push(variant);
}
// NOTE: Opaque data-types with a single variant and a single field are transparent, they
// are erased completely at compilation time.
if data_type.opaque {
if let [variant] = &variants[..] {
if let [field] = &variant.annotated.fields[..] {
return Ok(field.annotated.clone());
}
}
}
Ok(Data::AnyOf(variants))
}
}
impl Display for Schema {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let s = serde_json::to_string_pretty(self).map_err(|_| fmt::Error)?;
f.write_str(&s)
}
}
impl Serialize for Schema {
fn serialize<S: Serializer>(&self, serializer: S) -> Result<S::Ok, S::Error> {
match self {
Schema::Unit => {
let mut s = serializer.serialize_struct("Unit", 1)?;
s.serialize_field("dataType", "#unit")?;
s.end()
}
Schema::Boolean => {
let mut s = serializer.serialize_struct("Integer", 1)?;
s.serialize_field("dataType", "#integer")?;
s.end()
}
Schema::Integer => {
let mut s = serializer.serialize_struct("Integer", 1)?;
s.serialize_field("dataType", "#integer")?;
s.end()
}
Schema::Bytes => {
let mut s = serializer.serialize_struct("Bytes", 1)?;
s.serialize_field("dataType", "#bytes")?;
s.end()
}
Schema::String => {
let mut s = serializer.serialize_struct("String", 1)?;
s.serialize_field("dataType", "#string")?;
s.end()
}
Schema::Pair(left, right) => {
let mut s = serializer.serialize_struct("Pair", 3)?;
s.serialize_field("dataType", "#pair")?;
s.serialize_field("left", &left)?;
s.serialize_field("right", &right)?;
s.end()
}
Schema::List(elements) => {
let mut s = serializer.serialize_struct("List", 2)?;
s.serialize_field("dataType", "#list")?;
s.serialize_field("elements", &elements)?;
s.end()
}
Schema::Data(None) => {
let s = serializer.serialize_struct("Data", 0)?;
s.end()
}
Schema::Data(Some(data)) => data.serialize(serializer),
}
}
}
impl Serialize for Data {
fn serialize<S: Serializer>(&self, serializer: S) -> Result<S::Ok, S::Error> {
match self {
Data::Integer => {
let mut s = serializer.serialize_struct("Integer", 1)?;
s.serialize_field("dataType", "integer")?;
s.end()
}
Data::Bytes => {
let mut s = serializer.serialize_struct("Bytes", 1)?;
s.serialize_field("dataType", "bytes")?;
s.end()
}
Data::List(items) => {
let mut s = serializer.serialize_struct("List", 2)?;
s.serialize_field("dataType", "list")?;
s.serialize_field("items", &items)?;
s.end()
}
Data::Map(keys, values) => {
let mut s = serializer.serialize_struct("Map", 3)?;
s.serialize_field("dataType", "map")?;
s.serialize_field("keys", &keys)?;
s.serialize_field("values", &values)?;
s.end()
}
Data::AnyOf(constructors) => {
// TODO: Avoid 'anyOf' applicator when there's only one constructor
//
// match &constructors[..] {
// [constructor] => constructor.serialize(serializer),
// _ => {
let mut s = serializer.serialize_struct("AnyOf", 1)?;
s.serialize_field("anyOf", &constructors)?;
s.end()
}
}
}
}
impl Serialize for Constructor {
fn serialize<S: Serializer>(&self, serializer: S) -> Result<S::Ok, S::Error> {
let mut s = serializer.serialize_struct("Constructor", 3)?;
s.serialize_field("dataType", "constructor")?;
s.serialize_field("index", &self.index)?;
s.serialize_field("fields", &self.fields)?;
s.end()
}
}
#[derive(Debug, PartialEq, Clone, thiserror::Error)]
#[error("{}", context)]
pub struct Error {
context: ErrorContext,
breadcrumbs: Vec<Type>,
}
#[derive(Debug, PartialEq, Clone, thiserror::Error)]
pub enum ErrorContext {
#[error("I failed at my own job and couldn't figure out how to generate a specification for a type.")]
UnsupportedType,
#[error("I discovered a type hole where I would expect a concrete type.")]
UnboundTypeVariable,
#[error("I caught a free variable in the contract's interface boundary.")]
FreeTypeVariable,
#[error("I had the misfortune to find an invalid type in an interface boundary.")]
ExpectedData,
#[error("I figured you tried to export a function in your contract's binary interface.")]
UnexpectedFunction,
}
impl Error {
pub fn new(context: ErrorContext, type_info: &Type) -> Self {
Error {
context,
breadcrumbs: vec![type_info.clone()],
}
}
pub fn backtrack(self, type_info: &Type) -> Self {
let mut breadcrumbs = vec![type_info.clone()];
breadcrumbs.extend(self.breadcrumbs);
Error {
context: self.context,
breadcrumbs,
}
}
pub fn help(&self) -> String {
match self.context {
ErrorContext::UnsupportedType => format!(
r#"I do not know how to generate a portable Plutus specification for the following type:
{signature}
This is likely a bug. I should know. May you be kind enough and report this on <https://github.com/aiken-lang/aiken>."#,
signature = Error::fmt_breadcrumbs(&[self.breadcrumbs.last().unwrap().to_owned()]),
),
ErrorContext::FreeTypeVariable => format!(
r#"There can't be any free type variable at the contract's boundary (i.e. in types used as datum and/or redeemer). Indeed, the validator can only be invoked with (very) concrete types. Since there's no reflexion possible inside a validator, it simply isn't possible to have any remaining free type variable in any of the datum or redeemer.
I got there when trying to generate a blueprint specification of the following type:
{breadcrumbs}"#,
breadcrumbs = Error::fmt_breadcrumbs(&self.breadcrumbs)
),
ErrorContext::UnboundTypeVariable => format!(
r#"There cannot be any unbound type variable at the contract's boundary (i.e. in types used as datum and/or redeemer). Indeed, in order to generate an outward-facing specification of the contract's interface, I need to know what concrete representations will the datum and/or the redeemer have.
If your contract doesn't need datum or redeemer, you can always give them the type {type_Void} to indicate this. It is very concrete and will help me progress forward."#,
type_Void = "Void".bright_blue().bold()
),
ErrorContext::ExpectedData => format!(
r#"While figuring out the outward-facing specification for your contract, I found a type that cannot actually be represented as valid Untyped Plutus Core (the low-level language Cardano uses to execute smart-contracts. For example, it isn't possible to have a list or a tuple of {type_String} because the underlying execution engine doesn't allow it.
There are few restrictions like this one. In this instance, here's the types I followed and that led me to this problem:
{breadcrumbs}"#,
type_String = "String".bright_blue().bold(),
breadcrumbs = Error::fmt_breadcrumbs(&self.breadcrumbs)
),
ErrorContext::UnexpectedFunction => format!(
r#"I can't allow that. Functions aren't serializable as data on-chain and thus cannot be used within your datum and/or redeemer types.
Here's the types I followed and that led me to this problem:
{breadcrumbs}"#,
breadcrumbs = Error::fmt_breadcrumbs(&self.breadcrumbs)
),
}
}
fn fmt_breadcrumbs(breadcrumbs: &[Type]) -> String {
breadcrumbs
.iter()
.map(|type_info| {
pretty::Printer::new()
.print(type_info)
.to_pretty_string(70)
.bright_blue()
.bold()
.to_string()
})
.collect::<Vec<_>>()
.join("")
}
}
fn collect_type_parameters<'a>(
generics: &'a [Arc<Type>],
applications: &'a [Arc<Type>],
) -> HashMap<u64, &'a Arc<Type>> {
let mut type_parameters = HashMap::new();
for (index, generic) in generics.iter().enumerate() {
match &**generic {
Type::Var { tipo } => match *tipo.borrow() {
TypeVar::Generic { id } => {
type_parameters.insert(id, applications.get(index).unwrap());
}
_ => unreachable!(),
},
_ => unreachable!(),
}
}
type_parameters
}
fn find_definition<'a>(
name: &str,
definitions: &'a Vec<TypedDefinition>,
) -> Option<&'a DataType<Arc<Type>>> {
for def in definitions {
match def {
Definition::DataType(data_type) if name == data_type.name => return Some(data_type),
Definition::Fn { .. }
| Definition::DataType { .. }
| Definition::TypeAlias { .. }
| Definition::Use { .. }
| Definition::ModuleConstant { .. }
| Definition::Test { .. } => continue,
}
}
None
}
#[cfg(test)]
pub mod test {
use super::*;
use serde_json::{self, json, Value};
pub fn assert_json(schema: &impl Serialize, expected: Value) {
assert_eq!(serde_json::to_value(schema).unwrap(), expected);
}
#[test]
fn serialize_data_integer() {
let schema = Schema::Data(Some(Data::Integer));
assert_json(
&schema,
json!({
"dataType": "integer"
}),
);
}
#[test]
fn serialize_data_bytes() {
let schema = Schema::Data(Some(Data::Bytes));
assert_json(
&schema,
json!({
"dataType": "bytes"
}),
);
}
#[test]
fn serialize_data_list_1() {
let schema = Schema::Data(Some(Data::List(Box::new(Data::Integer))));
assert_json(
&schema,
json!({
"dataType": "list",
"items": {
"dataType": "integer"
}
}),
);
}
#[test]
fn serialize_data_list_2() {
let schema = Schema::Data(Some(Data::List(Box::new(Data::List(Box::new(
Data::Integer,
))))));
assert_json(
&schema,
json!({
"dataType": "list",
"items":
{
"dataType": "list",
"items": { "dataType": "integer" }
}
}),
);
}
#[test]
fn serialize_data_map_1() {
let schema = Schema::Data(Some(Data::Map(
Box::new(Data::Integer),
Box::new(Data::Bytes),
)));
assert_json(
&schema,
json!({
"dataType": "map",
"keys": {
"dataType": "integer"
},
"values": {
"dataType": "bytes"
}
}),
)
}
#[test]
fn serialize_data_map_2() {
let schema = Schema::Data(Some(Data::Map(
Box::new(Data::Bytes),
Box::new(Data::List(Box::new(Data::Integer))),
)));
assert_json(
&schema,
json!({
"dataType": "map",
"keys": {
"dataType": "bytes"
},
"values": {
"dataType": "list",
"items": { "dataType": "integer" }
}
}),
)
}
#[test]
fn serialize_data_constr_1() {
let schema = Schema::Data(Some(Data::AnyOf(vec![Constructor {
index: 0,
fields: vec![],
}
.into()])));
assert_json(
&schema,
json!({
"anyOf": [{
"dataType": "constructor",
"index": 0,
"fields": []
}]
}),
)
}
#[test]
fn serialize_data_constr_2() {
let schema = Schema::Data(Some(Data::AnyOf(vec![
Constructor {
index: 0,
fields: vec![Data::Integer.into()],
}
.into(),
Constructor {
index: 1,
fields: vec![Data::Bytes.into()],
}
.into(),
])));
assert_json(
&schema,
json!({
"anyOf": [
{
"dataType": "constructor",
"index": 0,
"fields": [{ "dataType": "integer" }]
},
{
"dataType": "constructor",
"index": 1,
"fields": [{ "dataType": "bytes" }]
}
]
}),
)
}
#[test]
fn serialize_empty_data() {
let schema = Schema::Data(None);
assert_json(&schema, json!({}))
}
#[test]
fn serialize_annotated_1() {
let schema = Annotated {
title: Some("foo".to_string()),
description: None,
annotated: Schema::Integer,
};
assert_json(
&schema,
json!({
"title": "foo",
"dataType": "#integer"
}),
)
}
#[test]
fn serialize_annotated_2() {
let schema = Annotated {
title: Some("foo".to_string()),
description: Some("Lorem Ipsum".to_string()),
annotated: Schema::String,
};
assert_json(
&schema,
json!({
"title": "foo",
"description": "Lorem Ipsum",
"dataType": "#string"
}),
)
}
}

View File

@ -0,0 +1,607 @@
use super::{
error::{assert_min_arity, assert_return_bool, Error},
schema::{Annotated, Schema},
};
use crate::module::{CheckedModule, CheckedModules};
use aiken_lang::{ast::TypedFunction, uplc::CodeGenerator};
use miette::NamedSource;
use serde;
use std::{
collections::HashMap,
fmt::{self, Display},
};
use uplc::ast::{DeBruijn, Program};
#[derive(Debug, PartialEq, Clone, serde::Serialize, serde::Deserialize)]
pub struct Validator<T> {
pub title: String,
pub purpose: Purpose,
#[serde(skip_serializing_if = "Option::is_none")]
pub description: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub datum: Option<Annotated<T>>,
pub redeemer: Annotated<T>,
#[serde(flatten)]
pub program: Program<DeBruijn>,
}
#[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Clone, serde::Serialize, serde::Deserialize)]
#[serde(rename_all = "camelCase")]
pub enum Purpose {
Spend,
Mint,
Withdraw,
Publish,
}
impl Display for Validator<Schema> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let s = serde_json::to_string_pretty(self).map_err(|_| fmt::Error)?;
f.write_str(&s)
}
}
impl Validator<Schema> {
pub fn from_checked_module(
modules: &CheckedModules,
generator: &mut CodeGenerator,
validator: &CheckedModule,
def: &TypedFunction,
) -> Result<Validator<Schema>, Error> {
let purpose: Purpose = def
.name
.clone()
.try_into()
.expect("unexpected validator name");
assert_return_bool(validator, def)?;
assert_min_arity(validator, def, purpose.min_arity())?;
let mut args = def.arguments.iter().rev();
let (_, redeemer, datum) = (args.next(), args.next().unwrap(), args.next());
Ok(Validator {
title: validator.name.clone(),
description: None,
purpose,
datum: datum
.map(|datum| {
Annotated::from_type(modules.into(), &datum.tipo, &HashMap::new()).map_err(
|error| Error::Schema {
error,
location: datum.location,
source_code: NamedSource::new(
validator.input_path.display().to_string(),
validator.code.clone(),
),
},
)
})
.transpose()?,
redeemer: Annotated::from_type(modules.into(), &redeemer.tipo, &HashMap::new())
.map_err(|error| Error::Schema {
error,
location: redeemer.location,
source_code: NamedSource::new(
validator.input_path.display().to_string(),
validator.code.clone(),
),
})?,
program: generator
.generate(&def.body, &def.arguments, true)
.try_into()
.unwrap(),
})
}
}
impl Purpose {
pub fn min_arity(&self) -> u8 {
match self {
Purpose::Spend => 3,
Purpose::Mint | Purpose::Withdraw | Purpose::Publish => 2,
}
}
}
impl Display for Purpose {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.write_str(match self {
Purpose::Spend => "spend",
Purpose::Mint => "mint",
Purpose::Withdraw => "withdraw",
Purpose::Publish => "publish",
})
}
}
impl TryFrom<String> for Purpose {
type Error = String;
fn try_from(purpose: String) -> Result<Purpose, Self::Error> {
match &purpose[..] {
"spend" => Ok(Purpose::Spend),
"mint" => Ok(Purpose::Mint),
"withdraw" => Ok(Purpose::Withdraw),
"publish" => Ok(Purpose::Publish),
unexpected => Err(format!("Can't turn '{}' into any Purpose", unexpected)),
}
}
}
#[cfg(test)]
mod test {
use super::*;
use crate::{module::ParsedModule, PackageName};
use aiken_lang::{
self,
ast::{ModuleKind, TypedDataType, TypedFunction},
builder::{DataTypeKey, FunctionAccessKey},
builtins, parser,
tipo::TypeInfo,
IdGenerator,
};
use assert_json_diff::assert_json_eq;
use indexmap::IndexMap;
use serde_json::{self, json};
use std::{collections::HashMap, path::PathBuf};
// TODO: Possible refactor this out of the module and have it used by `Project`. The idea would
// be to make this struct below the actual project, and wrap it in another metadata struct
// which contains all the config and I/O stuff regarding the project.
struct TestProject {
package: PackageName,
id_gen: IdGenerator,
module_types: HashMap<String, TypeInfo>,
functions: IndexMap<FunctionAccessKey, TypedFunction>,
data_types: IndexMap<DataTypeKey, TypedDataType>,
}
impl TestProject {
fn new() -> Self {
let id_gen = IdGenerator::new();
let package = PackageName {
owner: "test".to_owned(),
repo: "project".to_owned(),
};
let mut module_types = HashMap::new();
module_types.insert("aiken".to_string(), builtins::prelude(&id_gen));
module_types.insert("aiken/builtin".to_string(), builtins::plutus(&id_gen));
let functions = builtins::prelude_functions(&id_gen);
let data_types = builtins::prelude_data_types(&id_gen);
TestProject {
package,
id_gen,
module_types,
functions,
data_types,
}
}
fn parse(&self, source_code: &str) -> ParsedModule {
let kind = ModuleKind::Validator;
let name = "test_module".to_owned();
let (mut ast, extra) =
parser::module(source_code, kind).expect("Failed to parse module");
ast.name = name.clone();
let mut module = ParsedModule {
kind,
ast,
code: source_code.to_string(),
name,
path: PathBuf::new(),
extra,
package: self.package.to_string(),
};
module.attach_doc_and_module_comments();
module
}
fn check(&mut self, module: ParsedModule) -> CheckedModule {
let mut warnings = vec![];
let ast = module
.ast
.infer(
&self.id_gen,
module.kind,
&self.package.to_string(),
&self.module_types,
&mut warnings,
)
.expect("Failed to type-check module");
self.module_types
.insert(module.name.clone(), ast.type_info.clone());
CheckedModule {
kind: module.kind,
extra: module.extra,
name: module.name,
code: module.code,
package: module.package,
input_path: module.path,
ast,
}
}
}
fn assert_validator(source_code: &str, json: serde_json::Value) {
let mut project = TestProject::new();
let modules = CheckedModules::singleton(project.check(project.parse(source_code)));
let mut generator = modules.new_generator(
&project.functions,
&project.data_types,
&project.module_types,
);
let (validator, def) = modules
.validators()
.next()
.expect("source code did no yield any validator");
let validator = Validator::from_checked_module(&modules, &mut generator, validator, def)
.expect("Failed to create validator blueprint");
println!("{}", validator);
assert_json_eq!(serde_json::to_value(&validator).unwrap(), json);
}
#[test]
fn validator_mint_basic() {
assert_validator(
r#"
fn mint(redeemer: Data, ctx: Data) {
True
}
"#,
json!({
"title": "test_module",
"purpose": "mint",
"hash": "f9fcaa5bfce8bde3b85e595b5235a184fe0fb79916d38273c74a23cf",
"redeemer": {
"title": "Data",
"description": "Any Plutus data."
},
"compiledCode": "582e0100003232225333573494452616300100122253335573e004293099ab9b3001357420046660060066ae88008005"
}),
);
}
#[test]
fn validator_spend() {
assert_validator(
r#"
/// On-chain state
type State {
/// The contestation period as a number of seconds
contestationPeriod: ContestationPeriod,
/// List of public key hashes of all participants
parties: List<Party>,
utxoHash: Hash<Blake2b_256>,
}
/// A Hash digest for a given algorithm.
type Hash<alg> = ByteArray
type Blake2b_256 { Blake2b_256 }
/// Whatever
type ContestationPeriod {
/// A positive, non-zero number of seconds.
ContestationPeriod(Int)
}
type Party =
ByteArray
type Input {
CollectCom
Close
/// Abort a transaction
Abort
}
fn spend(datum: State, redeemer: Input, ctx: Data) {
True
}
"#,
json!({
"title": "test_module",
"purpose": "spend",
"hash": "3b7ee6139deb59d892955ac3cad15d53e48dcb1643227256b29d2b6f",
"datum": {
"title": "State",
"description": "On-chain state",
"anyOf": [
{
"title": "State",
"dataType": "constructor",
"index": 0,
"fields": [
{
"title": "contestationPeriod",
"description": "The contestation period as a number of seconds",
"anyOf": [
{
"title": "ContestationPeriod",
"description": "A positive, non-zero number of seconds.",
"dataType": "constructor",
"index": 0,
"fields": [
{
"dataType": "integer"
}
]
}
]
},
{
"title": "parties",
"description": "List of public key hashes of all participants",
"dataType": "list",
"items": {
"dataType": "bytes"
}
},
{
"title": "utxoHash",
"dataType": "bytes"
}
]
}
]
},
"redeemer": {
"title": "Input",
"anyOf": [
{
"title": "CollectCom",
"dataType": "constructor",
"index": 0,
"fields": []
},
{
"title": "Close",
"dataType": "constructor",
"index": 1,
"fields": []
},
{
"title": "Abort",
"description": "Abort a transaction",
"dataType": "constructor",
"index": 2,
"fields": []
}
]
},
"compiledCode": "582f01000032322225333573494452616300100122253335573e004293099ab9b3001357420046660060066ae880080041"
}),
);
}
#[test]
fn validator_spend_2tuple() {
assert_validator(
r#"
fn spend(datum: (Int, ByteArray), redeemer: String, ctx: Void) {
True
}
"#,
json!({
"title": "test_module",
"purpose": "spend",
"hash": "4a0c0768ff3e8c8f9da5fc2c499e592ae37f676a11dbc6e9de958116",
"datum": {
"dataType": "#pair",
"left": {
"dataType": "integer"
},
"right": {
"dataType": "bytes"
}
},
"redeemer": {
"dataType": "#string"
},
"compiledCode": "584901000032322322322533357349445261637326eb8004c8c8cdd81aba1002357420026ae88004dd600098008009112999aab9f00214984cd5cd98009aba100233300300335744004003"
}),
)
}
#[test]
fn validator_spend_tuples() {
assert_validator(
r#"
fn spend(datum: (Int, Int, Int), redeemer: Data, ctx: Void) {
True
}
"#,
json!({
"title": "test_module",
"purpose": "spend",
"hash": "5e7487927f32a4d6e8c3b462c8e0e0f685506621f5f2683807805d0e",
"datum": {
"title": "Tuple",
"dataType": "#list",
"elements": [
{
"dataType": "integer"
},
{
"dataType": "integer"
},
{
"dataType": "integer"
}
]
},
"redeemer": {
"title": "Data",
"description": "Any Plutus data."
},
"compiledCode": "5833010000323223222533357349445261637580026002002444a666aae7c008526133573660026ae84008ccc00c00cd5d10010009"
}),
)
}
#[test]
fn validator_generics() {
assert_validator(
r#"
type Either<left, right> {
Left(left)
Right(right)
}
type Interval<a> {
Finite(a)
Infinite
}
fn withdraw(redeemer: Either<ByteArray, Interval<Int>>, ctx: Void) {
True
}
"#,
json!(
{
"title": "test_module",
"purpose": "withdraw",
"hash": "f9fcaa5bfce8bde3b85e595b5235a184fe0fb79916d38273c74a23cf",
"redeemer": {
"title": "Either",
"anyOf": [
{
"title": "Left",
"dataType": "constructor",
"index": 0,
"fields": [
{
"dataType": "bytes"
}
]
},
{
"title": "Right",
"dataType": "constructor",
"index": 1,
"fields": [
{
"title": "Interval",
"anyOf": [
{
"title": "Finite",
"dataType": "constructor",
"index": 0,
"fields": [
{
"dataType": "integer"
}
]
},
{
"title": "Infinite",
"dataType": "constructor",
"index": 1,
"fields": []
}
]
}
]
}
]
},
"compiledCode": "582e0100003232225333573494452616300100122253335573e004293099ab9b3001357420046660060066ae88008005"
}
),
)
}
#[test]
fn validator_phantom_types() {
assert_validator(
r#"
type Dict<key, value> {
inner: List<(ByteArray, value)>
}
type UUID { UUID }
fn mint(redeemer: Dict<UUID, Int>, ctx: Void) {
True
}
"#,
json!(
{
"title": "test_module",
"purpose": "mint",
"hash": "f9fcaa5bfce8bde3b85e595b5235a184fe0fb79916d38273c74a23cf",
"redeemer": {
"title": "Dict",
"anyOf": [
{
"title": "Dict",
"dataType": "constructor",
"index": 0,
"fields": [
{
"title": "inner",
"dataType": "map",
"keys": {
"dataType": "bytes"
},
"values": {
"dataType": "integer"
}
}
]
}
]
},
"compiledCode": "582e0100003232225333573494452616300100122253335573e004293099ab9b3001357420046660060066ae88008005"
}
),
);
}
#[test]
fn validator_opaque_types() {
assert_validator(
r#"
pub opaque type Dict<key, value> {
inner: List<(ByteArray, value)>
}
type UUID { UUID }
fn mint(redeemer: Dict<UUID, Int>, ctx: Void) {
True
}
"#,
json!(
{
"title": "test_module",
"purpose": "mint",
"hash": "f9fcaa5bfce8bde3b85e595b5235a184fe0fb79916d38273c74a23cf",
"redeemer": {
"title": "Dict",
"dataType": "map",
"keys": {
"dataType": "bytes"
},
"values": {
"dataType": "integer"
}
},
"compiledCode": "582e0100003232225333573494452616300100122253335573e004293099ab9b3001357420046660060066ae88008005"
}
),
);
}
}

View File

@ -1,4 +1,10 @@
use crate::{deps::manifest::Package, package_name::PackageName, pretty, script::EvalHint}; use crate::{
blueprint::{error as blueprint, validator},
deps::manifest::Package,
package_name::PackageName,
pretty,
script::EvalHint,
};
use aiken_lang::{ use aiken_lang::{
ast::{BinOp, Span}, ast::{BinOp, Span},
parser::error::ParseError, parser::error::ParseError,
@ -7,6 +13,7 @@ use aiken_lang::{
use miette::{ use miette::{
Diagnostic, EyreContext, LabeledSpan, MietteHandlerOpts, NamedSource, RgbColors, SourceCode, Diagnostic, EyreContext, LabeledSpan, MietteHandlerOpts, NamedSource, RgbColors, SourceCode,
}; };
use owo_colors::OwoColorize;
use std::{ use std::{
fmt::{Debug, Display}, fmt::{Debug, Display},
io, io,
@ -31,6 +38,9 @@ pub enum Error {
#[error("I found some files with incorrectly formatted source code.")] #[error("I found some files with incorrectly formatted source code.")]
Format { problem_files: Vec<Unformatted> }, Format { problem_files: Vec<Unformatted> },
#[error(transparent)]
Blueprint(#[from] blueprint::Error),
#[error(transparent)] #[error(transparent)]
StandardIo(#[from] io::Error), StandardIo(#[from] io::Error),
@ -43,6 +53,9 @@ pub enum Error {
#[error(transparent)] #[error(transparent)]
JoinError(#[from] tokio::task::JoinError), JoinError(#[from] tokio::task::JoinError),
#[error(transparent)]
Json(#[from] serde_json::Error),
#[error("{help}")] #[error("{help}")]
TomlLoading { TomlLoading {
path: PathBuf, path: PathBuf,
@ -114,6 +127,21 @@ pub enum Error {
package.name.repo package.name.repo
)] )]
UnknownPackageVersion { package: Package }, UnknownPackageVersion { package: Package },
#[error("I couldn't parse the provided stake address.")]
MalformedStakeAddress {
error: Option<pallas::ledger::addresses::Error>,
},
#[error("I didn't find any validator matching your criteria.")]
NoValidatorNotFound {
known_validators: Vec<(String, validator::Purpose)>,
},
#[error("I found multiple suitable validators and I need you to tell me which one to pick.")]
MoreThanOneValidatorFound {
known_validators: Vec<(String, validator::Purpose)>,
},
} }
impl Error { impl Error {
@ -183,6 +211,7 @@ impl Error {
Error::FileIo { .. } => None, Error::FileIo { .. } => None,
Error::Format { .. } => None, Error::Format { .. } => None,
Error::StandardIo(_) => None, Error::StandardIo(_) => None,
Error::Blueprint(_) => None,
Error::MissingManifest { path } => Some(path.to_path_buf()), Error::MissingManifest { path } => Some(path.to_path_buf()),
Error::TomlLoading { path, .. } => Some(path.to_path_buf()), Error::TomlLoading { path, .. } => Some(path.to_path_buf()),
Error::ImportCycle { .. } => None, Error::ImportCycle { .. } => None,
@ -196,6 +225,10 @@ impl Error {
Error::ZipExtract(_) => None, Error::ZipExtract(_) => None,
Error::JoinError(_) => None, Error::JoinError(_) => None,
Error::UnknownPackageVersion { .. } => None, Error::UnknownPackageVersion { .. } => None,
Error::Json { .. } => None,
Error::MalformedStakeAddress { .. } => None,
Error::NoValidatorNotFound { .. } => None,
Error::MoreThanOneValidatorFound { .. } => None,
} }
} }
@ -205,6 +238,7 @@ impl Error {
Error::FileIo { .. } => None, Error::FileIo { .. } => None,
Error::Format { .. } => None, Error::Format { .. } => None,
Error::StandardIo(_) => None, Error::StandardIo(_) => None,
Error::Blueprint(_) => None,
Error::MissingManifest { .. } => None, Error::MissingManifest { .. } => None,
Error::TomlLoading { src, .. } => Some(src.to_string()), Error::TomlLoading { src, .. } => Some(src.to_string()),
Error::ImportCycle { .. } => None, Error::ImportCycle { .. } => None,
@ -218,6 +252,10 @@ impl Error {
Error::ZipExtract(_) => None, Error::ZipExtract(_) => None,
Error::JoinError(_) => None, Error::JoinError(_) => None,
Error::UnknownPackageVersion { .. } => None, Error::UnknownPackageVersion { .. } => None,
Error::Json { .. } => None,
Error::MalformedStakeAddress { .. } => None,
Error::NoValidatorNotFound { .. } => None,
Error::MoreThanOneValidatorFound { .. } => None,
} }
} }
} }
@ -250,6 +288,7 @@ impl Diagnostic for Error {
match self { match self {
Error::DuplicateModule { .. } => Some(Box::new("aiken::module::duplicate")), Error::DuplicateModule { .. } => Some(Box::new("aiken::module::duplicate")),
Error::FileIo { .. } => None, Error::FileIo { .. } => None,
Error::Blueprint(e) => e.code(),
Error::ImportCycle { .. } => Some(Box::new("aiken::module::cyclical")), Error::ImportCycle { .. } => Some(Box::new("aiken::module::cyclical")),
Error::List(_) => None, Error::List(_) => None,
Error::Parse { .. } => Some(Box::new("aiken::parser")), Error::Parse { .. } => Some(Box::new("aiken::parser")),
@ -268,6 +307,10 @@ impl Diagnostic for Error {
Error::ZipExtract(_) => None, Error::ZipExtract(_) => None,
Error::JoinError(_) => None, Error::JoinError(_) => None,
Error::UnknownPackageVersion { .. } => Some(Box::new("aiken::packages::resolve")), Error::UnknownPackageVersion { .. } => Some(Box::new("aiken::packages::resolve")),
Error::Json { .. } => None,
Error::MalformedStakeAddress { .. } => None,
Error::NoValidatorNotFound { .. } => None,
Error::MoreThanOneValidatorFound { .. } => None,
} }
} }
@ -278,7 +321,8 @@ impl Diagnostic for Error {
first.display(), first.display(),
second.display() second.display()
))), ))),
Error::FileIo { .. } => None, Error::FileIo { error, .. } => Some(Box::new(format!("{error}"))),
Error::Blueprint(e) => e.help(),
Error::ImportCycle { modules } => Some(Box::new(format!( Error::ImportCycle { modules } => Some(Box::new(format!(
"Try moving the shared code to a separate module that the others can depend on\n- {}", "Try moving the shared code to a separate module that the others can depend on\n- {}",
modules.join("\n- ") modules.join("\n- ")
@ -324,6 +368,23 @@ impl Diagnostic for Error {
Error::ZipExtract(_) => None, Error::ZipExtract(_) => None,
Error::JoinError(_) => None, Error::JoinError(_) => None,
Error::UnknownPackageVersion{..} => Some(Box::new("Perhaps, double-check the package repository and version?")), Error::UnknownPackageVersion{..} => Some(Box::new("Perhaps, double-check the package repository and version?")),
Error::Json(error) => Some(Box::new(format!("{error}"))),
Error::MalformedStakeAddress { error } => Some(Box::new(format!("A stake address must be provided either as a base16-encoded string, or as a bech32-encoded string with the 'stake' or 'stake_test' prefix.{hint}", hint = match error {
Some(error) => format!("\n\nHere's the error I encountered: {error}"),
None => String::new(),
}))),
Error::NoValidatorNotFound { known_validators } => {
Some(Box::new(format!(
"Here's a list of all validators (and their purpose) I've found in your project. Please double-check this list against the options that you've provided:\n\n{}",
known_validators.iter().map(|(name, purpose)| format!("{name} (purpose = {purpose})", name = name.purple().bold(), purpose = purpose.bright_blue())).collect::<Vec<String>>().join("\n")
)))
},
Error::MoreThanOneValidatorFound { known_validators } => {
Some(Box::new(format!(
"Here's a list of all validators (and their purpose) I've found in your project. Select one of them using the appropriate options:\n\n{}",
known_validators.iter().map(|(name, purpose)| format!("{name} (purpose = {purpose})", name = name.purple().bold(), purpose = purpose.bright_blue())).collect::<Vec<String>>().join("\n")
)))
},
} }
} }
@ -332,6 +393,7 @@ impl Diagnostic for Error {
Error::DuplicateModule { .. } => None, Error::DuplicateModule { .. } => None,
Error::FileIo { .. } => None, Error::FileIo { .. } => None,
Error::ImportCycle { .. } => None, Error::ImportCycle { .. } => None,
Error::Blueprint(e) => e.labels(),
Error::List(_) => None, Error::List(_) => None,
Error::Parse { error, .. } => error.labels(), Error::Parse { error, .. } => error.labels(),
Error::MissingManifest { .. } => None, Error::MissingManifest { .. } => None,
@ -358,6 +420,10 @@ impl Diagnostic for Error {
Error::ZipExtract(_) => None, Error::ZipExtract(_) => None,
Error::JoinError(_) => None, Error::JoinError(_) => None,
Error::UnknownPackageVersion { .. } => None, Error::UnknownPackageVersion { .. } => None,
Error::Json { .. } => None,
Error::MalformedStakeAddress { .. } => None,
Error::NoValidatorNotFound { .. } => None,
Error::MoreThanOneValidatorFound { .. } => None,
} }
} }
@ -366,6 +432,7 @@ impl Diagnostic for Error {
Error::DuplicateModule { .. } => None, Error::DuplicateModule { .. } => None,
Error::FileIo { .. } => None, Error::FileIo { .. } => None,
Error::ImportCycle { .. } => None, Error::ImportCycle { .. } => None,
Error::Blueprint(e) => e.source_code(),
Error::List(_) => None, Error::List(_) => None,
Error::Parse { named, .. } => Some(named), Error::Parse { named, .. } => Some(named),
Error::Type { named, .. } => Some(named), Error::Type { named, .. } => Some(named),
@ -380,6 +447,10 @@ impl Diagnostic for Error {
Error::ZipExtract(_) => None, Error::ZipExtract(_) => None,
Error::JoinError(_) => None, Error::JoinError(_) => None,
Error::UnknownPackageVersion { .. } => None, Error::UnknownPackageVersion { .. } => None,
Error::Json { .. } => None,
Error::MalformedStakeAddress { .. } => None,
Error::NoValidatorNotFound { .. } => None,
Error::MoreThanOneValidatorFound { .. } => None,
} }
} }
@ -388,6 +459,7 @@ impl Diagnostic for Error {
Error::DuplicateModule { .. } => None, Error::DuplicateModule { .. } => None,
Error::FileIo { .. } => None, Error::FileIo { .. } => None,
Error::ImportCycle { .. } => None, Error::ImportCycle { .. } => None,
Error::Blueprint(e) => e.url(),
Error::List { .. } => None, Error::List { .. } => None,
Error::Parse { .. } => None, Error::Parse { .. } => None,
Error::Type { error, .. } => error.url(), Error::Type { error, .. } => error.url(),
@ -402,6 +474,10 @@ impl Diagnostic for Error {
Error::ZipExtract { .. } => None, Error::ZipExtract { .. } => None,
Error::JoinError { .. } => None, Error::JoinError { .. } => None,
Error::UnknownPackageVersion { .. } => None, Error::UnknownPackageVersion { .. } => None,
Error::Json { .. } => None,
Error::MalformedStakeAddress { .. } => None,
Error::NoValidatorNotFound { .. } => None,
Error::MoreThanOneValidatorFound { .. } => None,
} }
} }
@ -409,6 +485,7 @@ impl Diagnostic for Error {
match self { match self {
Error::DuplicateModule { .. } => None, Error::DuplicateModule { .. } => None,
Error::FileIo { .. } => None, Error::FileIo { .. } => None,
Error::Blueprint(e) => e.related(),
Error::ImportCycle { .. } => None, Error::ImportCycle { .. } => None,
Error::List { .. } => None, Error::List { .. } => None,
Error::Parse { .. } => None, Error::Parse { .. } => None,
@ -424,6 +501,10 @@ impl Diagnostic for Error {
Error::ZipExtract { .. } => None, Error::ZipExtract { .. } => None,
Error::JoinError { .. } => None, Error::JoinError { .. } => None,
Error::UnknownPackageVersion { .. } => None, Error::UnknownPackageVersion { .. } => None,
Error::Json { .. } => None,
Error::MalformedStakeAddress { .. } => None,
Error::NoValidatorNotFound { .. } => None,
Error::MoreThanOneValidatorFound { .. } => None,
} }
} }
} }

View File

@ -1,3 +1,4 @@
pub mod blueprint;
pub mod config; pub mod config;
pub mod deps; pub mod deps;
pub mod docs; pub mod docs;
@ -11,34 +12,32 @@ pub mod pretty;
pub mod script; pub mod script;
pub mod telemetry; pub mod telemetry;
use crate::blueprint::{schema::Schema, validator, Blueprint};
use aiken_lang::{ use aiken_lang::{
ast::{Definition, Function, ModuleKind, TypedDataType, TypedDefinition, TypedFunction}, ast::{Definition, Function, ModuleKind, TypedDataType, TypedFunction},
builder::{DataTypeKey, FunctionAccessKey}, builder::{DataTypeKey, FunctionAccessKey},
builtins::{self, generic_var}, builtins,
tipo::TypeInfo, tipo::TypeInfo,
uplc::CodeGenerator, IdGenerator,
IdGenerator, MINT, PUBLISH, SPEND, VALIDATOR_NAMES, WITHDRAW,
}; };
use deps::UseManifest; use deps::UseManifest;
use indexmap::IndexMap; use indexmap::IndexMap;
use miette::NamedSource; use miette::NamedSource;
use options::{CodeGenMode, Options}; use options::{CodeGenMode, Options};
use package_name::PackageName; use package_name::PackageName;
use pallas::{ use pallas::ledger::addresses::{
codec::minicbor, Address, Network, ShelleyAddress, ShelleyDelegationPart, StakePayload,
ledger::{addresses::Address, primitives::babbage},
}; };
use pallas_traverse::ComputeHash;
use script::{EvalHint, EvalInfo, Script}; use script::{EvalHint, EvalInfo, Script};
use serde_json::json;
use std::{ use std::{
collections::HashMap, collections::HashMap,
fs, fs::{self, File},
io::BufReader,
path::{Path, PathBuf}, path::{Path, PathBuf},
}; };
use telemetry::EventListener; use telemetry::EventListener;
use uplc::{ use uplc::{
ast::{Constant, DeBruijn, Program, Term}, ast::{Constant, Term},
machine::cost_model::ExBudget, machine::cost_model::ExBudget,
}; };
@ -70,6 +69,8 @@ where
sources: Vec<Source>, sources: Vec<Source>,
pub warnings: Vec<Warning>, pub warnings: Vec<Warning>,
event_listener: T, event_listener: T,
functions: IndexMap<FunctionAccessKey, TypedFunction>,
data_types: IndexMap<DataTypeKey, TypedDataType>,
} }
impl<T> Project<T> impl<T> Project<T>
@ -84,6 +85,10 @@ where
module_types.insert("aiken".to_string(), builtins::prelude(&id_gen)); module_types.insert("aiken".to_string(), builtins::prelude(&id_gen));
module_types.insert("aiken/builtin".to_string(), builtins::plutus(&id_gen)); module_types.insert("aiken/builtin".to_string(), builtins::plutus(&id_gen));
let functions = builtins::prelude_functions(&id_gen);
let data_types = builtins::prelude_data_types(&id_gen);
let config = Config::load(&root)?; let config = Config::load(&root)?;
Ok(Project { Ok(Project {
@ -96,6 +101,8 @@ where
sources: vec![], sources: vec![],
warnings: vec![], warnings: vec![],
event_listener, event_listener,
functions,
data_types,
}) })
} }
@ -121,11 +128,7 @@ where
let destination = destination.unwrap_or_else(|| self.root.join("docs")); let destination = destination.unwrap_or_else(|| self.root.join("docs"));
let mut parsed_modules = self.parse_sources(self.config.name.clone())?; let parsed_modules = self.parse_sources(self.config.name.clone())?;
for (_, module) in parsed_modules.iter_mut() {
module.attach_doc_and_module_comments();
}
self.type_check(parsed_modules)?; self.type_check(parsed_modules)?;
@ -170,6 +173,25 @@ where
self.compile(options) self.compile(options)
} }
pub fn dump_uplc(&self, blueprint: &Blueprint<Schema>) -> Result<(), Error> {
let dir = self.root.join("artifacts");
self.event_listener
.handle_event(Event::DumpingUPLC { path: dir.clone() });
fs::create_dir_all(&dir)?;
for validator in &blueprint.validators {
let path = dir
.clone()
.join(format!("{}::{}>.uplc", validator.title, validator.purpose));
fs::write(&path, validator.program.to_pretty())
.map_err(|error| Error::FileIo { error, path })?;
}
Ok(())
}
pub fn blueprint_path(&self) -> PathBuf {
self.root.join("plutus.json")
}
pub fn compile(&mut self, options: Options) -> Result<(), Error> { pub fn compile(&mut self, options: Options) -> Result<(), Error> {
self.compile_deps()?; self.compile_deps()?;
@ -186,27 +208,42 @@ where
self.type_check(parsed_modules)?; self.type_check(parsed_modules)?;
let validators = self.validate_validators()?;
match options.code_gen_mode { match options.code_gen_mode {
CodeGenMode::Build(uplc_dump) => { CodeGenMode::Build(uplc_dump) => {
if validators.is_empty() { self.event_listener
.handle_event(Event::GeneratingBlueprint {
path: self.blueprint_path(),
});
let mut generator = self.checked_modules.new_generator(
&self.functions,
&self.data_types,
&self.module_types,
);
let blueprint = Blueprint::new(&self.config, &self.checked_modules, &mut generator)
.map_err(Error::Blueprint)?;
if blueprint.validators.is_empty() {
self.warnings.push(Warning::NoValidators); self.warnings.push(Warning::NoValidators);
} }
let programs = self.code_gen(validators)?; if uplc_dump {
self.dump_uplc(&blueprint)?;
}
self.write_build_outputs(programs, uplc_dump)?; let json = serde_json::to_string_pretty(&blueprint).unwrap();
fs::write(self.blueprint_path(), json).map_err(|error| Error::FileIo {
Ok(()) error,
path: self.blueprint_path(),
})
} }
CodeGenMode::Test { CodeGenMode::Test {
match_tests, match_tests,
verbose, verbose,
exact_match, exact_match,
} => { } => {
let tests = let tests = self.collect_tests(verbose)?;
self.collect_scripts(verbose, |def| matches!(def, Definition::Test(..)))?;
if !tests.is_empty() { if !tests.is_empty() {
self.event_listener.handle_event(Event::RunningTests); self.event_listener.handle_event(Event::RunningTests);
@ -244,6 +281,71 @@ where
} }
} }
pub fn address(
&self,
with_title: Option<&String>,
with_purpose: Option<&validator::Purpose>,
stake_address: Option<&String>,
) -> Result<ShelleyAddress, Error> {
// Parse stake address
let stake_address = stake_address
.map(|s| {
Address::from_hex(s)
.or_else(|_| Address::from_bech32(s))
.map_err(|error| Error::MalformedStakeAddress { error: Some(error) })
.and_then(|addr| match addr {
Address::Stake(addr) => Ok(addr),
_ => Err(Error::MalformedStakeAddress { error: None }),
})
})
.transpose()?;
let delegation_part = match stake_address.map(|addr| addr.payload().to_owned()) {
None => ShelleyDelegationPart::Null,
Some(StakePayload::Stake(key)) => ShelleyDelegationPart::Key(key),
Some(StakePayload::Script(script)) => ShelleyDelegationPart::Script(script),
};
// Read blueprint
let filepath = self.blueprint_path();
let blueprint =
File::open(filepath).map_err(|_| blueprint::error::Error::InvalidOrMissingFile)?;
let blueprint: Blueprint<serde_json::Value> =
serde_json::from_reader(BufReader::new(blueprint))?;
// Find validator's program
let mut program = None;
for v in blueprint.validators.iter() {
if Some(&v.title) == with_title.or(Some(&v.title))
&& Some(&v.purpose) == with_purpose.or(Some(&v.purpose))
{
program = Some(if program.is_none() {
Ok(v.program.clone())
} else {
Err(Error::MoreThanOneValidatorFound {
known_validators: blueprint
.validators
.iter()
.map(|v| (v.title.clone(), v.purpose.clone()))
.collect(),
})
})
}
}
// Print the address
match program {
Some(Ok(program)) => Ok(program.address(Network::Testnet, delegation_part)),
Some(Err(e)) => Err(e),
None => Err(Error::NoValidatorNotFound {
known_validators: blueprint
.validators
.iter()
.map(|v| (v.title.clone(), v.purpose.clone()))
.collect(),
}),
}
}
fn compile_deps(&mut self) -> Result<(), Error> { fn compile_deps(&mut self) -> Result<(), Error> {
let manifest = deps::download( let manifest = deps::download(
&self.event_listener, &self.event_listener,
@ -304,7 +406,7 @@ where
// Store the name // Store the name
ast.name = name.clone(); ast.name = name.clone();
let module = ParsedModule { let mut module = ParsedModule {
kind, kind,
ast, ast,
code, code,
@ -325,6 +427,8 @@ where
}); });
} }
module.attach_doc_and_module_comments();
parsed_modules.insert(module.name.clone(), module); parsed_modules.insert(module.name.clone(), module);
} }
Err(errs) => { Err(errs) => {
@ -408,236 +512,20 @@ where
Ok(()) Ok(())
} }
fn validate_validators(&self) -> Result<Vec<(PathBuf, String, TypedFunction)>, Error> { fn collect_tests(&mut self, verbose: bool) -> Result<Vec<Script>, Error> {
let mut errors = Vec::new();
let mut validators = Vec::new();
for module in self.checked_modules.validators() {
for def in module.ast.definitions() {
if let Definition::Fn(func_def) = def {
if VALIDATOR_NAMES.contains(&func_def.name.as_str()) {
// validators must return a Bool
if !func_def.return_type.is_bool() {
errors.push(Error::ValidatorMustReturnBool {
location: func_def.location,
src: module.code.clone(),
path: module.input_path.clone(),
named: NamedSource::new(
module.input_path.display().to_string(),
module.code.clone(),
),
})
}
// depending on name, validate the minimum number of arguments
// if too low, push a new error on to errors
if [MINT, WITHDRAW, PUBLISH].contains(&func_def.name.as_str())
&& func_def.arguments.len() < 2
{
errors.push(Error::WrongValidatorArity {
location: func_def.location,
src: module.code.clone(),
path: module.input_path.clone(),
named: NamedSource::new(
module.input_path.display().to_string(),
module.code.clone(),
),
name: func_def.name.clone(),
at_least: 2,
})
}
if SPEND == func_def.name && func_def.arguments.len() < 3 {
errors.push(Error::WrongValidatorArity {
location: func_def.location,
src: module.code.clone(),
path: module.input_path.clone(),
named: NamedSource::new(
module.input_path.display().to_string(),
module.code.clone(),
),
name: func_def.name.clone(),
at_least: 3,
})
}
validators.push((
module.input_path.clone(),
module.name.clone(),
func_def.clone(),
));
}
}
}
}
if errors.is_empty() {
Ok(validators)
} else {
Err(Error::List(errors))
}
}
fn code_gen(
&mut self,
validators: Vec<(PathBuf, String, TypedFunction)>,
) -> Result<Vec<Script>, Error> {
let mut programs = Vec::new();
let mut functions = IndexMap::new();
let mut type_aliases = IndexMap::new();
let mut data_types = IndexMap::new();
let prelude_functions = builtins::prelude_functions(&self.id_gen);
for (access_key, func) in prelude_functions.iter() {
functions.insert(access_key.clone(), func);
}
let option_data_type = TypedDataType::option(generic_var(self.id_gen.next()));
data_types.insert(
DataTypeKey {
module_name: "".to_string(),
defined_type: "Option".to_string(),
},
&option_data_type,
);
for module in self.checked_modules.values() {
for def in module.ast.definitions() {
match def {
Definition::Fn(func) => {
functions.insert(
FunctionAccessKey {
module_name: module.name.clone(),
function_name: func.name.clone(),
variant_name: String::new(),
},
func,
);
}
Definition::TypeAlias(ta) => {
type_aliases.insert((module.name.clone(), ta.alias.clone()), ta);
}
Definition::DataType(dt) => {
data_types.insert(
DataTypeKey {
module_name: module.name.clone(),
defined_type: dt.name.clone(),
},
dt,
);
}
Definition::ModuleConstant(_) | Definition::Test(_) | Definition::Use(_) => {}
}
}
}
for (input_path, module_name, func_def) in validators {
let Function {
arguments,
name,
body,
..
} = func_def;
let mut modules_map = IndexMap::new();
modules_map.extend(self.module_types.clone());
let mut generator = CodeGenerator::new(
&functions,
// &type_aliases,
&data_types,
&modules_map,
);
self.event_listener.handle_event(Event::GeneratingUPLC {
output_path: self.output_path().join(&module_name).join(&name),
name: format!("{}.{}", module_name, name),
});
let program = generator.generate(body, arguments, true);
let script = Script::new(
input_path,
module_name,
name,
program.try_into().unwrap(),
None,
);
programs.push(script);
}
Ok(programs)
}
fn collect_scripts(
&mut self,
verbose: bool,
should_collect: fn(&TypedDefinition) -> bool,
) -> Result<Vec<Script>, Error> {
let mut programs = Vec::new();
let mut functions = IndexMap::new();
let mut type_aliases = IndexMap::new();
let mut data_types = IndexMap::new();
let prelude_functions = builtins::prelude_functions(&self.id_gen);
for (access_key, func) in prelude_functions.iter() {
functions.insert(access_key.clone(), func);
}
let option_data_type = TypedDataType::option(generic_var(self.id_gen.next()));
data_types.insert(
DataTypeKey {
module_name: "".to_string(),
defined_type: "Option".to_string(),
},
&option_data_type,
);
let mut scripts = Vec::new(); let mut scripts = Vec::new();
for module in self.checked_modules.values() { for module in self.checked_modules.values() {
if module.package != self.config.name.to_string() {
continue;
}
for def in module.ast.definitions() { for def in module.ast.definitions() {
match def { if let Definition::Test(func) = def {
Definition::Fn(func) => { scripts.push((module.input_path.clone(), module.name.clone(), func))
functions.insert(
FunctionAccessKey {
module_name: module.name.clone(),
function_name: func.name.clone(),
variant_name: String::new(),
},
func,
);
if should_collect(def) && module.package == self.config.name.to_string() {
scripts.push((module.input_path.clone(), module.name.clone(), func));
}
}
Definition::Test(func) => {
if should_collect(def) && module.package == self.config.name.to_string() {
scripts.push((module.input_path.clone(), module.name.clone(), func));
}
}
Definition::TypeAlias(ta) => {
type_aliases.insert((module.name.clone(), ta.alias.clone()), ta);
}
Definition::DataType(dt) => {
data_types.insert(
DataTypeKey {
module_name: module.name.clone(),
defined_type: dt.name.clone(),
},
dt,
);
}
Definition::Use(_) | Definition::ModuleConstant(_) => (),
} }
} }
} }
let mut programs = Vec::new();
for (input_path, module_name, func_def) in scripts { for (input_path, module_name, func_def) in scripts {
let Function { let Function {
arguments, arguments,
@ -653,26 +541,23 @@ where
}) })
} }
let mut modules_map = IndexMap::new(); let mut generator = self.checked_modules.new_generator(
&self.functions,
modules_map.extend(self.module_types.clone()); &self.data_types,
&self.module_types,
let mut generator = CodeGenerator::new(
&functions,
// &type_aliases,
&data_types,
&modules_map,
); );
let evaluation_hint = if let Some((bin_op, left_src, right_src)) = func_def.test_hint() let evaluation_hint = if let Some((bin_op, left_src, right_src)) = func_def.test_hint()
{ {
let left = CodeGenerator::new(&functions, &data_types, &modules_map) let left = generator
.generate(*left_src, vec![], false) .clone()
.generate(&left_src, &[], false)
.try_into() .try_into()
.unwrap(); .unwrap();
let right = CodeGenerator::new(&functions, &data_types, &modules_map) let right = generator
.generate(*right_src, vec![], false) .clone()
.generate(&right_src, &[], false)
.try_into() .try_into()
.unwrap(); .unwrap();
@ -685,7 +570,7 @@ where
None None
}; };
let program = generator.generate(body.clone(), arguments.clone(), false); let program = generator.generate(body, arguments, false);
let script = Script::new( let script = Script::new(
input_path, input_path,
@ -722,7 +607,7 @@ where
.map(|match_test| { .map(|match_test| {
let mut match_split_dot = match_test.split('.'); let mut match_split_dot = match_test.split('.');
let match_module = if match_test.contains('.') { let match_module = if match_test.contains('.') || match_test.contains('/') {
match_split_dot.next().unwrap_or("") match_split_dot.next().unwrap_or("")
} else { } else {
"" ""
@ -746,14 +631,16 @@ where
match_tests.iter().any(|(module, names)| { match_tests.iter().any(|(module, names)| {
let matched_module = module == &"" || script.module.contains(module); let matched_module = module == &"" || script.module.contains(module);
let matched_name = matches!(names, Some(names) if names let matched_name = match names {
.iter() None => true,
.any(|name| if exact_match { Some(names) => names.iter().any(|name| {
name == &script.name if exact_match {
} else { name == &script.name
script.name.contains(name) } else {
} script.name.contains(name)
)); }
}),
};
matched_module && matched_name matched_module && matched_name
}) })
@ -785,91 +672,6 @@ where
.collect() .collect()
} }
fn output_path(&self) -> PathBuf {
self.root.join("assets")
}
fn write_build_outputs(&self, programs: Vec<Script>, uplc_dump: bool) -> Result<(), Error> {
for script in programs {
let script_output_dir = self.output_path().join(script.module).join(script.name);
fs::create_dir_all(&script_output_dir)?;
// dump textual uplc
if uplc_dump {
let uplc_path = script_output_dir.join("raw.uplc");
fs::write(uplc_path, script.program.to_pretty())?;
}
let program: Program<DeBruijn> = script.program.into();
let cbor = program.to_cbor().unwrap();
// Create file containing just the script cbor hex
let script_path = script_output_dir.join("script.cbor");
let cbor_hex = hex::encode(&cbor);
fs::write(script_path, cbor_hex)?;
// Create the payment script JSON file
let payment_script_path = script_output_dir.join("payment_script.json");
let mut bytes = Vec::new();
let mut encoder = minicbor::Encoder::new(&mut bytes);
encoder.bytes(&cbor).unwrap();
let prefixed_cbor_hex = hex::encode(&bytes);
let payment_script = json!({
"type": "PlutusScriptV2",
"description": "Generated by Aiken",
"cborHex": prefixed_cbor_hex
});
fs::write(
payment_script_path,
serde_json::to_string_pretty(&payment_script).unwrap(),
)?;
// Create mainnet and testnet addresses
let plutus_script = babbage::PlutusV2Script(cbor.into());
let hash = plutus_script.compute_hash();
// mainnet
let mainnet_path = script_output_dir.join("mainnet.addr");
let mut mainnet_bytes: Vec<u8> = vec![0b01110001];
mainnet_bytes.extend(hash.iter());
let mainnet_addr = Address::from_bytes(&mainnet_bytes)
.unwrap()
.to_bech32()
.unwrap();
fs::write(mainnet_path, mainnet_addr)?;
// testnet
let testnet_path = script_output_dir.join("testnet.addr");
let mut testnet_bytes: Vec<u8> = vec![0b01110000];
testnet_bytes.extend(hash.iter());
let testnet_addr = Address::from_bytes(&testnet_bytes)
.unwrap()
.to_bech32()
.unwrap();
fs::write(testnet_path, testnet_addr)?;
}
Ok(())
}
fn aiken_files(&mut self, dir: &Path, kind: ModuleKind) -> Result<(), Error> { fn aiken_files(&mut self, dir: &Path, kind: ModuleKind) -> Result<(), Error> {
let paths = walkdir::WalkDir::new(dir) let paths = walkdir::WalkDir::new(dir)
.follow_links(true) .follow_links(true)

View File

@ -1,17 +1,22 @@
use crate::error::Error;
use aiken_lang::{
ast::{
DataType, Definition, ModuleKind, TypedDataType, TypedFunction, TypedModule, UntypedModule,
},
builder::{DataTypeKey, FunctionAccessKey},
parser::extra::{comments_before, Comment, ModuleExtra},
tipo::TypeInfo,
uplc::CodeGenerator,
VALIDATOR_NAMES,
};
use indexmap::IndexMap;
use petgraph::{algo, graph::NodeIndex, Direction, Graph};
use std::{ use std::{
collections::{HashMap, HashSet}, collections::{HashMap, HashSet},
ops::{Deref, DerefMut}, ops::{Deref, DerefMut},
path::PathBuf, path::PathBuf,
}; };
use aiken_lang::{
ast::{DataType, Definition, ModuleKind, TypedModule, UntypedModule},
parser::extra::{comments_before, Comment, ModuleExtra},
};
use petgraph::{algo, graph::NodeIndex, Direction, Graph};
use crate::error::Error;
#[derive(Debug)] #[derive(Debug)]
pub struct ParsedModule { pub struct ParsedModule {
pub path: PathBuf, pub path: PathBuf,
@ -233,9 +238,31 @@ impl From<CheckedModules> for HashMap<String, CheckedModule> {
} }
} }
impl<'a> From<&'a CheckedModules> for &'a HashMap<String, CheckedModule> {
fn from(checked_modules: &'a CheckedModules) -> Self {
&checked_modules.0
}
}
impl CheckedModules { impl CheckedModules {
pub fn validators(&self) -> impl Iterator<Item = &CheckedModule> { pub fn singleton(module: CheckedModule) -> Self {
self.0.values().filter(|module| module.kind.is_validator()) let mut modules = Self::default();
modules.insert(module.name.clone(), module);
modules
}
pub fn validators(&self) -> impl Iterator<Item = (&CheckedModule, &TypedFunction)> {
let mut items = vec![];
for validator in self.0.values().filter(|module| module.kind.is_validator()) {
for some_definition in validator.ast.definitions() {
if let Definition::Fn(def) = some_definition {
if VALIDATOR_NAMES.contains(&def.name.as_str()) {
items.push((validator, def));
}
}
}
}
items.into_iter()
} }
pub fn into_validators(self) -> impl Iterator<Item = CheckedModule> { pub fn into_validators(self) -> impl Iterator<Item = CheckedModule> {
@ -243,6 +270,59 @@ impl CheckedModules {
.into_values() .into_values()
.filter(|module| module.kind.is_validator()) .filter(|module| module.kind.is_validator())
} }
pub fn new_generator<'a>(
&'a self,
builtin_functions: &'a IndexMap<FunctionAccessKey, TypedFunction>,
builtin_data_types: &'a IndexMap<DataTypeKey, TypedDataType>,
module_types: &'a HashMap<String, TypeInfo>,
) -> CodeGenerator<'a> {
let mut functions = IndexMap::new();
for (k, v) in builtin_functions {
functions.insert(k.clone(), v);
}
let mut data_types = IndexMap::new();
for (k, v) in builtin_data_types {
data_types.insert(k.clone(), v);
}
for module in self.values() {
for def in module.ast.definitions() {
match def {
Definition::Fn(func) => {
functions.insert(
FunctionAccessKey {
module_name: module.name.clone(),
function_name: func.name.clone(),
variant_name: String::new(),
},
func,
);
}
Definition::DataType(dt) => {
data_types.insert(
DataTypeKey {
module_name: module.name.clone(),
defined_type: dt.name.clone(),
},
dt,
);
}
Definition::TypeAlias(_)
| Definition::ModuleConstant(_)
| Definition::Test(_)
| Definition::Use(_) => {}
}
}
}
let mut module_types_index = IndexMap::new();
module_types_index.extend(module_types);
CodeGenerator::new(functions, data_types, module_types_index)
}
} }
impl Deref for CheckedModules { impl Deref for CheckedModules {

View File

@ -19,9 +19,11 @@ pub enum Event {
GeneratingDocFiles { GeneratingDocFiles {
output_path: PathBuf, output_path: PathBuf,
}, },
GeneratingUPLC { GeneratingBlueprint {
output_path: PathBuf, path: PathBuf,
name: String, },
DumpingUPLC {
path: PathBuf,
}, },
GeneratingUPLCFor { GeneratingUPLCFor {
name: String, name: String,

View File

@ -28,3 +28,4 @@ aiken-lang = { path = "../aiken-lang", version = "0.0.28" }
aiken-lsp = { path = "../aiken-lsp", version = "0.0.28" } aiken-lsp = { path = "../aiken-lsp", version = "0.0.28" }
aiken-project = { path = '../aiken-project', version = "0.0.28" } aiken-project = { path = '../aiken-project', version = "0.0.28" }
uplc = { path = '../uplc', version = "0.0.28" } uplc = { path = '../uplc', version = "0.0.28" }
serde_json = "1.0.91"

View File

@ -0,0 +1,53 @@
use crate::with_project;
use aiken_lang::VALIDATOR_NAMES;
use std::path::PathBuf;
#[derive(clap::Args)]
#[clap(setting(clap::AppSettings::DeriveDisplayOrder))]
/// Compute a validator's address.
pub struct Args {
/// Path to project
directory: Option<PathBuf>,
/// Name of the validator's module within the project. Optional if there's only one validator.
#[clap(short, long)]
validator: Option<String>,
/// Purpose of the validator within the module. Optional if there's only one validator.
#[clap(short, long, possible_values=&VALIDATOR_NAMES)]
purpose: Option<String>,
/// Stake address to attach, if any.
#[clap(long)]
delegated_to: Option<String>,
/// Force the project to be rebuilt, otherwise relies on existing artifacts (i.e. plutus.json).
#[clap(long)]
rebuild: bool,
}
pub fn exec(
Args {
directory,
validator,
purpose,
delegated_to,
rebuild,
}: Args,
) -> miette::Result<()> {
with_project(directory, |p| {
if rebuild {
p.build(false)?;
}
let address = p.address(
validator.as_ref(),
purpose
.as_ref()
.map(|p| p.clone().try_into().unwrap())
.as_ref(),
delegated_to.as_ref(),
)?;
println!("{}", address.to_bech32().unwrap());
Ok(())
})
}

View File

@ -1,3 +1,4 @@
pub mod address;
pub mod build; pub mod build;
pub mod check; pub mod check;
pub mod docs; pub mod docs;

View File

@ -165,7 +165,7 @@ fn gitignore(root: &Path) -> miette::Result<()> {
indoc! { indoc! {
r#" r#"
# Aiken compilation artifacts # Aiken compilation artifacts
assets/ artifacts/
# Aiken's project working directory # Aiken's project working directory
build/ build/
# Aiken's default documentation export # Aiken's default documentation export

View File

@ -94,12 +94,20 @@ impl telemetry::EventListener for Terminal {
telemetry::Event::WaitingForBuildDirLock => { telemetry::Event::WaitingForBuildDirLock => {
println!("{}", "Waiting for build directory lock ...".bold().purple()); println!("{}", "Waiting for build directory lock ...".bold().purple());
} }
telemetry::Event::GeneratingUPLC { output_path, name } => { telemetry::Event::DumpingUPLC { path } => {
println!( println!(
"{} {} in {}", "{} {} ({})",
" Exporting".bold().purple(),
"UPLC".bold(),
path.display().bright_blue()
);
}
telemetry::Event::GeneratingBlueprint { path } => {
println!(
"{} {} ({})",
" Generating".bold().purple(), " Generating".bold().purple(),
name.bold(), "project's blueprint".bold(),
output_path.display().bright_blue() path.display().bright_blue()
); );
} }
telemetry::Event::GeneratingDocFiles { output_path } => { telemetry::Event::GeneratingDocFiles { output_path } => {
@ -112,7 +120,7 @@ impl telemetry::EventListener for Terminal {
telemetry::Event::GeneratingUPLCFor { name, path } => { telemetry::Event::GeneratingUPLCFor { name, path } => {
println!( println!(
"{} {}.{{{}}}", "{} {}.{{{}}}",
" Generating Untyped Plutus Core for".bold().purple(), " Generating UPLC for".bold().purple(),
path.to_str().unwrap_or("").blue(), path.to_str().unwrap_or("").blue(),
name.bright_blue(), name.bright_blue(),
); );

View File

@ -1,5 +1,5 @@
use aiken::cmd::{ use aiken::cmd::{
build, check, docs, fmt, lsp, new, address, build, check, docs, fmt, lsp, new,
packages::{self, add}, packages::{self, add},
tx, uplc, tx, uplc,
}; };
@ -14,6 +14,7 @@ pub enum Cmd {
New(new::Args), New(new::Args),
Fmt(fmt::Args), Fmt(fmt::Args),
Build(build::Args), Build(build::Args),
Address(address::Args),
Check(check::Args), Check(check::Args),
Docs(docs::Args), Docs(docs::Args),
Add(add::Args), Add(add::Args),
@ -43,6 +44,7 @@ fn main() -> miette::Result<()> {
Cmd::New(args) => new::exec(args), Cmd::New(args) => new::exec(args),
Cmd::Fmt(args) => fmt::exec(args), Cmd::Fmt(args) => fmt::exec(args),
Cmd::Build(args) => build::exec(args), Cmd::Build(args) => build::exec(args),
Cmd::Address(args) => address::exec(args),
Cmd::Check(args) => check::exec(args), Cmd::Check(args) => check::exec(args),
Cmd::Docs(args) => docs::exec(args), Cmd::Docs(args) => docs::exec(args),
Cmd::Add(args) => add::exec(args), Cmd::Add(args) => add::exec(args),

View File

@ -1,6 +1,20 @@
use std::{fmt::Display, rc::Rc}; use std::{
fmt::{self, Display},
rc::Rc,
};
use pallas_primitives::{alonzo::PlutusData, babbage::Language}; use serde::{
self,
de::{self, Deserialize, Deserializer, MapAccess, Visitor},
ser::{Serialize, SerializeStruct, Serializer},
};
use pallas_addresses::{Network, ShelleyAddress, ShelleyDelegationPart, ShelleyPaymentPart};
use pallas_primitives::{
alonzo::PlutusData,
babbage::{self as cardano, Language},
};
use pallas_traverse::ComputeHash;
use crate::{ use crate::{
builtins::DefaultFunction, builtins::DefaultFunction,
@ -79,6 +93,81 @@ where
} }
} }
impl Serialize for Program<DeBruijn> {
fn serialize<S: Serializer>(&self, serializer: S) -> Result<S::Ok, S::Error> {
let cbor = self.to_cbor().unwrap();
let mut s = serializer.serialize_struct("Program<DeBruijn>", 2)?;
s.serialize_field("compiledCode", &hex::encode(&cbor))?;
s.serialize_field("hash", &cardano::PlutusV2Script(cbor.into()).compute_hash())?;
s.end()
}
}
impl<'a> Deserialize<'a> for Program<DeBruijn> {
fn deserialize<D: Deserializer<'a>>(deserializer: D) -> Result<Self, D::Error> {
#[derive(serde::Deserialize)]
#[serde(field_identifier, rename_all = "camelCase")]
enum Fields {
CompiledCode,
}
struct ProgramVisitor;
impl<'a> Visitor<'a> for ProgramVisitor {
type Value = Program<DeBruijn>;
fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
formatter.write_str("Program<Visitor>")
}
fn visit_map<V>(self, mut map: V) -> Result<Program<DeBruijn>, V::Error>
where
V: MapAccess<'a>,
{
let mut compiled_code: Option<String> = None;
while let Some(key) = map.next_key()? {
match key {
Fields::CompiledCode => {
if compiled_code.is_some() {
return Err(de::Error::duplicate_field("compiledCode"));
}
compiled_code = Some(map.next_value()?);
}
}
}
let compiled_code =
compiled_code.ok_or_else(|| de::Error::missing_field("compiledCode"))?;
let mut cbor_buffer = Vec::new();
let mut flat_buffer = Vec::new();
Program::<DeBruijn>::from_hex(&compiled_code, &mut cbor_buffer, &mut flat_buffer)
.map_err(|e| {
de::Error::invalid_value(
de::Unexpected::Other(&format!("{}", e)),
&"a base16-encoded CBOR-serialized UPLC program",
)
})
}
}
const FIELDS: &[&str] = &["compiledCode"];
deserializer.deserialize_struct("Program<DeBruijn>", FIELDS, ProgramVisitor)
}
}
impl Program<DeBruijn> {
pub fn address(&self, network: Network, delegation: ShelleyDelegationPart) -> ShelleyAddress {
let cbor = self.to_cbor().unwrap();
let validator_hash = cardano::PlutusV2Script(cbor.into()).compute_hash();
ShelleyAddress::new(
network,
ShelleyPaymentPart::Script(validator_hash),
delegation,
)
}
}
/// This represents a term in Untyped Plutus Core. /// This represents a term in Untyped Plutus Core.
/// We need a generic type for the different forms that a program may be in. /// We need a generic type for the different forms that a program may be in.
/// Specifically, `Var` and `parameter_name` in `Lambda` can be a `Name`, /// Specifically, `Var` and `parameter_name` in `Lambda` can be a `Name`,

View File

@ -1 +0,0 @@
addr1w8r2ln3c7meykuf6ejw0qu5qtdfxh4e4p68v5e3c0lwmrmsdujvef

View File

@ -1,5 +0,0 @@
{
"type": "PlutusScriptV2",
"description": "Generated by Aiken",
"cborHex": "590173590170010000323222253335734646464646466002006464646464646464646600201291010500000000000022323232300600130060013300600100237566601c601e00490011199ab9a0014a09448c94ccd55cf8008a5114a00024464646600a00297adef6c60330050010020022232325333573466e1c005200210031323200137566ae84004c034008d55ce9baa001002223300300200130010012223253335573e002266e9520024bd700991919192999ab9a3371e00c002266e9520003357406e980092f5c0266601001000600c6eb8d55ce8019bab35573c0046ae88008d5d08008011800800911192999aab9f00114a026464a666ae68c01000852889998030030008021aba2002357420020046eb0cc004c008cc004c00800d20004801088c8ccc00400520000032223333573466e1c0100095d0919980200219b8000348008d5d100080091aab9e37540022930b18008009112999aab9f00214984cd5cd98009aba1002333003003357440040021"
}

View File

@ -1 +0,0 @@
590170010000323222253335734646464646466002006464646464646464646600201291010500000000000022323232300600130060013300600100237566601c601e00490011199ab9a0014a09448c94ccd55cf8008a5114a00024464646600a00297adef6c60330050010020022232325333573466e1c005200210031323200137566ae84004c034008d55ce9baa001002223300300200130010012223253335573e002266e9520024bd700991919192999ab9a3371e00c002266e9520003357406e980092f5c0266601001000600c6eb8d55ce8019bab35573c0046ae88008d5d08008011800800911192999aab9f00114a026464a666ae68c01000852889998030030008021aba2002357420020046eb0cc004c008cc004c00800d20004801088c8ccc00400520000032223333573466e1c0100095d0919980200219b8000348008d5d100080091aab9e37540022930b18008009112999aab9f00214984cd5cd98009aba1002333003003357440040021

View File

@ -1 +0,0 @@
addr_test1wrr2ln3c7meykuf6ejw0qu5qtdfxh4e4p68v5e3c0lwmrmsk5xskv

View File

@ -0,0 +1,22 @@
{
"preamble": {
"title": "aiken-lang/acceptance_test_036",
"version": "0.0.0"
},
"validators": [
{
"title": "spend",
"purpose": "spend",
"datum": {
"title": "Data",
"description": "Any Plutus data."
},
"redeemer": {
"title": "Data",
"description": "Any Plutus data."
},
"compiledCode": "590170010000323222253335734646464646466002006464646464646464646600201291010500000000000022323232300600130060013300600100237566601c601e00490011199ab9a0014a09448c94ccd55cf8008a5114a00024464646600a00297adef6c60330050010020022232325333573466e1c005200210031323200137566ae84004c034008d55ce9baa001002223300300200130010012223253335573e002266e9520024bd700991919192999ab9a3371e00c002266e9520003357406e980092f5c0266601001000600c6eb8d55ce8019bab35573c0046ae88008d5d08008011800800911192999aab9f00114a026464a666ae68c01000852889998030030008021aba2002357420020046eb0cc004c008cc004c00800d20004801088c8ccc00400520000032223333573466e1c0100095d0919980200219b8000348008d5d100080091aab9e37540022930b18008009112999aab9f00214984cd5cd98009aba1002333003003357440040021",
"hash": "c6afce38f6f24b713acc9cf072805b526bd7350e8eca66387fddb1ee"
}
]
}

View File

@ -12,7 +12,7 @@ pub fn has_policy_id(self: Output, policy_id: PolicyId) -> Bool {
|> not |> not
} }
pub fn spend(_datum, _redeemer, ctx: ScriptContext) -> Bool { pub fn spend(_datum: Data, _redeemer: Data, ctx: ScriptContext) -> Bool {
ctx.transaction.outputs ctx.transaction.outputs
|> list.any(has_policy_id(_, my_policy_id)) |> list.any(has_policy_id(_, my_policy_id))
} }

View File

@ -1 +0,0 @@
addr1wyahaesnnh44nkyjj4dv8jk3t4f7frwtzepjyujkk2wjkmczwqttr

View File

@ -1,5 +0,0 @@
{
"type": "PlutusScriptV2",
"description": "Generated by Aiken",
"cborHex": "5831582f01000032322225333573494452616300100122253335573e004293099ab9b3001357420046660060066ae880080041"
}

View File

@ -1 +0,0 @@
582f01000032322225333573494452616300100122253335573e004293099ab9b3001357420046660060066ae880080041

View File

@ -1 +0,0 @@
addr_test1wqahaesnnh44nkyjj4dv8jk3t4f7frwtzepjyujkk2wjkmcex5hyx

View File

@ -0,0 +1,36 @@
{
"preamble": {
"title": "aiken-lang/acceptance_test_047",
"version": "0.0.0"
},
"validators": [
{
"title": "foo",
"purpose": "spend",
"datum": {
"title": "Unit",
"description": "The nullary constructor.",
"anyOf": [
{
"dataType": "constructor",
"index": 0,
"fields": []
}
]
},
"redeemer": {
"title": "Unit",
"description": "The nullary constructor.",
"anyOf": [
{
"dataType": "constructor",
"index": 0,
"fields": []
}
]
},
"compiledCode": "582f01000032322225333573494452616300100122253335573e004293099ab9b3001357420046660060066ae880080041",
"hash": "3b7ee6139deb59d892955ac3cad15d53e48dcb1643227256b29d2b6f"
}
]
}

View File

@ -1 +0,0 @@
addr1w9d2hmr5s8j9cz5xgzjzn3srsfajcwjjzeruaj5mh3kpuqcmvmz3r

View File

@ -1,5 +0,0 @@
{
"type": "PlutusScriptV2",
"description": "Generated by Aiken",
"cborHex": "5855585301000032322225333573466e1cc8c0052f7b6301010400010101002323232002375a6aae78008dd69aab9d0010014802052616300100122253335573e004293099ab9b3001357420046660060066ae88008005"
}

View File

@ -1 +0,0 @@
585301000032322225333573466e1cc8c0052f7b6301010400010101002323232002375a6aae78008dd69aab9d0010014802052616300100122253335573e004293099ab9b3001357420046660060066ae88008005

View File

@ -1 +0,0 @@
addr_test1wpd2hmr5s8j9cz5xgzjzn3srsfajcwjjzeruaj5mh3kpuqcqy077x

View File

@ -0,0 +1,22 @@
{
"preamble": {
"title": "aiken-lang/acceptance_test_048",
"version": "0.0.0"
},
"validators": [
{
"title": "foo",
"purpose": "spend",
"datum": {
"title": "Data",
"description": "Any Plutus data."
},
"redeemer": {
"title": "Data",
"description": "Any Plutus data."
},
"compiledCode": "585301000032322225333573466e1cc8c0052f7b6301010400010101002323232002375a6aae78008dd69aab9d0010014802052616300100122253335573e004293099ab9b3001357420046660060066ae88008005",
"hash": "5aabec7481e45c0a8640a429c603827b2c3a521647ceca9bbc6c1e03"
}
]
}

View File

@ -4,6 +4,6 @@ fn when_tuple(a: (Int, Int)) -> Int {
} }
} }
pub fn spend(a, b, c) -> Bool { pub fn spend(a: Data, b: Data, c) -> Bool {
when_tuple((4, 1)) == 4 when_tuple((4, 1)) == 4
} }

View File

@ -1 +0,0 @@
addr1w9z47fyj9ffqck2fnld04k27zfe04wq6n9zj76u4ghu4xdcd0futm

View File

@ -1,5 +0,0 @@
{
"type": "PlutusScriptV2",
"description": "Generated by Aiken",
"cborHex": "4e4d01000022253335734944526161"
}

View File

@ -1 +0,0 @@
4d01000022253335734944526161

View File

@ -1 +0,0 @@
addr_test1wpz47fyj9ffqck2fnld04k27zfe04wq6n9zj76u4ghu4xdck8aqy7

View File

@ -1,5 +0,0 @@
{
"type": "PlutusScriptV2",
"description": "Generated by Aiken",
"cborHex": "58bb58b90100002225333573464646464a666ae6800840045281919198009bac330043005330043005006480012010375c66008600a01090001800800911192999aab9f00114a026464a666ae68cdc78010020a511333006006001004357440046eb8d5d080080119b97323001375c66004600600a900011b9900149010d48656c6c6f2c20576f726c64210022323330010014800000c888cccd5cd19b870040025742466600800866e0000d20023574400200246aae78dd50008a4c2d"
}

View File

@ -1 +0,0 @@
58b90100002225333573464646464a666ae6800840045281919198009bac330043005330043005006480012010375c66008600a01090001800800911192999aab9f00114a026464a666ae68cdc78010020a511333006006001004357440046eb8d5d080080119b97323001375c66004600600a900011b9900149010d48656c6c6f2c20576f726c64210022323330010014800000c888cccd5cd19b870040025742466600800866e0000d20023574400200246aae78dd50008a4c2d

View File

@ -0,0 +1,47 @@
{
"preamble": {
"title": "aiken-lang/hello_world",
"description": "Aiken contracts for project 'aiken-lang/hello_world'",
"version": "1.0.0"
},
"validators": [
{
"title": "hello_world",
"purpose": "spend",
"datum": {
"title": "Datum",
"anyOf": [
{
"title": "Datum",
"dataType": "constructor",
"index": 0,
"fields": [
{
"title": "owner",
"dataType": "bytes"
}
]
}
]
},
"redeemer": {
"title": "Redeemer",
"anyOf": [
{
"title": "Redeemer",
"dataType": "constructor",
"index": 0,
"fields": [
{
"title": "msg",
"dataType": "bytes"
}
]
}
]
},
"compiledCode": "58db01000032322225333573464646464a666ae6800840045281919198009bac330043005330043005006480012010375c66008600a01090001800800911192999aab9f00114a026464a666ae68cdc78010020a511333006006001004357440046eb8d5d080080119b97323001375c66004600600a900011b9900149010d48656c6c6f2c20576f726c64210022323330010014800000c888cccd5cd19b870040025742466600800866e0000d20023574400200246aae78dd50008a4c2c6002002444a666aae7c008526133573660026ae84008ccc00c00cd5d10010009",
"hash": "d478e73c101a53d083f4720d400c876d7441c279168becabab0e0177"
}
]
}