Refactor build steps to generate blueprints instead
The blueprint is generated at the root of the repository and is intended to be versioned with the rest. It acts as a business card that contains many practical information. There's a variety of tools we can then build on top of open-source contracts. And, quite importantly, the blueprint is language-agnostic; it isn't specific to Aiken. So it is really meant as an interop format within the ecosystem.
This commit is contained in:
parent
3cefbd00af
commit
5683d19a4c
|
@ -988,7 +988,7 @@ pub fn convert_constants_to_data(constants: Vec<UplcConstant>) -> Vec<UplcConsta
|
|||
new_constants
|
||||
}
|
||||
|
||||
pub fn wrap_validator_args(term: Term<Name>, arguments: Vec<TypedArg>) -> Term<Name> {
|
||||
pub fn wrap_validator_args(term: Term<Name>, arguments: &[TypedArg]) -> Term<Name> {
|
||||
let mut term = term;
|
||||
for arg in arguments.iter().rev() {
|
||||
if !matches!(arg.tipo.get_uplc_type(), UplcType::Data) {
|
||||
|
|
|
@ -39,11 +39,11 @@ use crate::{
|
|||
IdGenerator,
|
||||
};
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct CodeGenerator<'a> {
|
||||
defined_functions: IndexMap<FunctionAccessKey, ()>,
|
||||
functions: &'a IndexMap<FunctionAccessKey, &'a TypedFunction>,
|
||||
// type_aliases: &'a IndexMap<(String, String), &'a TypeAlias<Arc<tipo::Type>>>,
|
||||
data_types: &'a IndexMap<DataTypeKey, &'a TypedDataType>,
|
||||
functions: IndexMap<FunctionAccessKey, &'a TypedFunction>,
|
||||
data_types: IndexMap<DataTypeKey, &'a TypedDataType>,
|
||||
module_types: &'a IndexMap<String, TypeInfo>,
|
||||
id_gen: IdGenerator,
|
||||
needs_field_access: bool,
|
||||
|
@ -53,15 +53,13 @@ pub struct CodeGenerator<'a> {
|
|||
|
||||
impl<'a> CodeGenerator<'a> {
|
||||
pub fn new(
|
||||
functions: &'a IndexMap<FunctionAccessKey, &'a TypedFunction>,
|
||||
// type_aliases: &'a IndexMap<(String, String), &'a TypeAlias<Arc<tipo::Type>>>,
|
||||
data_types: &'a IndexMap<DataTypeKey, &'a TypedDataType>,
|
||||
functions: IndexMap<FunctionAccessKey, &'a TypedFunction>,
|
||||
data_types: IndexMap<DataTypeKey, &'a TypedDataType>,
|
||||
module_types: &'a IndexMap<String, TypeInfo>,
|
||||
) -> Self {
|
||||
CodeGenerator {
|
||||
defined_functions: IndexMap::new(),
|
||||
functions,
|
||||
// type_aliases,
|
||||
data_types,
|
||||
module_types,
|
||||
id_gen: IdGenerator::new(),
|
||||
|
@ -73,14 +71,14 @@ impl<'a> CodeGenerator<'a> {
|
|||
|
||||
pub fn generate(
|
||||
&mut self,
|
||||
body: TypedExpr,
|
||||
arguments: Vec<TypedArg>,
|
||||
body: &TypedExpr,
|
||||
arguments: &[TypedArg],
|
||||
wrap_as_validator: bool,
|
||||
) -> Program<Name> {
|
||||
let mut ir_stack = vec![];
|
||||
let scope = vec![self.id_gen.next()];
|
||||
|
||||
self.build_ir(&body, &mut ir_stack, scope);
|
||||
self.build_ir(body, &mut ir_stack, scope);
|
||||
|
||||
self.define_ir(&mut ir_stack);
|
||||
|
||||
|
@ -2865,7 +2863,7 @@ impl<'a> CodeGenerator<'a> {
|
|||
variant_name: String::new(),
|
||||
};
|
||||
|
||||
let function = self.functions.get(&non_variant_function_key).unwrap();
|
||||
let function = *self.functions.get(&non_variant_function_key).unwrap();
|
||||
|
||||
let mut func_ir = vec![];
|
||||
|
||||
|
@ -3355,7 +3353,7 @@ impl<'a> CodeGenerator<'a> {
|
|||
count,
|
||||
scope,
|
||||
} => {
|
||||
if check_replaceable_opaque_type(&tipo, self.data_types) {
|
||||
if check_replaceable_opaque_type(&tipo, &self.data_types) {
|
||||
indices_to_remove.push(index);
|
||||
} else {
|
||||
let mut replaced_type = tipo.clone();
|
||||
|
@ -3377,7 +3375,7 @@ impl<'a> CodeGenerator<'a> {
|
|||
let record = ir_stack[index + 1].clone();
|
||||
let record_type = record.tipo();
|
||||
if let Some(record_type) = record_type {
|
||||
if check_replaceable_opaque_type(&record_type, self.data_types) {
|
||||
if check_replaceable_opaque_type(&record_type, &self.data_types) {
|
||||
indices_to_remove.push(index);
|
||||
} else {
|
||||
let mut replaced_type = tipo.clone();
|
||||
|
@ -3408,7 +3406,7 @@ impl<'a> CodeGenerator<'a> {
|
|||
let record = ir_stack[index + 1].clone();
|
||||
let record_type = record.tipo();
|
||||
if let Some(record_type) = record_type {
|
||||
if check_replaceable_opaque_type(&record_type, self.data_types) {
|
||||
if check_replaceable_opaque_type(&record_type, &self.data_types) {
|
||||
ir_stack[index] = Air::Let {
|
||||
scope,
|
||||
name: indices[0].1.clone(),
|
||||
|
|
|
@ -1,191 +0,0 @@
|
|||
use serde::ser::{Serialize, SerializeStruct, Serializer};
|
||||
use serde_json;
|
||||
use std::fmt::{self, Display};
|
||||
|
||||
#[derive(Debug, PartialEq, Eq, Clone)]
|
||||
pub enum Schema {
|
||||
Integer,
|
||||
Bytes,
|
||||
List(Item<Box<Schema>>),
|
||||
Map((Box<Schema>, Box<Schema>)),
|
||||
Constructor(usize, Vec<Schema>),
|
||||
}
|
||||
|
||||
impl Display for Schema {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
let s = serde_json::to_string_pretty(self).map_err(|_| fmt::Error)?;
|
||||
f.write_str(&s)
|
||||
}
|
||||
}
|
||||
|
||||
impl Serialize for Schema {
|
||||
fn serialize<S: Serializer>(&self, serializer: S) -> Result<S::Ok, S::Error> {
|
||||
match self {
|
||||
Schema::Integer => {
|
||||
let mut s = serializer.serialize_struct("Integer", 1)?;
|
||||
s.serialize_field("dataType", "integer")?;
|
||||
s.end()
|
||||
}
|
||||
Schema::Bytes => {
|
||||
let mut s = serializer.serialize_struct("Bytes", 1)?;
|
||||
s.serialize_field("dataType", "bytes")?;
|
||||
s.end()
|
||||
}
|
||||
Schema::List(items) => {
|
||||
let mut s = serializer.serialize_struct("List", 2)?;
|
||||
s.serialize_field("dataType", "list")?;
|
||||
s.serialize_field("items", &items)?;
|
||||
s.end()
|
||||
}
|
||||
Schema::Map(elements) => {
|
||||
let mut s = serializer.serialize_struct("Map", 2)?;
|
||||
s.serialize_field("dataType", "map")?;
|
||||
s.serialize_field("elements", &elements)?;
|
||||
s.end()
|
||||
}
|
||||
_ => {
|
||||
todo!()
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Represent a items list in a JSON schema. Can be either a singleton (i.e. a single schema) when
|
||||
// all elements in the list are uniform or a list of schemas.
|
||||
#[derive(Debug, PartialEq, Eq, Clone)]
|
||||
pub enum Item<T> {
|
||||
Singleton(T),
|
||||
Many(Vec<T>),
|
||||
}
|
||||
|
||||
impl<T: Serialize> Serialize for Item<T> {
|
||||
fn serialize<S: Serializer>(&self, serializer: S) -> Result<S::Ok, S::Error> {
|
||||
match self {
|
||||
Item::Singleton(elem) => Serialize::serialize(elem, serializer),
|
||||
Item::Many(elems) => Serialize::serialize(elems, serializer),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub mod test {
|
||||
use super::*;
|
||||
#[allow(unused_imports)]
|
||||
use serde_json::{self, json, Value};
|
||||
|
||||
pub fn assert_json(schema: &Schema, expected: Value) {
|
||||
assert_eq!(serde_json::to_value(schema).unwrap(), expected);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn serialize_integer() {
|
||||
let schema = Schema::Integer;
|
||||
assert_json(
|
||||
&schema,
|
||||
json!({
|
||||
"dataType": "integer"
|
||||
}),
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn serialize_bytes() {
|
||||
let schema = Schema::Bytes;
|
||||
assert_json(
|
||||
&schema,
|
||||
json!({
|
||||
"dataType": "bytes"
|
||||
}),
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn serialize_list_1() {
|
||||
let schema = Schema::List(Item::Many(vec![]));
|
||||
assert_json(
|
||||
&schema,
|
||||
json!({
|
||||
"dataType": "list",
|
||||
"items": []
|
||||
}),
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn serialize_list_2() {
|
||||
let schema = Schema::List(Item::Singleton(Box::new(Schema::Integer)));
|
||||
assert_json(
|
||||
&schema,
|
||||
json!({
|
||||
"dataType": "list",
|
||||
"items": {
|
||||
"dataType": "integer"
|
||||
}
|
||||
}),
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn serialize_list_3() {
|
||||
let schema = Schema::List(Item::Many(vec![
|
||||
Box::new(Schema::Bytes),
|
||||
Box::new(Schema::List(Item::Singleton(Box::new(Schema::Integer)))),
|
||||
]));
|
||||
assert_json(
|
||||
&schema,
|
||||
json!({
|
||||
"dataType": "list",
|
||||
"items": [
|
||||
{
|
||||
"dataType": "bytes"
|
||||
},
|
||||
{
|
||||
"dataType": "list",
|
||||
"items": { "dataType": "integer" }
|
||||
}
|
||||
]
|
||||
}),
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn serialize_map_1() {
|
||||
let schema = Schema::Map((Box::new(Schema::Integer), Box::new(Schema::Bytes)));
|
||||
assert_json(
|
||||
&schema,
|
||||
json!({
|
||||
"dataType": "map",
|
||||
"elements": [
|
||||
{
|
||||
"dataType": "integer"
|
||||
},
|
||||
{
|
||||
"dataType": "bytes"
|
||||
}
|
||||
]
|
||||
}),
|
||||
)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn serialize_map_2() {
|
||||
let schema = Schema::Map((
|
||||
Box::new(Schema::Bytes),
|
||||
Box::new(Schema::List(Item::Singleton(Box::new(Schema::Integer)))),
|
||||
));
|
||||
assert_json(
|
||||
&schema,
|
||||
json!({
|
||||
"dataType": "map",
|
||||
"elements": [
|
||||
{
|
||||
"dataType": "bytes"
|
||||
},
|
||||
{
|
||||
"dataType": "list",
|
||||
"items": { "dataType": "integer" }
|
||||
}
|
||||
]
|
||||
}),
|
||||
)
|
||||
}
|
||||
}
|
|
@ -0,0 +1,59 @@
|
|||
use super::schema;
|
||||
use crate::module::CheckedModule;
|
||||
use aiken_lang::ast::{Span, TypedFunction};
|
||||
use miette::{Diagnostic, NamedSource};
|
||||
use std::{fmt::Debug, path::PathBuf};
|
||||
|
||||
#[derive(Debug, thiserror::Error, Diagnostic)]
|
||||
pub enum Error {
|
||||
#[error("Validator functions must return Bool")]
|
||||
ValidatorMustReturnBool {
|
||||
path: PathBuf,
|
||||
src: String,
|
||||
named: NamedSource,
|
||||
location: Span,
|
||||
},
|
||||
#[error("Validator\n\n{name}\n\nrequires at least {at_least} arguments")]
|
||||
WrongValidatorArity {
|
||||
name: String,
|
||||
at_least: u8,
|
||||
location: Span,
|
||||
path: PathBuf,
|
||||
src: String,
|
||||
named: NamedSource,
|
||||
},
|
||||
#[error(transparent)]
|
||||
Schema(schema::Error),
|
||||
}
|
||||
|
||||
pub fn assert_return_bool(module: &CheckedModule, def: &TypedFunction) -> Result<(), Error> {
|
||||
if !def.return_type.is_bool() {
|
||||
Err(Error::ValidatorMustReturnBool {
|
||||
location: def.location,
|
||||
src: module.code.clone(),
|
||||
path: module.input_path.clone(),
|
||||
named: NamedSource::new(module.input_path.display().to_string(), module.code.clone()),
|
||||
})
|
||||
} else {
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
pub fn assert_min_arity(
|
||||
module: &CheckedModule,
|
||||
def: &TypedFunction,
|
||||
at_least: u8,
|
||||
) -> Result<(), Error> {
|
||||
if def.arguments.len() < at_least as usize {
|
||||
Err(Error::WrongValidatorArity {
|
||||
location: def.location,
|
||||
src: module.code.clone(),
|
||||
path: module.input_path.clone(),
|
||||
named: NamedSource::new(module.input_path.display().to_string(), module.code.clone()),
|
||||
name: def.name.clone(),
|
||||
at_least,
|
||||
})
|
||||
} else {
|
||||
Ok(())
|
||||
}
|
||||
}
|
|
@ -0,0 +1,141 @@
|
|||
pub mod error;
|
||||
pub mod schema;
|
||||
pub mod validator;
|
||||
|
||||
use crate::{config::Config, module::CheckedModules};
|
||||
use aiken_lang::uplc::CodeGenerator;
|
||||
use error::*;
|
||||
use schema::Schema;
|
||||
use std::fmt::Debug;
|
||||
use validator::{Purpose, Validator};
|
||||
|
||||
#[derive(Debug, PartialEq, Clone, serde::Serialize)]
|
||||
pub struct Blueprint {
|
||||
pub preamble: Preamble,
|
||||
pub validators: Vec<validator::Validator>,
|
||||
}
|
||||
|
||||
impl Blueprint {
|
||||
pub fn new(
|
||||
config: &Config,
|
||||
modules: &CheckedModules,
|
||||
generator: &mut CodeGenerator,
|
||||
) -> Result<Self, Error> {
|
||||
let mut validators = Vec::new();
|
||||
|
||||
for (validator, def) in modules.validators() {
|
||||
let purpose: Purpose = def.name.clone().into();
|
||||
|
||||
assert_return_bool(validator, def)?;
|
||||
assert_min_arity(validator, def, purpose.min_arity())?;
|
||||
|
||||
let mut args = def.arguments.iter().rev();
|
||||
let (_, redeemer, datum) = (args.next(), args.next().unwrap(), args.next());
|
||||
|
||||
validators.push(Validator {
|
||||
description: None,
|
||||
purpose,
|
||||
datum: datum
|
||||
.map(|datum| {
|
||||
Schema::from_type(modules.into(), &datum.arg_name.get_label(), &datum.tipo)
|
||||
.map_err(Error::Schema)
|
||||
})
|
||||
.transpose()?,
|
||||
redeemer: Schema::from_type(
|
||||
modules.into(),
|
||||
&redeemer.arg_name.get_label(),
|
||||
&redeemer.tipo,
|
||||
)
|
||||
.map_err(Error::Schema)?,
|
||||
program: generator
|
||||
.generate(&def.body, &def.arguments, true)
|
||||
.try_into()
|
||||
.unwrap(),
|
||||
});
|
||||
}
|
||||
|
||||
Ok(Blueprint {
|
||||
preamble: Preamble::from_config(config),
|
||||
validators,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Clone, serde::Serialize)]
|
||||
pub struct Preamble {
|
||||
pub title: String,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub description: Option<String>,
|
||||
pub version: String,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub license: Option<String>,
|
||||
}
|
||||
|
||||
impl Preamble {
|
||||
pub fn from_config(config: &Config) -> Self {
|
||||
Preamble {
|
||||
title: config.name.to_string(),
|
||||
description: if config.description.is_empty() {
|
||||
None
|
||||
} else {
|
||||
Some(config.description.clone())
|
||||
},
|
||||
version: config.version.clone(),
|
||||
license: config.license.clone(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use super::*;
|
||||
use serde_json::{self, json};
|
||||
|
||||
#[test]
|
||||
fn serialize_no_description() {
|
||||
let blueprint = Blueprint {
|
||||
preamble: Preamble {
|
||||
title: "Foo".to_string(),
|
||||
description: None,
|
||||
version: "1.0.0".to_string(),
|
||||
license: Some("Apache-2.0".to_string()),
|
||||
},
|
||||
validators: vec![],
|
||||
};
|
||||
assert_eq!(
|
||||
serde_json::to_value(&blueprint).unwrap(),
|
||||
json!({
|
||||
"preamble": {
|
||||
"title": "Foo",
|
||||
"version": "1.0.0",
|
||||
"license": "Apache-2.0"
|
||||
},
|
||||
"validators": []
|
||||
}),
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn serialize_with_description() {
|
||||
let blueprint = Blueprint {
|
||||
preamble: Preamble {
|
||||
title: "Foo".to_string(),
|
||||
description: Some("Lorem ipsum".to_string()),
|
||||
version: "1.0.0".to_string(),
|
||||
license: None,
|
||||
},
|
||||
validators: vec![],
|
||||
};
|
||||
assert_eq!(
|
||||
serde_json::to_value(&blueprint).unwrap(),
|
||||
json!({
|
||||
"preamble": {
|
||||
"title": "Foo",
|
||||
"description": "Lorem ipsum",
|
||||
"version": "1.0.0",
|
||||
},
|
||||
"validators": []
|
||||
}),
|
||||
);
|
||||
}
|
||||
}
|
|
@ -0,0 +1,353 @@
|
|||
use crate::CheckedModule;
|
||||
use aiken_lang::{
|
||||
ast::{DataType, Definition, TypedDefinition},
|
||||
tipo::Type,
|
||||
};
|
||||
use miette::Diagnostic;
|
||||
use serde::ser::{Serialize, SerializeStruct, Serializer};
|
||||
use serde_json;
|
||||
use std::{
|
||||
collections::HashMap,
|
||||
fmt::{self, Display},
|
||||
sync::Arc,
|
||||
};
|
||||
|
||||
#[derive(Debug, PartialEq, Eq, Clone)]
|
||||
pub enum Schema {
|
||||
Integer,
|
||||
Bytes,
|
||||
List(Item<Box<Schema>>),
|
||||
Map((Box<Schema>, Box<Schema>)),
|
||||
AnyOf(Vec<Constructor>),
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Eq, Clone)]
|
||||
pub struct Constructor {
|
||||
pub index: usize,
|
||||
pub fields: Vec<Schema>,
|
||||
}
|
||||
|
||||
impl Schema {
|
||||
pub fn from_type(
|
||||
modules: &HashMap<String, CheckedModule>,
|
||||
name: &str,
|
||||
type_info: &Type,
|
||||
) -> Result<Self, Error> {
|
||||
match type_info {
|
||||
Type::App {
|
||||
module: module_name,
|
||||
name: type_name,
|
||||
..
|
||||
} if module_name.is_empty() => match &type_name[..] {
|
||||
"ByteArray" => Ok(Schema::Bytes),
|
||||
"Integer" => Ok(Schema::Integer),
|
||||
_ => Err(Error::UnsupportedPrimitiveType {
|
||||
type_name: type_name.clone(),
|
||||
}),
|
||||
},
|
||||
Type::App {
|
||||
module: module_name,
|
||||
name: type_name,
|
||||
..
|
||||
} => {
|
||||
let module = modules.get(module_name).unwrap();
|
||||
let constructor = find_definition(type_name, &module.ast.definitions).unwrap();
|
||||
Self::from_data_type(modules, constructor)
|
||||
}
|
||||
Type::Fn { .. } | Type::Var { .. } | Type::Tuple { .. } => {
|
||||
Err(Error::UnsupportedKind {
|
||||
arg_or_field_name: name.to_string(),
|
||||
type_info: type_info.clone(),
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn from_data_type(
|
||||
modules: &HashMap<String, CheckedModule>,
|
||||
data_type: &DataType<Arc<Type>>,
|
||||
) -> Result<Self, Error> {
|
||||
let mut variants = vec![];
|
||||
for (index, constructor) in data_type.constructors.iter().enumerate() {
|
||||
let mut fields = vec![];
|
||||
for field in constructor.arguments.iter() {
|
||||
fields.push(Schema::from_type(
|
||||
modules,
|
||||
&field.label.clone().unwrap_or_default(),
|
||||
&field.tipo,
|
||||
)?);
|
||||
}
|
||||
variants.push(Constructor { index, fields });
|
||||
}
|
||||
Ok(Schema::AnyOf(variants))
|
||||
}
|
||||
}
|
||||
|
||||
impl Display for Schema {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
let s = serde_json::to_string_pretty(self).map_err(|_| fmt::Error)?;
|
||||
f.write_str(&s)
|
||||
}
|
||||
}
|
||||
|
||||
impl Serialize for Schema {
|
||||
fn serialize<S: Serializer>(&self, serializer: S) -> Result<S::Ok, S::Error> {
|
||||
match self {
|
||||
Schema::Integer => {
|
||||
let mut s = serializer.serialize_struct("Integer", 1)?;
|
||||
s.serialize_field("dataType", "integer")?;
|
||||
s.end()
|
||||
}
|
||||
Schema::Bytes => {
|
||||
let mut s = serializer.serialize_struct("Bytes", 1)?;
|
||||
s.serialize_field("dataType", "bytes")?;
|
||||
s.end()
|
||||
}
|
||||
Schema::List(items) => {
|
||||
let mut s = serializer.serialize_struct("List", 2)?;
|
||||
s.serialize_field("dataType", "list")?;
|
||||
s.serialize_field("items", &items)?;
|
||||
s.end()
|
||||
}
|
||||
Schema::Map(elements) => {
|
||||
let mut s = serializer.serialize_struct("Map", 2)?;
|
||||
s.serialize_field("dataType", "map")?;
|
||||
s.serialize_field("elements", &elements)?;
|
||||
s.end()
|
||||
}
|
||||
Schema::AnyOf(constructors) => match &constructors[..] {
|
||||
[constructor] => constructor.serialize(serializer),
|
||||
_ => {
|
||||
let mut s = serializer.serialize_struct("AnyOf", 1)?;
|
||||
s.serialize_field("anyOf", &constructors)?;
|
||||
s.end()
|
||||
}
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
impl Serialize for Constructor {
|
||||
fn serialize<S: Serializer>(&self, serializer: S) -> Result<S::Ok, S::Error> {
|
||||
let mut s = serializer.serialize_struct("Constructor", 3)?;
|
||||
s.serialize_field("dataType", "constructor")?;
|
||||
s.serialize_field("index", &self.index)?;
|
||||
s.serialize_field("fields", &self.fields)?;
|
||||
s.end()
|
||||
}
|
||||
}
|
||||
|
||||
// Represent a items list in a JSON schema. Can be either a singleton (i.e. a single schema) when
|
||||
// all elements in the list are uniform or a list of schemas.
|
||||
#[derive(Debug, PartialEq, Eq, Clone)]
|
||||
pub enum Item<T> {
|
||||
Singleton(T),
|
||||
Many(Vec<T>),
|
||||
}
|
||||
|
||||
impl<T: Serialize> Serialize for Item<T> {
|
||||
fn serialize<S: Serializer>(&self, serializer: S) -> Result<S::Ok, S::Error> {
|
||||
match self {
|
||||
Item::Singleton(elem) => Serialize::serialize(elem, serializer),
|
||||
Item::Many(elems) => Serialize::serialize(elems, serializer),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Clone, thiserror::Error, Diagnostic)]
|
||||
pub enum Error {
|
||||
#[error("I stumble upon an unsupported kind in a datum or redeemer definition.\n")]
|
||||
UnsupportedKind {
|
||||
arg_or_field_name: String,
|
||||
type_info: Type,
|
||||
},
|
||||
#[error("I discovered an unexpected primitive in a datum or redeemer definition.\n")]
|
||||
UnsupportedPrimitiveType { type_name: String },
|
||||
}
|
||||
|
||||
fn find_definition<'a>(
|
||||
name: &str,
|
||||
definitions: &'a Vec<TypedDefinition>,
|
||||
) -> Option<&'a DataType<Arc<Type>>> {
|
||||
for def in definitions {
|
||||
match def {
|
||||
Definition::DataType(data_type) if name == data_type.name => return Some(data_type),
|
||||
Definition::Fn { .. }
|
||||
| Definition::DataType { .. }
|
||||
| Definition::TypeAlias { .. }
|
||||
| Definition::Use { .. }
|
||||
| Definition::ModuleConstant { .. }
|
||||
| Definition::Test { .. } => continue,
|
||||
}
|
||||
}
|
||||
None
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
pub mod test {
|
||||
use super::*;
|
||||
use serde_json::{self, json, Value};
|
||||
|
||||
pub fn assert_json(schema: &Schema, expected: Value) {
|
||||
assert_eq!(serde_json::to_value(schema).unwrap(), expected);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn serialize_integer() {
|
||||
let schema = Schema::Integer;
|
||||
assert_json(
|
||||
&schema,
|
||||
json!({
|
||||
"dataType": "integer"
|
||||
}),
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn serialize_bytes() {
|
||||
let schema = Schema::Bytes;
|
||||
assert_json(
|
||||
&schema,
|
||||
json!({
|
||||
"dataType": "bytes"
|
||||
}),
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn serialize_list_1() {
|
||||
let schema = Schema::List(Item::Many(vec![]));
|
||||
assert_json(
|
||||
&schema,
|
||||
json!({
|
||||
"dataType": "list",
|
||||
"items": []
|
||||
}),
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn serialize_list_2() {
|
||||
let schema = Schema::List(Item::Singleton(Box::new(Schema::Integer)));
|
||||
assert_json(
|
||||
&schema,
|
||||
json!({
|
||||
"dataType": "list",
|
||||
"items": {
|
||||
"dataType": "integer"
|
||||
}
|
||||
}),
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn serialize_list_3() {
|
||||
let schema = Schema::List(Item::Many(vec![
|
||||
Box::new(Schema::Bytes),
|
||||
Box::new(Schema::List(Item::Singleton(Box::new(Schema::Integer)))),
|
||||
]));
|
||||
assert_json(
|
||||
&schema,
|
||||
json!({
|
||||
"dataType": "list",
|
||||
"items": [
|
||||
{
|
||||
"dataType": "bytes"
|
||||
},
|
||||
{
|
||||
"dataType": "list",
|
||||
"items": { "dataType": "integer" }
|
||||
}
|
||||
]
|
||||
}),
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn serialize_map_1() {
|
||||
let schema = Schema::Map((Box::new(Schema::Integer), Box::new(Schema::Bytes)));
|
||||
assert_json(
|
||||
&schema,
|
||||
json!({
|
||||
"dataType": "map",
|
||||
"elements": [
|
||||
{
|
||||
"dataType": "integer"
|
||||
},
|
||||
{
|
||||
"dataType": "bytes"
|
||||
}
|
||||
]
|
||||
}),
|
||||
)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn serialize_map_2() {
|
||||
let schema = Schema::Map((
|
||||
Box::new(Schema::Bytes),
|
||||
Box::new(Schema::List(Item::Singleton(Box::new(Schema::Integer)))),
|
||||
));
|
||||
assert_json(
|
||||
&schema,
|
||||
json!({
|
||||
"dataType": "map",
|
||||
"elements": [
|
||||
{
|
||||
"dataType": "bytes"
|
||||
},
|
||||
{
|
||||
"dataType": "list",
|
||||
"items": { "dataType": "integer" }
|
||||
}
|
||||
]
|
||||
}),
|
||||
)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn serialize_constr_1() {
|
||||
let schema = Schema::AnyOf(vec![Constructor {
|
||||
index: 0,
|
||||
fields: vec![],
|
||||
}]);
|
||||
assert_json(
|
||||
&schema,
|
||||
json!({
|
||||
"dataType": "constructor",
|
||||
"index": 0,
|
||||
"fields": []
|
||||
}),
|
||||
)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn serialize_constr_2() {
|
||||
let schema = Schema::AnyOf(vec![
|
||||
Constructor {
|
||||
index: 0,
|
||||
fields: vec![Schema::Integer],
|
||||
},
|
||||
Constructor {
|
||||
index: 1,
|
||||
fields: vec![Schema::Bytes],
|
||||
},
|
||||
]);
|
||||
assert_json(
|
||||
&schema,
|
||||
json!({
|
||||
"anyOf": [
|
||||
{
|
||||
"dataType": "constructor",
|
||||
"index": 0,
|
||||
"fields": [{ "dataType": "integer" }]
|
||||
},
|
||||
{
|
||||
"dataType": "constructor",
|
||||
"index": 1,
|
||||
"fields": [{ "dataType": "bytes" }]
|
||||
}
|
||||
]
|
||||
}),
|
||||
)
|
||||
}
|
||||
}
|
|
@ -0,0 +1,121 @@
|
|||
use super::schema::Schema;
|
||||
use pallas::ledger::primitives::babbage as cardano;
|
||||
use pallas_traverse::ComputeHash;
|
||||
use serde::{
|
||||
self,
|
||||
ser::{Serialize, SerializeStruct, Serializer},
|
||||
};
|
||||
use std::fmt::{self, Display};
|
||||
use uplc::ast::{NamedDeBruijn, Program};
|
||||
|
||||
#[derive(Debug, PartialEq, Clone)]
|
||||
pub struct Validator {
|
||||
pub purpose: Purpose,
|
||||
pub description: Option<String>,
|
||||
pub datum: Option<Schema>,
|
||||
pub redeemer: Schema,
|
||||
pub program: Program<NamedDeBruijn>,
|
||||
}
|
||||
|
||||
impl Serialize for Validator {
|
||||
fn serialize<S: Serializer>(&self, serializer: S) -> Result<S::Ok, S::Error> {
|
||||
let cbor = self.program.to_cbor().unwrap();
|
||||
let source_code = hex::encode(&cbor);
|
||||
let mut s = serializer.serialize_struct("Validator", 5)?;
|
||||
s.serialize_field("purpose", &self.purpose)?;
|
||||
let hash = cardano::PlutusV2Script(cbor.into()).compute_hash();
|
||||
s.serialize_field("hash", &hash)?;
|
||||
if let Some { .. } = self.description {
|
||||
s.serialize_field("description", &self.description)?;
|
||||
}
|
||||
if let Some { .. } = self.datum {
|
||||
s.serialize_field("datum", &self.datum)?;
|
||||
}
|
||||
s.serialize_field("redeemer", &self.redeemer)?;
|
||||
s.serialize_field("compiledCode", &source_code)?;
|
||||
s.end()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Clone, serde::Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub enum Purpose {
|
||||
Spend,
|
||||
Mint,
|
||||
Withdraw,
|
||||
Publish,
|
||||
}
|
||||
|
||||
impl Purpose {
|
||||
pub fn min_arity(&self) -> u8 {
|
||||
match self {
|
||||
Purpose::Spend => 3,
|
||||
Purpose::Mint | Purpose::Withdraw | Purpose::Publish => 2,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<String> for Purpose {
|
||||
fn from(purpose: String) -> Purpose {
|
||||
match &purpose[..] {
|
||||
"spend" => Purpose::Spend,
|
||||
"mint" => Purpose::Mint,
|
||||
"withdraw" => Purpose::Withdraw,
|
||||
"publish" => Purpose::Publish,
|
||||
unexpected => panic!("Can't turn '{}' into any Purpose", unexpected),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Display for Purpose {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
f.write_str(match self {
|
||||
Purpose::Spend => "spend",
|
||||
Purpose::Mint => "mint",
|
||||
Purpose::Withdraw => "withdraw",
|
||||
Purpose::Publish => "publish",
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use super::super::schema::Constructor;
|
||||
use super::*;
|
||||
use serde_json::{self, json};
|
||||
use uplc::parser;
|
||||
|
||||
#[test]
|
||||
fn serialize() {
|
||||
let program = parser::program("(program 1.0.0 (con integer 42))")
|
||||
.unwrap()
|
||||
.try_into()
|
||||
.unwrap();
|
||||
let validator = Validator {
|
||||
description: Some("Lorem ipsum".to_string()),
|
||||
purpose: Purpose::Spend,
|
||||
datum: None,
|
||||
redeemer: Schema::AnyOf(vec![Constructor {
|
||||
index: 0,
|
||||
fields: vec![Schema::Bytes],
|
||||
}]),
|
||||
program,
|
||||
};
|
||||
assert_eq!(
|
||||
serde_json::to_value(&validator).unwrap(),
|
||||
json!({
|
||||
"description": "Lorem ipsum",
|
||||
"purpose": "spend",
|
||||
"redeemer": {
|
||||
"dataType": "constructor",
|
||||
"index": 0,
|
||||
"fields": [{
|
||||
"dataType": "bytes"
|
||||
}]
|
||||
},
|
||||
"compiledCode": "46010000481501",
|
||||
"hash": "27dc8e44c17b4ae5f4b9286ab599fffe70e61b49dec61eaca1fc5898"
|
||||
}),
|
||||
);
|
||||
}
|
||||
}
|
|
@ -1,4 +1,7 @@
|
|||
use crate::{deps::manifest::Package, package_name::PackageName, pretty, script::EvalHint};
|
||||
use crate::{
|
||||
blueprint::error as blueprint, deps::manifest::Package, package_name::PackageName, pretty,
|
||||
script::EvalHint,
|
||||
};
|
||||
use aiken_lang::{
|
||||
ast::{BinOp, Span},
|
||||
parser::error::ParseError,
|
||||
|
@ -31,6 +34,9 @@ pub enum Error {
|
|||
#[error("I found some files with incorrectly formatted source code.")]
|
||||
Format { problem_files: Vec<Unformatted> },
|
||||
|
||||
#[error(transparent)]
|
||||
Blueprint(#[from] blueprint::Error),
|
||||
|
||||
#[error(transparent)]
|
||||
StandardIo(#[from] io::Error),
|
||||
|
||||
|
@ -183,6 +189,7 @@ impl Error {
|
|||
Error::FileIo { .. } => None,
|
||||
Error::Format { .. } => None,
|
||||
Error::StandardIo(_) => None,
|
||||
Error::Blueprint(_) => None,
|
||||
Error::MissingManifest { path } => Some(path.to_path_buf()),
|
||||
Error::TomlLoading { path, .. } => Some(path.to_path_buf()),
|
||||
Error::ImportCycle { .. } => None,
|
||||
|
@ -205,6 +212,7 @@ impl Error {
|
|||
Error::FileIo { .. } => None,
|
||||
Error::Format { .. } => None,
|
||||
Error::StandardIo(_) => None,
|
||||
Error::Blueprint(_) => None,
|
||||
Error::MissingManifest { .. } => None,
|
||||
Error::TomlLoading { src, .. } => Some(src.to_string()),
|
||||
Error::ImportCycle { .. } => None,
|
||||
|
@ -250,6 +258,7 @@ impl Diagnostic for Error {
|
|||
match self {
|
||||
Error::DuplicateModule { .. } => Some(Box::new("aiken::module::duplicate")),
|
||||
Error::FileIo { .. } => None,
|
||||
Error::Blueprint(e) => e.code(),
|
||||
Error::ImportCycle { .. } => Some(Box::new("aiken::module::cyclical")),
|
||||
Error::List(_) => None,
|
||||
Error::Parse { .. } => Some(Box::new("aiken::parser")),
|
||||
|
@ -279,6 +288,7 @@ impl Diagnostic for Error {
|
|||
second.display()
|
||||
))),
|
||||
Error::FileIo { .. } => None,
|
||||
Error::Blueprint(e) => e.help(),
|
||||
Error::ImportCycle { modules } => Some(Box::new(format!(
|
||||
"Try moving the shared code to a separate module that the others can depend on\n- {}",
|
||||
modules.join("\n- ")
|
||||
|
@ -332,6 +342,7 @@ impl Diagnostic for Error {
|
|||
Error::DuplicateModule { .. } => None,
|
||||
Error::FileIo { .. } => None,
|
||||
Error::ImportCycle { .. } => None,
|
||||
Error::Blueprint(e) => e.labels(),
|
||||
Error::List(_) => None,
|
||||
Error::Parse { error, .. } => error.labels(),
|
||||
Error::MissingManifest { .. } => None,
|
||||
|
@ -366,6 +377,7 @@ impl Diagnostic for Error {
|
|||
Error::DuplicateModule { .. } => None,
|
||||
Error::FileIo { .. } => None,
|
||||
Error::ImportCycle { .. } => None,
|
||||
Error::Blueprint(e) => e.source_code(),
|
||||
Error::List(_) => None,
|
||||
Error::Parse { named, .. } => Some(named),
|
||||
Error::Type { named, .. } => Some(named),
|
||||
|
@ -388,6 +400,7 @@ impl Diagnostic for Error {
|
|||
Error::DuplicateModule { .. } => None,
|
||||
Error::FileIo { .. } => None,
|
||||
Error::ImportCycle { .. } => None,
|
||||
Error::Blueprint(e) => e.url(),
|
||||
Error::List { .. } => None,
|
||||
Error::Parse { .. } => None,
|
||||
Error::Type { error, .. } => error.url(),
|
||||
|
@ -409,6 +422,7 @@ impl Diagnostic for Error {
|
|||
match self {
|
||||
Error::DuplicateModule { .. } => None,
|
||||
Error::FileIo { .. } => None,
|
||||
Error::Blueprint(e) => e.related(),
|
||||
Error::ImportCycle { .. } => None,
|
||||
Error::List { .. } => None,
|
||||
Error::Parse { .. } => None,
|
||||
|
|
|
@ -12,26 +12,21 @@ pub mod pretty;
|
|||
pub mod script;
|
||||
pub mod telemetry;
|
||||
|
||||
use crate::blueprint::Blueprint;
|
||||
use aiken_lang::{
|
||||
ast::{Definition, Function, ModuleKind, TypedDataType, TypedDefinition, TypedFunction},
|
||||
builder::{DataTypeKey, FunctionAccessKey},
|
||||
builtins::{self, generic_var},
|
||||
tipo::TypeInfo,
|
||||
uplc::CodeGenerator,
|
||||
IdGenerator, MINT, PUBLISH, SPEND, VALIDATOR_NAMES, WITHDRAW,
|
||||
IdGenerator,
|
||||
};
|
||||
use deps::UseManifest;
|
||||
use indexmap::IndexMap;
|
||||
use miette::NamedSource;
|
||||
use options::{CodeGenMode, Options};
|
||||
use package_name::PackageName;
|
||||
use pallas::{
|
||||
codec::minicbor,
|
||||
ledger::{addresses::Address, primitives::babbage},
|
||||
};
|
||||
use pallas_traverse::ComputeHash;
|
||||
use script::{EvalHint, EvalInfo, Script};
|
||||
use serde_json::json;
|
||||
use std::{
|
||||
collections::HashMap,
|
||||
fs,
|
||||
|
@ -39,7 +34,7 @@ use std::{
|
|||
};
|
||||
use telemetry::EventListener;
|
||||
use uplc::{
|
||||
ast::{Constant, DeBruijn, Program, Term},
|
||||
ast::{Constant, Term},
|
||||
machine::cost_model::ExBudget,
|
||||
};
|
||||
|
||||
|
@ -71,6 +66,8 @@ where
|
|||
sources: Vec<Source>,
|
||||
pub warnings: Vec<Warning>,
|
||||
event_listener: T,
|
||||
functions: HashMap<FunctionAccessKey, TypedFunction>,
|
||||
data_types: HashMap<DataTypeKey, TypedDataType>,
|
||||
}
|
||||
|
||||
impl<T> Project<T>
|
||||
|
@ -85,6 +82,21 @@ where
|
|||
module_types.insert("aiken".to_string(), builtins::prelude(&id_gen));
|
||||
module_types.insert("aiken/builtin".to_string(), builtins::plutus(&id_gen));
|
||||
|
||||
let mut functions = HashMap::new();
|
||||
for (access_key, func) in builtins::prelude_functions(&id_gen).into_iter() {
|
||||
functions.insert(access_key.to_owned(), func);
|
||||
}
|
||||
|
||||
let mut data_types = HashMap::new();
|
||||
let option_data_type = TypedDataType::option(generic_var(id_gen.next()));
|
||||
data_types.insert(
|
||||
DataTypeKey {
|
||||
module_name: "".to_string(),
|
||||
defined_type: "Option".to_string(),
|
||||
},
|
||||
option_data_type,
|
||||
);
|
||||
|
||||
let config = Config::load(&root)?;
|
||||
|
||||
Ok(Project {
|
||||
|
@ -97,6 +109,8 @@ where
|
|||
sources: vec![],
|
||||
warnings: vec![],
|
||||
event_listener,
|
||||
functions,
|
||||
data_types,
|
||||
})
|
||||
}
|
||||
|
||||
|
@ -187,19 +201,43 @@ where
|
|||
|
||||
self.type_check(parsed_modules)?;
|
||||
|
||||
let validators = self.validate_validators()?;
|
||||
|
||||
match options.code_gen_mode {
|
||||
CodeGenMode::Build(uplc_dump) => {
|
||||
if validators.is_empty() {
|
||||
let blueprint_path = self.root.join("plutus.json");
|
||||
self.event_listener
|
||||
.handle_event(Event::GeneratingBlueprint {
|
||||
path: blueprint_path.clone(),
|
||||
});
|
||||
|
||||
let mut generator = self.checked_modules.new_generator(
|
||||
&self.functions,
|
||||
&self.data_types,
|
||||
&self.module_types,
|
||||
);
|
||||
|
||||
let blueprint = Blueprint::new(&self.config, &self.checked_modules, &mut generator)
|
||||
.map_err(Error::Blueprint)?;
|
||||
|
||||
if blueprint.validators.is_empty() {
|
||||
self.warnings.push(Warning::NoValidators);
|
||||
}
|
||||
|
||||
let programs = self.code_gen(validators)?;
|
||||
if uplc_dump {
|
||||
let dir = self.root.join("artifacts");
|
||||
fs::create_dir_all(&dir)?;
|
||||
for validator in &blueprint.validators {
|
||||
// TODO: Also include validator name.
|
||||
let path = dir.clone().join(format!("{}.uplc", validator.purpose));
|
||||
fs::write(&path, validator.program.to_pretty())
|
||||
.map_err(|error| Error::FileIo { error, path })?;
|
||||
}
|
||||
}
|
||||
|
||||
self.write_build_outputs(programs, uplc_dump)?;
|
||||
|
||||
Ok(())
|
||||
let json = serde_json::to_string_pretty(&blueprint).unwrap();
|
||||
fs::write(&blueprint_path, json).map_err(|error| Error::FileIo {
|
||||
error,
|
||||
path: blueprint_path,
|
||||
})
|
||||
}
|
||||
CodeGenMode::Test {
|
||||
match_tests,
|
||||
|
@ -409,170 +447,6 @@ where
|
|||
Ok(())
|
||||
}
|
||||
|
||||
fn validate_validators(&self) -> Result<Vec<(PathBuf, String, TypedFunction)>, Error> {
|
||||
let mut errors = Vec::new();
|
||||
let mut validators = Vec::new();
|
||||
|
||||
for module in self.checked_modules.validators() {
|
||||
for def in module.ast.definitions() {
|
||||
if let Definition::Fn(func_def) = def {
|
||||
if VALIDATOR_NAMES.contains(&func_def.name.as_str()) {
|
||||
// validators must return a Bool
|
||||
if !func_def.return_type.is_bool() {
|
||||
errors.push(Error::ValidatorMustReturnBool {
|
||||
location: func_def.location,
|
||||
src: module.code.clone(),
|
||||
path: module.input_path.clone(),
|
||||
named: NamedSource::new(
|
||||
module.input_path.display().to_string(),
|
||||
module.code.clone(),
|
||||
),
|
||||
})
|
||||
}
|
||||
|
||||
// depending on name, validate the minimum number of arguments
|
||||
// if too low, push a new error on to errors
|
||||
if [MINT, WITHDRAW, PUBLISH].contains(&func_def.name.as_str())
|
||||
&& func_def.arguments.len() < 2
|
||||
{
|
||||
errors.push(Error::WrongValidatorArity {
|
||||
location: func_def.location,
|
||||
src: module.code.clone(),
|
||||
path: module.input_path.clone(),
|
||||
named: NamedSource::new(
|
||||
module.input_path.display().to_string(),
|
||||
module.code.clone(),
|
||||
),
|
||||
name: func_def.name.clone(),
|
||||
at_least: 2,
|
||||
})
|
||||
}
|
||||
|
||||
if SPEND == func_def.name && func_def.arguments.len() < 3 {
|
||||
errors.push(Error::WrongValidatorArity {
|
||||
location: func_def.location,
|
||||
src: module.code.clone(),
|
||||
path: module.input_path.clone(),
|
||||
named: NamedSource::new(
|
||||
module.input_path.display().to_string(),
|
||||
module.code.clone(),
|
||||
),
|
||||
name: func_def.name.clone(),
|
||||
at_least: 3,
|
||||
})
|
||||
}
|
||||
|
||||
validators.push((
|
||||
module.input_path.clone(),
|
||||
module.name.clone(),
|
||||
func_def.clone(),
|
||||
));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if errors.is_empty() {
|
||||
Ok(validators)
|
||||
} else {
|
||||
Err(Error::List(errors))
|
||||
}
|
||||
}
|
||||
|
||||
fn code_gen(
|
||||
&mut self,
|
||||
validators: Vec<(PathBuf, String, TypedFunction)>,
|
||||
) -> Result<Vec<Script>, Error> {
|
||||
let mut programs = Vec::new();
|
||||
let mut functions = IndexMap::new();
|
||||
let mut type_aliases = IndexMap::new();
|
||||
let mut data_types = IndexMap::new();
|
||||
|
||||
let prelude_functions = builtins::prelude_functions(&self.id_gen);
|
||||
for (access_key, func) in prelude_functions.iter() {
|
||||
functions.insert(access_key.clone(), func);
|
||||
}
|
||||
|
||||
let option_data_type = TypedDataType::option(generic_var(self.id_gen.next()));
|
||||
data_types.insert(
|
||||
DataTypeKey {
|
||||
module_name: "".to_string(),
|
||||
defined_type: "Option".to_string(),
|
||||
},
|
||||
&option_data_type,
|
||||
);
|
||||
|
||||
for module in self.checked_modules.values() {
|
||||
for def in module.ast.definitions() {
|
||||
match def {
|
||||
Definition::Fn(func) => {
|
||||
functions.insert(
|
||||
FunctionAccessKey {
|
||||
module_name: module.name.clone(),
|
||||
function_name: func.name.clone(),
|
||||
variant_name: String::new(),
|
||||
},
|
||||
func,
|
||||
);
|
||||
}
|
||||
Definition::TypeAlias(ta) => {
|
||||
type_aliases.insert((module.name.clone(), ta.alias.clone()), ta);
|
||||
}
|
||||
Definition::DataType(dt) => {
|
||||
data_types.insert(
|
||||
DataTypeKey {
|
||||
module_name: module.name.clone(),
|
||||
defined_type: dt.name.clone(),
|
||||
},
|
||||
dt,
|
||||
);
|
||||
}
|
||||
|
||||
Definition::ModuleConstant(_) | Definition::Test(_) | Definition::Use(_) => {}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for (input_path, module_name, func_def) in validators {
|
||||
let Function {
|
||||
arguments,
|
||||
name,
|
||||
body,
|
||||
..
|
||||
} = func_def;
|
||||
|
||||
let mut modules_map = IndexMap::new();
|
||||
|
||||
modules_map.extend(self.module_types.clone());
|
||||
|
||||
let mut generator = CodeGenerator::new(
|
||||
&functions,
|
||||
// &type_aliases,
|
||||
&data_types,
|
||||
&modules_map,
|
||||
);
|
||||
|
||||
self.event_listener.handle_event(Event::GeneratingUPLC {
|
||||
output_path: self.output_path().join(&module_name).join(&name),
|
||||
name: format!("{}.{}", module_name, name),
|
||||
});
|
||||
|
||||
let program = generator.generate(body, arguments, true);
|
||||
|
||||
let script = Script::new(
|
||||
input_path,
|
||||
module_name,
|
||||
name,
|
||||
program.try_into().unwrap(),
|
||||
None,
|
||||
);
|
||||
|
||||
programs.push(script);
|
||||
}
|
||||
|
||||
Ok(programs)
|
||||
}
|
||||
|
||||
fn collect_scripts(
|
||||
&mut self,
|
||||
verbose: bool,
|
||||
|
@ -654,26 +528,20 @@ where
|
|||
})
|
||||
}
|
||||
|
||||
let mut modules_map = IndexMap::new();
|
||||
|
||||
modules_map.extend(self.module_types.clone());
|
||||
|
||||
let mut generator = CodeGenerator::new(
|
||||
&functions,
|
||||
// &type_aliases,
|
||||
&data_types,
|
||||
&modules_map,
|
||||
);
|
||||
let mut generator =
|
||||
CodeGenerator::new(functions.clone(), data_types.clone(), &self.module_types);
|
||||
|
||||
let evaluation_hint = if let Some((bin_op, left_src, right_src)) = func_def.test_hint()
|
||||
{
|
||||
let left = CodeGenerator::new(&functions, &data_types, &modules_map)
|
||||
.generate(*left_src, vec![], false)
|
||||
let left = generator
|
||||
.clone()
|
||||
.generate(&left_src, &[], false)
|
||||
.try_into()
|
||||
.unwrap();
|
||||
|
||||
let right = CodeGenerator::new(&functions, &data_types, &modules_map)
|
||||
.generate(*right_src, vec![], false)
|
||||
let right = generator
|
||||
.clone()
|
||||
.generate(&right_src, &[], false)
|
||||
.try_into()
|
||||
.unwrap();
|
||||
|
||||
|
@ -686,7 +554,7 @@ where
|
|||
None
|
||||
};
|
||||
|
||||
let program = generator.generate(body.clone(), arguments.clone(), false);
|
||||
let program = generator.generate(body, arguments, false);
|
||||
|
||||
let script = Script::new(
|
||||
input_path,
|
||||
|
@ -786,91 +654,6 @@ where
|
|||
.collect()
|
||||
}
|
||||
|
||||
fn output_path(&self) -> PathBuf {
|
||||
self.root.join("assets")
|
||||
}
|
||||
|
||||
fn write_build_outputs(&self, programs: Vec<Script>, uplc_dump: bool) -> Result<(), Error> {
|
||||
for script in programs {
|
||||
let script_output_dir = self.output_path().join(script.module).join(script.name);
|
||||
|
||||
fs::create_dir_all(&script_output_dir)?;
|
||||
|
||||
// dump textual uplc
|
||||
if uplc_dump {
|
||||
let uplc_path = script_output_dir.join("raw.uplc");
|
||||
|
||||
fs::write(uplc_path, script.program.to_pretty())?;
|
||||
}
|
||||
|
||||
let program: Program<DeBruijn> = script.program.into();
|
||||
|
||||
let cbor = program.to_cbor().unwrap();
|
||||
|
||||
// Create file containing just the script cbor hex
|
||||
let script_path = script_output_dir.join("script.cbor");
|
||||
|
||||
let cbor_hex = hex::encode(&cbor);
|
||||
|
||||
fs::write(script_path, cbor_hex)?;
|
||||
|
||||
// Create the payment script JSON file
|
||||
let payment_script_path = script_output_dir.join("payment_script.json");
|
||||
|
||||
let mut bytes = Vec::new();
|
||||
|
||||
let mut encoder = minicbor::Encoder::new(&mut bytes);
|
||||
|
||||
encoder.bytes(&cbor).unwrap();
|
||||
|
||||
let prefixed_cbor_hex = hex::encode(&bytes);
|
||||
|
||||
let payment_script = json!({
|
||||
"type": "PlutusScriptV2",
|
||||
"description": "Generated by Aiken",
|
||||
"cborHex": prefixed_cbor_hex
|
||||
});
|
||||
|
||||
fs::write(
|
||||
payment_script_path,
|
||||
serde_json::to_string_pretty(&payment_script).unwrap(),
|
||||
)?;
|
||||
|
||||
// Create mainnet and testnet addresses
|
||||
let plutus_script = babbage::PlutusV2Script(cbor.into());
|
||||
|
||||
let hash = plutus_script.compute_hash();
|
||||
|
||||
// mainnet
|
||||
let mainnet_path = script_output_dir.join("mainnet.addr");
|
||||
let mut mainnet_bytes: Vec<u8> = vec![0b01110001];
|
||||
|
||||
mainnet_bytes.extend(hash.iter());
|
||||
|
||||
let mainnet_addr = Address::from_bytes(&mainnet_bytes)
|
||||
.unwrap()
|
||||
.to_bech32()
|
||||
.unwrap();
|
||||
|
||||
fs::write(mainnet_path, mainnet_addr)?;
|
||||
|
||||
// testnet
|
||||
let testnet_path = script_output_dir.join("testnet.addr");
|
||||
let mut testnet_bytes: Vec<u8> = vec![0b01110000];
|
||||
|
||||
testnet_bytes.extend(hash.iter());
|
||||
|
||||
let testnet_addr = Address::from_bytes(&testnet_bytes)
|
||||
.unwrap()
|
||||
.to_bech32()
|
||||
.unwrap();
|
||||
|
||||
fs::write(testnet_path, testnet_addr)?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn aiken_files(&mut self, dir: &Path, kind: ModuleKind) -> Result<(), Error> {
|
||||
let paths = walkdir::WalkDir::new(dir)
|
||||
.follow_links(true)
|
||||
|
|
|
@ -1,17 +1,21 @@
|
|||
use crate::error::Error;
|
||||
use aiken_lang::{
|
||||
ast::{
|
||||
DataType, Definition, ModuleKind, TypedDataType, TypedFunction, TypedModule, UntypedModule,
|
||||
},
|
||||
builder::{DataTypeKey, FunctionAccessKey},
|
||||
parser::extra::{comments_before, Comment, ModuleExtra},
|
||||
tipo::TypeInfo,
|
||||
uplc::CodeGenerator,
|
||||
VALIDATOR_NAMES,
|
||||
};
|
||||
use petgraph::{algo, graph::NodeIndex, Direction, Graph};
|
||||
use std::{
|
||||
collections::{HashMap, HashSet},
|
||||
ops::{Deref, DerefMut},
|
||||
path::PathBuf,
|
||||
};
|
||||
|
||||
use aiken_lang::{
|
||||
ast::{DataType, Definition, ModuleKind, TypedModule, UntypedModule},
|
||||
parser::extra::{comments_before, Comment, ModuleExtra},
|
||||
};
|
||||
use petgraph::{algo, graph::NodeIndex, Direction, Graph};
|
||||
|
||||
use crate::error::Error;
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct ParsedModule {
|
||||
pub path: PathBuf,
|
||||
|
@ -233,9 +237,25 @@ impl From<CheckedModules> for HashMap<String, CheckedModule> {
|
|||
}
|
||||
}
|
||||
|
||||
impl<'a> From<&'a CheckedModules> for &'a HashMap<String, CheckedModule> {
|
||||
fn from(checked_modules: &'a CheckedModules) -> Self {
|
||||
&checked_modules.0
|
||||
}
|
||||
}
|
||||
|
||||
impl CheckedModules {
|
||||
pub fn validators(&self) -> impl Iterator<Item = &CheckedModule> {
|
||||
self.0.values().filter(|module| module.kind.is_validator())
|
||||
pub fn validators(&self) -> impl Iterator<Item = (&CheckedModule, &TypedFunction)> {
|
||||
let mut items = vec![];
|
||||
for validator in self.0.values().filter(|module| module.kind.is_validator()) {
|
||||
for some_definition in validator.ast.definitions() {
|
||||
if let Definition::Fn(def) = some_definition {
|
||||
if VALIDATOR_NAMES.contains(&def.name.as_str()) {
|
||||
items.push((validator, def));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
items.into_iter()
|
||||
}
|
||||
|
||||
pub fn into_validators(self) -> impl Iterator<Item = CheckedModule> {
|
||||
|
@ -243,6 +263,55 @@ impl CheckedModules {
|
|||
.into_values()
|
||||
.filter(|module| module.kind.is_validator())
|
||||
}
|
||||
|
||||
pub fn new_generator<'a>(
|
||||
&'a self,
|
||||
builtin_functions: &'a HashMap<FunctionAccessKey, TypedFunction>,
|
||||
builtin_data_types: &'a HashMap<DataTypeKey, TypedDataType>,
|
||||
module_types: &'a HashMap<String, TypeInfo>,
|
||||
) -> CodeGenerator<'a> {
|
||||
let mut functions = HashMap::new();
|
||||
for (k, v) in builtin_functions {
|
||||
functions.insert(k.clone(), v);
|
||||
}
|
||||
|
||||
let mut data_types = HashMap::new();
|
||||
for (k, v) in builtin_data_types {
|
||||
data_types.insert(k.clone(), v);
|
||||
}
|
||||
|
||||
for module in self.values() {
|
||||
for def in module.ast.definitions() {
|
||||
match def {
|
||||
Definition::Fn(func) => {
|
||||
functions.insert(
|
||||
FunctionAccessKey {
|
||||
module_name: module.name.clone(),
|
||||
function_name: func.name.clone(),
|
||||
variant_name: String::new(),
|
||||
},
|
||||
func,
|
||||
);
|
||||
}
|
||||
Definition::DataType(dt) => {
|
||||
data_types.insert(
|
||||
DataTypeKey {
|
||||
module_name: module.name.clone(),
|
||||
defined_type: dt.name.clone(),
|
||||
},
|
||||
dt,
|
||||
);
|
||||
}
|
||||
|
||||
Definition::TypeAlias(_)
|
||||
| Definition::ModuleConstant(_)
|
||||
| Definition::Test(_)
|
||||
| Definition::Use(_) => {}
|
||||
}
|
||||
}
|
||||
}
|
||||
CodeGenerator::new(functions, data_types, module_types)
|
||||
}
|
||||
}
|
||||
|
||||
impl Deref for CheckedModules {
|
||||
|
|
|
@ -19,9 +19,8 @@ pub enum Event {
|
|||
GeneratingDocFiles {
|
||||
output_path: PathBuf,
|
||||
},
|
||||
GeneratingUPLC {
|
||||
output_path: PathBuf,
|
||||
name: String,
|
||||
GeneratingBlueprint {
|
||||
path: PathBuf,
|
||||
},
|
||||
GeneratingUPLCFor {
|
||||
name: String,
|
||||
|
|
|
@ -94,12 +94,12 @@ impl telemetry::EventListener for Terminal {
|
|||
telemetry::Event::WaitingForBuildDirLock => {
|
||||
println!("{}", "Waiting for build directory lock ...".bold().purple());
|
||||
}
|
||||
telemetry::Event::GeneratingUPLC { output_path, name } => {
|
||||
telemetry::Event::GeneratingBlueprint { path } => {
|
||||
println!(
|
||||
"{} {} in {}",
|
||||
"{} {} ({})",
|
||||
" Generating".bold().purple(),
|
||||
name.bold(),
|
||||
output_path.display().bright_blue()
|
||||
"contract blueprint".bold(),
|
||||
path.display().bright_blue()
|
||||
);
|
||||
}
|
||||
telemetry::Event::GeneratingDocFiles { output_path } => {
|
||||
|
|
Loading…
Reference in New Issue