Compare commits

...

10 Commits

Author SHA1 Message Date
waalge 6dfac19942 bump flake. rm cargo patch. fix version 2025-03-25 09:44:30 +00:00
Lucas 9bd670767d
Merge pull request #1135 from aiken-lang/fix_bytearray_comments
fix bytearray comments
2025-03-24 00:32:25 -04:00
rvcas 443a54d4ef
chore: update change lang 2025-03-23 23:15:28 -04:00
rvcas 871951933e
chore: forgot a snapshot 2025-03-23 23:15:04 -04:00
rvcas e1104f4293
test: format_allow_comments_in_byte_array 2025-03-23 23:14:50 -04:00
rvcas abcbe48267
chore: add new snapshots and fix errors introduced from ast changes 2025-03-23 23:14:33 -04:00
rvcas e5211cf792
feat: add support for capturing spans
inside of UntypedExpr::ByteArray
      and UntypedPattern::ByteArray
2025-03-23 23:11:07 -04:00
rvcas cf4534be31 chore: update changelog 2025-03-23 21:46:13 -04:00
rvcas 824431371e feat: centralize some Cargo.toml configs at root and upgrade to edition 2024 2025-03-23 21:46:13 -04:00
rvcas 5f9b5ac781 feat: basic ability to have many projects in one repo 2025-03-23 21:46:13 -04:00
113 changed files with 1062 additions and 616 deletions

View File

@ -1,6 +1,25 @@
# Changelog
## v1.1.15 - unreleased
## v1.1.16 - unreleased
### Added
- **aiken**: Very simple support for monorepos via a `members` property in the
root `aiken.toml` file. Globs are supported you one could do this:
```toml
members = ["pkgs/*"]
```
@rvcas
### Fixed
- **aiken**: Summary should always print at the end of the output not just when
checks plus warnings is greater than zero. @rvcas
- **aiken-lang**: Fix comments not being able to occur in ByteArray array
members. @rvcas
## v1.1.14 - 2025-03-21

1
Cargo.lock generated vendored
View File

@ -144,6 +144,7 @@ dependencies = [
"dirs",
"fslock",
"futures",
"glob",
"hex",
"ignore",
"indexmap 1.9.3",

View File

@ -2,6 +2,21 @@
members = ["crates/*"]
resolver = "2"
[workspace.package]
description = "Cardano smart contract language and toolchain"
documentation = "https://aiken-lang.org"
version = "1.1.15"
edition = "2024"
repository = "https://github.com/aiken-lang/aiken"
homepage = "https://github.com/aiken-lang/aiken"
license = "Apache-2.0"
authors = [
"Lucas Rosa <x@rvcas.dev>",
"Kasey White <kwhitemsg@gmail.com>",
"KtorZ <matthias.benkort@gmail.com>",
]
rust-version = "1.85.0"
[workspace.metadata.release]
shared-version = true
tag-name = "v{{version}}"
@ -17,7 +32,12 @@ installers = ["shell", "powershell", "npm", "homebrew"]
# A GitHub repo to push Homebrew formulas to
tap = "aiken-lang/homebrew-tap"
# Target platforms to build apps for (Rust target-triple syntax)
targets = ["aarch64-apple-darwin", "x86_64-apple-darwin", "x86_64-unknown-linux-musl", "x86_64-pc-windows-msvc"]
targets = [
"aarch64-apple-darwin",
"x86_64-apple-darwin",
"x86_64-unknown-linux-musl",
"x86_64-pc-windows-msvc",
]
# The archive format to use for windows builds (defaults .zip)
windows-archive = ".tar.gz"
# The archive format to use for non-windows builds (defaults .tar.xz)

View File

@ -1,17 +1,15 @@
[package]
name = "aiken-lang"
description = "The Aiken compiler"
version = "1.1.15"
edition = "2021"
repository = "https://github.com/aiken-lang/aiken"
homepage = "https://github.com/aiken-lang/aiken"
license = "Apache-2.0"
authors = [
"Lucas Rosa <x@rvcas.dev>",
"Kasey White <kwhitemsg@gmail.com>",
"KtorZ <matthias.benkort@gmail.com>",
]
rust-version = "1.80.0"
version.workspace = true
edition.workspace = true
description.workspace = true
license.workspace = true
authors.workspace = true
repository.workspace = true
homepage.workspace = true
documentation.workspace = true
rust-version.workspace = true
build = "build.rs"
[dependencies]

View File

@ -17,7 +17,7 @@ use std::{
rc::Rc,
};
use uplc::machine::runtime::Compressable;
use vec1::{vec1, Vec1};
use vec1::{Vec1, vec1};
pub const BACKPASS_VARIABLE: &str = "_backpass";
pub const CAPTURE_VARIABLE: &str = "_capture";
@ -956,7 +956,7 @@ pub struct CallArg<A> {
impl CallArg<UntypedExpr> {
pub fn is_capture_hole(&self) -> bool {
match &self.value {
UntypedExpr::Var { ref name, .. } => name.contains(CAPTURE_VARIABLE),
UntypedExpr::Var { name, .. } => name.contains(CAPTURE_VARIABLE),
_ => false,
}
}
@ -1535,8 +1535,8 @@ impl BinOp {
}
}
pub type UntypedPattern = Pattern<(), (), Namespace>;
pub type TypedPattern = Pattern<PatternConstructor, Rc<Type>, String>;
pub type UntypedPattern = Pattern<(), (), Namespace, (u8, Span)>;
pub type TypedPattern = Pattern<PatternConstructor, Rc<Type>, String, u8>;
impl TypedPattern {
pub fn var(name: &str) -> Self {
@ -1660,7 +1660,7 @@ pub enum Namespace {
}
#[derive(Debug, Clone, PartialEq, serde::Serialize, serde::Deserialize)]
pub enum Pattern<Constructor, Type, NamespaceKind> {
pub enum Pattern<Constructor, Type, NamespaceKind, ByteValue> {
Int {
location: Span,
value: String,
@ -1669,7 +1669,7 @@ pub enum Pattern<Constructor, Type, NamespaceKind> {
ByteArray {
location: Span,
value: Vec<u8>,
value: Vec<ByteValue>,
preferred_format: ByteArrayFormatPreference,
},
@ -1731,7 +1731,7 @@ pub enum Pattern<Constructor, Type, NamespaceKind> {
},
}
impl<A, B, C> Pattern<A, B, C> {
impl<A, B, C, BV> Pattern<A, B, C, BV> {
pub fn location(&self) -> Span {
match self {
Pattern::Assign { pattern, .. } => pattern.location(),
@ -2207,11 +2207,11 @@ impl<T: Default> AssignmentKind<T> {
}
}
pub type MultiPattern<PatternConstructor, Type, NamespaceKind> =
Vec<Pattern<PatternConstructor, Type, NamespaceKind>>;
pub type MultiPattern<PatternConstructor, Type, NamespaceKind, ByteValue> =
Vec<Pattern<PatternConstructor, Type, NamespaceKind, ByteValue>>;
pub type UntypedMultiPattern = MultiPattern<(), (), Namespace>;
pub type TypedMultiPattern = MultiPattern<PatternConstructor, Rc<Type>, String>;
pub type UntypedMultiPattern = MultiPattern<(), (), Namespace, (u8, Span)>;
pub type TypedMultiPattern = MultiPattern<PatternConstructor, Rc<Type>, String, u8>;
#[derive(Debug, Clone, PartialEq)]
pub struct UntypedClause {

View File

@ -1,16 +1,15 @@
use crate::{
aiken_fn,
IdGenerator, aiken_fn,
ast::{
well_known, Annotation, ArgName, CallArg, DataType, DataTypeKey, Function,
FunctionAccessKey, ModuleKind, OnTestFailure, RecordConstructor, RecordConstructorArg,
Span, TypedArg, TypedDataType, TypedFunction, UnOp,
Annotation, ArgName, CallArg, DataType, DataTypeKey, Function, FunctionAccessKey,
ModuleKind, OnTestFailure, RecordConstructor, RecordConstructorArg, Span, TypedArg,
TypedDataType, TypedFunction, UnOp, well_known,
},
expr::TypedExpr,
tipo::{
fields::FieldMap, Type, TypeConstructor, TypeInfo, ValueConstructor,
ValueConstructorVariant,
Type, TypeConstructor, TypeInfo, ValueConstructor, ValueConstructorVariant,
fields::FieldMap,
},
IdGenerator,
};
use std::{collections::BTreeSet, sync::LazyLock};

View File

@ -9,17 +9,17 @@ pub(crate) use crate::{
},
parser::token::Base,
tipo::{
check_replaceable_opaque_type, convert_opaque_type, lookup_data_type_by_tipo,
ModuleValueConstructor, Type, TypeVar, ValueConstructor, ValueConstructorVariant,
check_replaceable_opaque_type, convert_opaque_type, lookup_data_type_by_tipo,
},
};
use indexmap::IndexMap;
use pallas_primitives::alonzo::{Constr, PlutusData};
use std::{fmt::Debug, rc::Rc};
use uplc::{
KeyValuePairs,
ast::Data,
machine::{runtime::convert_tag_to_constr, value::from_pallas_bigint},
KeyValuePairs,
};
use vec1::Vec1;
@ -649,7 +649,7 @@ pub enum UntypedExpr {
ByteArray {
location: Span,
bytes: Vec<u8>,
bytes: Vec<(u8, Span)>,
preferred_format: ByteArrayFormatPreference,
},
@ -977,7 +977,7 @@ impl UntypedExpr {
location: Span::empty(),
value: UntypedExpr::ByteArray {
location: Span::empty(),
bytes,
bytes: bytes.into_iter().map(|b| (b, Span::empty())).collect(),
preferred_format: ByteArrayFormatPreference::HexadecimalString,
},
}],
@ -1001,11 +1001,15 @@ impl UntypedExpr {
value: from_pallas_bigint(i).to_string(),
},
PlutusData::BoundedBytes(bytes) => UntypedExpr::ByteArray {
location: Span::empty(),
bytes: bytes.into(),
preferred_format: ByteArrayFormatPreference::HexadecimalString,
},
PlutusData::BoundedBytes(bytes) => {
let bytes: Vec<u8> = bytes.into();
UntypedExpr::ByteArray {
location: Span::empty(),
bytes: bytes.into_iter().map(|b| (b, Span::empty())).collect(),
preferred_format: ByteArrayFormatPreference::HexadecimalString,
}
}
PlutusData::Array(elems) => UntypedExpr::List {
location: Span::empty(),
@ -1113,9 +1117,10 @@ impl UntypedExpr {
value: String::from_utf8(bytes.to_vec()).expect("invalid UTF-8 string"),
})
} else {
let bytes: Vec<u8> = bytes.into();
Ok(UntypedExpr::ByteArray {
location: Span::empty(),
bytes: bytes.into(),
bytes: bytes.into_iter().map(|b| (b, Span::empty())).collect(),
preferred_format: ByteArrayFormatPreference::HexadecimalString,
})
}
@ -1219,31 +1224,23 @@ impl UntypedExpr {
name: constructor.name.to_string(),
})
} else {
let arguments =
fields
.to_vec()
.into_iter()
.zip(constructor.arguments.iter())
.map(
|(
field,
RecordConstructorArg {
ref label,
ref tipo,
..
},
)| {
UntypedExpr::do_reify_data(
generics, data_types, field, tipo,
)
.map(|value| CallArg {
label: label.clone(),
location: Span::empty(),
value,
})
},
let arguments = fields
.to_vec()
.into_iter()
.zip(constructor.arguments.iter())
.map(|(field, RecordConstructorArg { label, tipo, .. })| {
UntypedExpr::do_reify_data(
generics, data_types, field, tipo,
)
.collect::<Result<Vec<_>, _>>()?;
.map(|value| {
CallArg {
label: label.clone(),
location: Span::empty(),
value,
}
})
})
.collect::<Result<Vec<_>, _>>()?;
Ok(UntypedExpr::Call {
location: Span::empty(),

View File

@ -1,20 +1,20 @@
use crate::{
ast::{
Annotation, ArgBy, ArgName, ArgVia, AssignmentKind, AssignmentPattern, BinOp,
ByteArrayFormatPreference, CallArg, CurveType, DataType, Definition, Function,
LogicalOpChainKind, ModuleConstant, Namespace, OnTestFailure, Pattern, RecordConstructor,
RecordConstructorArg, RecordUpdateSpread, Span, TraceKind, TypeAlias, TypedArg,
TypedValidator, UnOp, UnqualifiedImport, UntypedArg, UntypedArgVia, UntypedAssignmentKind,
UntypedClause, UntypedDefinition, UntypedFunction, UntypedIfBranch, UntypedModule,
UntypedPattern, UntypedRecordUpdateArg, Use, Validator, CAPTURE_VARIABLE,
ByteArrayFormatPreference, CAPTURE_VARIABLE, CallArg, CurveType, DataType, Definition,
Function, LogicalOpChainKind, ModuleConstant, Namespace, OnTestFailure, Pattern,
RecordConstructor, RecordConstructorArg, RecordUpdateSpread, Span, TraceKind, TypeAlias,
TypedArg, TypedValidator, UnOp, UnqualifiedImport, UntypedArg, UntypedArgVia,
UntypedAssignmentKind, UntypedClause, UntypedDefinition, UntypedFunction, UntypedIfBranch,
UntypedModule, UntypedPattern, UntypedRecordUpdateArg, Use, Validator,
},
docvec,
expr::{FnStyle, TypedExpr, UntypedExpr, DEFAULT_ERROR_STR, DEFAULT_TODO_STR},
expr::{DEFAULT_ERROR_STR, DEFAULT_TODO_STR, FnStyle, TypedExpr, UntypedExpr},
parser::{
extra::{Comment, ModuleExtra},
token::Base,
},
pretty::{break_, concat, flex_break, join, line, lines, nil, Document, Documentable},
pretty::{Document, Documentable, break_, concat, flex_break, join, line, lines, nil},
tipo::{self, Type},
};
use itertools::Itertools;
@ -84,7 +84,10 @@ impl<'comments> Formatter<'comments> {
// Pop comments that occur before a byte-index in the source, consuming
// and retaining any empty lines contained within.
fn pop_comments(&mut self, limit: usize) -> impl Iterator<Item = Option<&'comments str>> {
fn pop_comments(
&mut self,
limit: usize,
) -> impl Iterator<Item = Option<&'comments str>> + use<'comments> {
let (popped, rest, empty_lines) =
comments_before(self.comments, self.empty_lines, limit, true);
@ -369,13 +372,24 @@ impl<'comments> Formatter<'comments> {
bytes,
preferred_format,
..
} => self.bytearray(bytes, None, preferred_format),
} => self.bytearray(
&bytes
.iter()
.map(|b| (*b, Span::empty()))
.collect::<Vec<(u8, Span)>>(),
None,
preferred_format,
),
TypedExpr::CurvePoint {
point,
preferred_format,
..
} => self.bytearray(
&point.compress(),
&point
.compress()
.into_iter()
.map(|b| (b, Span::empty()))
.collect::<Vec<(u8, Span)>>(),
Some(point.as_ref().into()),
preferred_format,
),
@ -892,7 +906,7 @@ impl<'comments> Formatter<'comments> {
pub fn bytearray<'a>(
&mut self,
bytes: &[u8],
bytes: &[(u8, Span)],
curve: Option<CurveType>,
preferred_format: &ByteArrayFormatPreference,
) -> Document<'a> {
@ -903,7 +917,9 @@ impl<'comments> Formatter<'comments> {
curve.map(|c| c.to_string()).unwrap_or_default(),
))
.append("\"")
.append(Document::String(hex::encode(bytes)))
.append(Document::String(hex::encode(
bytes.iter().map(|(b, _)| *b).collect::<Vec<u8>>(),
)))
.append("\""),
ByteArrayFormatPreference::ArrayOfBytes(Base::Decimal { .. }) => "#"
.to_doc()
@ -911,8 +927,19 @@ impl<'comments> Formatter<'comments> {
curve.map(|c| c.to_string()).unwrap_or_default(),
))
.append(
flex_break("[", "[")
.append(join(bytes.iter().map(|b| b.to_doc()), break_(",", ", ")))
break_("[", "[")
.append(join(
bytes.iter().map(|b| {
let doc = b.0.to_doc();
if b.1 == Span::empty() {
doc
} else {
commented(doc, self.pop_comments(b.1.start))
}
}),
break_(",", ", "),
))
.nest(INDENT)
.append(break_(",", ""))
.append("]"),
@ -924,14 +951,20 @@ impl<'comments> Formatter<'comments> {
curve.map(|c| c.to_string()).unwrap_or_default(),
))
.append(
flex_break("[", "[")
break_("[", "[")
.append(join(
bytes.iter().map(|b| {
Document::String(if *b < 16 {
format!("0x0{b:x}")
let doc = Document::String(if b.0 < 16 {
format!("0x0{:x}", b.0)
} else {
format!("{b:#x}")
})
format!("{:#x}", b.0)
});
if b.1 == Span::empty() {
doc
} else {
commented(doc, self.pop_comments(b.1.start))
}
}),
break_(",", ", "),
))
@ -943,7 +976,8 @@ impl<'comments> Formatter<'comments> {
ByteArrayFormatPreference::Utf8String => nil()
.append("\"")
.append(Document::String(escape(
core::str::from_utf8(bytes).unwrap(),
core::str::from_utf8(&bytes.iter().map(|(b, _)| *b).collect::<Vec<u8>>())
.unwrap(),
)))
.append("\""),
}
@ -1004,7 +1038,11 @@ impl<'comments> Formatter<'comments> {
preferred_format,
..
} => self.bytearray(
&point.compress(),
&point
.compress()
.into_iter()
.map(|b| (b, Span::empty()))
.collect::<Vec<(u8, Span)>>(),
Some(point.as_ref().into()),
preferred_format,
),
@ -2130,11 +2168,7 @@ impl<'a> Documentable<'a> for &'a ArgName {
}
fn pub_(public: bool) -> Document<'static> {
if public {
"pub ".to_doc()
} else {
nil()
}
if public { "pub ".to_doc() } else { nil() }
}
impl<'a> Documentable<'a> for &'a UnqualifiedImport {

View File

@ -8,13 +8,14 @@ pub mod tree;
use self::{
air::Air,
builder::{
AssignmentProperties, CodeGenSpecialFuncs, CycleFunctionNames, HoistableFunction, Variant,
cast_validator_args, convert_type_to_data, extract_constant, modify_cyclic_calls,
modify_self_calls, AssignmentProperties, CodeGenSpecialFuncs, CycleFunctionNames,
HoistableFunction, Variant,
modify_self_calls,
},
tree::{AirTree, TreePath},
};
use crate::{
IdGenerator,
ast::{
AssignmentKind, BinOp, Bls12_381Point, Curve, DataTypeKey, FunctionAccessKey, Pattern,
Span, TraceLevel, Tracing, TypedArg, TypedDataType, TypedFunction, TypedPattern,
@ -25,30 +26,29 @@ use crate::{
gen_uplc::{
air::ExpectLevel,
builder::{
erase_opaque_type_operations, get_generic_variant_name, get_line_columns_by_span,
get_src_code_by_span, known_data_to_type, monomorphize, wrap_validator_condition,
CodeGenFunction,
CodeGenFunction, erase_opaque_type_operations, get_generic_variant_name,
get_line_columns_by_span, get_src_code_by_span, known_data_to_type, monomorphize,
wrap_validator_condition,
},
},
line_numbers::LineNumbers,
plutus_version::PlutusVersion,
tipo::{
check_replaceable_opaque_type, convert_opaque_type, find_and_replace_generics,
get_arg_type_name, get_generic_id_and_type, lookup_data_type_by_tipo,
ModuleValueConstructor, PatternConstructor, Type, TypeInfo, ValueConstructor,
ValueConstructorVariant,
ValueConstructorVariant, check_replaceable_opaque_type, convert_opaque_type,
find_and_replace_generics, get_arg_type_name, get_generic_id_and_type,
lookup_data_type_by_tipo,
},
IdGenerator,
};
use builder::{
introduce_name, introduce_pattern, pop_pattern, softcast_data_to_type_otherwise,
unknown_data_to_type, DISCARDED,
DISCARDED, introduce_name, introduce_pattern, pop_pattern, softcast_data_to_type_otherwise,
unknown_data_to_type,
};
use decision_tree::{get_tipo_by_path, Assigned, CaseTest, DecisionTree, TreeGen};
use decision_tree::{Assigned, CaseTest, DecisionTree, TreeGen, get_tipo_by_path};
use indexmap::IndexMap;
use interner::AirInterner;
use itertools::Itertools;
use petgraph::{algo, Graph};
use petgraph::{Graph, algo};
use std::{collections::HashMap, rc::Rc};
use stick_break_set::{Builtins, TreeSet};
use tree::Fields;
@ -2451,7 +2451,7 @@ impl<'a> CodeGenerator<'a> {
let last_clause = if data_type
.as_ref()
.map_or(true, |d| d.constructors.len() != cases.len())
.is_none_or(|d| d.constructors.len() != cases.len())
{
*default.unwrap()
} else {

View File

@ -10,19 +10,19 @@ use crate::{
},
line_numbers::{LineColumn, LineNumbers},
tipo::{
check_replaceable_opaque_type, convert_opaque_type, find_and_replace_generics, Type,
ValueConstructor, ValueConstructorVariant,
Type, ValueConstructor, ValueConstructorVariant, check_replaceable_opaque_type,
convert_opaque_type, find_and_replace_generics,
},
};
use indexmap::IndexMap;
use itertools::{Itertools, Position};
use std::{ops::Deref, rc::Rc};
use uplc::{
KeyValuePairs, PlutusData,
ast::{Constant as UplcConstant, Data, Name, Term, Type as UplcType},
builder::{CONSTR_FIELDS_EXPOSER, CONSTR_INDEX_EXPOSER},
builtins::DefaultFunction,
machine::{runtime::Compressable, value::to_pallas_bigint},
KeyValuePairs, PlutusData,
};
pub type Variant = String;

View File

@ -7,7 +7,7 @@ use itertools::{Either, Itertools, Position};
use crate::{
ast::{DataTypeKey, Pattern, TypedClause, TypedDataType, TypedPattern},
expr::{lookup_data_type_by_tipo, Type, TypeVar, TypedExpr},
expr::{Type, TypeVar, TypedExpr, lookup_data_type_by_tipo},
};
use super::{interner::AirInterner, tree::AirTree};
@ -726,10 +726,12 @@ impl<'a, 'b> TreeGen<'a, 'b> {
// First step make sure all rows have same number of columns
// or something went wrong
assert!(matrix
.rows
.iter()
.all(|row| { row.columns.len() == column_length }));
assert!(
matrix
.rows
.iter()
.all(|row| { row.columns.len() == column_length })
);
// Find which column has the most important pattern
let occurrence_col = highest_occurrence(&matrix, column_length);
@ -1367,6 +1369,7 @@ mod tester {
use indexmap::IndexMap;
use crate::{
IdGenerator,
ast::{
Definition, ModuleKind, Span, TraceLevel, Tracing, TypedModule, TypedPattern,
UntypedModule,
@ -1376,7 +1379,7 @@ mod tester {
gen_uplc::{decision_tree::TreeGen, interner::AirInterner},
parser,
tipo::error::{Error, Warning},
utils, IdGenerator,
utils,
};
fn parse(source_code: &str) -> UntypedModule {

View File

@ -1,6 +1,6 @@
use std::collections::HashMap;
use vec1::{vec1, Vec1};
use vec1::{Vec1, vec1};
#[derive(Clone)]
pub struct AirInterner {

View File

@ -6,7 +6,7 @@ use uplc::{builder::CONSTR_FIELDS_EXPOSER, builtins::DefaultFunction};
use crate::expr::Type;
use super::{
decision_tree::{get_tipo_by_path, CaseTest, Path},
decision_tree::{CaseTest, Path, get_tipo_by_path},
tree::AirTree,
};

View File

@ -1,6 +1,6 @@
use std::sync::{
atomic::{AtomicU64, Ordering},
Arc,
atomic::{AtomicU64, Ordering},
};
pub mod ast;

View File

@ -2,7 +2,7 @@ use super::Chain;
use crate::{
ast::CallArg,
expr::UntypedExpr,
parser::{token::Token, ParseError},
parser::{ParseError, token::Token},
};
use chumsky::prelude::*;

View File

@ -1,7 +1,7 @@
use super::Chain;
use crate::{
ast::well_known,
parser::{token::Token, ParseError},
parser::{ParseError, token::Token},
};
use chumsky::prelude::*;

View File

@ -1,7 +1,7 @@
use chumsky::prelude::*;
use super::Chain;
use crate::parser::{token::Token, ParseError};
use crate::parser::{ParseError, token::Token};
pub(crate) fn parser() -> impl Parser<Token, Chain, Error = ParseError> {
just(Token::Dot)

View File

@ -84,8 +84,8 @@ pub fn parser() -> impl Parser<Token, ast::UntypedDefinition, Error = ParseError
)
}
fn labeled_constructor_type_args(
) -> impl Parser<Token, Vec<ast::RecordConstructorArg<()>>, Error = ParseError> {
fn labeled_constructor_type_args()
-> impl Parser<Token, Vec<ast::RecordConstructorArg<()>>, Error = ParseError> {
select! {Token::Name {name} => name}
.then_ignore(just(Token::Colon))
.then(annotation())

View File

@ -4,7 +4,7 @@ use crate::{
expr::UntypedExpr,
parser::{
annotation,
chain::{call::parser as call, field_access, tuple_index::parser as tuple_index, Chain},
chain::{Chain, call::parser as call, field_access, tuple_index::parser as tuple_index},
error::ParseError,
expr::{self, bytearray, int as uint, list, string, tuple, var},
pattern,

View File

@ -11,12 +11,14 @@ pub fn parser() -> impl Parser<Token, UntypedExpr, Error = ParseError> {
|bytes, preferred_format, curve, location, emit| match curve {
Some(curve @ ast::CurveType::Bls12_381(point)) => {
let point = match point {
ast::Bls12_381PointType::G1 => {
blst::blst_p1::uncompress(&bytes).map(ast::Bls12_381Point::G1)
}
ast::Bls12_381PointType::G2 => {
blst::blst_p2::uncompress(&bytes).map(ast::Bls12_381Point::G2)
}
ast::Bls12_381PointType::G1 => blst::blst_p1::uncompress(
&bytes.iter().map(|&(byte, _)| byte).collect::<Vec<u8>>(),
)
.map(ast::Bls12_381Point::G1),
ast::Bls12_381PointType::G2 => blst::blst_p2::uncompress(
&bytes.iter().map(|&(byte, _)| byte).collect::<Vec<u8>>(),
)
.map(ast::Bls12_381Point::G2),
};
let point = point.unwrap_or_else(|_err| {

View File

@ -9,7 +9,7 @@ use super::{
use crate::{
expr::UntypedExpr,
parser::{
chain::{call::parser as call, field_access, tuple_index::parser as tuple_index, Chain},
chain::{Chain, call::parser as call, field_access, tuple_index::parser as tuple_index},
error::ParseError,
token::Token,
},

View File

@ -1,5 +1,5 @@
use crate::{
ast::{well_known, TraceKind},
ast::{TraceKind, well_known},
expr::UntypedExpr,
parser::{
error::{ParseError, Pattern},

View File

@ -5,9 +5,18 @@ description: "Code:\n\n#\"00aaff\""
ByteArray {
location: 0..9,
bytes: [
0,
170,
255,
(
0,
0..0,
),
(
170,
0..0,
),
(
255,
0..0,
),
],
preferred_format: HexadecimalString,
}

View File

@ -5,9 +5,18 @@ description: "Code:\n\n#[0, 170, 255]"
ByteArray {
location: 0..14,
bytes: [
0,
170,
255,
(
0,
2..3,
),
(
170,
5..8,
),
(
255,
10..13,
),
],
preferred_format: ArrayOfBytes(
Decimal {

View File

@ -5,11 +5,26 @@ description: "Code:\n\n\"aiken\""
ByteArray {
location: 0..7,
bytes: [
97,
105,
107,
101,
110,
(
97,
0..0,
),
(
105,
0..0,
),
(
107,
0..0,
),
(
101,
0..0,
),
(
110,
0..0,
),
],
preferred_format: Utf8String,
}

View File

@ -5,13 +5,34 @@ description: "Code:\n\n\"\\\"aiken\\\"\""
ByteArray {
location: 0..11,
bytes: [
34,
97,
105,
107,
101,
110,
34,
(
34,
0..0,
),
(
97,
0..0,
),
(
105,
0..0,
),
(
107,
0..0,
),
(
101,
0..0,
),
(
110,
0..0,
),
(
34,
0..0,
),
],
preferred_format: Utf8String,
}

View File

@ -12,11 +12,26 @@ Call {
value: ByteArray {
location: 13..20,
bytes: [
65,
105,
107,
101,
110,
(
65,
0..0,
),
(
105,
0..0,
),
(
107,
0..0,
),
(
101,
0..0,
),
(
110,
0..0,
),
],
preferred_format: Utf8String,
},

View File

@ -12,11 +12,26 @@ Call {
value: ByteArray {
location: 25..32,
bytes: [
65,
105,
107,
101,
110,
(
65,
0..0,
),
(
105,
0..0,
),
(
107,
0..0,
),
(
101,
0..0,
),
(
110,
0..0,
),
],
preferred_format: Utf8String,
},

View File

@ -22,11 +22,26 @@ RecordUpdate {
value: ByteArray {
location: 21..28,
bytes: [
65,
105,
107,
101,
110,
(
65,
0..0,
),
(
105,
0..0,
),
(
107,
0..0,
),
(
101,
0..0,
),
(
110,
0..0,
),
],
preferred_format: Utf8String,
},

View File

@ -19,18 +19,36 @@ Trace {
ByteArray {
location: 18..23,
bytes: [
102,
111,
111,
(
102,
0..0,
),
(
111,
0..0,
),
(
111,
0..0,
),
],
preferred_format: Utf8String,
},
ByteArray {
location: 25..30,
bytes: [
98,
97,
114,
(
98,
0..0,
),
(
97,
0..0,
),
(
114,
0..0,
),
],
preferred_format: Utf8String,
},

View File

@ -20,18 +20,36 @@ Trace {
ByteArray {
location: 19..24,
bytes: [
102,
111,
111,
(
102,
0..0,
),
(
111,
0..0,
),
(
111,
0..0,
),
],
preferred_format: Utf8String,
},
ByteArray {
location: 26..31,
bytes: [
98,
97,
114,
(
98,
0..0,
),
(
97,
0..0,
),
(
114,
0..0,
),
],
preferred_format: Utf8String,
},

View File

@ -10,7 +10,7 @@ use crate::{
pub fn parser<A>(
into: impl Fn(
Vec<u8>,
Vec<(u8, ast::Span)>,
ast::ByteArrayFormatPreference,
Option<ast::CurveType>,
ast::Span,
@ -20,7 +20,13 @@ pub fn parser<A>(
choice((
array_of_bytes(),
hex_string(),
utf8_string().map(|(p, b)| (None, p, b)),
utf8_string().map(|(p, b)| {
(
None,
p,
b.into_iter().map(|b| (b, ast::Span::empty())).collect(),
)
}),
))
.validate(move |(curve, preferred_format, bytes), span, emit| {
into(bytes, preferred_format, curve, span, emit)
@ -66,7 +72,7 @@ pub fn array_of_bytes() -> impl Parser<
(
Option<ast::CurveType>,
ast::ByteArrayFormatPreference,
Vec<u8>,
Vec<(u8, ast::Span)>,
),
Error = ParseError,
> {
@ -86,14 +92,14 @@ pub fn array_of_bytes() -> impl Parser<
0
}
};
(byte, base)
(byte, base, span)
})
.separated_by(just(Token::Comma))
.allow_trailing()
.delimited_by(just(Token::LeftSquare), just(Token::RightSquare)),
)
.validate(|(curve, bytes), span, emit| {
let base = bytes.iter().try_fold(None, |acc, (_, base)| match acc {
let base = bytes.iter().try_fold(None, |acc, (_, base, _)| match acc {
None => Ok(Some(base)),
Some(previous_base) if previous_base == base => Ok(Some(base)),
_ => Err(()),
@ -114,7 +120,10 @@ pub fn array_of_bytes() -> impl Parser<
(
curve,
bytes.into_iter().map(|(b, _)| b).collect::<Vec<u8>>(),
bytes
.into_iter()
.map(|(b, _, span)| (b, span))
.collect::<Vec<(u8, ast::Span)>>(),
base,
)
})
@ -132,7 +141,7 @@ pub fn hex_string() -> impl Parser<
(
Option<ast::CurveType>,
ast::ByteArrayFormatPreference,
Vec<u8>,
Vec<(u8, ast::Span)>,
),
Error = ParseError,
> {
@ -153,13 +162,13 @@ pub fn hex_string() -> impl Parser<
(
curve,
ast::ByteArrayFormatPreference::HexadecimalString,
token,
token.into_iter().map(|b| (b, ast::Span::empty())).collect(),
)
})
}
pub fn utf8_string(
) -> impl Parser<Token, (ast::ByteArrayFormatPreference, Vec<u8>), Error = ParseError> {
pub fn utf8_string()
-> impl Parser<Token, (ast::ByteArrayFormatPreference, Vec<u8>), Error = ParseError> {
select! {Token::ByteString {value} => value.into_bytes() }
.map(|token| (ast::ByteArrayFormatPreference::Utf8String, token))
}

View File

@ -1,5 +1,5 @@
use crate::{
ast::{well_known, UntypedPattern},
ast::{UntypedPattern, well_known},
parser::{error::ParseError, token::Token},
};
use chumsky::prelude::*;

View File

@ -15,9 +15,18 @@ When {
ByteArray {
location: 18..27,
value: [
0,
171,
205,
(
0,
0..0,
),
(
171,
0..0,
),
(
205,
0..0,
),
],
preferred_format: HexadecimalString,
},
@ -33,19 +42,58 @@ When {
ByteArray {
location: 40..55,
value: [
65,
105,
107,
101,
110,
44,
32,
114,
111,
99,
107,
115,
33,
(
65,
0..0,
),
(
105,
0..0,
),
(
107,
0..0,
),
(
101,
0..0,
),
(
110,
0..0,
),
(
44,
0..0,
),
(
32,
0..0,
),
(
114,
0..0,
),
(
111,
0..0,
),
(
99,
0..0,
),
(
107,
0..0,
),
(
115,
0..0,
),
(
33,
0..0,
),
],
preferred_format: Utf8String,
},
@ -61,10 +109,22 @@ When {
ByteArray {
location: 68..81,
value: [
1,
2,
3,
4,
(
1,
70..71,
),
(
2,
73..74,
),
(
3,
76..77,
),
(
4,
79..80,
),
],
preferred_format: ArrayOfBytes(
Decimal {
@ -84,9 +144,18 @@ When {
ByteArray {
location: 94..113,
value: [
0,
171,
205,
(
0,
96..100,
),
(
171,
102..106,
),
(
205,
108..112,
),
],
preferred_format: ArrayOfBytes(
Hexadecimal,

View File

@ -18,9 +18,18 @@ Module {
value: ByteArray {
location: 21..26,
bytes: [
226,
152,
133,
(
226,
0..0,
),
(
152,
0..0,
),
(
133,
0..0,
),
],
preferred_format: Utf8String,
},

View File

@ -18,7 +18,10 @@ Module {
value: ByteArray {
location: 21..24,
bytes: [
42,
(
42,
0..0,
),
],
preferred_format: Utf8String,
},

View File

@ -4,7 +4,7 @@ use crate::{
format::Formatter,
gen_uplc::CodeGenerator,
plutus_version::PlutusVersion,
tipo::{convert_opaque_type, Type},
tipo::{Type, convert_opaque_type},
};
use cryptoxide::{blake2b::Blake2b, digest::Digest};
use indexmap::IndexMap;
@ -26,7 +26,7 @@ use uplc::{
ast::{Constant, Data, Name, NamedDeBruijn, Program, Term},
machine::{cost_model::ExBudget, eval_result::EvalResult},
};
use vec1::{vec1, Vec1};
use vec1::{Vec1, vec1};
#[derive(Debug, Clone, Copy)]
pub enum RunnableKind {
@ -531,7 +531,9 @@ impl Benchmark {
match prng.sample(&fuzzer) {
Ok(None) => {
panic!("A seeded PRNG returned 'None' which indicates a sampler is ill-formed and implemented wrongly; please contact library's authors.");
panic!(
"A seeded PRNG returned 'None' which indicates a sampler is ill-formed and implemented wrongly; please contact library's authors."
);
}
Ok(Some((new_prng, value))) => {
@ -689,8 +691,10 @@ impl Prng {
fn as_prng(cst: &PlutusData) -> Prng {
if let PlutusData::Constr(Constr { tag, fields, .. }) = cst {
if *tag == 121 + Prng::SEEDED {
if let [PlutusData::BoundedBytes(bytes), PlutusData::BoundedBytes(choices)] =
&fields[..]
if let [
PlutusData::BoundedBytes(bytes),
PlutusData::BoundedBytes(choices),
] = &fields[..]
{
return Prng::Seeded {
choices: choices.to_vec(),
@ -1124,21 +1128,17 @@ impl<U, T> TestResult<U, T> {
pub fn module(&self) -> &str {
match self {
TestResult::UnitTestResult(UnitTestResult { ref test, .. }) => test.module.as_str(),
TestResult::PropertyTestResult(PropertyTestResult { ref test, .. }) => {
test.module.as_str()
}
TestResult::BenchmarkResult(BenchmarkResult { ref bench, .. }) => bench.module.as_str(),
TestResult::UnitTestResult(UnitTestResult { test, .. }) => test.module.as_str(),
TestResult::PropertyTestResult(PropertyTestResult { test, .. }) => test.module.as_str(),
TestResult::BenchmarkResult(BenchmarkResult { bench, .. }) => bench.module.as_str(),
}
}
pub fn title(&self) -> &str {
match self {
TestResult::UnitTestResult(UnitTestResult { ref test, .. }) => test.name.as_str(),
TestResult::PropertyTestResult(PropertyTestResult { ref test, .. }) => {
test.name.as_str()
}
TestResult::BenchmarkResult(BenchmarkResult { ref bench, .. }) => bench.name.as_str(),
TestResult::UnitTestResult(UnitTestResult { test, .. }) => test.name.as_str(),
TestResult::PropertyTestResult(PropertyTestResult { test, .. }) => test.name.as_str(),
TestResult::BenchmarkResult(BenchmarkResult { bench, .. }) => bench.name.as_str(),
}
}
@ -1279,9 +1279,11 @@ impl TryFrom<TypedExpr> for Assertion<TypedExpr> {
final_else,
..
} => {
if let [IfBranch {
condition, body, ..
}] = &branches[..]
if let [
IfBranch {
condition, body, ..
},
] = &branches[..]
{
let then_is_true = match body {
TypedExpr::Var {

View File

@ -1,4 +1,5 @@
use crate::{
IdGenerator,
ast::{
Definition, ModuleKind, Pattern, TraceLevel, Tracing, TypedModule, UntypedModule,
UntypedPattern,
@ -7,7 +8,6 @@ use crate::{
expr::{CallArg, Span, TypedExpr},
parser,
tipo::error::{Error, UnifyErrorSituation, Warning},
IdGenerator,
};
use std::collections::HashMap;
@ -43,7 +43,9 @@ fn check_module(
let mut warnings = vec![];
if module.name == DEFAULT_MODULE_NAME {
panic!("passed extra modules with default name! Use 'parse_as' to define tests instead of 'parse'.");
panic!(
"passed extra modules with default name! Use 'parse_as' to define tests instead of 'parse'."
);
}
let typed_module = module
@ -2976,7 +2978,7 @@ fn correct_span_for_backpassing_args() {
let (warnings, _ast) = check(parse(source_code)).unwrap();
assert!(
matches!(&warnings[0], Warning::UnusedVariable { ref name, location } if name == "b" && location.start == 245 && location.end == 246)
matches!(&warnings[0], Warning::UnusedVariable { name, location } if name == "b" && location.start == 245 && location.end == 246)
);
}
@ -3407,7 +3409,7 @@ fn side_effects() {
assert!(warnings.is_empty(), "no warnings: {warnings:#?}");
if let Some(Definition::Fn(ref foo)) = ast.definitions().last() {
if let Some(Definition::Fn(foo)) = ast.definitions().last() {
if let TypedExpr::Sequence {
ref expressions, ..
} = foo.body

View File

@ -44,6 +44,21 @@ fn format_nul_byte() {
);
}
#[test]
fn format_allow_comments_in_byte_array() {
assert_format!(
r#"
pub const thing =
#[
// thing
0x12,
// wow
0x10,
]
"#
);
}
#[test]
fn format_g1_element_constant() {
assert_format!(

View File

@ -0,0 +1,11 @@
---
source: crates/aiken-lang/src/tests/format.rs
description: "Code:\n\npub const thing =\n #[\n // thing\n 0x12,\n // wow\n 0x10,\n ]\n"
---
pub const thing =
#[
// thing
0x12,
// wow
0x10,
]

View File

@ -1,8 +1,8 @@
use self::{environment::Environment, pretty::Printer};
use crate::{
ast::{
well_known, Annotation, DataType, DataTypeKey, DefinitionLocation, ModuleKind, Span,
TypedDataType,
Annotation, DataType, DataTypeKey, DefinitionLocation, ModuleKind, Span, TypedDataType,
well_known,
},
tipo::fields::FieldMap,
};
@ -212,8 +212,8 @@ impl Type {
match self {
Type::App { module, name, .. } => Some((module.to_string(), name.to_string())),
Type::Fn { .. } => None,
Type::Var { ref tipo, .. } => match &*tipo.borrow() {
TypeVar::Link { ref tipo } => tipo.qualifier(),
Type::Var { tipo, .. } => match &*tipo.borrow() {
TypeVar::Link { tipo } => tipo.qualifier(),
_ => None,
},
Type::Tuple { .. } => Some((String::new(), "Tuple".to_string())),
@ -1109,11 +1109,13 @@ impl TypeVar {
Self::Link { tipo } => tipo.get_inner_types(),
Self::Unbound { .. } => vec![],
var => {
vec![Type::Var {
tipo: RefCell::new(var.clone()).into(),
alias: None,
}
.into()]
vec![
Type::Var {
tipo: RefCell::new(var.clone()).into(),
alias: None,
}
.into(),
]
}
}
}

View File

@ -1,19 +1,19 @@
use super::{
error::{Error, Warning},
exhaustive::{simplify, Matrix, PatternStack},
hydrator::Hydrator,
AccessorsMap, RecordAccessor, Type, TypeConstructor, TypeInfo, TypeVar, ValueConstructor,
ValueConstructorVariant,
error::{Error, Warning},
exhaustive::{Matrix, PatternStack, simplify},
hydrator::Hydrator,
};
use crate::{
IdGenerator,
ast::{
self, Annotation, CallArg, DataType, Definition, Function, ModuleConstant, ModuleKind,
Namespace, RecordConstructor, RecordConstructorArg, Span, TypeAlias, TypedDefinition,
TypedFunction, TypedPattern, TypedValidator, UnqualifiedImport, UntypedArg,
UntypedDefinition, UntypedFunction, Use, Validator, PIPE_VARIABLE,
Namespace, PIPE_VARIABLE, RecordConstructor, RecordConstructorArg, Span, TypeAlias,
TypedDefinition, TypedFunction, TypedPattern, TypedValidator, UnqualifiedImport,
UntypedArg, UntypedDefinition, UntypedFunction, Use, Validator,
},
tipo::{fields::FieldMap, TypeAliasAnnotation},
IdGenerator,
tipo::{TypeAliasAnnotation, fields::FieldMap},
};
use std::{
collections::{HashMap, HashSet},
@ -1157,7 +1157,7 @@ impl<'a> Environment<'a> {
let first_error = unknowns.first().cloned();
unknowns.retain(|err| {
if let Error::UnknownType { ref name, .. } = err {
if let Error::UnknownType { name, .. } = err {
!type_definitions.contains(&Some(name))
} else {
false

View File

@ -1662,9 +1662,7 @@ pub enum Warning {
"I discovered an unused constructor: {}",
name.if_supports_color(Stderr, |s| s.default_color())
)]
#[diagnostic(help(
"No big deal, but you might want to remove it to get rid of that warning."
))]
#[diagnostic(help("No big deal, but you might want to remove it to get rid of that warning."))]
#[diagnostic(code("unused::constructor"))]
UnusedConstructor {
#[label("unused constructor")]
@ -1676,9 +1674,7 @@ pub enum Warning {
"I discovered an unused imported module: {}",
name.if_supports_color(Stderr, |s| s.default_color()),
)]
#[diagnostic(help(
"No big deal, but you might want to remove it to get rid of that warning."
))]
#[diagnostic(help("No big deal, but you might want to remove it to get rid of that warning."))]
#[diagnostic(code("unused::import::module"))]
UnusedImportedModule {
#[label("unused module")]
@ -1690,9 +1686,7 @@ pub enum Warning {
"I discovered an unused imported value: {}",
name.if_supports_color(Stderr, |s| s.default_color()),
)]
#[diagnostic(help(
"No big deal, but you might want to remove it to get rid of that warning."
))]
#[diagnostic(help("No big deal, but you might want to remove it to get rid of that warning."))]
#[diagnostic(code("unused:import::value"))]
UnusedImportedValueOrType {
#[label("unused import")]
@ -1867,7 +1861,9 @@ pub enum Warning {
},
#[error("I noticed a (compact) dynamic trace label which is not a string")]
#[diagnostic(help("Compiling with a compact trace-level, you are probably expecting compact traces although here, the entire label will need to be serialise *at runtime* which will add a significant overhead.\n\nAs a reminder, trace arguments are fully ignored in compact tracing. Hence, you probably want to put a cute little label here and move the current trace as argument!"))]
#[diagnostic(help(
"Compiling with a compact trace-level, you are probably expecting compact traces although here, the entire label will need to be serialise *at runtime* which will add a significant overhead.\n\nAs a reminder, trace arguments are fully ignored in compact tracing. Hence, you probably want to put a cute little label here and move the current trace as argument!"
))]
#[diagnostic(code("trace::label_is_not_string"))]
#[diagnostic(url("https://aiken-lang.org/language-tour/troubleshooting#traces"))]
CompactTraceLabelIsNotstring {

View File

@ -1,6 +1,6 @@
use crate::{
ast,
tipo::{self, environment::Environment, error::Error, Type},
tipo::{self, Type, environment::Environment, error::Error},
};
use itertools::Itertools;
use std::{collections::BTreeMap, iter, ops::Deref};
@ -407,11 +407,7 @@ impl Pattern {
.filter_map(|(index, p)| {
if index == 1 {
let tail = pretty_tail(p);
if tail == "[]" {
None
} else {
Some(tail)
}
if tail == "[]" { None } else { Some(tail) }
} else {
Some(p.pretty())
}

View File

@ -1,14 +1,15 @@
use super::{
RecordAccessor, Type, ValueConstructor, ValueConstructorVariant,
environment::{
assert_no_labeled_arguments, collapse_links, generalise, EntityKind, Environment,
EntityKind, Environment, assert_no_labeled_arguments, collapse_links, generalise,
},
error::{Error, Warning},
hydrator::Hydrator,
pattern::PatternTyper,
pipe::PipeTyper,
RecordAccessor, Type, ValueConstructor, ValueConstructorVariant,
};
use crate::{
IdGenerator,
ast::{
self, Annotation, ArgName, AssignmentKind, AssignmentPattern, BinOp, Bls12_381Point,
ByteArrayFormatPreference, CallArg, Curve, Function, IfBranch, LogicalOpChainKind,
@ -17,14 +18,13 @@ use crate::{
TypedValidator, UnOp, UntypedArg, UntypedAssignmentKind, UntypedClause, UntypedFunction,
UntypedIfBranch, UntypedPattern, UntypedRecordUpdateArg,
},
builtins::{from_default_function, BUILTIN},
builtins::{BUILTIN, from_default_function},
expr::{FnStyle, TypedExpr, UntypedExpr},
format,
parser::token::Base,
tipo::{
fields::FieldMap, DefaultFunction, ModuleKind, PatternConstructor, TypeConstructor, TypeVar,
DefaultFunction, ModuleKind, PatternConstructor, TypeConstructor, TypeVar, fields::FieldMap,
},
IdGenerator,
};
use std::{
cmp::Ordering,
@ -555,7 +555,11 @@ impl<'a, 'b> ExprTyper<'a, 'b> {
bytes,
preferred_format,
location,
} => self.infer_bytearray(bytes, preferred_format, location),
} => self.infer_bytearray(
bytes.into_iter().map(|(b, _)| b).collect(),
preferred_format,
location,
),
UntypedExpr::CurvePoint {
location,
@ -970,7 +974,7 @@ impl<'a, 'b> ExprTyper<'a, 'b> {
location,
} => {
if let UntypedExpr::Var {
name: ref module,
name: module,
location: module_location,
} = container.as_ref()
{
@ -1091,7 +1095,7 @@ impl<'a, 'b> ExprTyper<'a, 'b> {
container: ref type_container,
} if TypeConstructor::might_be(type_name) => {
if let UntypedExpr::Var {
name: ref module_name,
name: module_name,
location: module_location,
} = type_container.as_ref()
{

View File

@ -1,7 +1,7 @@
use super::{
Type, TypeConstructor,
environment::Environment,
error::{Error, Warning},
Type, TypeConstructor,
};
use crate::{ast::Annotation, tipo::Span};
use std::{collections::HashMap, rc::Rc};

View File

@ -1,11 +1,12 @@
use super::{
TypeInfo, ValueConstructor, ValueConstructorVariant,
environment::{EntityKind, Environment},
error::{Error, UnifyErrorSituation, Warning},
expr::ExprTyper,
hydrator::Hydrator,
TypeInfo, ValueConstructor, ValueConstructorVariant,
};
use crate::{
IdGenerator,
ast::{
Annotation, ArgBy, ArgName, ArgVia, DataType, Definition, Function, ModuleConstant,
ModuleKind, RecordConstructor, RecordConstructorArg, Tracing, TypeAlias, TypedArg,
@ -14,8 +15,7 @@ use crate::{
},
expr::{TypedExpr, UntypedAssignmentKind, UntypedExpr},
parser::token::Token,
tipo::{expr::infer_function, Span, Type, TypeVar},
IdGenerator,
tipo::{Span, Type, TypeVar, expr::infer_function},
};
use std::{
borrow::Borrow,
@ -758,8 +758,8 @@ where
.get_mut(&f.name)
.expect("Could not find preregistered type for test");
if let Type::Fn {
ref ret,
ref alias,
ret,
alias,
args: _,
} = scope.tipo.as_ref()
{

View File

@ -1,10 +1,10 @@
//! Type inference and checking of patterns used in case expressions
//! and variables bindings.
use super::{
environment::{assert_no_labeled_arguments, collapse_links, EntityKind, Environment},
PatternConstructor, Type, ValueConstructorVariant,
environment::{EntityKind, Environment, assert_no_labeled_arguments, collapse_links},
error::{Error, Warning},
hydrator::Hydrator,
PatternConstructor, Type, ValueConstructorVariant,
};
use crate::ast::{CallArg, Namespace, Pattern, Span, TypedPattern, UntypedPattern};
use itertools::Itertools;
@ -210,7 +210,7 @@ impl<'a, 'b> PatternTyper<'a, 'b> {
Ok(Pattern::ByteArray {
location,
value,
value: value.into_iter().map(|(b, _)| b).collect(),
preferred_format,
})
}

View File

@ -1,10 +1,10 @@
use super::{
Type, ValueConstructor, ValueConstructorVariant,
error::{Error, UnifyErrorSituation},
expr::ExprTyper,
Type, ValueConstructor, ValueConstructorVariant,
};
use crate::{
ast::{AssignmentKind, CallArg, Pattern, Span, PIPE_VARIABLE},
ast::{AssignmentKind, CallArg, PIPE_VARIABLE, Pattern, Span},
expr::{TypedExpr, UntypedExpr},
};
use std::{ops::Deref, rc::Rc};

View File

@ -136,7 +136,7 @@ impl Printer {
fn type_var_doc<'a>(&mut self, typ: &TypeVar) -> Document<'a> {
match typ {
TypeVar::Link { tipo: ref typ, .. } => self.print(typ),
TypeVar::Link { tipo: typ, .. } => self.print(typ),
TypeVar::Generic { id, .. } => self.generic_type_var(*id),
TypeVar::Unbound { .. } => "?".to_doc(),
}

View File

@ -1,13 +1,14 @@
[package]
name = "aiken-lsp"
version = "1.1.15"
edition = "2021"
description = "Cardano smart contract language and toolchain"
repository = "https://github.com/aiken-lang/aiken"
homepage = "https://github.com/aiken-lang/aiken"
license = "Apache-2.0"
authors = ["Lucas Rosa <x@rvcas.dev>"]
rust-version = "1.70.0"
version.workspace = true
edition.workspace = true
description.workspace = true
license.workspace = true
authors.workspace = true
repository.workspace = true
homepage.workspace = true
documentation.workspace = true
rust-version.workspace = true
[dependencies]
crossbeam-channel = "0.5.7"

View File

@ -1,5 +1,5 @@
use crate::server::Server;
use aiken_project::{config::Config, paths};
use aiken_project::{config::ProjectConfig, paths};
use error::Error;
use lsp_server::Connection;
use std::env;
@ -23,7 +23,7 @@ pub fn start() -> Result<(), Error> {
let config = if paths::project_config().exists() {
tracing::info!("Aiken project detected");
Some(Config::load(&root).expect("failed to load aiken.toml"))
Some(ProjectConfig::load(&root).expect("failed to load aiken.toml"))
} else {
tracing::info!("Aiken project config not found");

View File

@ -238,7 +238,7 @@ fn unknown_identifier(
) -> Vec<AnnotatedEdit> {
let mut edits = Vec::new();
if let Some(serde_json::Value::String(ref var_name)) = data {
if let Some(serde_json::Value::String(var_name)) = data {
for module in compiler.project.modules() {
if module.ast.has_definition(var_name) {
if let Some(edit) = parsed_document.import(&module, Some(var_name)) {
@ -263,7 +263,7 @@ fn unknown_constructor(
) -> Vec<AnnotatedEdit> {
let mut edits = Vec::new();
if let Some(serde_json::Value::String(ref constructor_name)) = data {
if let Some(serde_json::Value::String(constructor_name)) = data {
for module in compiler.project.modules() {
if module.ast.has_constructor(constructor_name) {
if let Some(edit) = parsed_document.import(&module, Some(constructor_name)) {
@ -316,7 +316,7 @@ fn unknown_module(
) -> Vec<AnnotatedEdit> {
let mut edits = Vec::new();
if let Some(serde_json::Value::String(ref module_name)) = data {
if let Some(serde_json::Value::String(module_name)) = data {
for module in compiler.project.modules() {
if module.name.ends_with(module_name) {
if let Some(edit) = parsed_document.import(&module, None) {
@ -336,7 +336,7 @@ fn unused_imports(
let mut edits = Vec::new();
for data in datas.iter().rev().flatten() {
if let serde_json::Value::String(ref args) = data {
if let serde_json::Value::String(args) = data {
let args = args.split(',').collect::<Vec<&str>>();
match args.as_slice() {
&[is_qualified, start] => {
@ -362,7 +362,7 @@ fn unused_imports(
fn utf8_byte_array_is_hex_string(diagnostic: &lsp_types::Diagnostic) -> Vec<AnnotatedEdit> {
let mut edits = Vec::new();
if let Some(serde_json::Value::String(ref value)) = diagnostic.data.as_ref() {
if let Some(serde_json::Value::String(value)) = diagnostic.data.as_ref() {
edits.push(AnnotatedEdit::SimpleEdit(
"Prefix with #".to_string(),
lsp_types::TextEdit {

View File

@ -5,8 +5,8 @@ use crate::{
quickfix,
quickfix::Quickfix,
utils::{
path_to_uri, span_to_lsp_range, text_edit_replace, uri_to_module_name,
COMPILING_PROGRESS_TOKEN, CREATE_COMPILING_PROGRESS_TOKEN,
COMPILING_PROGRESS_TOKEN, CREATE_COMPILING_PROGRESS_TOKEN, path_to_uri, span_to_lsp_range,
text_edit_replace, uri_to_module_name,
},
};
use aiken_lang::{
@ -17,7 +17,7 @@ use aiken_lang::{
tipo::pretty::Printer,
};
use aiken_project::{
config::{self, Config},
config::{self, ProjectConfig},
error::{Error as ProjectError, GetSource},
module::CheckedModule,
};
@ -25,6 +25,7 @@ use indoc::formatdoc;
use itertools::Itertools;
use lsp_server::{Connection, Message};
use lsp_types::{
DocumentFormattingParams, InitializeParams, TextEdit,
notification::{
DidChangeTextDocument, DidChangeWatchedFiles, DidCloseTextDocument, DidSaveTextDocument,
Notification, Progress, PublishDiagnostics, ShowMessage,
@ -33,7 +34,6 @@ use lsp_types::{
CodeActionRequest, Completion, Formatting, GotoDefinition, HoverRequest, Request,
WorkDoneProgressCreate,
},
DocumentFormattingParams, InitializeParams, TextEdit,
};
use miette::Diagnostic;
use std::{
@ -50,7 +50,7 @@ pub struct Server {
// Project root directory
root: PathBuf,
config: Option<config::Config>,
config: Option<config::ProjectConfig>,
/// Files that have been edited in memory
edited: HashMap<String, String>,
@ -235,7 +235,7 @@ impl Server {
}
DidChangeWatchedFiles::METHOD => {
if let Ok(config) = Config::load(&self.root) {
if let Ok(config) = ProjectConfig::load(&self.root) {
self.config = Some(config);
self.create_new_compiler();
self.compile(connection)?;
@ -603,7 +603,7 @@ impl Server {
pub fn new(
initialize_params: InitializeParams,
config: Option<config::Config>,
config: Option<config::ProjectConfig>,
root: PathBuf,
) -> Self {
let mut server = Server {

View File

@ -1,5 +1,7 @@
use aiken_lang::{ast::Tracing, line_numbers::LineNumbers, test_framework::PropertyTest};
use aiken_project::{config::Config, error::Error as ProjectError, module::CheckedModule, Project};
use aiken_project::{
Project, config::ProjectConfig, error::Error as ProjectError, module::CheckedModule,
};
use std::{collections::HashMap, path::PathBuf};
#[derive(Debug)]
@ -18,7 +20,7 @@ pub struct LspProject {
}
impl LspProject {
pub fn new(config: Config, root: PathBuf, telemetry: super::telemetry::Lsp) -> Self {
pub fn new(config: ProjectConfig, root: PathBuf, telemetry: super::telemetry::Lsp) -> Self {
Self {
project: Project::new_with_config(config, root, telemetry),
modules: HashMap::new(),

View File

@ -1,7 +1,7 @@
use crate::error::Error;
use aiken_lang::{ast::Span, line_numbers::LineNumbers};
use itertools::Itertools;
use lsp_types::{notification::Notification, TextEdit};
use lsp_types::{TextEdit, notification::Notification};
use std::path::{Path, PathBuf};
use urlencoding::decode;

View File

@ -1,17 +1,15 @@
[package]
name = "aiken-project"
description = "Aiken project utilities"
version = "1.1.15"
edition = "2021"
repository = "https://github.com/aiken-lang/aiken"
homepage = "https://github.com/aiken-lang/aiken"
license = "Apache-2.0"
authors = [
"Lucas Rosa <x@rvcas.dev>",
"Kasey White <kwhitemsg@gmail.com>",
"KtorZ <matthias.benkort@gmail.com>",
]
rust-version = "1.80.0"
version.workspace = true
edition.workspace = true
description.workspace = true
license.workspace = true
authors.workspace = true
repository.workspace = true
homepage.workspace = true
documentation.workspace = true
rust-version.workspace = true
build = "build.rs"
[dependencies]
@ -22,6 +20,7 @@ ciborium = "0.2.2"
dirs = "4.0.0"
fslock = "0.2.1"
futures = "0.3.26"
glob = "0.3.2"
hex = "0.4.3"
ignore = "0.4.20"
indexmap = "1.9.2"

View File

@ -1,11 +1,11 @@
use crate::{
Annotated, Schema,
blueprint::{
parameter::Parameter,
schema::{Data, Declaration, Items},
},
Annotated, Schema,
};
use aiken_lang::tipo::{pretty::resolve_alias, Type, TypeAliasAnnotation, TypeVar};
use aiken_lang::tipo::{Type, TypeAliasAnnotation, TypeVar, pretty::resolve_alias};
use itertools::Itertools;
use serde::{
self,
@ -176,7 +176,7 @@ impl Definitions<Annotated<Schema>> {
dependencies.insert(src);
}
}
Declaration::Inline(ref schema) => traverse(src, schema, usage),
Declaration::Inline(schema) => traverse(src, schema, usage),
}
}
@ -260,16 +260,16 @@ impl Definitions<Annotated<Schema>> {
fn schema_to_data(schema: &mut Schema) {
let items = match schema {
Schema::Data(_) => None,
Schema::Pair(ref mut left, ref mut right) => {
Schema::Pair(left, right) => {
let left = swap_declaration(left);
let right = swap_declaration(right);
Some(Items::Many(vec![left, right]))
}
Schema::List(Items::One(ref mut item)) => {
Schema::List(Items::One(item)) => {
let item = swap_declaration(item);
Some(Items::One(item))
}
Schema::List(Items::Many(ref mut items)) => Some(Items::Many(
Schema::List(Items::Many(items)) => Some(Items::Many(
items.iter_mut().map(swap_declaration).collect(),
)),
Schema::Integer => {
@ -300,7 +300,7 @@ impl Definitions<Annotated<Schema>> {
}
for (_, entry) in self.inner.iter_mut() {
if let Some(ref mut annotated) = entry {
if let Some(annotated) = entry {
schema_to_data(&mut annotated.annotated);
}
}

View File

@ -6,7 +6,7 @@ pub mod schema;
pub mod validator;
use crate::{
config::{self, Config, PlutusVersion},
config::{self, PlutusVersion, ProjectConfig},
module::CheckedModules,
};
use aiken_lang::gen_uplc::CodeGenerator;
@ -58,7 +58,7 @@ pub enum LookupResult<'a, T> {
impl Blueprint {
pub fn new(
config: &Config,
config: &ProjectConfig,
modules: &CheckedModules,
generator: &mut CodeGenerator,
) -> Result<Self, Error> {
@ -179,8 +179,8 @@ impl Blueprint {
}
}
impl From<&Config> for Preamble {
fn from(config: &Config) -> Self {
impl From<&ProjectConfig> for Preamble {
fn from(config: &ProjectConfig) -> Self {
Preamble {
title: config.name.to_string(),
description: if config.description.is_empty() {

View File

@ -5,8 +5,8 @@ use super::{
};
use std::{iter, ops::Deref};
use uplc::{
ast::{Constant, Data as UplcData},
PlutusData,
ast::{Constant, Data as UplcData},
};
#[derive(Debug, PartialEq, Eq, Clone, serde::Serialize, serde::Deserialize)]
@ -34,7 +34,7 @@ impl Parameter {
) -> Result<(), Error> {
let schema = match &self.schema {
Declaration::Inline(schema) => schema,
Declaration::Referenced(ref link) => {
Declaration::Referenced(link) => {
&definitions
.lookup(link)
.map(Ok)
@ -353,11 +353,13 @@ fn expect_data_constr(term: &Constant, index: usize) -> Result<Vec<Constant>, Er
Err(mismatch(
term,
Schema::Data(Data::AnyOf(vec![Constructor {
index,
fields: vec![],
}
.into()])),
Schema::Data(Data::AnyOf(vec![
Constructor {
index,
fields: vec![],
}
.into(),
])),
))
}

View File

@ -1,10 +1,10 @@
use crate::{
blueprint::definitions::{Definitions, Reference},
CheckedModule,
blueprint::definitions::{Definitions, Reference},
};
use aiken_lang::{
ast::{Definition, TypedDataType, TypedDefinition},
tipo::{pretty, Type, TypeVar},
tipo::{Type, TypeVar, pretty},
};
use owo_colors::{OwoColorize, Stream::Stdout};
use serde::{
@ -1117,7 +1117,7 @@ Here's the types I followed and that led me to this problem:
pub mod tests {
use super::*;
use proptest::prelude::*;
use serde_json::{self, json, Value};
use serde_json::{self, Value, json};
pub fn assert_json(schema: &impl Serialize, expected: Value) {
assert_eq!(serde_json::to_value(schema).unwrap(), expected);
@ -1198,11 +1198,13 @@ pub mod tests {
#[test]
fn serialize_data_constr_1() {
let schema = Schema::Data(Data::AnyOf(vec![Constructor {
index: 0,
fields: vec![],
}
.into()]));
let schema = Schema::Data(Data::AnyOf(vec![
Constructor {
index: 0,
fields: vec![],
}
.into(),
]));
assert_json(
&schema,
json!({
@ -1363,14 +1365,16 @@ pub mod tests {
#[test]
fn deserialize_any_of() {
assert_eq!(
Data::AnyOf(vec![Constructor {
index: 0,
fields: vec![
Declaration::Referenced(Reference::new("foo")).into(),
Declaration::Referenced(Reference::new("bar")).into()
],
}
.into()]),
Data::AnyOf(vec![
Constructor {
index: 0,
fields: vec![
Declaration::Referenced(Reference::new("foo")).into(),
Declaration::Referenced(Reference::new("bar")).into()
],
}
.into()
]),
serde_json::from_value(json!({
"anyOf": [{
"index": 0,
@ -1391,14 +1395,16 @@ pub mod tests {
#[test]
fn deserialize_one_of() {
assert_eq!(
Data::AnyOf(vec![Constructor {
index: 0,
fields: vec![
Declaration::Referenced(Reference::new("foo")).into(),
Declaration::Referenced(Reference::new("bar")).into()
],
}
.into()]),
Data::AnyOf(vec![
Constructor {
index: 0,
fields: vec![
Declaration::Referenced(Reference::new("foo")).into(),
Declaration::Referenced(Reference::new("bar")).into()
],
}
.into()
]),
serde_json::from_value(json!({
"oneOf": [{
"index": 0,

View File

@ -7,17 +7,17 @@ use super::{
};
use crate::module::{CheckedModule, CheckedModules};
use aiken_lang::{
ast::{well_known, Annotation, TypedArg, TypedFunction, TypedValidator},
ast::{Annotation, TypedArg, TypedFunction, TypedValidator, well_known},
gen_uplc::CodeGenerator,
plutus_version::PlutusVersion,
tipo::{collapse_links, Type},
tipo::{Type, collapse_links},
};
use miette::NamedSource;
use serde;
use std::borrow::Borrow;
use uplc::{
ast::{Constant, SerializableProgram},
PlutusData,
ast::{Constant, SerializableProgram},
};
#[derive(Debug, PartialEq, Clone, serde::Serialize, serde::Deserialize)]
@ -245,8 +245,8 @@ impl Validator {
pub fn tipo_or_annotation<'a>(module: &'a CheckedModule, arg: &'a TypedArg) -> &'a Type {
match collapse_links(arg.tipo.clone()).borrow() {
Type::App {
module: ref module_name,
name: ref type_name,
module: module_name,
name: type_name,
..
} if module_name.is_empty() && &type_name[..] == "Data" => match arg.annotation {
Some(Annotation::Constructor { ref arguments, .. }) if !arguments.is_empty() => module
@ -301,7 +301,7 @@ impl Validator {
description: None,
annotated: schema.as_ref().clone(),
},
Declaration::Referenced(ref link) => definitions
Declaration::Referenced(link) => definitions
.lookup(link)
.map(|s| {
Ok(Annotated {
@ -1032,11 +1032,13 @@ mod tests {
let mut definitions = fixture_definitions();
definitions.insert(
&schema,
Schema::Data(Data::AnyOf(vec![Constructor {
index: 0,
fields: vec![Declaration::Referenced(Reference::new("Bool")).into()],
}
.into()]))
Schema::Data(Data::AnyOf(vec![
Constructor {
index: 0,
fields: vec![Declaration::Referenced(Reference::new("Bool")).into()],
}
.into(),
]))
.into(),
);

View File

@ -1,5 +1,5 @@
use crate::{
error::TomlLoadingContext, github::repo::LatestRelease, package_name::PackageName, paths, Error,
Error, error::TomlLoadingContext, github::repo::LatestRelease, package_name::PackageName, paths,
};
use aiken_lang::{
ast::{Annotation, ByteArrayFormatPreference, ModuleConstant, Span, UntypedDefinition},
@ -7,37 +7,101 @@ use aiken_lang::{
parser::token::Base,
};
pub use aiken_lang::{plutus_version::PlutusVersion, version::compiler_version};
use glob::glob;
use miette::NamedSource;
use semver::Version;
use serde::{
de,
Deserialize, Serialize, de,
ser::{self, SerializeSeq, SerializeStruct},
Deserialize, Serialize,
};
use std::{collections::BTreeMap, fmt::Display, fs, io, path::Path};
use std::{
collections::BTreeMap,
fmt::Display,
fs, io,
path::{Path, PathBuf},
};
#[derive(Deserialize, Serialize, Clone)]
pub struct Config {
pub struct ProjectConfig {
pub name: PackageName,
pub version: String,
#[serde(
deserialize_with = "deserialize_version",
serialize_with = "serialize_version",
default = "default_version"
)]
pub compiler: Version,
#[serde(default, deserialize_with = "validate_v3_only")]
pub plutus: PlutusVersion,
pub license: Option<String>,
#[serde(default)]
pub description: String,
pub repository: Option<Repository>,
#[serde(default)]
pub dependencies: Vec<Dependency>,
#[serde(default)]
pub config: BTreeMap<String, BTreeMap<String, SimpleExpr>>,
}
#[derive(Deserialize, Serialize, Clone)]
struct RawWorkspaceConfig {
members: Vec<String>,
}
impl RawWorkspaceConfig {
pub fn expand_members(self, root: &Path) -> Vec<PathBuf> {
let mut result = Vec::new();
for member in self.members {
let pattern = root.join(member);
let glob_result: Vec<_> = pattern
.to_str()
.and_then(|s| glob(s).ok())
.map_or(Vec::new(), |g| g.filter_map(Result::ok).collect());
if glob_result.is_empty() {
// No matches (or glob failed), treat as literal path
result.push(pattern);
} else {
// Glob worked, add all matches
result.extend(glob_result);
}
}
result
}
}
pub struct WorkspaceConfig {
pub members: Vec<PathBuf>,
}
impl WorkspaceConfig {
pub fn load(dir: &Path) -> Result<WorkspaceConfig, Error> {
let config_path = dir.join(paths::project_config());
let raw_config = fs::read_to_string(&config_path).map_err(|_| Error::MissingManifest {
path: dir.to_path_buf(),
})?;
let raw: RawWorkspaceConfig = toml::from_str(&raw_config).map_err(|e| {
from_toml_de_error(e, config_path, raw_config, TomlLoadingContext::Workspace)
})?;
let members = raw.expand_members(dir);
Ok(WorkspaceConfig { members })
}
}
#[derive(Clone, Debug)]
pub enum SimpleExpr {
Int(i64),
@ -62,7 +126,7 @@ impl SimpleExpr {
},
SimpleExpr::ByteArray(bs, preferred_format) => UntypedExpr::ByteArray {
location: Span::empty(),
bytes: bs.to_vec(),
bytes: bs.iter().map(|b| (*b, Span::empty())).collect(),
preferred_format: *preferred_format,
},
SimpleExpr::List(es) => match annotation {
@ -303,9 +367,9 @@ impl Display for Platform {
}
}
impl Config {
impl ProjectConfig {
pub fn default(name: &PackageName) -> Self {
Config {
ProjectConfig {
name: name.clone(),
version: "0.0.0".to_string(),
compiler: default_version(),
@ -338,23 +402,14 @@ impl Config {
fs::write(aiken_toml_path, aiken_toml)
}
pub fn load(dir: &Path) -> Result<Config, Error> {
pub fn load(dir: &Path) -> Result<ProjectConfig, Error> {
let config_path = dir.join(paths::project_config());
let raw_config = fs::read_to_string(&config_path).map_err(|_| Error::MissingManifest {
path: dir.to_path_buf(),
})?;
let result: Self = toml::from_str(&raw_config).map_err(|e| Error::TomlLoading {
ctx: TomlLoadingContext::Project,
path: config_path.clone(),
src: raw_config.clone(),
named: NamedSource::new(config_path.display().to_string(), raw_config).into(),
// this isn't actually a legit way to get the span
location: e.span().map(|range| Span {
start: range.start,
end: range.end,
}),
help: e.message().to_string(),
let result: Self = toml::from_str(&raw_config).map_err(|e| {
from_toml_de_error(e, config_path, raw_config, TomlLoadingContext::Project)
})?;
Ok(result)
@ -388,6 +443,26 @@ where
}
}
fn from_toml_de_error(
e: toml::de::Error,
config_path: PathBuf,
raw_config: String,
ctx: TomlLoadingContext,
) -> Error {
Error::TomlLoading {
ctx,
path: config_path.clone(),
src: raw_config.clone(),
named: NamedSource::new(config_path.display().to_string(), raw_config).into(),
// this isn't actually a legit way to get the span
location: e.span().map(|range| Span {
start: range.start,
end: range.end,
}),
help: e.message().to_string(),
}
}
mod built_info {
include!(concat!(env!("OUT_DIR"), "/built.rs"));
}

View File

@ -6,7 +6,7 @@ use serde::{Deserialize, Serialize};
use tokio::time::Instant;
use crate::{
config::{Config, Dependency},
config::{Dependency, ProjectConfig},
error::{Error, TomlLoadingContext},
package_name::PackageName,
paths,
@ -133,7 +133,11 @@ impl From<&Manifest> for LocalPackages {
}
}
pub fn download<T>(event_listener: &T, root_path: &Path, config: &Config) -> Result<Manifest, Error>
pub fn download<T>(
event_listener: &T,
root_path: &Path,
config: &ProjectConfig,
) -> Result<Manifest, Error>
where
T: EventListener,
{

View File

@ -9,7 +9,7 @@ use std::{
};
use crate::{
config::{Config, Dependency, Platform},
config::{Dependency, Platform, ProjectConfig},
error::{Error, TomlLoadingContext},
package_name::PackageName,
paths,
@ -27,7 +27,7 @@ pub struct Manifest {
impl Manifest {
pub fn load<T>(
event_listener: &T,
config: &Config,
config: &ProjectConfig,
root_path: &Path,
) -> Result<(Self, bool), Error>
where
@ -121,7 +121,7 @@ pub struct Package {
pub source: Platform,
}
fn resolve_versions<T>(config: &Config, event_listener: &T) -> Result<Manifest, Error>
fn resolve_versions<T>(config: &ProjectConfig, event_listener: &T) -> Result<Manifest, Error>
where
T: EventListener,
{

View File

@ -1,5 +1,5 @@
use crate::{
config::{Config, Repository},
config::{ProjectConfig, Repository},
module::CheckedModule,
};
use aiken_lang::{
@ -104,7 +104,11 @@ impl DocLink {
/// The documentation is built using template files located at the root of this crate.
/// With the documentation, we also build a client-side search index to ease navigation
/// across multiple modules.
pub fn generate_all(root: &Path, config: &Config, modules: Vec<&CheckedModule>) -> Vec<DocFile> {
pub fn generate_all(
root: &Path,
config: &ProjectConfig,
modules: Vec<&CheckedModule>,
) -> Vec<DocFile> {
let timestamp = new_timestamp();
let modules_links = generate_modules_links(&modules);
@ -155,7 +159,7 @@ pub fn generate_all(root: &Path, config: &Config, modules: Vec<&CheckedModule>)
fn generate_module(
root: &Path,
config: &Config,
config: &ProjectConfig,
module: &CheckedModule,
modules: &[DocLink],
source: &DocLink,
@ -376,7 +380,7 @@ fn generate_static_assets(search_indexes: Vec<SearchIndex>) -> Vec<DocFile> {
fn generate_readme(
root: &Path,
config: &Config,
config: &ProjectConfig,
modules: &[DocLink],
source: &DocLink,
timestamp: &Duration,

View File

@ -78,10 +78,7 @@ impl LinkTree {
}
}
LinkTree::Leaf {
value: ref mut leaf,
..
} => {
LinkTree::Leaf { value: leaf, .. } => {
// In case we try to insert a module that already exists, there's nothing to do.
if module == leaf {
return;
@ -138,9 +135,7 @@ impl LinkTree {
}
LinkTree::Node {
ref mut prefix,
ref mut children,
..
prefix, children, ..
} => {
// When `module.starts_with(prefix)` is true, it means that the module being
// inserted belong to our sub-tree. We do not know *where* exactly though, so we
@ -274,8 +269,8 @@ impl LinkTree {
fn path(&self) -> &str {
match self {
LinkTree::Empty => "",
LinkTree::Leaf { ref value, .. } => value.as_str(),
LinkTree::Node { ref prefix, .. } => prefix.as_str(),
LinkTree::Leaf { value, .. } => value.as_str(),
LinkTree::Node { prefix, .. } => prefix.as_str(),
}
}
}

View File

@ -1,6 +1,6 @@
use crate::{
config::{Config, Platform},
CheckedModule,
config::{Platform, ProjectConfig},
};
use aiken_lang::{ast::Span, line_numbers::LineNumbers};
use camino::{Utf8Component, Utf8Path};
@ -12,7 +12,7 @@ pub struct SourceLinker {
}
impl SourceLinker {
pub fn new(root: &Path, config: &Config, module: &CheckedModule) -> Self {
pub fn new(root: &Path, config: &ProjectConfig, module: &CheckedModule) -> Self {
let utf8_path = <&Utf8Path>::try_from(
module
.input_path

View File

@ -26,6 +26,7 @@ pub enum TomlLoadingContext {
Project,
Manifest,
Package,
Workspace,
}
impl fmt::Display for TomlLoadingContext {
@ -34,6 +35,7 @@ impl fmt::Display for TomlLoadingContext {
TomlLoadingContext::Project => write!(f, "project"),
TomlLoadingContext::Manifest => write!(f, "manifest"),
TomlLoadingContext::Package => write!(f, "package"),
TomlLoadingContext::Workspace => write!(f, "workspace"),
}
}
}
@ -430,12 +432,14 @@ impl Diagnostic for Error {
))),
Error::NoValidatorNotFound { known_validators } => Some(Box::new(hint_validators(
known_validators,
"Here's a list of all validators I've found in your project.\nPlease double-check this list against the options that you've provided."
))),
Error::MoreThanOneValidatorFound { known_validators } => Some(Box::new(hint_validators(
known_validators,
"Here's a list of matching validators I've found in your project.\nPlease narrow the selection using additional options.",
"Here's a list of all validators I've found in your project.\nPlease double-check this list against the options that you've provided.",
))),
Error::MoreThanOneValidatorFound { known_validators } => {
Some(Box::new(hint_validators(
known_validators,
"Here's a list of matching validators I've found in your project.\nPlease narrow the selection using additional options.",
)))
}
Error::Module(e) => e.help(),
}
}
@ -695,12 +699,10 @@ impl Diagnostic for Warning {
Warning::NoConfigurationForEnv { .. } => Some(Box::new(
"When configuration keys are missing for a target environment, no 'config' module will be created. This may lead to issues down the line.",
)),
Warning::SuspiciousTestMatch { test } => Some(Box::new(
format!(
"Did you mean to match all tests within a specific module? Like so:\n\n╰─▶ {}",
format!("-m \"{test}.{{..}}\"").if_supports_color(Stderr, |s| s.bold()),
)
)),
Warning::SuspiciousTestMatch { test } => Some(Box::new(format!(
"Did you mean to match all tests within a specific module? Like so:\n\n╰─▶ {}",
format!("-m \"{test}.{{..}}\"").if_supports_color(Stderr, |s| s.bold()),
))),
}
}
}
@ -824,12 +826,8 @@ fn hint_validators(known_validators: &BTreeSet<(String, String, bool)>, hint: &s
{
known_validators
.iter()
.map(|(module, validator, has_params)| {
let title = format!(
"{:>pad_module$} . {:<pad_validator$}",
module,
validator,
);
.map(|(module, validator, has_params)| {
let title = format!("{:>pad_module$} . {:<pad_validator$}", module, validator,);
if *has_params {
title
.if_supports_color(Stderr, |s| s.bold())

View File

@ -1,4 +1,4 @@
use reqwest::{blocking::Client, header::USER_AGENT, Error};
use reqwest::{Error, blocking::Client, header::USER_AGENT};
use serde::Deserialize;
#[derive(Deserialize)]

View File

@ -21,16 +21,17 @@ mod tests;
use crate::{
blueprint::{
Blueprint,
definitions::Definitions,
schema::{Annotated, Schema},
Blueprint,
},
config::Config,
config::ProjectConfig,
error::{Error, Warning},
module::{CheckedModule, CheckedModules, ParsedModule, ParsedModules},
telemetry::Event,
};
use aiken_lang::{
IdGenerator,
ast::{
self, DataTypeKey, Definition, FunctionAccessKey, ModuleKind, Tracing, TypedDataType,
TypedFunction, UntypedDefinition,
@ -42,7 +43,7 @@ use aiken_lang::{
line_numbers::LineNumbers,
test_framework::{RunnableKind, Test, TestResult},
tipo::{Type, TypeInfo},
utils, IdGenerator,
utils,
};
use export::Export;
use indexmap::IndexMap;
@ -60,8 +61,8 @@ use std::{
};
use telemetry::EventListener;
use uplc::{
ast::{Constant, Name, Program},
PlutusData,
ast::{Constant, Name, Program},
};
#[derive(Debug)]
@ -87,7 +88,7 @@ pub struct Project<T>
where
T: EventListener,
{
config: Config,
config: ProjectConfig,
defined_modules: HashMap<String, PathBuf>,
checked_modules: CheckedModules,
id_gen: IdGenerator,
@ -108,7 +109,7 @@ where
T: EventListener,
{
pub fn new(root: PathBuf, event_listener: T) -> Result<Project<T>, Error> {
let config = Config::load(&root)?;
let config = ProjectConfig::load(&root)?;
let demanded_compiler_version = format!("v{}", config.compiler);
@ -126,7 +127,7 @@ where
Ok(project)
}
pub fn new_with_config(config: Config, root: PathBuf, event_listener: T) -> Project<T> {
pub fn new_with_config(config: ProjectConfig, root: PathBuf, event_listener: T) -> Project<T> {
let id_gen = IdGenerator::new();
let mut module_types = HashMap::new();

View File

@ -1,5 +1,6 @@
use crate::{Error, Warning};
use aiken_lang::{
IdGenerator,
ast::{
DataType, DataTypeKey, Definition, Function, FunctionAccessKey, Located, ModuleKind,
Tracing, TypedDataType, TypedFunction, TypedModule, TypedValidator, UntypedModule,
@ -7,13 +8,12 @@ use aiken_lang::{
},
expr::TypedExpr,
line_numbers::LineNumbers,
parser::extra::{comments_before, Comment, ModuleExtra},
parser::extra::{Comment, ModuleExtra, comments_before},
tipo::TypeInfo,
IdGenerator,
};
use indexmap::IndexMap;
use miette::NamedSource;
use petgraph::{algo, graph::NodeIndex, Direction, Graph};
use petgraph::{Direction, Graph, algo, graph::NodeIndex};
use std::{
collections::{BTreeSet, HashMap},
io,

View File

@ -1,5 +1,5 @@
use owo_colors::{OwoColorize, Stream::Stdout};
use serde::{de::Visitor, Deserialize, Serialize};
use serde::{Deserialize, Serialize, de::Visitor};
use std::{
fmt::{self, Display},
str::FromStr,

View File

@ -36,11 +36,7 @@ pub fn ansi_len(s: &str) -> usize {
pub fn len_longest_line(zero: usize, s: &str) -> usize {
s.lines().fold(zero, |max, l| {
let n = ansi_len(l);
if n > max {
n
} else {
max
}
if n > max { n } else { max }
})
}
@ -154,11 +150,7 @@ pub fn pad_right(mut text: String, n: usize, delimiter: &str) -> String {
}
pub fn style_if(styled: bool, s: String, apply_style: fn(String) -> String) -> String {
if styled {
apply_style(s)
} else {
s
}
if styled { apply_style(s) } else { s }
}
pub fn multiline(max_len: usize, s: String) -> Vec<String> {

View File

@ -2,7 +2,7 @@ use aiken_lang::{
expr::UntypedExpr,
test_framework::{BenchmarkResult, PropertyTestResult, TestResult, UnitTestResult},
};
pub use json::{json_schema, Json};
pub use json::{Json, json_schema};
use std::{
collections::BTreeMap,
fmt::Display,

View File

@ -1,4 +1,4 @@
use super::{group_by_module, Event, EventListener};
use super::{Event, EventListener, group_by_module};
use aiken_lang::{
ast::OnTestFailure,
expr::UntypedExpr,
@ -76,10 +76,8 @@ impl EventListener for Json {
fn fmt_test_json(result: &TestResult<UntypedExpr, UntypedExpr>) -> serde_json::Value {
let on_test_failure = match result {
TestResult::UnitTestResult(UnitTestResult { ref test, .. }) => &test.on_test_failure,
TestResult::PropertyTestResult(PropertyTestResult { ref test, .. }) => {
&test.on_test_failure
}
TestResult::UnitTestResult(UnitTestResult { test, .. }) => &test.on_test_failure,
TestResult::PropertyTestResult(PropertyTestResult { test, .. }) => &test.on_test_failure,
TestResult::BenchmarkResult(_) => unreachable!("benchmark returned in JSON output"),
};

View File

@ -1,4 +1,4 @@
use super::{find_max_execution_units, group_by_module, DownloadSource, Event, EventListener};
use super::{DownloadSource, Event, EventListener, find_max_execution_units, group_by_module};
use crate::pretty;
use aiken_lang::{
ast::OnTestFailure,

View File

@ -5,6 +5,7 @@ mod test {
utils,
};
use aiken_lang::{
IdGenerator,
ast::{DataTypeKey, Definition, ModuleKind, TraceLevel, Tracing, TypedDataType},
builtins,
expr::UntypedExpr,
@ -14,7 +15,6 @@ mod test {
parser::{self, extra::ModuleExtra},
plutus_version::PlutusVersion,
test_framework::*,
IdGenerator,
};
use indexmap::IndexMap;
use indoc::indoc;
@ -246,12 +246,14 @@ mod test {
}
"#});
assert!(TestResult::PropertyTestResult::<(), _>(prop.run(
42,
PropertyTest::DEFAULT_MAX_SUCCESS,
&PlutusVersion::default()
))
.is_success());
assert!(
TestResult::PropertyTestResult::<(), _>(prop.run(
42,
PropertyTest::DEFAULT_MAX_SUCCESS,
&PlutusVersion::default()
))
.is_success()
);
}
#[test]

View File

@ -5,6 +5,7 @@ use crate::{
utils,
};
use aiken_lang::{
IdGenerator,
ast::{
DataTypeKey, FunctionAccessKey, ModuleKind, TraceLevel, Tracing, TypedDataType,
TypedFunction,
@ -15,7 +16,6 @@ use aiken_lang::{
parser,
plutus_version::PlutusVersion,
tipo::TypeInfo,
IdGenerator,
};
use indexmap::IndexMap;
use std::{collections::HashMap, path::PathBuf};

View File

@ -1,4 +1,4 @@
use crate::{telemetry::EventTarget, Project};
use crate::{Project, config::WorkspaceConfig, telemetry::EventTarget};
use miette::{Diagnostic, IntoDiagnostic};
use notify::{Event, RecursiveMode, Watcher};
use owo_colors::{OwoColorize, Stream::Stderr};
@ -108,17 +108,57 @@ where
current_dir
};
let mut project = match Project::new(project_path, EventTarget::default()) {
Ok(p) => Ok(p),
Err(e) => {
e.report();
Err(ExitFailure::into_report())
let mut warnings = Vec::new();
let mut errs: Vec<crate::error::Error> = Vec::new();
let mut check_count = None;
if let Ok(workspace) = WorkspaceConfig::load(&project_path) {
let res_projects = workspace
.members
.into_iter()
.map(|member| Project::new(member, EventTarget::default()))
.collect::<Result<Vec<Project<_>>, crate::error::Error>>();
let projects = match res_projects {
Ok(p) => Ok(p),
Err(e) => {
e.report();
Err(ExitFailure::into_report())
}
}?;
for mut project in projects {
let build_result = action(&mut project);
warnings.extend(project.warnings());
let sum = check_count.unwrap_or(0) + project.checks_count.unwrap_or(0);
check_count = if sum > 0 { Some(sum) } else { None };
if let Err(e) = build_result {
errs.extend(e);
}
}
}?;
} else {
let mut project = match Project::new(project_path, EventTarget::default()) {
Ok(p) => Ok(p),
Err(e) => {
e.report();
Err(ExitFailure::into_report())
}
}?;
let build_result = action(&mut project);
let build_result = action(&mut project);
let warnings = project.warnings();
warnings.extend(project.warnings());
let sum = check_count.unwrap_or(0) + project.checks_count.unwrap_or(0);
check_count = if sum > 0 { Some(sum) } else { None };
if let Err(e) = build_result {
errs.extend(e);
}
}
let warning_count = warnings.len();
@ -130,7 +170,7 @@ where
}
}
if let Err(errs) = build_result {
if !errs.is_empty() {
for err in &errs {
err.report()
}
@ -138,7 +178,7 @@ where
eprintln!(
"{}",
Summary {
check_count: project.checks_count,
check_count,
warning_count,
error_count: errs.len(),
}
@ -147,16 +187,14 @@ where
return Err(ExitFailure::into_report());
}
if project.checks_count.unwrap_or_default() + warning_count > 0 {
eprintln!(
"{}",
Summary {
check_count: project.checks_count,
error_count: 0,
warning_count
}
);
}
eprintln!(
"{}",
Summary {
check_count,
error_count: 0,
warning_count
}
);
}
if warning_count > 0 && deny {
@ -172,6 +210,7 @@ where
/// // Note: doctest disabled, because aiken_project doesn't have an implementation of EventListener I can use
/// use aiken_project::watch::{watch_project, default_filter};
/// use aiken_project::{Project};
///
/// watch_project(None, default_filter, 500, |project| {
/// println!("Project changed!");
/// Ok(())

View File

@ -1,17 +1,14 @@
[package]
name = "aiken"
description = "Cardano smart contract language and toolchain"
version = "1.1.15"
edition = "2021"
repository = "https://github.com/aiken-lang/aiken"
homepage = "https://github.com/aiken-lang/aiken"
license = "Apache-2.0"
authors = [
"Lucas Rosa <x@rvcas.dev>",
"Kasey White <kwhitemsg@gmail.com>",
"KtorZ <matthias.benkort@gmail.com>",
]
rust-version = "1.70.0"
version.workspace = true
edition.workspace = true
description.workspace = true
license.workspace = true
authors.workspace = true
repository.workspace = true
homepage.workspace = true
documentation.workspace = true
rust-version.workspace = true
[package.metadata.wix]
upgrade-guid = "288B160D-418A-4558-91B9-7C38CFD789C7"

View File

@ -84,7 +84,7 @@ pub fn exec(
) -> miette::Result<()> {
let mut rng = rand::thread_rng();
let seed = seed.unwrap_or_else(|| rng.gen());
let seed = seed.unwrap_or_else(|| rng.r#gen());
let result = with_project(
directory.as_deref(),

View File

@ -1,5 +1,5 @@
use aiken_project::{
blueprint::{error::Error as BlueprintError, Blueprint},
blueprint::{Blueprint, error::Error as BlueprintError},
error::Error as ProjectError,
};
use clap::ValueEnum;

View File

@ -118,8 +118,8 @@ pub fn exec(
}
#[allow(clippy::type_complexity)]
pub fn trace_filter_parser(
) -> MapValueParser<PossibleValuesParser, fn(String) -> fn(TraceLevel) -> Tracing> {
pub fn trace_filter_parser()
-> MapValueParser<PossibleValuesParser, fn(String) -> fn(TraceLevel) -> Tracing> {
PossibleValuesParser::new(["user-defined", "compiler-generated", "all"]).map(
|s: String| match s.as_str() {
"user-defined" => Tracing::UserDefined,

View File

@ -133,7 +133,7 @@ pub fn exec(
let mut rng = rand::thread_rng();
let seed = seed.unwrap_or_else(|| rng.gen());
let seed = seed.unwrap_or_else(|| rng.r#gen());
let result = if watch {
watch_project(directory.as_deref(), watch::default_filter, 500, |p| {

View File

@ -1,6 +1,6 @@
use crate::{cmd::Cmd as MainCmd, pretty};
use clap::{Command, Subcommand};
use clap_complete::{generate, Shell};
use clap_complete::{Shell, generate};
use std::{
fmt::{self, Display},
fs::{File, OpenOptions},

View File

@ -1,5 +1,5 @@
use aiken_project::{
config::{self, Config},
config::{self, ProjectConfig},
package_name::{self, PackageName},
};
use indoc::{formatdoc, indoc};
@ -46,7 +46,7 @@ fn create_project(args: Args, package_name: &PackageName) -> miette::Result<()>
readme(&root, &package_name.repo)?;
Config::default(package_name)
ProjectConfig::default(package_name)
.save(&root)
.into_diagnostic()?;

View File

@ -1,5 +1,5 @@
use aiken_project::{
config::{Config, Dependency, Platform},
config::{Dependency, Platform, ProjectConfig},
error::Warning,
package_name::PackageName,
pretty,
@ -35,7 +35,7 @@ pub fn exec(args: Args) -> miette::Result<()> {
source: Platform::Github,
};
let config = match Config::load(&root) {
let config = match ProjectConfig::load(&root) {
Ok(config) => config,
Err(e) => {
e.report();

View File

@ -1,8 +1,8 @@
use miette::IntoDiagnostic;
use owo_colors::{OwoColorize, Stream::Stderr};
use pallas_primitives::{
conway::{Redeemer, TransactionInput, TransactionOutput},
Fragment,
conway::{Redeemer, TransactionInput, TransactionOutput},
};
use pallas_traverse::{Era, MultiEraTx};
use std::{fmt, fs, path::PathBuf, process};

View File

@ -9,8 +9,8 @@ use uplc::{
ast::{FakeNamedDeBruijn, Name, NamedDeBruijn, Program, Term},
builtins::DefaultFunction,
machine::{
cost_model::{ExBudget, StepKind},
TERM_COUNT,
cost_model::{ExBudget, StepKind},
},
parser,
};

View File

@ -3,7 +3,7 @@ use std::path::PathBuf;
use uplc::ast::{DeBruijn, Name, NamedDeBruijn, Program};
use uplc::optimize::aiken_optimize_and_intern;
use super::{encode, Format};
use super::{Format, encode};
#[derive(clap::Args)]
/// Shrink / Optimize UPLC code using a variety of optimization steps

View File

@ -2,11 +2,11 @@ use aiken_project::{config, pretty};
#[cfg(not(target_os = "windows"))]
use cmd::completion;
use cmd::{
benchmark,
Cmd, benchmark,
blueprint::{self, address},
build, check, docs, export, fmt, lsp, new,
packages::{self, add},
tx, uplc, Cmd,
tx, uplc,
};
use owo_colors::OwoColorize;

View File

@ -1,13 +1,15 @@
[package]
name = "uplc"
version.workspace = true
edition.workspace = true
description = "Utilities for working with Untyped Plutus Core"
version = "1.1.15"
edition = "2021"
repository = "https://github.com/aiken-lang/aiken"
homepage = "https://github.com/aiken-lang/aiken"
license = "Apache-2.0"
license.workspace = true
authors = ["Lucas Rosa <x@rvcas.dev>", "Kasey White <kwhitemsg@gmail.com>"]
rust-version = "1.70.0"
repository.workspace = true
homepage.workspace = true
documentation.workspace = true
rust-version.workspace = true
exclude = ["test_data/*"]
[dependencies]

View File

@ -3,9 +3,9 @@ use crate::{
debruijn::{self, Converter},
flat::Binder,
machine::{
cost_model::{initialize_cost_model, CostModel, ExBudget},
eval_result::EvalResult,
Machine,
cost_model::{CostModel, ExBudget, initialize_cost_model},
eval_result::EvalResult,
},
optimize::interner::CodeGenInterner,
};

View File

@ -799,7 +799,7 @@ mod tests {
use crate::{
ast::{Data, Name, NamedDeBruijn, Program, Term},
builder::Constant,
machine::{cost_model::ExBudget, Error},
machine::{Error, cost_model::ExBudget},
optimize::interner::CodeGenInterner,
};

View File

@ -7,11 +7,11 @@ use crate::{
};
use num_bigint::BigInt;
use pallas_codec::flat::{
Flat,
de::{self, Decode, Decoder},
en::{self, Encode, Encoder},
Flat,
};
use pallas_primitives::{conway::PlutusData, Fragment};
use pallas_primitives::{Fragment, conway::PlutusData};
use std::{collections::VecDeque, fmt::Debug, rc::Rc};
const BUILTIN_TAG_WIDTH: u32 = 7;

View File

@ -12,9 +12,9 @@ pub mod tx;
pub use pallas_codec::utils::KeyValuePairs;
pub use pallas_crypto::hash::Hash;
pub use pallas_primitives::{
Error, Fragment,
alonzo::{BigInt, Constr, PlutusData},
babbage::{PostAlonzoTransactionOutput, TransactionInput, TransactionOutput, Value},
Error, Fragment,
};
pub use tx::redeemer_tag_to_string;

View File

@ -1,4 +1,4 @@
use super::{cost_model::ExBudget, Error, Trace};
use super::{Error, Trace, cost_model::ExBudget};
use crate::ast::{Constant, NamedDeBruijn, Term};
#[derive(Debug)]

View File

@ -1,7 +1,7 @@
use super::{
Error, Trace, Value,
cost_model::{BuiltinCosts, ExBudget},
value::{from_pallas_bigint, to_pallas_bigint},
Error, Trace, Value,
};
use crate::{
ast::{Constant, Data, Type},
@ -1882,7 +1882,7 @@ pub static ANY_TAG: u64 = 102;
#[cfg(not(target_family = "wasm"))]
fn verify_ecdsa(public_key: &[u8], message: &[u8], signature: &[u8]) -> Result<Value, Error> {
use secp256k1::{ecdsa::Signature, Message, PublicKey, Secp256k1};
use secp256k1::{Message, PublicKey, Secp256k1, ecdsa::Signature};
let secp = Secp256k1::verification_only();
@ -1901,7 +1901,7 @@ fn verify_ecdsa(public_key: &[u8], message: &[u8], signature: &[u8]) -> Result<V
/// The message needs to be 32 bytes (ideally prehashed, but not a requirement).
#[cfg(not(target_family = "wasm"))]
fn verify_schnorr(public_key: &[u8], message: &[u8], signature: &[u8]) -> Result<Value, Error> {
use secp256k1::{schnorr::Signature, Message, Secp256k1, XOnlyPublicKey};
use secp256k1::{Message, Secp256k1, XOnlyPublicKey, schnorr::Signature};
let secp = Secp256k1::verification_only();

View File

@ -1,6 +1,6 @@
use super::{
runtime::{self, BuiltinRuntime},
Error,
runtime::{self, BuiltinRuntime},
};
use crate::{
ast::{Constant, NamedDeBruijn, Term, Type},
@ -480,7 +480,7 @@ pub fn to_pallas_bigint(n: &BigInt) -> conway::BigInt {
mod tests {
use crate::{
ast::Constant,
machine::value::{integer_log2, Value},
machine::value::{Value, integer_log2},
};
use num_bigint::BigInt;

Some files were not shown because too many files have changed in this diff Show More