Merge pull request #627 from aiken-lang/rvcas/parser_refactor

Parser Refactor
This commit is contained in:
Matthias Benkort 2023-07-05 18:32:48 +02:00 committed by GitHub
commit 82dc795ef1
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
103 changed files with 6343 additions and 6903 deletions

54
Cargo.lock generated vendored
View File

@ -83,6 +83,7 @@ dependencies = [
"hex",
"indexmap",
"indoc",
"insta",
"itertools",
"miette",
"num-bigint",
@ -525,6 +526,18 @@ dependencies = [
"windows-sys 0.45.0",
]
[[package]]
name = "console"
version = "0.15.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c926e00cc70edefdc64d3a5ff31cc65bb97a3460097762bd23afb4d8145fccf8"
dependencies = [
"encode_unicode",
"lazy_static",
"libc",
"windows-sys 0.45.0",
]
[[package]]
name = "const-oid"
version = "0.9.2"
@ -738,6 +751,12 @@ dependencies = [
"zeroize",
]
[[package]]
name = "encode_unicode"
version = "0.3.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a357d28ed41a50f9c765dbfe56cbc04a64e53e5fc58ba79fbc34c10ef3df831f"
[[package]]
name = "encoding_rs"
version = "0.8.32"
@ -1208,6 +1227,20 @@ version = "2.0.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9f2cb48b81b1dc9f39676bf99f5499babfec7cd8fe14307f7b3d747208fb5690"
[[package]]
name = "insta"
version = "1.30.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "28491f7753051e5704d4d0ae7860d45fae3238d7d235bc4289dcd45c48d3cec3"
dependencies = [
"console",
"lazy_static",
"linked-hash-map",
"serde",
"similar",
"yaml-rust",
]
[[package]]
name = "instant"
version = "0.1.12"
@ -1341,6 +1374,12 @@ dependencies = [
"vcpkg",
]
[[package]]
name = "linked-hash-map"
version = "0.5.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0717cef1bc8b636c6e1c1bbdefc09e6322da8a9321966e8928ef80d20f7f770f"
[[package]]
name = "linux-raw-sys"
version = "0.3.1"
@ -2385,6 +2424,12 @@ dependencies = [
"rand_core",
]
[[package]]
name = "similar"
version = "2.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "420acb44afdae038210c99e69aae24109f32f15500aa708e81d46c9f29d55fcf"
[[package]]
name = "slab"
version = "0.4.8"
@ -3257,6 +3302,15 @@ dependencies = [
"winapi",
]
[[package]]
name = "yaml-rust"
version = "0.4.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "56c1936c4cc7a1c9ab21a1ebb602eb942ba868cbd44a99cb7cdc5892335e1c85"
dependencies = [
"linked-hash-map",
]
[[package]]
name = "yansi"
version = "0.5.1"

View File

@ -7,3 +7,12 @@ strip = true
[workspace.metadata.release]
shared-version = true
tag-name = "v{{version}}"
[workspace.dependencies]
insta = { version = "1.30.0", features = ["yaml"] }
[profile.dev.package.insta]
opt-level = 3
[profile.dev.package.similar]
opt-level = 3

View File

@ -7,9 +7,9 @@ repository = "https://github.com/aiken-lang/aiken"
homepage = "https://github.com/aiken-lang/aiken"
license = "Apache-2.0"
authors = [
"Lucas Rosa <x@rvcas.dev>",
"Kasey White <kwhitemsg@gmail.com>",
"KtorZ <matthias.benkort@gmail.com>",
"Lucas Rosa <x@rvcas.dev>",
"Kasey White <kwhitemsg@gmail.com>",
"KtorZ <matthias.benkort@gmail.com>",
]
rust-version = "1.66.1"
@ -30,8 +30,12 @@ num-bigint = "0.4.3"
[target.'cfg(not(target_family="wasm"))'.dependencies]
chumsky = "0.9.2"
[target.'cfg(target_family="wasm")'.dependencies]
chumsky = { version = "0.9.2", features = ["ahash", "std"], default-features = false }
chumsky = { version = "0.9.2", features = [
"ahash",
"std",
], default-features = false }
[dev-dependencies]
indoc = "2.0.1"
insta.workspace = true
pretty_assertions = "1.3.0"

View File

@ -1167,6 +1167,12 @@ impl Span {
Self::new((), 0..0)
}
pub fn create(i: usize, n: usize) -> Self {
use chumsky::Span;
Self::new((), i..i + n)
}
pub fn range(&self) -> Range<usize> {
use chumsky::Span;

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,105 @@
use chumsky::prelude::*;
use crate::ast;
use super::{error::ParseError, token::Token};
pub fn parser() -> impl Parser<Token, ast::Annotation, Error = ParseError> {
recursive(|expression| {
choice((
// Type hole
select! {Token::DiscardName { name } => name}.map_with_span(|name, span| {
ast::Annotation::Hole {
location: span,
name,
}
}),
// Tuple
expression
.clone()
.separated_by(just(Token::Comma))
.at_least(2)
.allow_trailing()
.delimited_by(
choice((just(Token::LeftParen), just(Token::NewLineLeftParen))),
just(Token::RightParen),
)
.map_with_span(|elems, span| ast::Annotation::Tuple {
location: span,
elems,
}),
// Function
just(Token::Fn)
.ignore_then(
expression
.clone()
.separated_by(just(Token::Comma))
.allow_trailing()
.delimited_by(just(Token::LeftParen), just(Token::RightParen)),
)
.then_ignore(just(Token::RArrow))
.then(expression.clone())
.map_with_span(|(arguments, ret), span| ast::Annotation::Fn {
location: span,
arguments,
ret: Box::new(ret),
}),
// Constructor function
select! {Token::UpName { name } => name}
.then(
expression
.clone()
.separated_by(just(Token::Comma))
.allow_trailing()
.delimited_by(just(Token::Less), just(Token::Greater))
.or_not(),
)
.map_with_span(|(name, arguments), span| ast::Annotation::Constructor {
location: span,
module: None,
name,
arguments: arguments.unwrap_or_default(),
}),
// Constructor Module or type Variable
select! {Token::Name { name } => name}
.then(
just(Token::Dot)
.ignore_then(select! {Token::UpName {name} => name})
.then(
expression
.separated_by(just(Token::Comma))
.allow_trailing()
.delimited_by(just(Token::Less), just(Token::Greater))
.or_not(),
)
.or_not(),
)
.map_with_span(|(mod_name, opt_dot), span| {
if let Some((name, arguments)) = opt_dot {
ast::Annotation::Constructor {
location: span,
module: Some(mod_name),
name,
arguments: arguments.unwrap_or_default(),
}
} else {
// TODO: parse_error(ParseErrorType::NotConstType, SrcSpan { start, end })
ast::Annotation::Var {
location: span,
name: mod_name,
}
}
}),
))
})
}
#[cfg(test)]
mod tests {
use crate::assert_annotation;
#[test]
fn type_annotation_with_module_prefix() {
assert_annotation!("aiken.Option<Int>");
}
}

View File

@ -0,0 +1,38 @@
use chumsky::prelude::*;
use super::{Chain, ParserArg};
use crate::{
ast,
expr::UntypedExpr,
parser::{token::Token, ParseError},
};
pub(crate) fn parser(
expression: Recursive<'_, Token, UntypedExpr, ParseError>,
) -> impl Parser<Token, Chain, Error = ParseError> + '_ {
choice((
select! { Token::Name { name } => name }
.then_ignore(just(Token::Colon))
.or_not()
.then(expression)
.map_with_span(|(label, value), span| {
ParserArg::Arg(Box::new(ast::CallArg {
label,
location: span,
value,
}))
}),
select! { Token::Name { name } => name }
.then_ignore(just(Token::Colon))
.or_not()
.then_ignore(select! {Token::DiscardName {name} => name })
.map_with_span(|label, span| ParserArg::Hole {
location: span,
label,
}),
))
.separated_by(just(Token::Comma))
.allow_trailing()
.delimited_by(just(Token::LeftParen), just(Token::RightParen))
.map_with_span(Chain::Call)
}

View File

@ -0,0 +1,30 @@
use chumsky::prelude::*;
use super::Chain;
use crate::{
expr::UntypedExpr,
parser::{token::Token, ParseError},
};
pub(crate) fn parser() -> impl Parser<Token, Chain, Error = ParseError> {
just(Token::Dot)
.ignore_then(select! {
Token::Name { name } => name,
})
.map_with_span(Chain::FieldAccess)
}
pub(crate) fn constructor() -> impl Parser<Token, UntypedExpr, Error = ParseError> {
select! {Token::Name { name } => name}
.map_with_span(|module, span| (module, span))
.then_ignore(just(Token::Dot))
.then(select! {Token::UpName { name } => name})
.map_with_span(|((module, m_span), name), span| UntypedExpr::FieldAccess {
location: span,
label: name,
container: Box::new(UntypedExpr::Var {
location: m_span,
name: module,
}),
})
}

View File

@ -0,0 +1,22 @@
use crate::ast::{self, Span};
use crate::expr::UntypedExpr;
pub(crate) mod call;
pub(crate) mod field_access;
pub(crate) mod tuple_index;
pub(crate) enum Chain {
Call(Vec<ParserArg>, Span),
FieldAccess(String, Span),
TupleIndex(usize, Span),
}
// Parsing a function call into the appropriate structure
#[derive(Debug)]
pub(crate) enum ParserArg {
Arg(Box<ast::CallArg<UntypedExpr>>),
Hole {
location: Span,
label: Option<String>,
},
}

View File

@ -0,0 +1,23 @@
use chumsky::prelude::*;
use super::Chain;
use crate::parser::{token::Token, ParseError};
pub(crate) fn parser() -> impl Parser<Token, Chain, Error = ParseError> {
just(Token::Dot)
.ignore_then(select! {
Token::Ordinal { index } => index,
})
.validate(|index, span, emit| {
if index < 1 {
emit(ParseError::invalid_tuple_index(
span,
index.to_string(),
None,
));
Chain::TupleIndex(0, span)
} else {
Chain::TupleIndex(index as usize - 1, span)
}
})
}

View File

@ -0,0 +1,64 @@
use chumsky::prelude::*;
use crate::{
ast,
parser::{
annotation, error::ParseError, literal::bytearray::parser as bytearray, token::Token, utils,
},
};
pub fn parser() -> impl Parser<Token, ast::UntypedDefinition, Error = ParseError> {
utils::optional_flag(Token::Pub)
.then_ignore(just(Token::Const))
.then(select! {Token::Name{name} => name})
.then(
just(Token::Colon)
.ignore_then(annotation::parser())
.or_not(),
)
.then_ignore(just(Token::Equal))
.then(value())
.map_with_span(|(((public, name), annotation), value), span| {
ast::UntypedDefinition::ModuleConstant(ast::ModuleConstant {
doc: None,
location: span,
public,
name,
annotation,
value: Box::new(value),
tipo: (),
})
})
}
pub fn value() -> impl Parser<Token, ast::Constant, Error = ParseError> {
let constant_string_parser =
select! {Token::String {value} => value}.map_with_span(|value, span| {
ast::Constant::String {
location: span,
value,
}
});
let constant_int_parser =
select! {Token::Int {value, base} => (value, base)}.map_with_span(|(value, base), span| {
ast::Constant::Int {
location: span,
value,
base,
}
});
let constant_bytearray_parser =
bytearray(|bytes, preferred_format, span| ast::Constant::ByteArray {
location: span,
bytes,
preferred_format,
});
choice((
constant_string_parser,
constant_int_parser,
constant_bytearray_parser,
))
}

View File

@ -0,0 +1,122 @@
use chumsky::prelude::*;
use crate::{
ast,
parser::{annotation, error::ParseError, token::Token, utils},
};
pub fn parser() -> impl Parser<Token, ast::UntypedDefinition, Error = ParseError> {
let unlabeled_constructor_type_args = annotation()
.map_with_span(|annotation, span| ast::RecordConstructorArg {
label: None,
annotation,
tipo: (),
doc: None,
location: span,
})
.separated_by(just(Token::Comma))
.allow_trailing()
.delimited_by(just(Token::LeftParen), just(Token::RightParen));
let constructors = select! {Token::UpName { name } => name}
.then(
choice((
labeled_constructor_type_args(),
unlabeled_constructor_type_args,
))
.or_not(),
)
.map_with_span(|(name, arguments), span| ast::RecordConstructor {
location: span,
arguments: arguments.unwrap_or_default(),
name,
doc: None,
sugar: false,
})
.repeated()
.delimited_by(just(Token::LeftBrace), just(Token::RightBrace));
let record_sugar = labeled_constructor_type_args().map_with_span(|arguments, span| {
vec![ast::RecordConstructor {
location: span,
arguments,
doc: None,
name: String::from("_replace"),
sugar: true,
}]
});
utils::optional_flag(Token::Pub)
.then(utils::optional_flag(Token::Opaque))
.then(utils::type_name_with_args())
.then(choice((constructors, record_sugar)))
.map_with_span(
|(((public, opaque), (name, parameters)), constructors), span| {
ast::UntypedDefinition::DataType(ast::DataType {
location: span,
constructors: constructors
.into_iter()
.map(|mut constructor| {
if constructor.sugar {
constructor.name = name.clone();
}
constructor
})
.collect(),
doc: None,
name,
opaque,
parameters: parameters.unwrap_or_default(),
public,
typed_parameters: vec![],
})
},
)
}
fn labeled_constructor_type_args(
) -> impl Parser<Token, Vec<ast::RecordConstructorArg<()>>, Error = ParseError> {
select! {Token::Name {name} => name}
.then_ignore(just(Token::Colon))
.then(annotation())
.map_with_span(|(name, annotation), span| ast::RecordConstructorArg {
label: Some(name),
annotation,
tipo: (),
doc: None,
location: span,
})
.separated_by(just(Token::Comma))
.allow_trailing()
.delimited_by(just(Token::LeftBrace), just(Token::RightBrace))
}
#[cfg(test)]
mod tests {
use crate::assert_definition;
#[test]
fn custom_type() {
assert_definition!(
r#"
type Option<a> {
Some(a, Int)
None
Wow { name: Int, age: Int }
}
"#
);
}
#[test]
fn opaque_type() {
assert_definition!(
r#"
pub opaque type User {
name: _w
}
"#
);
}
}

View File

@ -0,0 +1,111 @@
use chumsky::prelude::*;
use crate::{
ast,
expr::UntypedExpr,
parser::{annotation, error::ParseError, expr, token::Token, utils},
};
pub fn parser() -> impl Parser<Token, ast::UntypedDefinition, Error = ParseError> {
utils::optional_flag(Token::Pub)
.then_ignore(just(Token::Fn))
.then(select! {Token::Name {name} => name})
.then(
param(false)
.separated_by(just(Token::Comma))
.allow_trailing()
.delimited_by(just(Token::LeftParen), just(Token::RightParen))
.map_with_span(|arguments, span| (arguments, span)),
)
.then(just(Token::RArrow).ignore_then(annotation()).or_not())
.then(
expr::sequence()
.or_not()
.delimited_by(just(Token::LeftBrace), just(Token::RightBrace)),
)
.map_with_span(
|((((public, name), (arguments, args_span)), return_annotation), body), span| {
ast::UntypedDefinition::Fn(ast::Function {
arguments,
body: body.unwrap_or_else(|| UntypedExpr::todo(span, None)),
doc: None,
location: ast::Span {
start: span.start,
end: return_annotation
.as_ref()
.map(|l| l.location().end)
.unwrap_or_else(|| args_span.end),
},
end_position: span.end - 1,
name,
public,
return_annotation,
return_type: (),
can_error: true,
})
},
)
}
pub fn param(is_validator_param: bool) -> impl Parser<Token, ast::UntypedArg, Error = ParseError> {
choice((
select! {Token::Name {name} => name}
.then(select! {Token::DiscardName {name} => name})
.map_with_span(|(label, name), span| ast::ArgName::Discarded {
label,
name,
location: span,
}),
select! {Token::DiscardName {name} => name}.map_with_span(|name, span| {
ast::ArgName::Discarded {
label: name.clone(),
name,
location: span,
}
}),
select! {Token::Name {name} => name}
.then(select! {Token::Name {name} => name})
.map_with_span(move |(label, name), span| ast::ArgName::Named {
label,
name,
location: span,
is_validator_param,
}),
select! {Token::Name {name} => name}.map_with_span(move |name, span| ast::ArgName::Named {
label: name.clone(),
name,
location: span,
is_validator_param,
}),
))
.then(just(Token::Colon).ignore_then(annotation()).or_not())
.map_with_span(|(arg_name, annotation), span| ast::Arg {
location: span,
annotation,
tipo: (),
arg_name,
})
}
#[cfg(test)]
mod tests {
use crate::assert_definition;
#[test]
fn function_empty() {
assert_definition!(
r#"
pub fn run() {}
"#
);
}
#[test]
fn function_non_public() {
assert_definition!(
r#"
fn run() {}
"#
);
}
}

View File

@ -0,0 +1,81 @@
use chumsky::prelude::*;
use crate::{
ast,
parser::{error::ParseError, token::Token},
};
pub fn parser() -> impl Parser<Token, ast::UntypedDefinition, Error = ParseError> {
let unqualified_import = choice((
select! {Token::Name { name } => name}.then(
just(Token::As)
.ignore_then(select! {Token::Name { name } => name})
.or_not(),
),
select! {Token::UpName { name } => name}.then(
just(Token::As)
.ignore_then(select! {Token::UpName { name } => name})
.or_not(),
),
))
.map_with_span(|(name, as_name), span| ast::UnqualifiedImport {
name,
location: span,
as_name,
layer: Default::default(),
});
let unqualified_imports = just(Token::Dot)
.ignore_then(
unqualified_import
.separated_by(just(Token::Comma))
.allow_trailing()
.delimited_by(just(Token::LeftBrace), just(Token::RightBrace)),
)
.or_not();
let as_name = just(Token::As)
.ignore_then(select! {Token::Name { name } => name})
.or_not();
let module_path = select! {Token::Name { name } => name}
.separated_by(just(Token::Slash))
.then(unqualified_imports)
.then(as_name);
just(Token::Use).ignore_then(module_path).map_with_span(
|((module, unqualified), as_name), span| {
ast::UntypedDefinition::Use(ast::Use {
module,
as_name,
unqualified: unqualified.unwrap_or_default(),
package: (),
location: span,
})
},
)
}
#[cfg(test)]
mod tests {
use crate::assert_definition;
#[test]
fn import_basic() {
assert_definition!("use aiken/list");
}
#[test]
fn import_unqualified() {
assert_definition!(
r#"
use std/address.{Address as A, thing as w}
"#
);
}
#[test]
fn import_alias() {
assert_definition!("use aiken/list as foo");
}
}

View File

@ -0,0 +1,32 @@
use chumsky::prelude::*;
pub mod constant;
mod data_type;
mod function;
mod import;
mod test;
mod type_alias;
mod validator;
pub use constant::parser as constant;
pub use data_type::parser as data_type;
pub use function::parser as function;
pub use import::parser as import;
pub use test::parser as test;
pub use type_alias::parser as type_alias;
pub use validator::parser as validator;
use super::{error::ParseError, token::Token};
use crate::ast;
pub fn parser() -> impl Parser<Token, ast::UntypedDefinition, Error = ParseError> {
choice((
import(),
data_type(),
type_alias(),
validator(),
function(),
test(),
constant(),
))
}

View File

@ -0,0 +1,92 @@
---
source: crates/aiken-lang/src/parser/definition/data_type.rs
description: "Code:\n\ntype Option<a> {\n Some(a, Int)\n None\n Wow { name: Int, age: Int }\n}\n"
---
DataType(
DataType {
constructors: [
RecordConstructor {
location: 19..31,
name: "Some",
arguments: [
RecordConstructorArg {
label: None,
annotation: Var {
location: 24..25,
name: "a",
},
location: 24..25,
tipo: (),
doc: None,
},
RecordConstructorArg {
label: None,
annotation: Constructor {
location: 27..30,
module: None,
name: "Int",
arguments: [],
},
location: 27..30,
tipo: (),
doc: None,
},
],
doc: None,
sugar: false,
},
RecordConstructor {
location: 34..38,
name: "None",
arguments: [],
doc: None,
sugar: false,
},
RecordConstructor {
location: 41..68,
name: "Wow",
arguments: [
RecordConstructorArg {
label: Some(
"name",
),
annotation: Constructor {
location: 53..56,
module: None,
name: "Int",
arguments: [],
},
location: 47..56,
tipo: (),
doc: None,
},
RecordConstructorArg {
label: Some(
"age",
),
annotation: Constructor {
location: 63..66,
module: None,
name: "Int",
arguments: [],
},
location: 58..66,
tipo: (),
doc: None,
},
],
doc: None,
sugar: false,
},
],
doc: None,
location: 0..70,
name: "Option",
opaque: false,
parameters: [
"a",
],
public: false,
typed_parameters: [],
},
)

View File

@ -0,0 +1,101 @@
---
source: crates/aiken-lang/src/parser/definition/validator.rs
description: "Code:\n\nvalidator {\n fn foo(datum, rdmr, ctx) {\n True\n }\n\n fn bar(rdmr, ctx) {\n True\n }\n}\n"
---
Validator(
Validator {
doc: None,
end_position: 90,
fun: Function {
arguments: [
Arg {
arg_name: Named {
name: "datum",
label: "datum",
location: 21..26,
is_validator_param: false,
},
location: 21..26,
annotation: None,
tipo: (),
},
Arg {
arg_name: Named {
name: "rdmr",
label: "rdmr",
location: 28..32,
is_validator_param: false,
},
location: 28..32,
annotation: None,
tipo: (),
},
Arg {
arg_name: Named {
name: "ctx",
label: "ctx",
location: 34..37,
is_validator_param: false,
},
location: 34..37,
annotation: None,
tipo: (),
},
],
body: Var {
location: 45..49,
name: "True",
},
doc: None,
location: 14..38,
name: "foo",
public: false,
return_annotation: None,
return_type: (),
end_position: 52,
can_error: true,
},
other_fun: Some(
Function {
arguments: [
Arg {
arg_name: Named {
name: "rdmr",
label: "rdmr",
location: 64..68,
is_validator_param: false,
},
location: 64..68,
annotation: None,
tipo: (),
},
Arg {
arg_name: Named {
name: "ctx",
label: "ctx",
location: 70..73,
is_validator_param: false,
},
location: 70..73,
annotation: None,
tipo: (),
},
],
body: Var {
location: 81..85,
name: "True",
},
doc: None,
location: 57..74,
name: "bar",
public: false,
return_annotation: None,
return_type: (),
end_position: 88,
can_error: true,
},
),
location: 0..9,
params: [],
},
)

View File

@ -0,0 +1,45 @@
---
source: crates/aiken-lang/src/parser/definition/test.rs
description: "Code:\n\n!test invalid_inputs() {\n expect True = False\n\n False\n}\n"
---
Test(
Function {
arguments: [],
body: Sequence {
location: 27..55,
expressions: [
Assignment {
location: 27..46,
value: Var {
location: 41..46,
name: "False",
},
pattern: Constructor {
is_record: false,
location: 34..38,
name: "True",
arguments: [],
module: None,
constructor: (),
with_spread: false,
tipo: (),
},
kind: Expect,
annotation: None,
},
Var {
location: 50..55,
name: "False",
},
],
},
doc: None,
location: 0..22,
name: "invalid_inputs",
public: false,
return_annotation: None,
return_type: (),
end_position: 56,
can_error: true,
},
)

View File

@ -0,0 +1,28 @@
---
source: crates/aiken-lang/src/parser/definition/function.rs
description: "Code:\n\npub fn run() {}\n"
---
Fn(
Function {
arguments: [],
body: Trace {
kind: Todo,
location: 0..15,
then: ErrorTerm {
location: 0..15,
},
text: String {
location: 0..15,
value: "aiken::todo",
},
},
doc: None,
location: 0..12,
name: "run",
public: true,
return_annotation: None,
return_type: (),
end_position: 14,
can_error: true,
},
)

View File

@ -0,0 +1,28 @@
---
source: crates/aiken-lang/src/parser/definition/function.rs
description: "Code:\n\nfn run() {}\n"
---
Fn(
Function {
arguments: [],
body: Trace {
kind: Todo,
location: 0..11,
then: ErrorTerm {
location: 0..11,
},
text: String {
location: 0..11,
value: "aiken::todo",
},
},
doc: None,
location: 0..8,
name: "run",
public: false,
return_annotation: None,
return_type: (),
end_position: 10,
can_error: true,
},
)

View File

@ -0,0 +1,18 @@
---
source: crates/aiken-lang/src/parser/definition/import.rs
description: "Code:\n\nuse aiken/list as foo"
---
Use(
Use {
as_name: Some(
"foo",
),
location: 0..21,
module: [
"aiken",
"list",
],
package: (),
unqualified: [],
},
)

View File

@ -0,0 +1,16 @@
---
source: crates/aiken-lang/src/parser/definition/import.rs
description: "Code:\n\nuse aiken/list"
---
Use(
Use {
as_name: None,
location: 0..14,
module: [
"aiken",
"list",
],
package: (),
unqualified: [],
},
)

View File

@ -0,0 +1,33 @@
---
source: crates/aiken-lang/src/parser/definition/import.rs
description: "Code:\n\nuse std/address.{Address as A, thing as w}\n"
---
Use(
Use {
as_name: None,
location: 0..42,
module: [
"std",
"address",
],
package: (),
unqualified: [
UnqualifiedImport {
location: 17..29,
name: "Address",
as_name: Some(
"A",
),
layer: Value,
},
UnqualifiedImport {
location: 31..41,
name: "thing",
as_name: Some(
"w",
),
layer: Value,
},
],
},
)

View File

@ -0,0 +1,37 @@
---
source: crates/aiken-lang/src/parser/definition/data_type.rs
description: "Code:\n\npub opaque type User {\n name: _w\n}\n"
---
DataType(
DataType {
constructors: [
RecordConstructor {
location: 21..35,
name: "User",
arguments: [
RecordConstructorArg {
label: Some(
"name",
),
annotation: Hole {
location: 31..33,
name: "_w",
},
location: 25..33,
tipo: (),
doc: None,
},
],
doc: None,
sugar: true,
},
],
doc: None,
location: 0..35,
name: "User",
opaque: true,
parameters: [],
public: true,
typed_parameters: [],
},
)

View File

@ -0,0 +1,20 @@
---
source: crates/aiken-lang/src/parser/definition/type_alias.rs
description: "Code:\n\ntype Thing = Int"
---
TypeAlias(
TypeAlias {
alias: "Thing",
annotation: Constructor {
location: 13..16,
module: None,
name: "Int",
arguments: [],
},
doc: None,
location: 0..16,
parameters: [],
public: false,
tipo: (),
},
)

View File

@ -0,0 +1,20 @@
---
source: crates/aiken-lang/src/parser/definition/type_alias.rs
description: "Code:\n\npub type Me = String"
---
TypeAlias(
TypeAlias {
alias: "Me",
annotation: Constructor {
location: 14..20,
module: None,
name: "String",
arguments: [],
},
doc: None,
location: 0..20,
parameters: [],
public: true,
tipo: (),
},
)

View File

@ -0,0 +1,31 @@
---
source: crates/aiken-lang/src/parser/definition/type_alias.rs
description: "Code:\n\ntype RoyaltyToken = (PolicyId, AssetName)"
---
TypeAlias(
TypeAlias {
alias: "RoyaltyToken",
annotation: Tuple {
location: 20..41,
elems: [
Constructor {
location: 21..29,
module: None,
name: "PolicyId",
arguments: [],
},
Constructor {
location: 31..40,
module: None,
name: "AssetName",
arguments: [],
},
],
},
doc: None,
location: 0..41,
parameters: [],
public: false,
tipo: (),
},
)

View File

@ -0,0 +1,62 @@
---
source: crates/aiken-lang/src/parser/definition/validator.rs
description: "Code:\n\nvalidator {\n fn foo(datum, rdmr, ctx) {\n True\n }\n}\n"
---
Validator(
Validator {
doc: None,
end_position: 54,
fun: Function {
arguments: [
Arg {
arg_name: Named {
name: "datum",
label: "datum",
location: 21..26,
is_validator_param: false,
},
location: 21..26,
annotation: None,
tipo: (),
},
Arg {
arg_name: Named {
name: "rdmr",
label: "rdmr",
location: 28..32,
is_validator_param: false,
},
location: 28..32,
annotation: None,
tipo: (),
},
Arg {
arg_name: Named {
name: "ctx",
label: "ctx",
location: 34..37,
is_validator_param: false,
},
location: 34..37,
annotation: None,
tipo: (),
},
],
body: Var {
location: 45..49,
name: "True",
},
doc: None,
location: 14..38,
name: "foo",
public: false,
return_annotation: None,
return_type: (),
end_position: 52,
can_error: true,
},
other_fun: None,
location: 0..9,
params: [],
},
)

View File

@ -0,0 +1,55 @@
use chumsky::prelude::*;
use crate::{
ast,
expr::UntypedExpr,
parser::{error::ParseError, expr, token::Token},
};
pub fn parser() -> impl Parser<Token, ast::UntypedDefinition, Error = ParseError> {
just(Token::Bang)
.ignored()
.or_not()
.then_ignore(just(Token::Test))
.then(select! {Token::Name {name} => name})
.then_ignore(just(Token::LeftParen))
.then_ignore(just(Token::RightParen))
.map_with_span(|name, span| (name, span))
.then(
expr::sequence()
.or_not()
.delimited_by(just(Token::LeftBrace), just(Token::RightBrace)),
)
.map_with_span(|(((fail, name), span_end), body), span| {
ast::UntypedDefinition::Test(ast::Function {
arguments: vec![],
body: body.unwrap_or_else(|| UntypedExpr::todo(span, None)),
doc: None,
location: span_end,
end_position: span.end - 1,
name,
public: false,
return_annotation: None,
return_type: (),
can_error: fail.is_some(),
})
})
}
#[cfg(test)]
mod tests {
use crate::assert_definition;
#[test]
fn test_fail() {
assert_definition!(
r#"
!test invalid_inputs() {
expect True = False
False
}
"#
);
}
}

View File

@ -0,0 +1,53 @@
use chumsky::prelude::*;
use crate::{
ast,
parser::{annotation, error::ParseError, token::Token, utils},
};
pub fn parser() -> impl Parser<Token, ast::UntypedDefinition, Error = ParseError> {
utils::optional_flag(Token::Pub)
.then(utils::type_name_with_args())
.then_ignore(just(Token::Equal))
.then(annotation())
.map_with_span(|((public, (alias, parameters)), annotation), span| {
ast::UntypedDefinition::TypeAlias(ast::TypeAlias {
alias,
annotation,
doc: None,
location: span,
parameters: parameters.unwrap_or_default(),
public,
tipo: (),
})
})
}
#[cfg(test)]
mod tests {
use crate::assert_definition;
#[test]
fn type_alias_tuple() {
assert_definition!(
r#"
type RoyaltyToken = (PolicyId, AssetName)"#
);
}
#[test]
fn type_alias_basic() {
assert_definition!(
r#"
type Thing = Int"#
);
}
#[test]
fn type_alias_pub() {
assert_definition!(
r#"
pub type Me = String"#
);
}
}

View File

@ -0,0 +1,95 @@
use chumsky::prelude::*;
use crate::{
ast,
parser::{error::ParseError, token::Token},
};
use super::function;
pub fn parser() -> impl Parser<Token, ast::UntypedDefinition, Error = ParseError> {
just(Token::Validator)
.ignore_then(
function::param(true)
.separated_by(just(Token::Comma))
.allow_trailing()
.delimited_by(just(Token::LeftParen), just(Token::RightParen))
.map_with_span(|arguments, span| (arguments, span))
.or_not(),
)
.then(
function()
.repeated()
.at_least(1)
.at_most(2)
.delimited_by(just(Token::LeftBrace), just(Token::RightBrace))
.map(|defs| {
defs.into_iter().map(|def| {
let ast::UntypedDefinition::Fn(fun) = def else {
unreachable!("It should be a fn definition");
};
fun
})
}),
)
.map_with_span(|(opt_extra_params, mut functions), span| {
let (params, params_span) = opt_extra_params.unwrap_or((
vec![],
ast::Span {
start: 0,
end: span.start + "validator".len(),
},
));
ast::UntypedDefinition::Validator(ast::Validator {
doc: None,
fun: functions
.next()
.expect("unwrapping safe because there's 'at_least(1)' function"),
other_fun: functions.next(),
location: ast::Span {
start: span.start,
// capture the span from the optional params
end: params_span.end,
},
params,
end_position: span.end - 1,
})
})
}
#[cfg(test)]
mod tests {
use crate::assert_definition;
#[test]
fn validator() {
assert_definition!(
r#"
validator {
fn foo(datum, rdmr, ctx) {
True
}
}
"#
);
}
#[test]
fn double_validator() {
assert_definition!(
r#"
validator {
fn foo(datum, rdmr, ctx) {
True
}
fn bar(rdmr, ctx) {
True
}
}
"#
);
}
}

View File

@ -0,0 +1,114 @@
use chumsky::prelude::*;
use crate::{
ast,
expr::{FnStyle, UntypedExpr},
parser::{error::ParseError, token::Token},
};
pub fn parser() -> impl Parser<Token, UntypedExpr, Error = ParseError> {
select! {
Token::EqualEqual => ast::BinOp::Eq,
Token::NotEqual => ast::BinOp::NotEq,
Token::Less => ast::BinOp::LtInt,
Token::LessEqual => ast::BinOp::LtEqInt,
Token::Greater => ast::BinOp::GtInt,
Token::GreaterEqual => ast::BinOp::GtEqInt,
Token::VbarVbar => ast::BinOp::Or,
Token::AmperAmper => ast::BinOp::And,
Token::Plus => ast::BinOp::AddInt,
Token::Minus => ast::BinOp::SubInt,
Token::Slash => ast::BinOp::DivInt,
Token::Star => ast::BinOp::MultInt,
Token::Percent => ast::BinOp::ModInt,
}
.map_with_span(|name, location| {
use ast::BinOp::*;
let arg_annotation = match name {
Or | And => Some(ast::Annotation::boolean(location)),
Eq | NotEq => None,
LtInt | LtEqInt | GtInt | GtEqInt | AddInt | SubInt | MultInt | DivInt | ModInt => {
Some(ast::Annotation::int(location))
}
};
let return_annotation = match name {
Or | And | Eq | NotEq | LtInt | LtEqInt | GtInt | GtEqInt => {
Some(ast::Annotation::boolean(location))
}
AddInt | SubInt | MultInt | DivInt | ModInt => Some(ast::Annotation::int(location)),
};
let arguments = vec![
ast::Arg {
arg_name: ast::ArgName::Named {
name: "left".to_string(),
label: "left".to_string(),
location,
is_validator_param: false,
},
annotation: arg_annotation.clone(),
location,
tipo: (),
},
ast::Arg {
arg_name: ast::ArgName::Named {
name: "right".to_string(),
label: "right".to_string(),
location,
is_validator_param: false,
},
annotation: arg_annotation,
location,
tipo: (),
},
];
let body = UntypedExpr::BinOp {
location,
name,
left: Box::new(UntypedExpr::Var {
location,
name: "left".to_string(),
}),
right: Box::new(UntypedExpr::Var {
location,
name: "right".to_string(),
}),
};
UntypedExpr::Fn {
arguments,
body: Box::new(body),
return_annotation,
fn_style: FnStyle::BinOp(name),
location,
}
})
}
#[cfg(test)]
mod tests {
use crate::assert_expr;
#[test]
fn first_class_binop() {
assert_expr!(
r#"
compare_with(a, >, b)
compare_with(a, >=, b)
compare_with(a, <, b)
compare_with(a, <=, b)
compare_with(a, ==, b)
compare_with(a, !=, b)
combine_with(a, &&, b)
combine_with(a, ||, b)
compute_with(a, +, b)
compute_with(a, -, b)
compute_with(a, /, b)
compute_with(a, *, b)
compute_with(a, %, b)"#
);
}
}

View File

@ -0,0 +1,66 @@
use chumsky::prelude::*;
use crate::{
ast,
expr::{FnStyle, UntypedExpr},
parser::{annotation, error::ParseError, token::Token},
};
pub fn parser(
sequence: Recursive<'_, Token, UntypedExpr, ParseError>,
) -> impl Parser<Token, UntypedExpr, Error = ParseError> + '_ {
just(Token::Fn)
.ignore_then(
params()
.separated_by(just(Token::Comma))
.allow_trailing()
.delimited_by(just(Token::LeftParen), just(Token::RightParen)),
)
.then(just(Token::RArrow).ignore_then(annotation()).or_not())
.then(sequence.delimited_by(just(Token::LeftBrace), just(Token::RightBrace)))
.map_with_span(
|((arguments, return_annotation), body), span| UntypedExpr::Fn {
arguments,
body: Box::new(body),
location: span,
fn_style: FnStyle::Plain,
return_annotation,
},
)
}
pub fn params() -> impl Parser<Token, ast::UntypedArg, Error = ParseError> {
// TODO: return a better error when a label is provided `UnexpectedLabel`
choice((
select! {Token::DiscardName {name} => name}.map_with_span(|name, span| {
ast::ArgName::Discarded {
label: name.clone(),
name,
location: span,
}
}),
select! {Token::Name {name} => name}.map_with_span(|name, span| ast::ArgName::Named {
label: name.clone(),
name,
location: span,
is_validator_param: false,
}),
))
.then(just(Token::Colon).ignore_then(annotation()).or_not())
.map_with_span(|(arg_name, annotation), span| ast::Arg {
location: span,
annotation,
tipo: (),
arg_name,
})
}
#[cfg(test)]
mod tests {
use crate::assert_expr;
#[test]
fn anonymous_function_basic() {
assert_expr!(r#"fn (a: Int) -> Int { a + 1 }"#);
}
}

View File

@ -0,0 +1,59 @@
use chumsky::prelude::*;
use crate::{
ast,
expr::UntypedExpr,
parser::{annotation, error::ParseError, pattern, token::Token},
};
pub fn let_(
r: Recursive<'_, Token, UntypedExpr, ParseError>,
) -> impl Parser<Token, UntypedExpr, Error = ParseError> + '_ {
assignment(r, ast::AssignmentKind::Let)
}
pub fn expect(
r: Recursive<'_, Token, UntypedExpr, ParseError>,
) -> impl Parser<Token, UntypedExpr, Error = ParseError> + '_ {
assignment(r, ast::AssignmentKind::Expect)
}
fn assignment(
r: Recursive<'_, Token, UntypedExpr, ParseError>,
kind: ast::AssignmentKind,
) -> impl Parser<Token, UntypedExpr, Error = ParseError> + '_ {
let keyword = match kind {
ast::AssignmentKind::Let => Token::Let,
ast::AssignmentKind::Expect => Token::Expect,
};
just(keyword)
.ignore_then(pattern())
.then(just(Token::Colon).ignore_then(annotation()).or_not())
.then_ignore(just(Token::Equal))
.then(r.clone())
.map_with_span(
move |((pattern, annotation), value), span| UntypedExpr::Assignment {
location: span,
value: Box::new(value),
pattern,
kind,
annotation,
},
)
}
#[cfg(test)]
mod tests {
use crate::assert_expr;
#[test]
fn let_bindings() {
assert_expr!("let thing = [ 1, 2, a ]");
}
#[test]
fn expect() {
assert_expr!("expect Some(x) = something.field");
}
}

View File

@ -0,0 +1,38 @@
use chumsky::prelude::*;
use crate::{
expr::UntypedExpr,
parser::{error::ParseError, token::Token},
};
pub fn parser(
sequence: Recursive<'_, Token, UntypedExpr, ParseError>,
) -> impl Parser<Token, UntypedExpr, Error = ParseError> + '_ {
choice((
sequence
.clone()
.delimited_by(just(Token::LeftBrace), just(Token::RightBrace)),
sequence.clone().delimited_by(
choice((just(Token::LeftParen), just(Token::NewLineLeftParen))),
just(Token::RightParen),
),
))
}
#[cfg(test)]
mod tests {
use crate::assert_expr;
#[test]
fn block_basic() {
assert_expr!(
r#"
let b = {
let x = 4
x + 5
}
"#
);
}
}

View File

@ -0,0 +1,33 @@
use chumsky::prelude::*;
use crate::parser::{
error::ParseError, expr::UntypedExpr, literal::bytearray::parser as bytearray, token::Token,
};
pub fn parser() -> impl Parser<Token, UntypedExpr, Error = ParseError> {
bytearray(|bytes, preferred_format, location| UntypedExpr::ByteArray {
location,
bytes,
preferred_format,
})
}
#[cfg(test)]
mod tests {
use crate::assert_expr;
#[test]
fn bytearray_basic() {
assert_expr!("#[0, 170, 255]");
}
#[test]
fn bytearray_base16() {
assert_expr!("#\"00aaff\"");
}
#[test]
fn bytearray_utf8_encoded() {
assert_expr!("\"aiken\"");
}
}

View File

@ -0,0 +1,133 @@
use chumsky::prelude::*;
use super::anonymous_binop::parser as anonymous_binop;
use super::anonymous_function::parser as anonymous_function;
use super::assignment;
use super::block::parser as block;
use super::bytearray::parser as bytearray;
use super::if_else::parser as if_else;
use super::int::parser as int;
use super::list::parser as list;
use super::record::parser as record;
use super::record_update::parser as record_update;
use super::string::parser as string;
use super::tuple::parser as tuple;
use super::var::parser as var;
use super::when::parser as when;
use crate::{
ast::{self, Span},
expr::{FnStyle, UntypedExpr},
parser::{
chain::{
call::parser as call, field_access, tuple_index::parser as tuple_index, Chain,
ParserArg,
},
error::ParseError,
token::Token,
},
};
pub fn parser<'a>(
sequence: Recursive<'a, Token, UntypedExpr, ParseError>,
expression: Recursive<'a, Token, UntypedExpr, ParseError>,
) -> impl Parser<Token, UntypedExpr, Error = ParseError> + 'a {
let chain = choice((
tuple_index(),
field_access::parser(),
call(expression.clone()),
));
chain_start(sequence, expression)
.then(chain.repeated())
.foldl(|expr, chain| match chain {
Chain::Call(args, span) => {
let mut holes = Vec::new();
let args = args
.into_iter()
.enumerate()
.map(|(index, a)| match a {
ParserArg::Arg(arg) => *arg,
ParserArg::Hole { location, label } => {
let name = format!("{}__{index}", ast::CAPTURE_VARIABLE);
holes.push(ast::Arg {
location: Span::empty(),
annotation: None,
arg_name: ast::ArgName::Named {
label: name.clone(),
name,
location: Span::empty(),
is_validator_param: false,
},
tipo: (),
});
ast::CallArg {
label,
location,
value: UntypedExpr::Var {
location,
name: format!("{}__{index}", ast::CAPTURE_VARIABLE),
},
}
}
})
.collect();
let call = UntypedExpr::Call {
location: expr.location().union(span),
fun: Box::new(expr),
arguments: args,
};
if holes.is_empty() {
call
} else {
UntypedExpr::Fn {
location: call.location(),
fn_style: FnStyle::Capture,
arguments: holes,
body: Box::new(call),
return_annotation: None,
}
}
}
Chain::FieldAccess(label, span) => UntypedExpr::FieldAccess {
location: expr.location().union(span),
label,
container: Box::new(expr),
},
Chain::TupleIndex(index, span) => UntypedExpr::TupleIndex {
location: expr.location().union(span),
index,
tuple: Box::new(expr),
},
})
}
pub fn chain_start<'a>(
sequence: Recursive<'a, Token, UntypedExpr, ParseError>,
expression: Recursive<'a, Token, UntypedExpr, ParseError>,
) -> impl Parser<Token, UntypedExpr, Error = ParseError> + 'a {
choice((
string(),
int(),
record_update(expression.clone()),
record(expression.clone()),
field_access::constructor(),
var(),
tuple(expression.clone()),
bytearray(),
list(expression.clone()),
anonymous_function(sequence.clone()),
anonymous_binop(),
block(sequence.clone()),
when(expression.clone()),
assignment::let_(expression.clone()),
assignment::expect(expression.clone()),
if_else(sequence, expression.clone()),
))
}

View File

@ -0,0 +1,74 @@
use chumsky::prelude::*;
use crate::{
ast,
expr::UntypedExpr,
parser::{error::ParseError, token::Token},
};
use super::block;
pub fn parser<'a>(
sequence: Recursive<'a, Token, UntypedExpr, ParseError>,
expression: Recursive<'a, Token, UntypedExpr, ParseError>,
) -> impl Parser<Token, UntypedExpr, Error = ParseError> + 'a {
just(Token::If)
.ignore_then(
expression
.clone()
.then(block(sequence.clone()))
.map_with_span(|(condition, body), span| ast::IfBranch {
condition,
body,
location: span,
}),
)
.then(
just(Token::Else)
.ignore_then(just(Token::If))
.ignore_then(
expression
.clone()
.then(block(sequence.clone()))
.map_with_span(|(condition, body), span| ast::IfBranch {
condition,
body,
location: span,
}),
)
.repeated(),
)
.then_ignore(just(Token::Else))
.then(block(sequence))
.map_with_span(|((first, alternative_branches), final_else), span| {
let mut branches = vec1::vec1![first];
branches.extend(alternative_branches);
UntypedExpr::If {
location: span,
branches,
final_else: Box::new(final_else),
}
})
}
#[cfg(test)]
mod tests {
use crate::assert_expr;
#[test]
fn if_else_basic() {
assert_expr!(
r#"
if True {
1 + 1
} else if a < 1 {
3
} else {
4
}
"#
);
}
}

View File

@ -0,0 +1,47 @@
use chumsky::prelude::*;
use crate::{
expr::UntypedExpr,
parser::{error::ParseError, literal::int::parser as int, token::Token},
};
pub fn parser() -> impl Parser<Token, UntypedExpr, Error = ParseError> {
int().map_with_span(|(value, base), span| UntypedExpr::Int {
location: span,
value,
base,
})
}
#[cfg(test)]
mod tests {
use crate::assert_expr;
#[test]
fn int_literal() {
assert_expr!("1");
}
#[test]
fn int_negative() {
assert_expr!("-1");
}
#[test]
fn int_numeric_underscore() {
assert_expr!(
r#"
{
let i = 1_234_567
let j = 1_000_000
let k = -10_000
}
"#
);
}
#[test]
fn int_hex_bytes() {
assert_expr!(r#"0x01"#);
}
}

View File

@ -0,0 +1,49 @@
use chumsky::prelude::*;
use crate::{
expr::UntypedExpr,
parser::{error::ParseError, token::Token},
};
pub fn parser(
r: Recursive<'_, Token, UntypedExpr, ParseError>,
) -> impl Parser<Token, UntypedExpr, Error = ParseError> + '_ {
just(Token::LeftSquare)
.ignore_then(r.clone().separated_by(just(Token::Comma)))
.then(choice((
just(Token::Comma).ignore_then(
just(Token::DotDot)
.ignore_then(r.clone())
.map(Box::new)
.or_not(),
),
just(Token::Comma).ignored().or_not().map(|_| None),
)))
.then_ignore(just(Token::RightSquare))
// TODO: check if tail.is_some and elements.is_empty then return ListSpreadWithoutElements error
.map_with_span(|(elements, tail), span| UntypedExpr::List {
location: span,
elements,
tail,
})
}
#[cfg(test)]
mod tests {
use crate::assert_expr;
#[test]
fn empty_list() {
assert_expr!("[]");
}
#[test]
fn int_list() {
assert_expr!("[1, 2, 3]");
}
#[test]
fn list_spread() {
assert_expr!("[1, 2, ..[]]");
}
}

View File

@ -0,0 +1,233 @@
use chumsky::prelude::*;
use vec1::Vec1;
mod anonymous_binop;
pub mod anonymous_function;
pub mod assignment;
mod block;
pub(crate) mod bytearray;
mod chained;
mod if_else;
mod int;
mod list;
mod record;
mod record_update;
mod sequence;
pub mod string;
mod tuple;
mod var;
pub mod when;
pub use anonymous_function::parser as anonymous_function;
pub use block::parser as block;
pub use bytearray::parser as bytearray;
pub use chained::parser as chained;
pub use if_else::parser as if_else;
pub use int::parser as int;
pub use list::parser as list;
pub use record::parser as record;
pub use record_update::parser as record_update;
pub use sequence::parser as sequence;
pub use string::parser as string;
pub use tuple::parser as tuple;
pub use var::parser as var;
pub use when::parser as when;
use super::{error::ParseError, token::Token};
use crate::{ast, expr::UntypedExpr};
pub fn parser(
sequence: Recursive<'_, Token, UntypedExpr, ParseError>,
) -> impl Parser<Token, UntypedExpr, Error = ParseError> + '_ {
recursive(|expression| {
let chained_debugged = chained(sequence, expression)
.then(just(Token::Question).or_not())
.map_with_span(|(value, token), location| match token {
Some(_) => UntypedExpr::TraceIfFalse {
value: Box::new(value),
location,
},
None => value,
});
// Negate
let op = choice((
just(Token::Bang).to(ast::UnOp::Not),
just(Token::Minus)
// NOTE: Prevent conflict with usage for '-' as a standalone binary op.
// This will make '-' parse when used as standalone binop in a function call.
// For example:
//
// foo(a, -, b)
//
// but it'll fail in a let-binding:
//
// let foo = -
//
// which seems acceptable.
.then_ignore(just(Token::Comma).not().rewind())
.to(ast::UnOp::Negate),
));
let unary = op
.map_with_span(|op, span| (op, span))
.repeated()
.then(chained_debugged)
.foldr(|(un_op, span), value| UntypedExpr::UnOp {
op: un_op,
location: span.union(value.location()),
value: Box::new(value),
})
.boxed();
// Product
let op = choice((
just(Token::Star).to(ast::BinOp::MultInt),
just(Token::Slash).to(ast::BinOp::DivInt),
just(Token::Percent).to(ast::BinOp::ModInt),
));
let product = unary
.clone()
.then(op.then(unary).repeated())
.foldl(|a, (op, b)| UntypedExpr::BinOp {
location: a.location().union(b.location()),
name: op,
left: Box::new(a),
right: Box::new(b),
})
.boxed();
// Sum
let op = choice((
just(Token::Plus).to(ast::BinOp::AddInt),
just(Token::Minus).to(ast::BinOp::SubInt),
));
let sum = product
.clone()
.then(op.then(product).repeated())
.foldl(|a, (op, b)| UntypedExpr::BinOp {
location: a.location().union(b.location()),
name: op,
left: Box::new(a),
right: Box::new(b),
})
.boxed();
// Comparison
let op = choice((
just(Token::EqualEqual).to(ast::BinOp::Eq),
just(Token::NotEqual).to(ast::BinOp::NotEq),
just(Token::Less).to(ast::BinOp::LtInt),
just(Token::Greater).to(ast::BinOp::GtInt),
just(Token::LessEqual).to(ast::BinOp::LtEqInt),
just(Token::GreaterEqual).to(ast::BinOp::GtEqInt),
));
let comparison = sum
.clone()
.then(op.then(sum).repeated())
.foldl(|a, (op, b)| UntypedExpr::BinOp {
location: a.location().union(b.location()),
name: op,
left: Box::new(a),
right: Box::new(b),
})
.boxed();
// Conjunction
let op = just(Token::AmperAmper).to(ast::BinOp::And);
let conjunction = comparison
.clone()
.then(op.then(comparison).repeated())
.foldl(|a, (op, b)| UntypedExpr::BinOp {
location: a.location().union(b.location()),
name: op,
left: Box::new(a),
right: Box::new(b),
})
.boxed();
// Disjunction
let op = just(Token::VbarVbar).to(ast::BinOp::Or);
let disjunction = conjunction
.clone()
.then(op.then(conjunction).repeated())
.foldl(|a, (op, b)| UntypedExpr::BinOp {
location: a.location().union(b.location()),
name: op,
left: Box::new(a),
right: Box::new(b),
})
.boxed();
// Pipeline
disjunction
.clone()
.then(
choice((just(Token::Pipe), just(Token::NewLinePipe)))
.then(disjunction)
.repeated(),
)
.foldl(|l, (pipe, r)| {
if let UntypedExpr::PipeLine {
mut expressions,
one_liner,
} = l
{
expressions.push(r);
return UntypedExpr::PipeLine {
expressions,
one_liner,
};
}
let mut expressions = Vec1::new(l);
expressions.push(r);
UntypedExpr::PipeLine {
expressions,
one_liner: pipe != Token::NewLinePipe,
}
})
})
}
#[cfg(test)]
mod tests {
use crate::assert_expr;
#[test]
fn plus_binop() {
assert_expr!("a + 1");
}
#[test]
fn pipeline() {
assert_expr!(
r#"
a + 2
|> add_one
|> add_one
"#
);
}
#[test]
fn field_access() {
assert_expr!("user.name");
}
#[test]
fn function_invoke() {
assert_expr!(
r#"
let x = add_one(3)
let map_add_x = list.map(_, fn (y) { x + y })
map_add_x([ 1, 2, 3 ])
"#
);
}
}

View File

@ -0,0 +1,172 @@
use chumsky::prelude::*;
use crate::{
ast,
expr::UntypedExpr,
parser::{
error::{self, ParseError},
token::Token,
},
};
pub fn parser(
r: Recursive<'_, Token, UntypedExpr, ParseError>,
) -> impl Parser<Token, UntypedExpr, Error = ParseError> + '_ {
choice((
select! {Token::Name { name } => name}
.map_with_span(|module, span: ast::Span| (module, span))
.then_ignore(just(Token::Dot))
.or_not()
.then(select! {Token::UpName { name } => name}.map_with_span(|name, span| (name, span)))
.then(
choice((
select! {Token::Name {name} => name}
.then_ignore(just(Token::Colon))
.then(choice((
r.clone(),
select! {Token::DiscardName {name} => name }.validate(
|_name, span, emit| {
emit(ParseError::expected_input_found(
span,
None,
Some(error::Pattern::Discard),
));
UntypedExpr::Var {
location: span,
name: ast::CAPTURE_VARIABLE.to_string(),
}
},
),
)))
.map_with_span(|(label, value), span| ast::CallArg {
location: span,
value,
label: Some(label),
}),
choice((
select! {Token::Name {name} => name}.map_with_span(|name, span| {
(
UntypedExpr::Var {
name: name.clone(),
location: span,
},
name,
)
}),
select! {Token::DiscardName {name} => name }.validate(
|name, span, emit| {
emit(ParseError::expected_input_found(
span,
None,
Some(error::Pattern::Discard),
));
(
UntypedExpr::Var {
location: span,
name: ast::CAPTURE_VARIABLE.to_string(),
},
name,
)
},
),
))
.map(|(value, name)| ast::CallArg {
location: value.location(),
value,
label: Some(name),
}),
))
.separated_by(just(Token::Comma))
.allow_trailing()
.delimited_by(just(Token::LeftBrace), just(Token::RightBrace)),
),
select! {Token::Name { name } => name}
.map_with_span(|module, span| (module, span))
.then_ignore(just(Token::Dot))
.or_not()
.then(select! {Token::UpName { name } => name}.map_with_span(|name, span| (name, span)))
.then(
select! {Token::Name {name} => name}
.ignored()
.then_ignore(just(Token::Colon))
.validate(|_label, span, emit| {
emit(ParseError::expected_input_found(
span,
None,
Some(error::Pattern::Label),
))
})
.or_not()
.then(choice((
r.clone(),
select! {Token::DiscardName {name} => name }.validate(
|_name, span, emit| {
emit(ParseError::expected_input_found(
span,
None,
Some(error::Pattern::Discard),
));
UntypedExpr::Var {
location: span,
name: ast::CAPTURE_VARIABLE.to_string(),
}
},
),
)))
.map(|(_label, value)| ast::CallArg {
location: value.location(),
value,
label: None,
})
.separated_by(just(Token::Comma))
.allow_trailing()
.delimited_by(just(Token::LeftParen), just(Token::RightParen)),
),
))
.map_with_span(|((module, (name, n_span)), arguments), span| {
let fun = if let Some((module, m_span)) = module {
UntypedExpr::FieldAccess {
location: m_span.union(n_span),
label: name,
container: Box::new(UntypedExpr::Var {
location: m_span,
name: module,
}),
}
} else {
UntypedExpr::Var {
location: n_span,
name,
}
};
UntypedExpr::Call {
arguments,
fun: Box::new(fun),
location: span,
}
})
}
#[cfg(test)]
mod tests {
use crate::assert_expr;
#[test]
fn record_create_labeled() {
assert_expr!(r#"User { name: "Aiken", age, thing: 2 }"#);
}
#[test]
fn record_create_labeled_with_field_access() {
assert_expr!(r#"some_module.User { name: "Aiken", age, thing: 2 }"#);
}
#[test]
fn record_create_unlabeled() {
assert_expr!(r#"some_module.Thing(1, a)"#);
}
}

View File

@ -0,0 +1,96 @@
use chumsky::prelude::*;
use crate::{
ast,
expr::UntypedExpr,
parser::{error::ParseError, token::Token},
};
pub fn parser(
r: Recursive<'_, Token, UntypedExpr, ParseError>,
) -> impl Parser<Token, UntypedExpr, Error = ParseError> + '_ {
select! {Token::Name { name } => name}
.map_with_span(|module, span: ast::Span| (module, span))
.then_ignore(just(Token::Dot))
.or_not()
.then(select! {Token::UpName { name } => name}.map_with_span(|name, span| (name, span)))
.then(
just(Token::DotDot)
.ignore_then(r.clone())
.then(
just(Token::Comma)
.ignore_then(
choice((
select! { Token::Name {name} => name }
.then_ignore(just(Token::Colon))
.then(r.clone())
.map_with_span(|(label, value), span| {
ast::UntypedRecordUpdateArg {
label,
value,
location: span,
}
}),
select! {Token::Name {name} => name}.map_with_span(|name, span| {
ast::UntypedRecordUpdateArg {
location: span,
value: UntypedExpr::Var {
name: name.clone(),
location: span,
},
label: name,
}
}),
))
.separated_by(just(Token::Comma))
.allow_trailing(),
)
.or_not(),
)
.delimited_by(just(Token::LeftBrace), just(Token::RightBrace))
.map_with_span(|a, span: ast::Span| (a, span)),
)
.map(|((module, (name, n_span)), ((spread, opt_args), span))| {
let constructor = if let Some((module, m_span)) = module {
UntypedExpr::FieldAccess {
location: m_span.union(n_span),
label: name,
container: Box::new(UntypedExpr::Var {
location: m_span,
name: module,
}),
}
} else {
UntypedExpr::Var {
location: n_span,
name,
}
};
let spread_span = spread.location();
let location = ast::Span::new((), spread_span.start - 2..spread_span.end);
let spread = ast::RecordUpdateSpread {
base: Box::new(spread),
location,
};
UntypedExpr::RecordUpdate {
location: constructor.location().union(span),
constructor: Box::new(constructor),
spread,
arguments: opt_args.unwrap_or_default(),
}
})
}
#[cfg(test)]
mod tests {
use crate::assert_expr;
#[test]
fn record_update_basic() {
assert_expr!(r#"User { ..user, name: "Aiken", age }"#);
}
}

View File

@ -0,0 +1,36 @@
use chumsky::prelude::*;
use crate::{
ast::TraceKind,
expr::UntypedExpr,
parser::{error::ParseError, token::Token},
};
pub fn parser() -> impl Parser<Token, UntypedExpr, Error = ParseError> {
recursive(|expression| {
choice((
just(Token::Trace)
.ignore_then(super::parser(expression.clone()))
.then(expression.clone())
.map_with_span(|(text, then_), span| UntypedExpr::Trace {
kind: TraceKind::Trace,
location: span,
then: Box::new(then_),
text: Box::new(super::string::flexible(text)),
}),
just(Token::ErrorTerm)
.ignore_then(super::parser(expression.clone()).or_not())
.map_with_span(|reason, span| {
UntypedExpr::error(span, reason.map(super::string::flexible))
}),
just(Token::Todo)
.ignore_then(super::parser(expression.clone()).or_not())
.map_with_span(|reason, span| {
UntypedExpr::todo(span, reason.map(super::string::flexible))
}),
super::parser(expression.clone())
.then(expression.repeated())
.foldl(|current, next| current.append_in_sequence(next)),
))
})
}

View File

@ -0,0 +1,51 @@
---
source: crates/aiken-lang/src/parser/expr/anonymous_function.rs
description: "Code:\n\nfn (a: Int) -> Int { a + 1 }"
---
Fn {
location: 0..28,
fn_style: Plain,
arguments: [
Arg {
arg_name: Named {
name: "a",
label: "a",
location: 4..5,
is_validator_param: false,
},
location: 4..10,
annotation: Some(
Constructor {
location: 7..10,
module: None,
name: "Int",
arguments: [],
},
),
tipo: (),
},
],
body: BinOp {
location: 21..26,
name: AddInt,
left: Var {
location: 21..22,
name: "a",
},
right: Int {
location: 25..26,
value: "1",
base: Decimal {
numeric_underscore: false,
},
},
},
return_annotation: Some(
Constructor {
location: 15..18,
module: None,
name: "Int",
arguments: [],
},
),
}

View File

@ -0,0 +1,49 @@
---
source: crates/aiken-lang/src/parser/expr/block.rs
description: "Code:\n\nlet b = {\n let x = 4\n\n x + 5\n}\n"
---
Assignment {
location: 0..32,
value: Sequence {
location: 12..30,
expressions: [
Assignment {
location: 12..21,
value: Int {
location: 20..21,
value: "4",
base: Decimal {
numeric_underscore: false,
},
},
pattern: Var {
location: 16..17,
name: "x",
},
kind: Let,
annotation: None,
},
BinOp {
location: 25..30,
name: AddInt,
left: Var {
location: 25..26,
name: "x",
},
right: Int {
location: 29..30,
value: "5",
base: Decimal {
numeric_underscore: false,
},
},
},
],
},
pattern: Var {
location: 4..5,
name: "b",
},
kind: Let,
annotation: None,
}

View File

@ -0,0 +1,13 @@
---
source: crates/aiken-lang/src/parser/expr/bytearray.rs
description: "Code:\n\n#\"00aaff\""
---
ByteArray {
location: 0..9,
bytes: [
0,
170,
255,
],
preferred_format: HexadecimalString,
}

View File

@ -0,0 +1,17 @@
---
source: crates/aiken-lang/src/parser/expr/bytearray.rs
description: "Code:\n\n#[0, 170, 255]"
---
ByteArray {
location: 0..14,
bytes: [
0,
170,
255,
],
preferred_format: ArrayOfBytes(
Decimal {
numeric_underscore: false,
},
),
}

View File

@ -0,0 +1,15 @@
---
source: crates/aiken-lang/src/parser/expr/bytearray.rs
description: "Code:\n\n\"aiken\""
---
ByteArray {
location: 0..7,
bytes: [
97,
105,
107,
101,
110,
],
preferred_format: Utf8String,
}

View File

@ -0,0 +1,9 @@
---
source: crates/aiken-lang/src/parser/expr/list.rs
description: "Code:\n\n[]"
---
List {
location: 0..2,
elements: [],
tail: None,
}

View File

@ -0,0 +1,36 @@
---
source: crates/aiken-lang/src/parser/expr/assignment.rs
description: "Code:\n\nexpect Some(x) = something.field"
---
Assignment {
location: 0..32,
value: FieldAccess {
location: 17..32,
label: "field",
container: Var {
location: 17..26,
name: "something",
},
},
pattern: Constructor {
is_record: false,
location: 7..14,
name: "Some",
arguments: [
CallArg {
label: None,
location: 12..13,
value: Var {
location: 12..13,
name: "x",
},
},
],
module: None,
constructor: (),
with_spread: false,
tipo: (),
},
kind: Expect,
annotation: None,
}

View File

@ -0,0 +1,12 @@
---
source: crates/aiken-lang/src/parser/expr/mod.rs
description: "Code:\n\nuser.name"
---
FieldAccess {
location: 0..9,
label: "name",
container: Var {
location: 0..4,
name: "user",
},
}

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,160 @@
---
source: crates/aiken-lang/src/parser/expr/mod.rs
description: "Code:\n\nlet x = add_one(3)\n\nlet map_add_x = list.map(_, fn (y) { x + y })\n\nmap_add_x([ 1, 2, 3 ])\n"
---
Sequence {
location: 0..89,
expressions: [
Assignment {
location: 0..18,
value: Call {
arguments: [
CallArg {
label: None,
location: 16..17,
value: Int {
location: 16..17,
value: "3",
base: Decimal {
numeric_underscore: false,
},
},
},
],
fun: Var {
location: 8..15,
name: "add_one",
},
location: 8..18,
},
pattern: Var {
location: 4..5,
name: "x",
},
kind: Let,
annotation: None,
},
Assignment {
location: 20..65,
value: Fn {
location: 36..65,
fn_style: Capture,
arguments: [
Arg {
arg_name: Named {
name: "_capture__0",
label: "_capture__0",
location: 0..0,
is_validator_param: false,
},
location: 0..0,
annotation: None,
tipo: (),
},
],
body: Call {
arguments: [
CallArg {
label: None,
location: 45..46,
value: Var {
location: 45..46,
name: "_capture__0",
},
},
CallArg {
label: None,
location: 48..64,
value: Fn {
location: 48..64,
fn_style: Plain,
arguments: [
Arg {
arg_name: Named {
name: "y",
label: "y",
location: 52..53,
is_validator_param: false,
},
location: 52..53,
annotation: None,
tipo: (),
},
],
body: BinOp {
location: 57..62,
name: AddInt,
left: Var {
location: 57..58,
name: "x",
},
right: Var {
location: 61..62,
name: "y",
},
},
return_annotation: None,
},
},
],
fun: FieldAccess {
location: 36..44,
label: "map",
container: Var {
location: 36..40,
name: "list",
},
},
location: 36..65,
},
return_annotation: None,
},
pattern: Var {
location: 24..33,
name: "map_add_x",
},
kind: Let,
annotation: None,
},
Call {
arguments: [
CallArg {
label: None,
location: 77..88,
value: List {
location: 77..88,
elements: [
Int {
location: 79..80,
value: "1",
base: Decimal {
numeric_underscore: false,
},
},
Int {
location: 82..83,
value: "2",
base: Decimal {
numeric_underscore: false,
},
},
Int {
location: 85..86,
value: "3",
base: Decimal {
numeric_underscore: false,
},
},
],
tail: None,
},
},
],
fun: Var {
location: 67..76,
name: "map_add_x",
},
location: 67..89,
},
],
}

View File

@ -0,0 +1,66 @@
---
source: crates/aiken-lang/src/parser/expr/if_else.rs
description: "Code:\n\nif True {\n 1 + 1\n} else if a < 1 {\n 3\n} else {\n 4\n}\n"
---
If {
location: 0..54,
branches: [
IfBranch {
condition: Var {
location: 3..7,
name: "True",
},
body: BinOp {
location: 12..17,
name: AddInt,
left: Int {
location: 12..13,
value: "1",
base: Decimal {
numeric_underscore: false,
},
},
right: Int {
location: 16..17,
value: "1",
base: Decimal {
numeric_underscore: false,
},
},
},
location: 3..19,
},
IfBranch {
condition: BinOp {
location: 28..33,
name: LtInt,
left: Var {
location: 28..29,
name: "a",
},
right: Int {
location: 32..33,
value: "1",
base: Decimal {
numeric_underscore: false,
},
},
},
body: Int {
location: 38..39,
value: "3",
base: Decimal {
numeric_underscore: false,
},
},
location: 28..41,
},
],
final_else: Int {
location: 51..52,
value: "4",
base: Decimal {
numeric_underscore: false,
},
},
}

View File

@ -0,0 +1,9 @@
---
source: crates/aiken-lang/src/parser/expr/int.rs
description: "Code:\n\n0x01"
---
Int {
location: 0..4,
value: "1",
base: Hexadecimal,
}

View File

@ -0,0 +1,31 @@
---
source: crates/aiken-lang/src/parser/expr/list.rs
description: "Code:\n\n[1, 2, 3]"
---
List {
location: 0..9,
elements: [
Int {
location: 1..2,
value: "1",
base: Decimal {
numeric_underscore: false,
},
},
Int {
location: 4..5,
value: "2",
base: Decimal {
numeric_underscore: false,
},
},
Int {
location: 7..8,
value: "3",
base: Decimal {
numeric_underscore: false,
},
},
],
tail: None,
}

View File

@ -0,0 +1,11 @@
---
source: crates/aiken-lang/src/parser/expr/int.rs
description: "Code:\n\n1"
---
Int {
location: 0..1,
value: "1",
base: Decimal {
numeric_underscore: false,
},
}

View File

@ -0,0 +1,15 @@
---
source: crates/aiken-lang/src/parser/expr/int.rs
description: "Code:\n\n-1"
---
UnOp {
op: Negate,
location: 0..2,
value: Int {
location: 1..2,
value: "1",
base: Decimal {
numeric_underscore: false,
},
},
}

View File

@ -0,0 +1,61 @@
---
source: crates/aiken-lang/src/parser/expr/int.rs
description: "Code:\n\n{\n let i = 1_234_567\n let j = 1_000_000\n let k = -10_000\n}\n"
---
Sequence {
location: 4..59,
expressions: [
Assignment {
location: 4..21,
value: Int {
location: 12..21,
value: "1234567",
base: Decimal {
numeric_underscore: true,
},
},
pattern: Var {
location: 8..9,
name: "i",
},
kind: Let,
annotation: None,
},
Assignment {
location: 24..41,
value: Int {
location: 32..41,
value: "1000000",
base: Decimal {
numeric_underscore: true,
},
},
pattern: Var {
location: 28..29,
name: "j",
},
kind: Let,
annotation: None,
},
Assignment {
location: 44..59,
value: UnOp {
op: Negate,
location: 52..59,
value: Int {
location: 53..59,
value: "10000",
base: Decimal {
numeric_underscore: true,
},
},
},
pattern: Var {
location: 48..49,
name: "k",
},
kind: Let,
annotation: None,
},
],
}

View File

@ -0,0 +1,37 @@
---
source: crates/aiken-lang/src/parser/expr/assignment.rs
description: "Code:\n\nlet thing = [ 1, 2, a ]"
---
Assignment {
location: 0..23,
value: List {
location: 12..23,
elements: [
Int {
location: 14..15,
value: "1",
base: Decimal {
numeric_underscore: false,
},
},
Int {
location: 17..18,
value: "2",
base: Decimal {
numeric_underscore: false,
},
},
Var {
location: 20..21,
name: "a",
},
],
tail: None,
},
pattern: Var {
location: 4..9,
name: "thing",
},
kind: Let,
annotation: None,
}

View File

@ -0,0 +1,30 @@
---
source: crates/aiken-lang/src/parser/expr/list.rs
description: "Code:\n\n[1, 2, ..[]]"
---
List {
location: 0..12,
elements: [
Int {
location: 1..2,
value: "1",
base: Decimal {
numeric_underscore: false,
},
},
Int {
location: 4..5,
value: "2",
base: Decimal {
numeric_underscore: false,
},
},
],
tail: Some(
List {
location: 9..11,
elements: [],
tail: None,
},
),
}

View File

@ -0,0 +1,95 @@
---
source: crates/aiken-lang/src/parser/expr/tuple.rs
description: "Code:\n\nlet tuple = (1, 2, 3, 4)\ntuple.1st + tuple.2nd + tuple.3rd + tuple.4th\n"
---
Sequence {
location: 0..70,
expressions: [
Assignment {
location: 0..24,
value: Tuple {
location: 12..24,
elems: [
Int {
location: 13..14,
value: "1",
base: Decimal {
numeric_underscore: false,
},
},
Int {
location: 16..17,
value: "2",
base: Decimal {
numeric_underscore: false,
},
},
Int {
location: 19..20,
value: "3",
base: Decimal {
numeric_underscore: false,
},
},
Int {
location: 22..23,
value: "4",
base: Decimal {
numeric_underscore: false,
},
},
],
},
pattern: Var {
location: 4..9,
name: "tuple",
},
kind: Let,
annotation: None,
},
BinOp {
location: 25..70,
name: AddInt,
left: BinOp {
location: 25..58,
name: AddInt,
left: BinOp {
location: 25..46,
name: AddInt,
left: TupleIndex {
location: 25..34,
index: 0,
tuple: Var {
location: 25..30,
name: "tuple",
},
},
right: TupleIndex {
location: 37..46,
index: 1,
tuple: Var {
location: 37..42,
name: "tuple",
},
},
},
right: TupleIndex {
location: 49..58,
index: 2,
tuple: Var {
location: 49..54,
name: "tuple",
},
},
},
right: TupleIndex {
location: 61..70,
index: 3,
tuple: Var {
location: 61..66,
name: "tuple",
},
},
},
],
}

View File

@ -0,0 +1,54 @@
---
source: crates/aiken-lang/src/parser/expr/tuple.rs
description: "Code:\n\nlet a = foo(14)\n(a, 42)\n"
---
Sequence {
location: 0..23,
expressions: [
Assignment {
location: 0..15,
value: Call {
arguments: [
CallArg {
label: None,
location: 12..14,
value: Int {
location: 12..14,
value: "14",
base: Decimal {
numeric_underscore: false,
},
},
},
],
fun: Var {
location: 8..11,
name: "foo",
},
location: 8..15,
},
pattern: Var {
location: 4..5,
name: "a",
},
kind: Let,
annotation: None,
},
Tuple {
location: 16..23,
elems: [
Var {
location: 17..18,
name: "a",
},
Int {
location: 20..22,
value: "42",
base: Decimal {
numeric_underscore: false,
},
},
],
},
],
}

View File

@ -0,0 +1,32 @@
---
source: crates/aiken-lang/src/parser/expr/mod.rs
description: "Code:\n\na + 2\n|> add_one\n|> add_one\n"
---
PipeLine {
expressions: [
BinOp {
location: 0..5,
name: AddInt,
left: Var {
location: 0..1,
name: "a",
},
right: Int {
location: 4..5,
value: "2",
base: Decimal {
numeric_underscore: false,
},
},
},
Var {
location: 9..16,
name: "add_one",
},
Var {
location: 20..27,
name: "add_one",
},
],
one_liner: false,
}

View File

@ -0,0 +1,19 @@
---
source: crates/aiken-lang/src/parser/expr/mod.rs
description: "Code:\n\na + 1"
---
BinOp {
location: 0..5,
name: AddInt,
left: Var {
location: 0..1,
name: "a",
},
right: Int {
location: 4..5,
value: "1",
base: Decimal {
numeric_underscore: false,
},
},
}

View File

@ -0,0 +1,53 @@
---
source: crates/aiken-lang/src/parser/expr/record.rs
description: "Code:\n\nUser { name: \"Aiken\", age, thing: 2 }"
---
Call {
arguments: [
CallArg {
label: Some(
"name",
),
location: 7..20,
value: ByteArray {
location: 13..20,
bytes: [
65,
105,
107,
101,
110,
],
preferred_format: Utf8String,
},
},
CallArg {
label: Some(
"age",
),
location: 22..25,
value: Var {
location: 22..25,
name: "age",
},
},
CallArg {
label: Some(
"thing",
),
location: 27..35,
value: Int {
location: 34..35,
value: "2",
base: Decimal {
numeric_underscore: false,
},
},
},
],
fun: Var {
location: 0..4,
name: "User",
},
location: 0..37,
}

View File

@ -0,0 +1,57 @@
---
source: crates/aiken-lang/src/parser/expr/record.rs
description: "Code:\n\nsome_module.User { name: \"Aiken\", age, thing: 2 }"
---
Call {
arguments: [
CallArg {
label: Some(
"name",
),
location: 19..32,
value: ByteArray {
location: 25..32,
bytes: [
65,
105,
107,
101,
110,
],
preferred_format: Utf8String,
},
},
CallArg {
label: Some(
"age",
),
location: 34..37,
value: Var {
location: 34..37,
name: "age",
},
},
CallArg {
label: Some(
"thing",
),
location: 39..47,
value: Int {
location: 46..47,
value: "2",
base: Decimal {
numeric_underscore: false,
},
},
},
],
fun: FieldAccess {
location: 0..16,
label: "User",
container: Var {
location: 0..11,
name: "some_module",
},
},
location: 0..49,
}

View File

@ -0,0 +1,36 @@
---
source: crates/aiken-lang/src/parser/expr/record.rs
description: "Code:\n\nsome_module.Thing(1, a)"
---
Call {
arguments: [
CallArg {
label: None,
location: 18..19,
value: Int {
location: 18..19,
value: "1",
base: Decimal {
numeric_underscore: false,
},
},
},
CallArg {
label: None,
location: 21..22,
value: Var {
location: 21..22,
name: "a",
},
},
],
fun: FieldAccess {
location: 0..17,
label: "Thing",
container: Var {
location: 0..11,
name: "some_module",
},
},
location: 0..23,
}

View File

@ -0,0 +1,43 @@
---
source: crates/aiken-lang/src/parser/expr/record_update.rs
description: "Code:\n\nUser { ..user, name: \"Aiken\", age }"
---
RecordUpdate {
location: 0..35,
constructor: Var {
location: 0..4,
name: "User",
},
spread: RecordUpdateSpread {
base: Var {
location: 9..13,
name: "user",
},
location: 7..13,
},
arguments: [
UntypedRecordUpdateArg {
label: "name",
location: 15..28,
value: ByteArray {
location: 21..28,
bytes: [
65,
105,
107,
101,
110,
],
preferred_format: Utf8String,
},
},
UntypedRecordUpdateArg {
label: "age",
location: 30..33,
value: Var {
location: 30..33,
name: "age",
},
},
],
}

View File

@ -0,0 +1,8 @@
---
source: crates/aiken-lang/src/parser/expr/string.rs
description: "Code:\n\n@\"aiken\""
---
String {
location: 0..8,
value: "aiken",
}

View File

@ -0,0 +1,43 @@
use chumsky::prelude::*;
use crate::{
ast,
expr::UntypedExpr,
parser::{error::ParseError, literal::string::parser as string, token::Token},
};
pub fn parser() -> impl Parser<Token, UntypedExpr, Error = ParseError> {
string().map_with_span(|value, span| UntypedExpr::String {
location: span,
value,
})
}
/// Interpret bytearray string literals written as utf-8 strings, as strings.
///
/// This is mostly convenient so that todo & error works with either @"..." or plain "...".
/// In this particular context, there's actually no ambiguity about the right-hand-side, so
/// we can provide this syntactic sugar.
pub fn flexible(expr: UntypedExpr) -> UntypedExpr {
match expr {
UntypedExpr::ByteArray {
preferred_format: ast::ByteArrayFormatPreference::Utf8String,
bytes,
location,
} => UntypedExpr::String {
location,
value: String::from_utf8(bytes).unwrap(),
},
_ => expr,
}
}
#[cfg(test)]
mod tests {
use crate::assert_expr;
#[test]
fn string_basic() {
assert_expr!("@\"aiken\"");
}
}

View File

@ -0,0 +1,48 @@
use chumsky::prelude::*;
use crate::{
expr::UntypedExpr,
parser::{error::ParseError, token::Token},
};
pub fn parser(
r: Recursive<'_, Token, UntypedExpr, ParseError>,
) -> impl Parser<Token, UntypedExpr, Error = ParseError> + '_ {
r.clone()
.separated_by(just(Token::Comma))
.at_least(2)
.allow_trailing()
.delimited_by(
choice((just(Token::LeftParen), just(Token::NewLineLeftParen))),
just(Token::RightParen),
)
.map_with_span(|elems, span| UntypedExpr::Tuple {
location: span,
elems,
})
}
#[cfg(test)]
mod tests {
use crate::assert_expr;
#[test]
fn parse_tuple() {
assert_expr!(
r#"
let tuple = (1, 2, 3, 4)
tuple.1st + tuple.2nd + tuple.3rd + tuple.4th
"#
);
}
#[test]
fn parse_tuple2() {
assert_expr!(
r#"
let a = foo(14)
(a, 42)
"#
);
}
}

View File

@ -0,0 +1,17 @@
use chumsky::prelude::*;
use crate::{
expr::UntypedExpr,
parser::{error::ParseError, token::Token},
};
pub fn parser() -> impl Parser<Token, UntypedExpr, Error = ParseError> {
select! {
Token::Name { name } => name,
Token::UpName { name } => name,
}
.map_with_span(|name, span| UntypedExpr::Var {
location: span,
name,
})
}

View File

@ -0,0 +1,59 @@
use chumsky::prelude::*;
use vec1::vec1;
use crate::{
ast,
expr::UntypedExpr,
parser::{error::ParseError, expr::string::flexible, pattern, token::Token},
};
use super::guard;
pub fn parser(
r: Recursive<'_, Token, UntypedExpr, ParseError>,
) -> impl Parser<Token, ast::UntypedClause, Error = ParseError> + '_ {
pattern()
.then(just(Token::Vbar).ignore_then(pattern()).repeated().or_not())
.then(choice((
just(Token::If)
.ignore_then(guard())
.or_not()
.then_ignore(just(Token::RArrow)),
just(Token::If)
.ignore_then(take_until(just(Token::RArrow)))
.validate(|_value, span, emit| {
emit(ParseError::invalid_when_clause_guard(span));
None
}),
)))
// TODO: add hint "Did you mean to wrap a multi line clause in curly braces?"
.then(choice((
r.clone(),
just(Token::Todo)
.ignore_then(
r.clone()
.then_ignore(one_of(Token::RArrow).not().rewind())
.or_not(),
)
.map_with_span(|reason, span| UntypedExpr::todo(span, reason.map(flexible))),
just(Token::ErrorTerm)
.ignore_then(
r.clone()
.then_ignore(just(Token::RArrow).not().rewind())
.or_not(),
)
.map_with_span(|reason, span| UntypedExpr::error(span, reason.map(flexible))),
)))
.map_with_span(
|(((pattern, alternative_patterns_opt), guard), then), span| {
let mut patterns = vec1![pattern];
patterns.append(&mut alternative_patterns_opt.unwrap_or_default());
ast::UntypedClause {
location: span,
patterns,
guard,
then,
}
},
)
}

View File

@ -0,0 +1,122 @@
use chumsky::prelude::*;
use crate::{
ast,
parser::{definition, error::ParseError, token::Token},
};
pub fn parser() -> impl Parser<Token, ast::UntypedClauseGuard, Error = ParseError> {
recursive(|expression| {
let var_parser = select! {
Token::Name { name } => name,
Token::UpName { name } => name,
}
.map_with_span(|name, span| ast::ClauseGuard::Var {
name,
tipo: (),
location: span,
});
let constant_parser = definition::constant::value().map(ast::ClauseGuard::Constant);
let block_parser = expression
.clone()
.delimited_by(just(Token::LeftParen), just(Token::RightParen));
let leaf_parser = choice((var_parser, constant_parser, block_parser)).boxed();
let unary_op = just(Token::Bang);
let unary = unary_op
.map_with_span(|op, span| (op, span))
.repeated()
.then(leaf_parser)
.foldr(|(_, span), value| ast::ClauseGuard::Not {
location: span.union(value.location()),
value: Box::new(value),
})
.boxed();
let comparison_op = choice((
just(Token::EqualEqual).to(ast::BinOp::Eq),
just(Token::NotEqual).to(ast::BinOp::NotEq),
just(Token::Less).to(ast::BinOp::LtInt),
just(Token::Greater).to(ast::BinOp::GtInt),
just(Token::LessEqual).to(ast::BinOp::LtEqInt),
just(Token::GreaterEqual).to(ast::BinOp::GtEqInt),
));
let comparison = unary
.clone()
.then(comparison_op.then(unary).repeated())
.foldl(|left, (op, right)| {
let location = left.location().union(right.location());
let left = Box::new(left);
let right = Box::new(right);
match op {
ast::BinOp::Eq => ast::ClauseGuard::Equals {
location,
left,
right,
},
ast::BinOp::NotEq => ast::ClauseGuard::NotEquals {
location,
left,
right,
},
ast::BinOp::LtInt => ast::ClauseGuard::LtInt {
location,
left,
right,
},
ast::BinOp::GtInt => ast::ClauseGuard::GtInt {
location,
left,
right,
},
ast::BinOp::LtEqInt => ast::ClauseGuard::LtEqInt {
location,
left,
right,
},
ast::BinOp::GtEqInt => ast::ClauseGuard::GtEqInt {
location,
left,
right,
},
_ => unreachable!(),
}
})
.boxed();
let and_op = just(Token::AmperAmper);
let conjunction = comparison
.clone()
.then(and_op.then(comparison).repeated())
.foldl(|left, (_tok, right)| {
let location = left.location().union(right.location());
let left = Box::new(left);
let right = Box::new(right);
ast::ClauseGuard::And {
location,
left,
right,
}
});
let or_op = just(Token::VbarVbar);
conjunction
.clone()
.then(or_op.then(conjunction).repeated())
.foldl(|left, (_tok, right)| {
let location = left.location().union(right.location());
let left = Box::new(left);
let right = Box::new(right);
ast::ClauseGuard::Or {
location,
left,
right,
}
})
})
}

View File

@ -0,0 +1,52 @@
use chumsky::prelude::*;
mod clause;
mod guard;
pub use clause::parser as clause;
pub use guard::parser as guard;
use crate::{
expr::UntypedExpr,
parser::{error::ParseError, token::Token},
};
pub fn parser(
r: Recursive<'_, Token, UntypedExpr, ParseError>,
) -> impl Parser<Token, UntypedExpr, Error = ParseError> + '_ {
just(Token::When)
// TODO: If subject is empty we should return ParseErrorType::ExpectedExpr,
.ignore_then(r.clone().map(Box::new))
.then_ignore(just(Token::Is))
.then_ignore(just(Token::LeftBrace))
// TODO: If clauses are empty we should return ParseErrorType::NoCaseClause
.then(clause(r).repeated())
.then_ignore(just(Token::RightBrace))
.map_with_span(|(subject, clauses), span| UntypedExpr::When {
location: span,
subject,
clauses,
})
}
#[cfg(test)]
mod tests {
use crate::assert_expr;
#[test]
fn when_basic() {
assert_expr!(
r#"
when a is {
2 if x > 1 -> 3
1 | 4 | 5 -> {
let amazing = 5
amazing
}
3 -> 9
_ -> 4
}
"#
);
}
}

View File

@ -0,0 +1,140 @@
---
source: crates/aiken-lang/src/parser/expr/when/mod.rs
description: "Code:\n\nwhen a is {\n 2 if x > 1 -> 3\n 1 | 4 | 5 -> {\n let amazing = 5\n amazing\n }\n 3 -> 9\n _ -> 4\n}\n"
---
When {
location: 0..102,
subject: Var {
location: 5..6,
name: "a",
},
clauses: [
UntypedClause {
location: 14..29,
patterns: [
Int {
location: 14..15,
value: "2",
base: Decimal {
numeric_underscore: false,
},
},
],
guard: Some(
GtInt {
location: 19..24,
left: Var {
location: 19..20,
tipo: (),
name: "x",
},
right: Constant(
Int {
location: 23..24,
value: "1",
base: Decimal {
numeric_underscore: false,
},
},
),
},
),
then: Int {
location: 28..29,
value: "3",
base: Decimal {
numeric_underscore: false,
},
},
},
UntypedClause {
location: 32..82,
patterns: [
Int {
location: 32..33,
value: "1",
base: Decimal {
numeric_underscore: false,
},
},
Int {
location: 36..37,
value: "4",
base: Decimal {
numeric_underscore: false,
},
},
Int {
location: 40..41,
value: "5",
base: Decimal {
numeric_underscore: false,
},
},
],
guard: None,
then: Sequence {
location: 51..78,
expressions: [
Assignment {
location: 51..66,
value: Int {
location: 65..66,
value: "5",
base: Decimal {
numeric_underscore: false,
},
},
pattern: Var {
location: 55..62,
name: "amazing",
},
kind: Let,
annotation: None,
},
Var {
location: 71..78,
name: "amazing",
},
],
},
},
UntypedClause {
location: 85..91,
patterns: [
Int {
location: 85..86,
value: "3",
base: Decimal {
numeric_underscore: false,
},
},
],
guard: None,
then: Int {
location: 90..91,
value: "9",
base: Decimal {
numeric_underscore: false,
},
},
},
UntypedClause {
location: 94..100,
patterns: [
Discard {
name: "_",
location: 94..95,
},
],
guard: None,
then: Int {
location: 99..100,
value: "4",
base: Decimal {
numeric_underscore: false,
},
},
},
],
}

View File

@ -1,12 +1,84 @@
use super::{
error::ParseError,
token::{Base, Token},
};
use crate::ast::Span;
use chumsky::prelude::*;
use num_bigint::BigInt;
use ordinal::Ordinal;
use super::{
error::ParseError,
extra::ModuleExtra,
token::{Base, Token},
};
use crate::ast::Span;
pub struct LexInfo {
pub tokens: Vec<(Token, Span)>,
pub extra: ModuleExtra,
}
pub fn run(src: &str) -> Result<LexInfo, Vec<ParseError>> {
let len = src.as_bytes().len();
let tokens = lexer().parse(chumsky::Stream::from_iter(
Span::create(len, 1),
src.chars().scan(0, |i, c| {
let start = *i;
let offset = c.len_utf8();
*i = start + offset;
Some((c, Span::create(start, offset)))
}),
))?;
let mut extra = ModuleExtra::new();
let mut previous_is_newline = false;
let tokens = tokens
.into_iter()
.filter_map(|(token, ref span)| {
let current_is_newline = token == Token::NewLine || token == Token::EmptyLine;
let result = match token {
Token::ModuleComment => {
extra.module_comments.push(*span);
None
}
Token::DocComment => {
extra.doc_comments.push(*span);
None
}
Token::Comment => {
extra.comments.push(*span);
None
}
Token::EmptyLine => {
extra.empty_lines.push(span.start);
None
}
Token::LeftParen => {
if previous_is_newline {
Some((Token::NewLineLeftParen, *span))
} else {
Some((Token::LeftParen, *span))
}
}
Token::Pipe => {
if previous_is_newline {
Some((Token::NewLinePipe, *span))
} else {
Some((Token::Pipe, *span))
}
}
Token::NewLine => None,
_ => Some((token, *span)),
};
previous_is_newline = current_is_newline;
result
})
.collect::<Vec<(Token, Span)>>();
Ok(LexInfo { tokens, extra })
}
pub fn lexer() -> impl Parser<char, Vec<(Token, Span)>, Error = ParseError> {
let base10 = text::int(10).map(|value| Token::Int {
value,

View File

@ -0,0 +1,87 @@
use chumsky::prelude::*;
use crate::{
ast,
parser::{
error::{self, ParseError},
token::{Base, Token},
},
};
pub fn parser<A>(
into: impl Fn(Vec<u8>, ast::ByteArrayFormatPreference, ast::Span) -> A,
) -> impl Parser<Token, A, Error = ParseError> {
choice((array_of_bytes(), hex_string(), utf8_string()))
.map_with_span(move |(preferred_format, bytes), span| into(bytes, preferred_format, span))
}
pub fn array_of_bytes(
) -> impl Parser<Token, (ast::ByteArrayFormatPreference, Vec<u8>), Error = ParseError> {
just(Token::Hash)
.ignore_then(
select! {Token::Int {value, base, ..} => (value, base)}
.validate(|(value, base), span, emit| {
let byte: u8 = match value.parse() {
Ok(b) => b,
Err(_) => {
emit(ParseError::expected_input_found(
span,
None,
Some(error::Pattern::Byte),
));
0
}
};
(byte, base)
})
.separated_by(just(Token::Comma))
.allow_trailing()
.delimited_by(just(Token::LeftSquare), just(Token::RightSquare)),
)
.validate(|bytes, span, emit| {
let base = bytes.iter().fold(Ok(None), |acc, (_, base)| match acc {
Ok(None) => Ok(Some(base)),
Ok(Some(previous_base)) if previous_base == base => Ok(Some(base)),
_ => Err(()),
});
let base = match base {
Err(()) => {
emit(ParseError::hybrid_notation_in_bytearray(span));
Base::Decimal {
numeric_underscore: false,
}
}
Ok(None) => Base::Decimal {
numeric_underscore: false,
},
Ok(Some(base)) => *base,
};
(bytes.into_iter().map(|(b, _)| b).collect::<Vec<u8>>(), base)
})
.map(|(bytes, base)| (ast::ByteArrayFormatPreference::ArrayOfBytes(base), bytes))
}
pub fn hex_string(
) -> impl Parser<Token, (ast::ByteArrayFormatPreference, Vec<u8>), Error = ParseError> {
just(Token::Hash)
.ignore_then(
select! {Token::ByteString {value} => value}.validate(|value, span, emit| {
match hex::decode(value) {
Ok(bytes) => bytes,
Err(_) => {
emit(ParseError::malformed_base16_string_literal(span));
vec![]
}
}
}),
)
.map(|token| (ast::ByteArrayFormatPreference::HexadecimalString, token))
}
pub fn utf8_string(
) -> impl Parser<Token, (ast::ByteArrayFormatPreference, Vec<u8>), Error = ParseError> {
select! {Token::ByteString {value} => value.into_bytes() }
.map(|token| (ast::ByteArrayFormatPreference::Utf8String, token))
}

View File

@ -0,0 +1,10 @@
use chumsky::prelude::*;
use crate::parser::{
error::ParseError,
token::{Base, Token},
};
pub fn parser() -> impl Parser<Token, (String, Base), Error = ParseError> {
select! { Token::Int {value, base} => (value, base)}
}

View File

@ -0,0 +1,3 @@
pub(crate) mod bytearray;
pub(crate) mod int;
pub(crate) mod string;

View File

@ -0,0 +1,7 @@
use chumsky::prelude::*;
use crate::parser::{error::ParseError, token::Token};
pub fn parser() -> impl Parser<Token, String, Error = ParseError> {
select! {Token::String {value} => value}
}

View File

@ -0,0 +1,199 @@
use chumsky::prelude::*;
use crate::ast;
use super::{
error::{self, ParseError},
token::Token,
};
pub fn parser() -> impl Parser<Token, ast::UntypedPattern, Error = ParseError> {
recursive(|expression| {
let record_constructor_pattern_arg_parser = choice((
select! {Token::Name {name} => name}
.then_ignore(just(Token::Colon))
.then(expression.clone())
.map_with_span(|(name, pattern), span| ast::CallArg {
location: span,
label: Some(name),
value: pattern,
}),
select! {Token::Name{name} => name}.map_with_span(|name, span| ast::CallArg {
location: span,
value: ast::UntypedPattern::Var {
name: name.clone(),
location: span,
},
label: Some(name),
}),
))
.separated_by(just(Token::Comma))
.allow_trailing()
.then(
just(Token::DotDot)
.then_ignore(just(Token::Comma).or_not())
.ignored()
.or_not(),
)
.delimited_by(just(Token::LeftBrace), just(Token::RightBrace));
let tuple_constructor_pattern_arg_parser = expression
.clone()
.map(|pattern| ast::CallArg {
location: pattern.location(),
value: pattern,
label: None,
})
.separated_by(just(Token::Comma))
.allow_trailing()
.then(
just(Token::DotDot)
.then_ignore(just(Token::Comma).or_not())
.ignored()
.or_not(),
)
.delimited_by(just(Token::LeftParen), just(Token::RightParen));
let constructor_pattern_args_parser = choice((
record_constructor_pattern_arg_parser.map(|a| (a, true)),
tuple_constructor_pattern_arg_parser.map(|a| (a, false)),
))
.or_not()
.map(|opt_args| {
opt_args
.map(|((a, b), c)| (a, b.is_some(), c))
.unwrap_or_else(|| (vec![], false, false))
});
let constructor_pattern_parser =
select! {Token::UpName { name } => name}.then(constructor_pattern_args_parser);
choice((
select! { Token::Name {name} => name }
.then(
just(Token::Dot)
.ignore_then(constructor_pattern_parser.clone())
.or_not(),
)
.map_with_span(|(name, opt_pattern), span| {
if let Some((c_name, (arguments, with_spread, is_record))) = opt_pattern {
ast::UntypedPattern::Constructor {
is_record,
location: span,
name: c_name,
arguments,
module: Some(name),
constructor: (),
with_spread,
tipo: (),
}
} else {
ast::UntypedPattern::Var {
location: span,
name,
}
}
}),
constructor_pattern_parser.map_with_span(
|(name, (arguments, with_spread, is_record)), span| {
ast::UntypedPattern::Constructor {
is_record,
location: span,
name,
arguments,
module: None,
constructor: (),
with_spread,
tipo: (),
}
},
),
select! {Token::DiscardName {name} => name}.map_with_span(|name, span| {
ast::UntypedPattern::Discard {
name,
location: span,
}
}),
select! {Token::Int {value, base} => (value, base)}.map_with_span(
|(value, base), span| ast::UntypedPattern::Int {
location: span,
value,
base,
},
),
expression
.clone()
.separated_by(just(Token::Comma))
.allow_trailing()
.delimited_by(
choice((just(Token::LeftParen), just(Token::NewLineLeftParen))),
just(Token::RightParen),
)
.map_with_span(|elems, span| ast::UntypedPattern::Tuple {
location: span,
elems,
}),
just(Token::LeftSquare)
.ignore_then(expression.clone().separated_by(just(Token::Comma)))
.then(choice((
just(Token::Comma).ignore_then(
just(Token::DotDot)
.ignore_then(expression.clone().or_not())
.or_not(),
),
just(Token::Comma).ignored().or_not().map(|_| None),
)))
.then_ignore(just(Token::RightSquare))
.validate(|(elements, tail), span: ast::Span, emit| {
let tail = match tail {
// There is a tail and it has a Pattern::Var or Pattern::Discard
Some(Some(
pat @ (ast::UntypedPattern::Var { .. }
| ast::UntypedPattern::Discard { .. }),
)) => Some(pat),
Some(Some(pat)) => {
emit(ParseError::expected_input_found(
pat.location(),
None,
Some(error::Pattern::Match),
));
Some(pat)
}
// There is a tail but it has no content, implicit discard
Some(None) => Some(ast::UntypedPattern::Discard {
location: ast::Span {
start: span.end - 1,
end: span.end,
},
name: "_".to_string(),
}),
// No tail specified
None => None,
};
ast::UntypedPattern::List {
location: span,
elements,
tail: tail.map(Box::new),
}
}),
))
.then(
just(Token::As)
.ignore_then(select! { Token::Name {name} => name})
.or_not(),
)
.map_with_span(|(pattern, opt_as), span| {
if let Some(name) = opt_as {
ast::UntypedPattern::Assign {
name,
location: span,
pattern: Box::new(pattern),
}
} else {
pattern
}
})
})
}

View File

@ -0,0 +1,19 @@
---
source: crates/aiken-lang/src/parser/annotation.rs
description: "Code:\n\naiken.Option<Int>"
---
Constructor {
location: 0..17,
module: Some(
"aiken",
),
name: "Option",
arguments: [
Constructor {
location: 13..16,
module: None,
name: "Int",
arguments: [],
},
],
}

View File

@ -0,0 +1,99 @@
use chumsky::prelude::*;
use super::{error::ParseError, token::Token};
pub fn optional_flag(token: Token) -> impl Parser<Token, bool, Error = ParseError> {
just(token).ignored().or_not().map(|v| v.is_some())
}
pub fn type_name_with_args() -> impl Parser<Token, (String, Option<Vec<String>>), Error = ParseError>
{
just(Token::Type).ignore_then(
select! {Token::UpName { name } => name}.then(
select! {Token::Name { name } => name}
.separated_by(just(Token::Comma))
.allow_trailing()
.delimited_by(just(Token::Less), just(Token::Greater))
.or_not(),
),
)
}
#[macro_export]
macro_rules! assert_expr {
($code:expr) => {
use chumsky::Parser;
let $crate::parser::lexer::LexInfo { tokens, .. } = $crate::parser::lexer::run(indoc::indoc! { $code }).unwrap();
let stream = chumsky::Stream::from_iter($crate::ast::Span::create(tokens.len(), 1), tokens.into_iter());
let result = $crate::parser::expr::sequence().parse(stream).unwrap();
insta::with_settings!({
description => concat!("Code:\n\n", indoc::indoc! { $code }),
prepend_module_to_snapshot => false,
omit_expression => true
}, {
insta::assert_debug_snapshot!(result);
});
};
}
#[macro_export]
macro_rules! assert_annotation {
($code:expr) => {
use chumsky::Parser;
let $crate::parser::lexer::LexInfo { tokens, .. } = $crate::parser::lexer::run(indoc::indoc! { $code }).unwrap();
let stream = chumsky::Stream::from_iter($crate::ast::Span::create(tokens.len(), 1), tokens.into_iter());
let result = $crate::parser::annotation().parse(stream).unwrap();
insta::with_settings!({
description => concat!("Code:\n\n", indoc::indoc! { $code }),
prepend_module_to_snapshot => false,
omit_expression => true
}, {
insta::assert_debug_snapshot!(result);
});
};
}
#[macro_export]
macro_rules! assert_module {
($code:expr) => {
let (module, _) =
$crate::parser::module(indoc::indoc!{ $code }, $crate::ast::ModuleKind::Validator).expect("Failed to parse code");
insta::with_settings!({
description => concat!("Code:\n\n", indoc::indoc! { $code }),
prepend_module_to_snapshot => false,
omit_expression => true
}, {
insta::assert_debug_snapshot!(module);
});
};
}
#[macro_export]
macro_rules! assert_definition {
($code:expr) => {
use chumsky::Parser;
let $crate::parser::lexer::LexInfo { tokens, .. } = $crate::parser::lexer::run(indoc::indoc! { $code }).unwrap();
let stream = chumsky::Stream::from_iter($crate::ast::Span::create(tokens.len(), 1), tokens.into_iter());
let result = $crate::parser::definition().parse(stream).unwrap();
insta::with_settings!({
description => concat!("Code:\n\n", indoc::indoc! { $code }),
prepend_module_to_snapshot => false,
omit_expression => true
}, {
insta::assert_debug_snapshot!(result);
});
};
}

View File

@ -0,0 +1,23 @@
---
source: crates/aiken-lang/src/parser.rs
description: "Code:\n\nuse aiken\n\n// some comment\n// more comments"
---
Module {
name: "",
docs: [],
type_info: (),
definitions: [
Use(
Use {
as_name: None,
location: 0..9,
module: [
"aiken",
],
package: (),
unqualified: [],
},
),
],
kind: Validator,
}

View File

@ -0,0 +1,201 @@
---
source: crates/aiken-lang/src/parser.rs
description: "Code:\n\nfn foo_1() {\n let a = bar\n (40)\n}\n\nfn foo_2() {\n let a = bar\n {40}\n}\n\nfn foo_3() {\n let a = (40+2)\n}\n\nfn foo_4() {\n let a = bar(42)\n (a + 14) * 42\n}\n"
---
Module {
name: "",
docs: [],
type_info: (),
definitions: [
Fn(
Function {
arguments: [],
body: Sequence {
location: 15..32,
expressions: [
Assignment {
location: 15..26,
value: Var {
location: 23..26,
name: "bar",
},
pattern: Var {
location: 19..20,
name: "a",
},
kind: Let,
annotation: None,
},
Int {
location: 30..32,
value: "40",
base: Decimal {
numeric_underscore: false,
},
},
],
},
doc: None,
location: 0..10,
name: "foo_1",
public: false,
return_annotation: None,
return_type: (),
end_position: 34,
can_error: true,
},
),
Fn(
Function {
arguments: [],
body: Sequence {
location: 52..69,
expressions: [
Assignment {
location: 52..63,
value: Var {
location: 60..63,
name: "bar",
},
pattern: Var {
location: 56..57,
name: "a",
},
kind: Let,
annotation: None,
},
Int {
location: 67..69,
value: "40",
base: Decimal {
numeric_underscore: false,
},
},
],
},
doc: None,
location: 37..47,
name: "foo_2",
public: false,
return_annotation: None,
return_type: (),
end_position: 71,
can_error: true,
},
),
Fn(
Function {
arguments: [],
body: Assignment {
location: 89..103,
value: BinOp {
location: 98..102,
name: AddInt,
left: Int {
location: 98..100,
value: "40",
base: Decimal {
numeric_underscore: false,
},
},
right: Int {
location: 101..102,
value: "2",
base: Decimal {
numeric_underscore: false,
},
},
},
pattern: Var {
location: 93..94,
name: "a",
},
kind: Let,
annotation: None,
},
doc: None,
location: 74..84,
name: "foo_3",
public: false,
return_annotation: None,
return_type: (),
end_position: 104,
can_error: true,
},
),
Fn(
Function {
arguments: [],
body: Sequence {
location: 122..153,
expressions: [
Assignment {
location: 122..137,
value: Call {
arguments: [
CallArg {
label: None,
location: 134..136,
value: Int {
location: 134..136,
value: "42",
base: Decimal {
numeric_underscore: false,
},
},
},
],
fun: Var {
location: 130..133,
name: "bar",
},
location: 130..137,
},
pattern: Var {
location: 126..127,
name: "a",
},
kind: Let,
annotation: None,
},
BinOp {
location: 141..153,
name: MultInt,
left: BinOp {
location: 141..147,
name: AddInt,
left: Var {
location: 141..142,
name: "a",
},
right: Int {
location: 145..147,
value: "14",
base: Decimal {
numeric_underscore: false,
},
},
},
right: Int {
location: 151..153,
value: "42",
base: Decimal {
numeric_underscore: false,
},
},
},
],
},
doc: None,
location: 107..117,
name: "foo_4",
public: false,
return_annotation: None,
return_type: (),
end_position: 154,
can_error: true,
},
),
],
kind: Validator,
}

View File

@ -0,0 +1,52 @@
---
source: crates/aiken-lang/src/parser.rs
description: "Code:\n\nfn foo() {\n let x = \"★\"\n x\n}\n"
---
Module {
name: "",
docs: [],
type_info: (),
definitions: [
Fn(
Function {
arguments: [],
body: Sequence {
location: 13..30,
expressions: [
Assignment {
location: 13..26,
value: ByteArray {
location: 21..26,
bytes: [
226,
152,
133,
],
preferred_format: Utf8String,
},
pattern: Var {
location: 17..18,
name: "x",
},
kind: Let,
annotation: None,
},
Var {
location: 29..30,
name: "x",
},
],
},
doc: None,
location: 0..8,
name: "foo",
public: false,
return_annotation: None,
return_type: (),
end_position: 31,
can_error: true,
},
),
],
kind: Validator,
}

View File

@ -0,0 +1,50 @@
---
source: crates/aiken-lang/src/parser.rs
description: "Code:\n\nfn foo() {\n let x = \"*\"\n x\n}\n"
---
Module {
name: "",
docs: [],
type_info: (),
definitions: [
Fn(
Function {
arguments: [],
body: Sequence {
location: 13..28,
expressions: [
Assignment {
location: 13..24,
value: ByteArray {
location: 21..24,
bytes: [
42,
],
preferred_format: Utf8String,
},
pattern: Var {
location: 17..18,
name: "x",
},
kind: Let,
annotation: None,
},
Var {
location: 27..28,
name: "x",
},
],
},
doc: None,
location: 0..8,
name: "foo",
public: false,
return_annotation: None,
return_type: (),
end_position: 29,
can_error: true,
},
),
],
kind: Validator,
}

View File

@ -0,0 +1,24 @@
---
source: crates/aiken-lang/src/parser.rs
description: "Code:\n\nuse aiken/list\r\n"
---
Module {
name: "",
docs: [],
type_info: (),
definitions: [
Use(
Use {
as_name: None,
location: 0..14,
module: [
"aiken",
"list",
],
package: (),
unqualified: [],
},
),
],
kind: Validator,
}

View File

@ -1,4 +1,3 @@
mod check;
mod format;
mod lexer;
mod parser;

File diff suppressed because it is too large Load Diff

View File

@ -168,7 +168,7 @@ fn qualify_type_name(module: &String, typ_name: &str) -> Document<'static> {
}
#[cfg(test)]
mod test {
mod tests {
use std::cell::RefCell;
use pretty_assertions::assert_eq;

View File

@ -140,7 +140,7 @@ impl From<&Config> for Preamble {
}
#[cfg(test)]
mod test {
mod tests {
use super::*;
use aiken_lang::builtins;
use schema::{Data, Declaration, Items, Schema};

View File

@ -1033,7 +1033,7 @@ Here's the types I followed and that led me to this problem:
}
#[cfg(test)]
pub mod test {
pub mod tests {
use super::*;
use proptest::prelude::*;
use serde_json::{self, json, Value};

View File

@ -177,7 +177,7 @@ impl Validator {
}
#[cfg(test)]
mod test {
mod tests {
use assert_json_diff::assert_json_eq;
use serde_json::{self, json};

View File

@ -1,126 +1,123 @@
#[cfg(test)]
mod test {
use flat_rs::filler::Filler;
use flat_rs::{decode, encode};
use proptest::prelude::*;
use flat_rs::filler::Filler;
use flat_rs::{decode, encode};
use proptest::prelude::*;
prop_compose! {
fn arb_big_vec()(size in 255..300, element in any::<u8>()) -> Vec<u8> {
(0..size).map(|_| element).collect()
}
}
#[test]
fn encode_bool() {
let bytes = encode(&true).unwrap();
assert_eq!(bytes, vec![0b10000001]);
let decoded: bool = decode(bytes.as_slice()).unwrap();
assert!(decoded);
let bytes = encode(&false).unwrap();
assert_eq!(bytes, vec![0b00000001]);
let decoded: bool = decode(bytes.as_slice()).unwrap();
assert!(!decoded);
}
#[test]
fn encode_u8() {
let bytes = encode(&3_u8).unwrap();
assert_eq!(bytes, vec![0b00000011, 0b00000001]);
let decoded: u8 = decode(bytes.as_slice()).unwrap();
assert_eq!(decoded, 3_u8);
}
proptest! {
#[test]
fn encode_isize(x: isize) {
let bytes = encode(&x).unwrap();
let decoded: isize = decode(&bytes).unwrap();
assert_eq!(decoded, x);
}
#[test]
fn encode_usize(x: usize) {
let bytes = encode(&x).unwrap();
let decoded: usize = decode(&bytes).unwrap();
assert_eq!(decoded, x);
}
#[test]
fn encode_char(c: char) {
let bytes = encode(&c).unwrap();
let decoded: char = decode(&bytes).unwrap();
assert_eq!(decoded, c);
}
#[test]
fn encode_string(str: String) {
let bytes = encode(&str).unwrap();
let decoded: String = decode(&bytes).unwrap();
assert_eq!(decoded, str);
}
#[test]
fn encode_vec_u8(xs: Vec<u8>) {
let bytes = encode(&xs).unwrap();
let decoded: Vec<u8> = decode(&bytes).unwrap();
assert_eq!(decoded, xs);
}
#[test]
fn encode_big_vec_u8(xs in arb_big_vec()) {
let bytes = encode(&xs).unwrap();
let decoded: Vec<u8> = decode(&bytes).unwrap();
assert_eq!(decoded, xs);
}
#[test]
fn encode_arr_u8(xs: Vec<u8>) {
let bytes = encode(&xs.as_slice()).unwrap();
let decoded: Vec<u8> = decode(&bytes).unwrap();
assert_eq!(decoded, xs);
}
#[test]
fn encode_big_arr_u8(xs in arb_big_vec()) {
let bytes = encode(&xs.as_slice()).unwrap();
let decoded: Vec<u8> = decode(&bytes).unwrap();
assert_eq!(decoded, xs);
}
#[test]
fn encode_boxed(c: char) {
let boxed = Box::new(c);
let bytes = encode(&boxed).unwrap();
let decoded: char = decode(&bytes).unwrap();
assert_eq!(decoded, c);
}
}
#[test]
fn encode_filler() {
let bytes = encode(&Filler::FillerEnd).unwrap();
assert_eq!(bytes, vec![0b0000001, 0b00000001]);
let bytes = encode(&Filler::FillerStart(Box::new(Filler::FillerEnd))).unwrap();
assert_eq!(bytes, vec![0b0000001, 0b00000001]);
let bytes = encode(&Filler::FillerStart(Box::new(Filler::FillerStart(
Box::new(Filler::FillerEnd),
))))
.unwrap();
assert_eq!(bytes, vec![0b0000001, 0b00000001]);
prop_compose! {
fn arb_big_vec()(size in 255..300, element in any::<u8>()) -> Vec<u8> {
(0..size).map(|_| element).collect()
}
}
#[test]
fn encode_bool() {
let bytes = encode(&true).unwrap();
assert_eq!(bytes, vec![0b10000001]);
let decoded: bool = decode(bytes.as_slice()).unwrap();
assert!(decoded);
let bytes = encode(&false).unwrap();
assert_eq!(bytes, vec![0b00000001]);
let decoded: bool = decode(bytes.as_slice()).unwrap();
assert!(!decoded);
}
#[test]
fn encode_u8() {
let bytes = encode(&3_u8).unwrap();
assert_eq!(bytes, vec![0b00000011, 0b00000001]);
let decoded: u8 = decode(bytes.as_slice()).unwrap();
assert_eq!(decoded, 3_u8);
}
proptest! {
#[test]
fn encode_isize(x: isize) {
let bytes = encode(&x).unwrap();
let decoded: isize = decode(&bytes).unwrap();
assert_eq!(decoded, x);
}
#[test]
fn encode_usize(x: usize) {
let bytes = encode(&x).unwrap();
let decoded: usize = decode(&bytes).unwrap();
assert_eq!(decoded, x);
}
#[test]
fn encode_char(c: char) {
let bytes = encode(&c).unwrap();
let decoded: char = decode(&bytes).unwrap();
assert_eq!(decoded, c);
}
#[test]
fn encode_string(str: String) {
let bytes = encode(&str).unwrap();
let decoded: String = decode(&bytes).unwrap();
assert_eq!(decoded, str);
}
#[test]
fn encode_vec_u8(xs: Vec<u8>) {
let bytes = encode(&xs).unwrap();
let decoded: Vec<u8> = decode(&bytes).unwrap();
assert_eq!(decoded, xs);
}
#[test]
fn encode_big_vec_u8(xs in arb_big_vec()) {
let bytes = encode(&xs).unwrap();
let decoded: Vec<u8> = decode(&bytes).unwrap();
assert_eq!(decoded, xs);
}
#[test]
fn encode_arr_u8(xs: Vec<u8>) {
let bytes = encode(&xs.as_slice()).unwrap();
let decoded: Vec<u8> = decode(&bytes).unwrap();
assert_eq!(decoded, xs);
}
#[test]
fn encode_big_arr_u8(xs in arb_big_vec()) {
let bytes = encode(&xs.as_slice()).unwrap();
let decoded: Vec<u8> = decode(&bytes).unwrap();
assert_eq!(decoded, xs);
}
#[test]
fn encode_boxed(c: char) {
let boxed = Box::new(c);
let bytes = encode(&boxed).unwrap();
let decoded: char = decode(&bytes).unwrap();
assert_eq!(decoded, c);
}
}
#[test]
fn encode_filler() {
let bytes = encode(&Filler::FillerEnd).unwrap();
assert_eq!(bytes, vec![0b0000001, 0b00000001]);
let bytes = encode(&Filler::FillerStart(Box::new(Filler::FillerEnd))).unwrap();
assert_eq!(bytes, vec![0b0000001, 0b00000001]);
let bytes = encode(&Filler::FillerStart(Box::new(Filler::FillerStart(
Box::new(Filler::FillerEnd),
))))
.unwrap();
assert_eq!(bytes, vec![0b0000001, 0b00000001]);
}

View File

@ -1,21 +1,18 @@
#[cfg(test)]
mod test {
use flat_rs::zigzag::{to_isize, to_usize};
use proptest::prelude::*;
use flat_rs::zigzag::{to_isize, to_usize};
use proptest::prelude::*;
proptest! {
#[test]
fn zigzag(i: isize) {
let u = to_usize(i);
let converted_i = to_isize(u);
assert_eq!(converted_i, i);
}
proptest! {
#[test]
fn zigzag(i: isize) {
let u = to_usize(i);
let converted_i = to_isize(u);
assert_eq!(converted_i, i);
}
#[test]
fn zagzig(u: usize) {
let i = to_isize(u);
let converted_u = to_usize(i);
assert_eq!(converted_u, u);
}
#[test]
fn zagzig(u: usize) {
let i = to_isize(u);
let converted_u = to_usize(i);
assert_eq!(converted_u, u);
}
}

View File

@ -808,7 +808,7 @@ pub fn decode_constant_tag(d: &mut Decoder) -> Result<u8, de::Error> {
}
#[cfg(test)]
mod test {
mod tests {
use super::{Constant, Program, Term};
use crate::{
ast::{DeBruijn, Name, Type},

Some files were not shown because too many files have changed in this diff Show More