Merge pull request #391 from aiken-lang/string-and-bytearray-literals-iteration

String and bytearray literals iteration
This commit is contained in:
Matthias Benkort 2023-02-19 10:38:05 +01:00 committed by GitHub
commit 95fce14b75
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
29 changed files with 458 additions and 229 deletions

View File

@ -326,11 +326,21 @@ pub struct DefinitionLocation<'module> {
#[derive(Debug, Clone, PartialEq)]
pub enum Constant {
Int { location: Span, value: String },
Int {
location: Span,
value: String,
},
String { location: Span, value: String },
String {
location: Span,
value: String,
},
ByteArray { location: Span, bytes: Vec<u8> },
ByteArray {
location: Span,
bytes: Vec<u8>,
preferred_format: ByteArrayFormatPreference,
},
}
impl Constant {
@ -748,6 +758,13 @@ impl<A, B> Pattern<A, B> {
}
}
#[derive(Debug, Clone, PartialEq, Eq, Copy)]
pub enum ByteArrayFormatPreference {
HexadecimalString,
ArrayOfBytes,
Utf8String,
}
#[derive(Debug, Clone, PartialEq, Eq, Copy)]
pub enum AssignmentKind {
Let,

View File

@ -4,9 +4,9 @@ use vec1::Vec1;
use crate::{
ast::{
Annotation, Arg, AssignmentKind, BinOp, CallArg, Clause, DefinitionLocation, IfBranch,
Pattern, RecordUpdateSpread, Span, TraceKind, TypedRecordUpdateArg, UnOp,
UntypedRecordUpdateArg,
Annotation, Arg, AssignmentKind, BinOp, ByteArrayFormatPreference, CallArg, Clause,
DefinitionLocation, IfBranch, Pattern, RecordUpdateSpread, Span, TraceKind,
TypedRecordUpdateArg, UnOp, UntypedRecordUpdateArg,
},
builtins::void,
tipo::{ModuleValueConstructor, PatternConstructor, Type, ValueConstructor},
@ -361,6 +361,7 @@ pub enum UntypedExpr {
ByteArray {
location: Span,
bytes: Vec<u8>,
preferred_format: ByteArrayFormatPreference,
},
PipeLine {

View File

@ -5,12 +5,12 @@ use vec1::Vec1;
use crate::{
ast::{
Annotation, Arg, ArgName, AssignmentKind, BinOp, CallArg, ClauseGuard, Constant, DataType,
Definition, Function, IfBranch, ModuleConstant, Pattern, RecordConstructor,
RecordConstructorArg, RecordUpdateSpread, Span, TraceKind, TypeAlias, TypedArg, UnOp,
UnqualifiedImport, UntypedArg, UntypedClause, UntypedClauseGuard, UntypedDefinition,
UntypedFunction, UntypedModule, UntypedPattern, UntypedRecordUpdateArg, Use, Validator,
CAPTURE_VARIABLE,
Annotation, Arg, ArgName, AssignmentKind, BinOp, ByteArrayFormatPreference, CallArg,
ClauseGuard, Constant, DataType, Definition, Function, IfBranch, ModuleConstant, Pattern,
RecordConstructor, RecordConstructorArg, RecordUpdateSpread, Span, TraceKind, TypeAlias,
TypedArg, UnOp, UnqualifiedImport, UntypedArg, UntypedClause, UntypedClauseGuard,
UntypedDefinition, UntypedFunction, UntypedModule, UntypedPattern, UntypedRecordUpdateArg,
Use, Validator, CAPTURE_VARIABLE,
},
docvec,
expr::{UntypedExpr, DEFAULT_ERROR_STR, DEFAULT_TODO_STR},
@ -326,7 +326,11 @@ impl<'comments> Formatter<'comments> {
fn const_expr<'a>(&mut self, value: &'a Constant) -> Document<'a> {
match value {
Constant::ByteArray { bytes, .. } => self.bytearray(bytes),
Constant::ByteArray {
bytes,
preferred_format,
..
} => self.bytearray(bytes, preferred_format),
Constant::Int { value, .. } => value.to_doc(),
Constant::String { value, .. } => self.string(value),
}
@ -635,18 +639,43 @@ impl<'comments> Formatter<'comments> {
}
}
pub fn bytearray<'a>(&mut self, bytes: &'a [u8]) -> Document<'a> {
"#".to_doc()
.append("\"")
.append(Document::String(hex::encode(bytes)))
.append("\"")
pub fn bytearray<'a>(
&mut self,
bytes: &'a [u8],
preferred_format: &ByteArrayFormatPreference,
) -> Document<'a> {
match preferred_format {
ByteArrayFormatPreference::HexadecimalString => "#"
.to_doc()
.append("\"")
.append(Document::String(hex::encode(bytes)))
.append("\""),
ByteArrayFormatPreference::ArrayOfBytes => "#"
.to_doc()
.append(
flex_break("[", "[")
.append(join(bytes.iter().map(|b| b.to_doc()), break_(",", ", ")))
.nest(INDENT)
.append(break_(",", ""))
.append("]"),
)
.group(),
ByteArrayFormatPreference::Utf8String => nil()
.append("\"")
.append(Document::String(String::from_utf8(bytes.to_vec()).unwrap()))
.append("\""),
}
}
pub fn expr<'a>(&mut self, expr: &'a UntypedExpr) -> Document<'a> {
let comments = self.pop_comments(expr.start_byte_index());
let document = match expr {
UntypedExpr::ByteArray { bytes, .. } => self.bytearray(bytes),
UntypedExpr::ByteArray {
bytes,
preferred_format,
..
} => self.bytearray(bytes, preferred_format),
UntypedExpr::If {
branches,
@ -741,7 +770,7 @@ impl<'comments> Formatter<'comments> {
}
fn string<'a>(&self, string: &'a String) -> Document<'a> {
let doc = string.to_doc().surround("\"", "\"");
let doc = "@".to_doc().append(string.to_doc().surround("\"", "\""));
if string.contains('\n') {
doc.force_break()
} else {

View File

@ -7,7 +7,10 @@ pub mod lexer;
pub mod token;
use crate::{
ast::{self, BinOp, Span, TraceKind, UnOp, UntypedDefinition, CAPTURE_VARIABLE},
ast::{
self, BinOp, ByteArrayFormatPreference, Span, TraceKind, UnOp, UntypedDefinition,
CAPTURE_VARIABLE,
},
expr,
};
@ -402,9 +405,12 @@ fn constant_value_parser() -> impl Parser<Token, ast::Constant, Error = ParseErr
});
let constant_bytearray_parser =
bytearray_parser().map_with_span(|bytes, span| ast::Constant::ByteArray {
location: span,
bytes,
bytearray_parser().map_with_span(|(preferred_format, bytes), span| {
ast::Constant::ByteArray {
location: span,
bytes,
preferred_format,
}
});
choice((
@ -414,42 +420,54 @@ fn constant_value_parser() -> impl Parser<Token, ast::Constant, Error = ParseErr
))
}
pub fn bytearray_parser() -> impl Parser<Token, Vec<u8>, Error = ParseError> {
let bytearray_list_parser = just(Token::Hash).ignore_then(
select! {Token::Int {value} => value}
.validate(|value, span, emit| {
let byte: u8 = match value.parse() {
Ok(b) => b,
Err(_) => {
emit(ParseError::expected_input_found(
span,
None,
Some(error::Pattern::Byte),
));
pub fn bytearray_parser(
) -> impl Parser<Token, (ByteArrayFormatPreference, Vec<u8>), Error = ParseError> {
let bytearray_list_parser = just(Token::Hash)
.ignore_then(
select! {Token::Int {value} => value}
.validate(|value, span, emit| {
let byte: u8 = match value.parse() {
Ok(b) => b,
Err(_) => {
emit(ParseError::expected_input_found(
span,
None,
Some(error::Pattern::Byte),
));
0
}
};
0
}
};
byte
})
.separated_by(just(Token::Comma))
.allow_trailing()
.delimited_by(just(Token::LeftSquare), just(Token::RightSquare)),
);
byte
})
.separated_by(just(Token::Comma))
.allow_trailing()
.delimited_by(just(Token::LeftSquare), just(Token::RightSquare)),
)
.map(|token| (ByteArrayFormatPreference::ArrayOfBytes, token));
let bytearray_hexstring_parser =
just(Token::Hash).ignore_then(select! {Token::String {value} => value}.validate(
|value, span, emit| match hex::decode(value) {
Ok(bytes) => bytes,
Err(_) => {
emit(ParseError::malformed_base16_string_literal(span));
vec![]
}
},
));
just(Token::Hash)
.ignore_then(select! {Token::ByteString {value} => value}.validate(
|value, span, emit| match hex::decode(value) {
Ok(bytes) => bytes,
Err(_) => {
emit(ParseError::malformed_base16_string_literal(span));
vec![]
}
},
))
.map(|token| (ByteArrayFormatPreference::HexadecimalString, token));
choice((bytearray_list_parser, bytearray_hexstring_parser))
let bytearray_utf8_parser = select! {Token::ByteString {value} => value.into_bytes() }
.map(|token| (ByteArrayFormatPreference::Utf8String, token));
choice((
bytearray_list_parser,
bytearray_hexstring_parser,
bytearray_utf8_parser,
))
}
pub fn fn_param_parser() -> impl Parser<Token, ast::UntypedArg, Error = ParseError> {
@ -515,6 +533,25 @@ pub fn anon_fn_param_parser() -> impl Parser<Token, ast::UntypedArg, Error = Par
})
}
// Interpret bytearray string literals written as utf-8 strings, as strings.
//
// This is mostly convenient so that todo & error works with either @"..." or plain "...".
// In this particular context, there's actually no ambiguity about the right-hand-side, so
// we can provide this syntactic sugar.
fn flexible_string_literal(expr: expr::UntypedExpr) -> expr::UntypedExpr {
match expr {
expr::UntypedExpr::ByteArray {
preferred_format: ByteArrayFormatPreference::Utf8String,
bytes,
location,
} => expr::UntypedExpr::String {
location,
value: String::from_utf8(bytes).unwrap(),
},
_ => expr,
}
}
pub fn expr_seq_parser() -> impl Parser<Token, expr::UntypedExpr, Error = ParseError> {
recursive(|r| {
choice((
@ -525,14 +562,18 @@ pub fn expr_seq_parser() -> impl Parser<Token, expr::UntypedExpr, Error = ParseE
kind: TraceKind::Trace,
location: span,
then: Box::new(then_),
text: Box::new(text),
text: Box::new(flexible_string_literal(text)),
}),
just(Token::ErrorTerm)
.ignore_then(expr_parser(r.clone()).or_not())
.map_with_span(|reason, span| expr::UntypedExpr::error(span, reason)),
.map_with_span(|reason, span| {
expr::UntypedExpr::error(span, reason.map(flexible_string_literal))
}),
just(Token::Todo)
.ignore_then(expr_parser(r.clone()).or_not())
.map_with_span(|reason, span| expr::UntypedExpr::todo(span, reason)),
.map_with_span(|reason, span| {
expr::UntypedExpr::todo(span, reason.map(flexible_string_literal))
}),
expr_parser(r.clone())
.then(r.repeated())
.foldl(|current, next| current.append_in_sequence(next)),
@ -816,11 +857,13 @@ pub fn expr_parser(
elems,
});
let bytearray =
bytearray_parser().map_with_span(|bytes, span| expr::UntypedExpr::ByteArray {
let bytearray = bytearray_parser().map_with_span(|(preferred_format, bytes), span| {
expr::UntypedExpr::ByteArray {
location: span,
bytes,
});
preferred_format,
}
});
let list_parser = just(Token::LeftSquare)
.ignore_then(r.clone().separated_by(just(Token::Comma)))
@ -904,14 +947,18 @@ pub fn expr_parser(
.then_ignore(one_of(Token::RArrow).not().rewind())
.or_not(),
)
.map_with_span(|reason, span| expr::UntypedExpr::todo(span, reason)),
.map_with_span(|reason, span| {
expr::UntypedExpr::todo(span, reason.map(flexible_string_literal))
}),
just(Token::ErrorTerm)
.ignore_then(
r.clone()
.then_ignore(just(Token::RArrow).not().rewind())
.or_not(),
)
.map_with_span(|reason, span| expr::UntypedExpr::error(span, reason)),
.map_with_span(|reason, span| {
expr::UntypedExpr::error(span, reason.map(flexible_string_literal))
}),
)))
.map_with_span(
|(((patterns, alternative_patterns_opt), guard), then), span| ast::UntypedClause {

View File

@ -102,15 +102,18 @@ impl<T: Into<Pattern>> chumsky::Error<T> for ParseError {
pub enum ErrorKind {
#[error("I arrived at the end of the file unexpectedly.")]
UnexpectedEnd,
#[error("{0}")]
#[diagnostic(help("{}", .0.help().unwrap_or_else(|| Box::new(""))))]
Unexpected(Pattern),
#[error("I discovered an invalid tuple index.")]
#[diagnostic()]
InvalidTupleIndex {
#[help]
hint: Option<String>,
},
#[error("I tripped over a malformed base16-encoded string literal.")]
#[diagnostic(help("{}", formatdoc! {
r#"You can declare literal bytearrays from base16-encoded (a.k.a. hexadecimal) string literals.
@ -123,6 +126,7 @@ pub enum ErrorKind {
"#
, "pub const".bright_blue(), "=".yellow(), "\"f4c9f9c4252d86702c2f4c2e49e6648c7cffe3c8f2b6b7d779788f50\"".bright_purple()}))]
MalformedBase16StringLiteral,
#[error("I failed to understand a when clause guard.")]
#[diagnostic(url("https://aiken-lang.org/language-tour/control-flow#checking-equality-and-ordering-in-patterns"))]
#[diagnostic(help("{}", formatdoc! {

View File

@ -77,13 +77,21 @@ pub fn lexer() -> impl Parser<char, Vec<(Token, Span)>, Error = ParseError> {
.or(just('t').to('\t')),
);
let string = just('"')
let string = just('@')
.ignore_then(just('"'))
.ignore_then(filter(|c| *c != '\\' && *c != '"').or(escape).repeated())
.then_ignore(just('"'))
.collect::<String>()
.map(|value| Token::String { value })
.labelled("string");
let bytestring = just('"')
.ignore_then(filter(|c| *c != '\\' && *c != '"').or(escape).repeated())
.then_ignore(just('"'))
.collect::<String>()
.map(|value| Token::ByteString { value })
.labelled("bytestring");
let keyword = text::ident().map(|s: String| match s.as_str() {
"trace" => Token::Trace,
"error" => Token::ErrorTerm,
@ -158,16 +166,18 @@ pub fn lexer() -> impl Parser<char, Vec<(Token, Span)>, Error = ParseError> {
comment_parser(Token::ModuleComment),
comment_parser(Token::DocComment),
comment_parser(Token::Comment),
choice((ordinal, keyword, int, op, newlines, grouping, string))
.or(any().map(Token::Error).validate(|t, span, emit| {
emit(ParseError::expected_input_found(
span,
None,
Some(t.clone()),
));
t
}))
.map_with_span(|token, span| (token, span)),
choice((
ordinal, keyword, int, op, newlines, grouping, bytestring, string,
))
.or(any().map(Token::Error).validate(|t, span, emit| {
emit(ParseError::expected_input_found(
span,
None,
Some(t.clone()),
));
t
}))
.map_with_span(|token, span| (token, span)),
))
.padded_by(one_of(" \t").ignored().repeated())
.recover_with(skip_then_retry_until([]))

View File

@ -8,6 +8,7 @@ pub enum Token {
UpName { name: String },
DiscardName { name: String },
Int { value: String },
ByteString { value: String },
String { value: String },
// Groupings
NewLineLeftParen, // ↳(
@ -97,6 +98,7 @@ impl fmt::Display for Token {
Token::DiscardName { name } => name,
Token::Int { value } => value,
Token::String { value } => value,
Token::ByteString { value } => value,
Token::NewLineLeftParen => "↳(",
Token::LeftParen => "(",
Token::RightParen => ")",

View File

@ -235,12 +235,12 @@ fn list_pattern_6() {
fn trace_strings() {
let source_code = r#"
fn bar() {
"BAR"
@"BAR"
}
test foo() {
let msg1 = "FOO"
trace("INLINE")
let msg1 = @"FOO"
trace(@"INLINE")
trace(msg1)
trace(bar())
True
@ -305,3 +305,17 @@ fn trace_if_false_ko() {
Err((_, Error::CouldNotUnify { .. }))
))
}
#[test]
fn utf8_hex_literal_warning() {
let source_code = r#"
pub const policy_id = "f43a62fdc3965df486de8a0d32fe800963589c41b38946602a0dc535"
"#;
let (warnings, _) = check(parse(source_code)).unwrap();
assert!(matches!(
warnings[0],
Warning::Utf8ByteArrayIsValidHexString { .. }
))
}

View File

@ -311,22 +311,6 @@ fn test_block_logical_expr() {
assert_fmt(src, expected);
}
#[test]
fn test_format_bytearray_literals() {
let src = indoc! {r#"
const foo = #"ff00"
const bar = #[0, 255]
"#};
let expected = indoc! { r#"
const foo = #"ff00"
const bar = #"00ff"
"#};
assert_fmt(src, expected);
}
#[test]
fn test_nested_function_calls() {
let src = indoc! {r#"
@ -360,7 +344,7 @@ fn test_nested_function_calls() {
),
when output.datum is {
InlineDatum(_) -> True
_ -> error "expected inline datum"
_ -> error @"expected inline datum"
},
]
|> list.and
@ -398,7 +382,33 @@ fn format_trace_todo_error() {
}
"#};
assert_fmt(src, src);
let out = indoc! {r#"
fn foo_1() {
todo
}
fn foo_2() {
todo @"my custom message"
}
fn foo_3() {
when x is {
Foo -> True
_ -> error
}
}
fn foo_4() {
if 14 == 42 {
error @"I don't think so"
} else {
trace @"been there"
True
}
}
"#};
assert_fmt(src, out);
}
#[test]
@ -503,3 +513,37 @@ fn test_newline_module_comments() {
assert_fmt(src, out);
}
#[test]
fn test_bytearray_literals() {
let src = indoc! {r#"
const foo_const_array = #[102, 111, 111]
const foo_const_hex = #"666f6f"
const foo_const_utf8 = "foo"
fn foo() {
let foo_const_array = #[102, 111, 111]
let foo_const_hex = #"666f6f"
let foo_const_utf8 = "foo"
}
"#};
assert_fmt(src, src);
}
#[test]
fn test_string_literal() {
let src = indoc! {r#"
const foo_const: String = @"foo"
fn foo() {
let foo_var: String = @"foo"
}
"#};
assert_fmt(src, src);
}

View File

@ -1343,7 +1343,7 @@ fn call() {
#[test]
fn record_update() {
let code = indoc! {r#"
fn update_name(user: User, name: String) -> User {
fn update_name(user: User, name: ByteArray) -> User {
User { ..user, name: "Aiken", age }
}
"#};
@ -1373,60 +1373,61 @@ fn record_update() {
name: "name".to_string(),
location: Span::new((), 27..31),
},
location: Span::new((), 27..39),
location: Span::new((), 27..42),
annotation: Some(ast::Annotation::Constructor {
location: Span::new((), 33..39),
location: Span::new((), 33..42),
module: None,
name: "String".to_string(),
name: "ByteArray".to_string(),
arguments: vec![],
}),
tipo: (),
},
],
body: expr::UntypedExpr::RecordUpdate {
location: Span::new((), 53..88),
location: Span::new((), 56..91),
constructor: Box::new(expr::UntypedExpr::Var {
location: Span::new((), 53..57),
location: Span::new((), 56..60),
name: "User".to_string(),
}),
spread: ast::RecordUpdateSpread {
base: Box::new(expr::UntypedExpr::Var {
location: Span::new((), 62..66),
location: Span::new((), 65..69),
name: "user".to_string(),
}),
location: Span::new((), 60..66),
location: Span::new((), 63..69),
},
arguments: vec![
ast::UntypedRecordUpdateArg {
label: "name".to_string(),
location: Span::new((), 68..81),
value: expr::UntypedExpr::String {
location: Span::new((), 74..81),
value: "Aiken".to_string(),
location: Span::new((), 71..84),
value: expr::UntypedExpr::ByteArray {
location: Span::new((), 77..84),
bytes: String::from("Aiken").into_bytes(),
preferred_format: ast::ByteArrayFormatPreference::Utf8String,
},
},
ast::UntypedRecordUpdateArg {
label: "age".to_string(),
location: Span::new((), 83..86),
location: Span::new((), 86..89),
value: expr::UntypedExpr::Var {
location: Span::new((), 83..86),
location: Span::new((), 86..89),
name: "age".to_string(),
},
},
],
},
doc: None,
location: Span::new((), 0..48),
location: Span::new((), 0..51),
name: "update_name".to_string(),
public: false,
return_annotation: Some(ast::Annotation::Constructor {
location: Span::new((), 44..48),
location: Span::new((), 47..51),
module: None,
name: "User".to_string(),
arguments: vec![],
}),
return_type: (),
end_position: 89,
end_position: 92,
})],
)
}
@ -1448,9 +1449,10 @@ fn record_create_labeled() {
ast::CallArg {
label: Some("name".to_string()),
location: Span::new((), 23..36),
value: expr::UntypedExpr::String {
value: expr::UntypedExpr::ByteArray {
location: Span::new((), 29..36),
value: "Aiken".to_string(),
bytes: String::from("Aiken").into_bytes(),
preferred_format: ast::ByteArrayFormatPreference::Utf8String,
},
},
ast::CallArg {
@ -1504,9 +1506,10 @@ fn record_create_labeled_with_field_access() {
ast::CallArg {
label: Some("name".to_string()),
location: Span::new((), 35..48),
value: expr::UntypedExpr::String {
value: expr::UntypedExpr::ByteArray {
location: Span::new((), 41..48),
value: "Aiken".to_string(),
bytes: String::from("Aiken").into_bytes(),
preferred_format: ast::ByteArrayFormatPreference::Utf8String,
},
},
ast::CallArg {
@ -1785,6 +1788,7 @@ fn plain_bytearray_literals() {
value: Box::new(Constant::ByteArray {
location: Span::new((), 25..39),
bytes: vec![0, 170, 255],
preferred_format: ast::ByteArrayFormatPreference::ArrayOfBytes,
}),
tipo: (),
})],
@ -1813,6 +1817,7 @@ fn base16_bytearray_literals() {
value: Box::new(Constant::ByteArray {
location: Span::new((), 25..34),
bytes: vec![0, 170, 255],
preferred_format: ast::ByteArrayFormatPreference::HexadecimalString,
}),
tipo: (),
}),
@ -1828,6 +1833,7 @@ fn base16_bytearray_literals() {
right: Box::new(expr::UntypedExpr::ByteArray {
location: Span::new((), 71..80),
bytes: vec![0, 170, 255],
preferred_format: ast::ByteArrayFormatPreference::HexadecimalString,
}),
},
doc: None,
@ -2489,9 +2495,10 @@ fn clause_guards() {
)),
}),
}),
right: Box::new(ast::ClauseGuard::Constant(ast::Constant::String {
right: Box::new(ast::ClauseGuard::Constant(ast::Constant::ByteArray {
location: Span::new((), 178..183),
value: "str".to_string(),
bytes: String::from("str").into_bytes(),
preferred_format: ast::ByteArrayFormatPreference::Utf8String,
})),
}),
then: expr::UntypedExpr::Var {
@ -2669,10 +2676,10 @@ fn scope_logical_expression() {
fn trace_expressions() {
let code = indoc! {r#"
fn foo() {
let msg1 = "FOO"
trace "INLINE"
let msg1 = @"FOO"
trace @"INLINE"
trace msg1
trace string.concat(msg1, "BAR")
trace string.concat(msg1, @"BAR")
trace ( 14 + 42 * 1337 )
Void
}
@ -2682,12 +2689,12 @@ fn trace_expressions() {
vec![ast::Definition::Fn(Function {
arguments: vec![],
body: expr::UntypedExpr::Sequence {
location: Span::new((), 13..128),
location: Span::new((), 13..131),
expressions: vec![
expr::UntypedExpr::Assignment {
location: Span::new((), 13..29),
location: Span::new((), 13..30),
value: Box::new(expr::UntypedExpr::String {
location: Span::new((), 24..29),
location: Span::new((), 24..30),
value: "FOO".to_string(),
}),
pattern: ast::Pattern::Var {
@ -2699,36 +2706,36 @@ fn trace_expressions() {
},
expr::UntypedExpr::Trace {
kind: ast::TraceKind::Trace,
location: Span::new((), 32..128),
location: Span::new((), 33..131),
then: Box::new(expr::UntypedExpr::Trace {
kind: ast::TraceKind::Trace,
location: Span::new((), 49..128),
location: Span::new((), 51..131),
then: Box::new(expr::UntypedExpr::Trace {
kind: ast::TraceKind::Trace,
location: Span::new((), 62..128),
location: Span::new((), 64..131),
then: Box::new(expr::UntypedExpr::Trace {
kind: ast::TraceKind::Trace,
location: Span::new((), 97..128),
location: Span::new((), 100..131),
then: Box::new(expr::UntypedExpr::Var {
location: Span::new((), 124..128),
location: Span::new((), 127..131),
name: "Void".to_string(),
}),
text: Box::new(expr::UntypedExpr::BinOp {
location: Span::new((), 105..119),
location: Span::new((), 108..122),
name: ast::BinOp::AddInt,
left: Box::new(expr::UntypedExpr::Int {
location: Span::new((), 105..107),
location: Span::new((), 108..110),
value: "14".to_string(),
}),
right: Box::new(expr::UntypedExpr::BinOp {
location: Span::new((), 110..119),
location: Span::new((), 113..122),
name: ast::BinOp::MultInt,
left: Box::new(expr::UntypedExpr::Int {
location: Span::new((), 110..112),
location: Span::new((), 113..115),
value: "42".to_string(),
}),
right: Box::new(expr::UntypedExpr::Int {
location: Span::new((), 115..119),
location: Span::new((), 118..122),
value: "1337".to_string(),
}),
}),
@ -2738,39 +2745,39 @@ fn trace_expressions() {
arguments: vec![
ast::CallArg {
label: None,
location: Span::new((), 82..86),
location: Span::new((), 84..88),
value: expr::UntypedExpr::Var {
location: Span::new((), 82..86),
location: Span::new((), 84..88),
name: "msg1".to_string(),
},
},
ast::CallArg {
label: None,
location: Span::new((), 88..93),
location: Span::new((), 90..96),
value: expr::UntypedExpr::String {
location: Span::new((), 88..93),
location: Span::new((), 90..96),
value: "BAR".to_string(),
},
},
],
fun: Box::new(expr::UntypedExpr::FieldAccess {
location: Span::new((), 68..81),
location: Span::new((), 70..83),
label: "concat".to_string(),
container: Box::new(expr::UntypedExpr::Var {
location: Span::new((), 68..74),
location: Span::new((), 70..76),
name: "string".to_string(),
}),
}),
location: Span::new((), 68..94),
location: Span::new((), 70..97),
}),
}),
text: Box::new(expr::UntypedExpr::Var {
location: Span::new((), 55..59),
location: Span::new((), 57..61),
name: "msg1".to_string(),
}),
}),
text: Box::new(expr::UntypedExpr::String {
location: Span::new((), 38..46),
location: Span::new((), 39..48),
value: "INLINE".to_string(),
}),
},
@ -2782,7 +2789,7 @@ fn trace_expressions() {
public: false,
return_annotation: None,
return_type: (),
end_position: 129,
end_position: 132,
})],
)
}
@ -2791,7 +2798,7 @@ fn trace_expressions() {
fn parse_keyword_error() {
let code = indoc! {r#"
fn foo() {
error "not implemented"
error @"not implemented"
}
fn bar() {
@ -2808,12 +2815,12 @@ fn parse_keyword_error() {
arguments: vec![],
body: expr::UntypedExpr::Trace {
kind: ast::TraceKind::Error,
location: Span::new((), 13..36),
location: Span::new((), 13..37),
then: Box::new(expr::UntypedExpr::ErrorTerm {
location: Span::new((), 13..36),
location: Span::new((), 13..37),
}),
text: Box::new(expr::UntypedExpr::String {
location: Span::new((), 19..36),
location: Span::new((), 19..37),
value: "not implemented".to_string(),
}),
},
@ -2823,22 +2830,22 @@ fn parse_keyword_error() {
public: false,
return_annotation: None,
return_type: (),
end_position: 37,
end_position: 38,
}),
ast::Definition::Fn(Function {
arguments: vec![],
body: expr::UntypedExpr::When {
location: Span::new((), 53..109),
location: Span::new((), 54..110),
subjects: vec![expr::UntypedExpr::Var {
location: Span::new((), 58..59),
location: Span::new((), 59..60),
name: "x".to_string(),
}],
clauses: vec![
ast::Clause {
location: Span::new((), 71..88),
location: Span::new((), 72..89),
pattern: vec![ast::Pattern::Constructor {
is_record: false,
location: Span::new((), 71..80),
location: Span::new((), 72..81),
name: "Something".to_string(),
arguments: vec![],
module: None,
@ -2849,26 +2856,26 @@ fn parse_keyword_error() {
alternative_patterns: vec![],
guard: None,
then: expr::UntypedExpr::Var {
location: Span::new((), 84..88),
location: Span::new((), 85..89),
name: "Void".to_string(),
},
},
ast::Clause {
location: Span::new((), 95..105),
location: Span::new((), 96..106),
pattern: vec![ast::Pattern::Discard {
name: "_".to_string(),
location: Span::new((), 95..96),
location: Span::new((), 96..97),
}],
alternative_patterns: vec![],
guard: None,
then: expr::UntypedExpr::Trace {
kind: ast::TraceKind::Error,
location: Span::new((), 100..105),
location: Span::new((), 101..106),
then: Box::new(expr::UntypedExpr::ErrorTerm {
location: Span::new((), 100..105),
location: Span::new((), 101..106),
}),
text: Box::new(expr::UntypedExpr::String {
location: Span::new((), 100..105),
location: Span::new((), 101..106),
value: "aiken::error".to_string(),
}),
},
@ -2876,12 +2883,12 @@ fn parse_keyword_error() {
],
},
doc: None,
location: Span::new((), 40..48),
location: Span::new((), 41..49),
name: "bar".to_string(),
public: false,
return_annotation: None,
return_type: (),
end_position: 110,
end_position: 111,
}),
],
)
@ -2891,7 +2898,7 @@ fn parse_keyword_error() {
fn parse_keyword_todo() {
let code = indoc! {r#"
fn foo() {
todo "not implemented"
todo @"not implemented"
}
fn bar() {
@ -2909,12 +2916,12 @@ fn parse_keyword_todo() {
arguments: vec![],
body: expr::UntypedExpr::Trace {
kind: ast::TraceKind::Todo,
location: Span::new((), 13..35),
location: Span::new((), 13..36),
then: Box::new(expr::UntypedExpr::ErrorTerm {
location: Span::new((), 13..35),
location: Span::new((), 13..36),
}),
text: Box::new(expr::UntypedExpr::String {
location: Span::new((), 18..35),
location: Span::new((), 18..36),
value: "not implemented".to_string(),
}),
},
@ -2924,22 +2931,22 @@ fn parse_keyword_todo() {
public: false,
return_annotation: None,
return_type: (),
end_position: 36,
end_position: 37,
}),
ast::Definition::Fn(Function {
arguments: vec![],
body: expr::UntypedExpr::When {
location: Span::new((), 52..120),
location: Span::new((), 53..121),
subjects: vec![expr::UntypedExpr::Var {
location: Span::new((), 57..58),
location: Span::new((), 58..59),
name: "x".to_string(),
}],
clauses: vec![
ast::Clause {
location: Span::new((), 70..81),
location: Span::new((), 71..82),
pattern: vec![ast::Pattern::Constructor {
is_record: false,
location: Span::new((), 70..73),
location: Span::new((), 71..74),
name: "Foo".to_string(),
arguments: vec![],
module: None,
@ -2951,21 +2958,21 @@ fn parse_keyword_todo() {
guard: None,
then: expr::UntypedExpr::Trace {
kind: ast::TraceKind::Todo,
location: Span::new((), 77..81),
location: Span::new((), 78..82),
then: Box::new(expr::UntypedExpr::ErrorTerm {
location: Span::new((), 77..81),
location: Span::new((), 78..82),
}),
text: Box::new(expr::UntypedExpr::String {
location: Span::new((), 77..81),
location: Span::new((), 78..82),
value: "aiken::todo".to_string(),
}),
},
},
ast::Clause {
location: Span::new((), 88..99),
location: Span::new((), 89..100),
pattern: vec![ast::Pattern::Constructor {
is_record: false,
location: Span::new((), 88..91),
location: Span::new((), 89..92),
name: "Bar".to_string(),
arguments: vec![],
module: None,
@ -2976,32 +2983,32 @@ fn parse_keyword_todo() {
alternative_patterns: vec![],
guard: None,
then: expr::UntypedExpr::Var {
location: Span::new((), 95..99),
location: Span::new((), 96..100),
name: "True".to_string(),
},
},
ast::Clause {
location: Span::new((), 106..116),
location: Span::new((), 107..117),
pattern: vec![ast::Pattern::Discard {
name: "_".to_string(),
location: Span::new((), 106..107),
location: Span::new((), 107..108),
}],
alternative_patterns: vec![],
guard: None,
then: expr::UntypedExpr::Var {
location: Span::new((), 111..116),
location: Span::new((), 112..117),
name: "False".to_string(),
},
},
],
},
doc: None,
location: Span::new((), 39..47),
location: Span::new((), 40..48),
name: "bar".to_string(),
public: false,
return_annotation: None,
return_type: (),
end_position: 121,
end_position: 122,
}),
],
)

View File

@ -1244,6 +1244,31 @@ pub enum Warning {
#[label("unused")]
location: Span,
},
#[error(
"I noticed a suspicious {type_ByteArray} UTF-8 literal which resembles a hash digest.",
type_ByteArray = "ByteArray".bold().bright_blue()
)]
#[diagnostic(help("{}", formatdoc! {
r#"When you specify a {type_ByteArray} literal using plain double-quotes, it's interpreted as an array of UTF-8 bytes. For example, the literal {literal_foo} is interpreted as the byte sequence {foo_bytes}.
However here, you have specified a literal that resembles a hash digest encoded as an hexadecimal string. This is a common case, but you probably want to capture the raw bytes represented by this sequence, and not the hexadecimal sequence. Fear not! Aiken provides a convenient syntax for that: just prefix the literal with {symbol_hash}. This will decode the hexadecimal string for you and capture the non-encoded bytes as a {type_ByteArray}.
{symbol_hash}{value}
"#,
type_ByteArray = "ByteArray".bold().bright_blue(),
literal_foo = "\"foo\"".purple(),
foo_bytes = "#[102, 111, 111]".purple(),
value = "\"{value}\"".purple(),
symbol_hash = "#".purple(),
}))]
#[diagnostic(code("syntax::bytearray_literal_is_hex_string"))]
#[diagnostic(url("https://aiken-lang.org/language-tour/primitive-types#bytearray"))]
Utf8ByteArrayIsValidHexString {
#[label("missing '#' to decode hex string")]
location: Span,
value: String,
},
}
#[derive(Debug, Clone, Copy, PartialEq, Eq)]

View File

@ -4,11 +4,11 @@ use vec1::Vec1;
use crate::{
ast::{
Annotation, Arg, ArgName, AssignmentKind, BinOp, CallArg, Clause, ClauseGuard, Constant,
IfBranch, RecordUpdateSpread, Span, TraceKind, Tracing, TypedArg, TypedCallArg,
TypedClause, TypedClauseGuard, TypedIfBranch, TypedMultiPattern, TypedRecordUpdateArg,
UnOp, UntypedArg, UntypedClause, UntypedClauseGuard, UntypedIfBranch, UntypedMultiPattern,
UntypedPattern, UntypedRecordUpdateArg,
Annotation, Arg, ArgName, AssignmentKind, BinOp, ByteArrayFormatPreference, CallArg,
Clause, ClauseGuard, Constant, IfBranch, RecordUpdateSpread, Span, TraceKind, Tracing,
TypedArg, TypedCallArg, TypedClause, TypedClauseGuard, TypedIfBranch, TypedMultiPattern,
TypedRecordUpdateArg, UnOp, UntypedArg, UntypedClause, UntypedClauseGuard, UntypedIfBranch,
UntypedMultiPattern, UntypedPattern, UntypedRecordUpdateArg,
},
builtins::{bool, byte_array, function, int, list, string, tuple},
expr::{TypedExpr, UntypedExpr},
@ -350,9 +350,11 @@ impl<'a, 'b> ExprTyper<'a, 'b> {
..
} => self.infer_tuple_index(*tuple, index, location),
UntypedExpr::ByteArray { location, bytes } => {
Ok(self.infer_byte_array(bytes, location))
}
UntypedExpr::ByteArray {
bytes,
preferred_format,
location,
} => self.infer_bytearray(bytes, preferred_format, location),
UntypedExpr::RecordUpdate {
location,
@ -373,12 +375,27 @@ impl<'a, 'b> ExprTyper<'a, 'b> {
}
}
fn infer_byte_array(&mut self, bytes: Vec<u8>, location: Span) -> TypedExpr {
TypedExpr::ByteArray {
fn infer_bytearray(
&mut self,
bytes: Vec<u8>,
preferred_format: ByteArrayFormatPreference,
location: Span,
) -> Result<TypedExpr, Error> {
if let ByteArrayFormatPreference::Utf8String = preferred_format {
let value = String::from_utf8(bytes.clone()).unwrap();
let is_hex_string = hex::decode(&value).is_ok();
if bytes.len() >= 56 && is_hex_string {
self.environment
.warnings
.push(Warning::Utf8ByteArrayIsValidHexString { location, value });
}
}
Ok(TypedExpr::ByteArray {
location,
bytes,
tipo: byte_array(),
}
})
}
fn infer_trace_if_false(
@ -1353,7 +1370,18 @@ impl<'a, 'b> ExprTyper<'a, 'b> {
location, value, ..
} => Ok(Constant::String { location, value }),
Constant::ByteArray { location, bytes } => Ok(Constant::ByteArray { location, bytes }),
Constant::ByteArray {
location,
bytes,
preferred_format,
} => {
let _ = self.infer_bytearray(bytes.clone(), preferred_format, location)?;
Ok(Constant::ByteArray {
location,
bytes,
preferred_format,
})
}
}?;
// Check type annotation is accurate.

View File

@ -10,10 +10,10 @@ fn concat(left: String, right: String) -> String {
fn is_negative(i: Int) -> Bool {
if i < 0 {
trace "is negative"
trace @"is negative"
True
} else {
trace concat("is", concat(" ", "non-negative"))
trace concat(@"is", concat(@" ", @"non-negative"))
False
}
}

View File

@ -3,11 +3,11 @@
[[requirements]]
name = "aiken-lang/stdlib"
version = "3b47c89006e7580c2213370d7426ed2a38d2836e"
version = "main"
source = "github"
[[packages]]
name = "aiken-lang/stdlib"
version = "3b47c89006e7580c2213370d7426ed2a38d2836e"
version = "main"
requirements = []
source = "github"

View File

@ -1,6 +1,7 @@
name = "aiken-lang/acceptance_test_036"
version = "0.0.0"
dependencies = [
{ name = "aiken-lang/stdlib", version = "3b47c89006e7580c2213370d7426ed2a38d2836e", source = "github" },
]
[[dependencies]]
name = 'aiken-lang/stdlib'
version = 'main'
source = 'github'

View File

@ -3,11 +3,11 @@
[[requirements]]
name = "aiken-lang/stdlib"
version = "1cedbe85b7c7e9c4036d63d45cad4ced27b0d50b"
version = "main"
source = "github"
[[packages]]
name = "aiken-lang/stdlib"
version = "1cedbe85b7c7e9c4036d63d45cad4ced27b0d50b"
version = "main"
requirements = []
source = "github"

View File

@ -1,6 +1,7 @@
name = "aiken-lang/acceptance_test_054"
version = "0.0.0"
dependencies = [
{ name = "aiken-lang/stdlib", version = "1cedbe85b7c7e9c4036d63d45cad4ced27b0d50b", source = "github" },
]
[[dependencies]]
name = 'aiken-lang/stdlib'
version = 'main'
source = 'github'

View File

@ -3,11 +3,11 @@
[[requirements]]
name = "aiken-lang/stdlib"
version = "3b47c89006e7580c2213370d7426ed2a38d2836e"
version = "main"
source = "github"
[[packages]]
name = "aiken-lang/stdlib"
version = "3b47c89006e7580c2213370d7426ed2a38d2836e"
version = "main"
requirements = []
source = "github"

View File

@ -1,6 +1,7 @@
name = "aiken-lang/acceptance_test_055"
version = "0.0.0"
dependencies = [
{ name = "aiken-lang/stdlib", version="3b47c89006e7580c2213370d7426ed2a38d2836e", source = "github" },
]
[[dependencies]]
name = 'aiken-lang/stdlib'
version = 'main'
source = 'github'

View File

@ -1,2 +0,0 @@
let foo = Module { name: "tests", docs: [], type_info: (), definitions: [Fn(Function { arguments: [Arg { arg_name: Named { name: "signatories", label: "signatories", location: 18..29 }, location: 18..29, annotation: None, tipo: () }], body: When { location: 35..154, subjects: [Var { location: 40..51, name: "signatories" }], clauses: [Clause { location: 61..115, pattern: [List { location: 61..63, elements: [], tail: None }], alternative_patterns: [], guard: None, then: Trace { kind: Trace, location: 75..109, then: Var { location: 104..109, name: "False" }, text: String { location: 81..97, value: "no signatories" } } }, Clause { location: 120..150, pattern: [List { location: 120..129, elements: [Var { location: 121..124, name: "sig" }], tail: Some(Discard { name: "_", location: 128..129 }) }], alternative_patterns: [], guard: None, then: TraceIfFalse { location: 133..150, value: BinOp { location: 134..148, name: Eq, left: Var { location: 134..137, name: "sig" }, right: String { location: 141..148, value: "#ffff" } } } }] }, doc: None, location: 0..30, name: "must_be_signed", public: false, return_annotation: None, return_type: (), end_position: 155 }), Fn(Function { arguments: [], body: Var { location: 182..186, name: "True" }, doc: None, location: 158..177, name: "must_say_hello", public: false, return_annotation: None, return_type: (), end_position: 187 }), Test(Function { arguments: [], body: BinOp { location: 205..260, name: Or, left: BinOp { location: 205..252, name: And, left: TraceIfFalse { location: 205..231, value: Call { arguments: [CallArg { label: None, location: 220..229, value: List { location: 220..229, elements: [String { location: 221..228, value: "#f000" }], tail: None } }], fun: Var { location: 205..219, name: "must_be_signed" }, location: 205..230 } }, right: TraceIfFalse { location: 235..252, value: Call { arguments: [], fun: Var { location: 235..249, name: "must_say_hello" }, location: 235..251 } } }, right: Var { location: 256..260, name: "True" } }, doc: None, location: 190..200, name: "foo", public: false, return_annotation: None, return_type: (), end_position: 261 })], kind: Lib };

View File

@ -1,4 +1,4 @@
name = 'aiken-lang/acceptance_test_061'
name = 'aiken-lang/acceptance_test_063'
version = '0.0.0'
description = ''

View File

@ -1,7 +1,7 @@
fn must_be_signed(signatories) {
when signatories is {
[] -> {
trace "no signatories"
trace @"no signatories"
False
}
[sig, ..] -> (sig == "#ffff")?

View File

@ -28,7 +28,7 @@ fn assert_purpose(purpose) {
ref.transaction_id == TransactionId(
#"0000000000000000000000000000000000000000000000000000000000000000",
) && ref.output_index == 0
_ -> error "script purpose isn't 'Spend'"
_ -> error @"script purpose isn't 'Spend'"
}
}
@ -49,6 +49,6 @@ fn assert_outputs(transaction) {
output.reference_script == None,
]
|> list.and
_ -> error "unexpected number of outputs"
_ -> error @"unexpected number of outputs"
}
}

View File

@ -40,7 +40,7 @@ fn assert_outputs(outputs) {
when outputs is {
[output_1, output_2, ..] ->
assert_first_output(output_1) && assert_second_output(output_2)
_ -> error "expected transaction to have (at least) 2 outputs"
_ -> error @"expected transaction to have (at least) 2 outputs"
}
}
@ -67,7 +67,7 @@ fn assert_second_output(output) {
),
when output.datum is {
InlineDatum(_) -> True
_ -> error "expected inline datum"
_ -> error @"expected inline datum"
},
]
|> list.and

View File

@ -29,7 +29,7 @@ fn assert_mint(purpose, transaction) {
let tokens = value.tokens(transaction.mint, policy_id)
when dict.get(tokens, #"666f6f") is {
None -> error "token not found"
None -> error @"token not found"
Some(quantity) -> quantity == 1337
}
}

View File

@ -23,11 +23,11 @@ validator spend {
[
when dict.get(ctx.transaction.withdrawals, alice) is {
None -> error "alice's withdrawal not found"
None -> error @"alice's withdrawal not found"
Some(value) -> value == 42
},
when dict.get(ctx.transaction.withdrawals, bob) is {
None -> error "bob's withdrawal not found"
None -> error @"bob's withdrawal not found"
Some(value) -> value == 14
},
dict.keys(ctx.transaction.withdrawals) == [alice, bob],

View File

@ -3,11 +3,11 @@
[[requirements]]
name = "aiken-lang/stdlib"
version = "43d8e740ffdf5febc59e51b7f0d5f8506115340c"
version = "main"
source = "github"
[[packages]]
name = "aiken-lang/stdlib"
version = "43d8e740ffdf5febc59e51b7f0d5f8506115340c"
version = "main"
requirements = []
source = "github"

View File

@ -2,6 +2,8 @@ name = "aiken-lang/hello_world"
version = "1.0.0"
licences = ["Apache-2.0"]
description = "Aiken contracts for project 'aiken-lang/hello_world'"
dependencies = [
{ name = "aiken-lang/stdlib", version = "43d8e740ffdf5febc59e51b7f0d5f8506115340c", source = "github" },
]
[[dependencies]]
name = "aiken-lang/stdlib"
version = "main"
source = "github"

View File

@ -1,6 +1,5 @@
use aiken/hash.{Blake2b_224, Hash}
use aiken/list
use aiken/string
use aiken/transaction.{ScriptContext}
use aiken/transaction/credential.{VerificationKey}
@ -14,7 +13,7 @@ type Redeemer {
validator spend {
fn(datum: Datum, redeemer: Redeemer, context: ScriptContext) -> Bool {
let must_say_hello = string.from_bytearray(redeemer.msg) == "Hello, World!"
let must_say_hello = redeemer.msg == "Hello, World!"
let must_be_signed =
list.has(context.transaction.extra_signatories, datum.owner)
@ -22,4 +21,3 @@ validator spend {
must_say_hello && must_be_signed
}
}