fix: properly capture empty lines
This commit is contained in:
parent
d9d1310c6d
commit
53bc9aa56f
|
@ -51,6 +51,7 @@ pub fn module(
|
||||||
|
|
||||||
false
|
false
|
||||||
}
|
}
|
||||||
|
Token::NewLine => false,
|
||||||
_ => true,
|
_ => true,
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|
|
@ -42,6 +42,7 @@ pub fn lexer() -> impl Parser<char, Vec<(Token, Span)>, Error = ParseError> {
|
||||||
just("&&").to(Token::AmperAmper),
|
just("&&").to(Token::AmperAmper),
|
||||||
just('#').to(Token::Hash),
|
just('#').to(Token::Hash),
|
||||||
just("\n\n").to(Token::EmptyLine),
|
just("\n\n").to(Token::EmptyLine),
|
||||||
|
just("\n").to(Token::NewLine),
|
||||||
));
|
));
|
||||||
|
|
||||||
let grouping = choice((
|
let grouping = choice((
|
||||||
|
@ -127,17 +128,10 @@ pub fn lexer() -> impl Parser<char, Vec<(Token, Span)>, Error = ParseError> {
|
||||||
.map_with_span(|token, span| (token, span)),
|
.map_with_span(|token, span| (token, span)),
|
||||||
);
|
);
|
||||||
|
|
||||||
let comments_with_trailing_newline = just("//").ignore_then(
|
|
||||||
take_until(text::newline())
|
|
||||||
.then_ignore(text::newline().rewind())
|
|
||||||
.to(Token::Comment)
|
|
||||||
.map_with_span(|token, span| (token, span)),
|
|
||||||
);
|
|
||||||
|
|
||||||
choice((
|
choice((
|
||||||
module_comments,
|
module_comments,
|
||||||
doc_comments,
|
doc_comments,
|
||||||
choice((comments_with_trailing_newline, comments)),
|
comments,
|
||||||
choice((keyword, int, op, grouping, string))
|
choice((keyword, int, op, grouping, string))
|
||||||
.or(any().map(Token::Error).validate(|t, span, emit| {
|
.or(any().map(Token::Error).validate(|t, span, emit| {
|
||||||
emit(ParseError::expected_input_found(
|
emit(ParseError::expected_input_found(
|
||||||
|
@ -149,9 +143,9 @@ pub fn lexer() -> impl Parser<char, Vec<(Token, Span)>, Error = ParseError> {
|
||||||
}))
|
}))
|
||||||
.map_with_span(|token, span| (token, span)),
|
.map_with_span(|token, span| (token, span)),
|
||||||
))
|
))
|
||||||
.padded()
|
.padded_by(one_of(" \t").ignored().repeated())
|
||||||
.recover_with(skip_then_retry_until([]))
|
.recover_with(skip_then_retry_until([]))
|
||||||
.repeated()
|
.repeated()
|
||||||
.padded()
|
.padded_by(one_of(" \t").ignored().repeated())
|
||||||
.then_ignore(end())
|
.then_ignore(end())
|
||||||
}
|
}
|
||||||
|
|
|
@ -55,6 +55,7 @@ pub enum Token {
|
||||||
DocComment,
|
DocComment,
|
||||||
ModuleComment,
|
ModuleComment,
|
||||||
EmptyLine,
|
EmptyLine,
|
||||||
|
NewLine,
|
||||||
// Keywords (alphabetically):
|
// Keywords (alphabetically):
|
||||||
As,
|
As,
|
||||||
Assert,
|
Assert,
|
||||||
|
@ -130,6 +131,7 @@ impl fmt::Display for Token {
|
||||||
Token::DocComment => "///",
|
Token::DocComment => "///",
|
||||||
Token::ModuleComment => "////",
|
Token::ModuleComment => "////",
|
||||||
Token::EmptyLine => "EMPTYLINE",
|
Token::EmptyLine => "EMPTYLINE",
|
||||||
|
Token::NewLine => "NEWLINE",
|
||||||
Token::As => "as",
|
Token::As => "as",
|
||||||
Token::Assert => "assert",
|
Token::Assert => "assert",
|
||||||
Token::Check => "check",
|
Token::Check => "check",
|
||||||
|
|
|
@ -1,10 +1,17 @@
|
||||||
use sample
|
use sample
|
||||||
|
|
||||||
|
// Stuff
|
||||||
|
|
||||||
|
/// Spend validator
|
||||||
pub fn spend(datum: sample.Datum, rdmr: sample.Redeemer, _ctx: Nil) -> Bool {
|
pub fn spend(datum: sample.Datum, rdmr: sample.Redeemer, _ctx: Nil) -> Bool {
|
||||||
let x = #(datum, rdmr, #[244])
|
let x = #(datum, rdmr, #[244])
|
||||||
|
|
||||||
let y = [#(#[222], #[222]), #(#[233], #[52])]
|
let y = [#(#[222], #[222]), #(#[233], #[52])]
|
||||||
|
|
||||||
let [z, f, ..g] = y
|
let [z, f, ..g] = y
|
||||||
|
|
||||||
let #(a, b, _) = x
|
let #(a, b, _) = x
|
||||||
|
|
||||||
z == #(#[222], #[222])
|
z == #(#[222], #[222])
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
Loading…
Reference in New Issue