chore: make folder names match crate name

This commit is contained in:
rvcas
2022-12-21 17:42:53 -05:00
committed by Lucas
parent 5694cac1a5
commit 42204d2d71
93 changed files with 7 additions and 7 deletions

View File

@@ -0,0 +1,135 @@
use std::collections::HashSet;
use miette::Diagnostic;
use crate::{ast::Span, parser::token::Token};
#[derive(Debug, Diagnostic, thiserror::Error)]
#[error("{kind}\n")]
pub struct ParseError {
pub kind: ErrorKind,
#[label]
pub span: Span,
#[allow(dead_code)]
while_parsing: Option<(Span, &'static str)>,
expected: HashSet<Pattern>,
label: Option<&'static str>,
}
impl ParseError {
pub fn merge(mut self, other: Self) -> Self {
// TODO: Use HashSet
for expected in other.expected.into_iter() {
self.expected.insert(expected);
}
self
}
}
impl PartialEq for ParseError {
fn eq(&self, other: &Self) -> bool {
self.kind == other.kind && self.span == other.span && self.label == other.label
}
}
impl<T: Into<Pattern>> chumsky::Error<T> for ParseError {
type Span = Span;
type Label = &'static str;
fn expected_input_found<Iter: IntoIterator<Item = Option<T>>>(
span: Self::Span,
expected: Iter,
found: Option<T>,
) -> Self {
Self {
kind: found
.map(Into::into)
.map(ErrorKind::Unexpected)
.unwrap_or(ErrorKind::UnexpectedEnd),
span,
while_parsing: None,
expected: expected
.into_iter()
.map(|x| x.map(Into::into).unwrap_or(Pattern::End))
.collect(),
label: None,
}
}
fn with_label(mut self, label: Self::Label) -> Self {
self.label.get_or_insert(label);
self
}
fn merge(self, other: Self) -> Self {
ParseError::merge(self, other)
}
}
#[derive(Debug, PartialEq, Eq, Diagnostic, thiserror::Error)]
pub enum ErrorKind {
#[error("unexpected end")]
UnexpectedEnd,
#[error("{0}")]
#[diagnostic(help("{}", .0.help().unwrap_or_else(|| Box::new(""))))]
Unexpected(Pattern),
#[error("unclosed {start}")]
Unclosed {
start: Pattern,
#[label]
before_span: Span,
before: Option<Pattern>,
},
#[error("no end branch")]
NoEndBranch,
}
#[derive(Debug, PartialEq, Eq, Hash, Diagnostic, thiserror::Error)]
pub enum Pattern {
#[error("Unexpected {0:?}")]
#[diagnostic(help("Try removing it"))]
Char(char),
#[error("Unexpected {0}")]
#[diagnostic(help("Try removing it"))]
Token(Token),
#[error("Unexpected literal")]
#[diagnostic(help("Try removing it"))]
Literal,
#[error("Unexpected type name")]
#[diagnostic(help("Try removing it"))]
TypeIdent,
#[error("Unexpected indentifier")]
#[diagnostic(help("Try removing it"))]
TermIdent,
#[error("Unexpected end of input")]
End,
#[error("Malformed list spread pattern")]
#[diagnostic(help("List spread in matches can\nuse have a discard or var"))]
Match,
#[error("Malformed byte literal")]
#[diagnostic(help("Bytes must be between 0-255"))]
Byte,
#[error("Unexpected pattern")]
#[diagnostic(help(
"If no label is provided then only variables\nmatching a field name are allowed"
))]
RecordPunning,
#[error("Unexpected label")]
#[diagnostic(help("You can only use labels with curly braces"))]
Label,
#[error("Unexpected hole")]
#[diagnostic(help("You can only use capture syntax with functions not constructors"))]
Discard,
}
impl From<char> for Pattern {
fn from(c: char) -> Self {
Self::Char(c)
}
}
impl From<Token> for Pattern {
fn from(tok: Token) -> Self {
Self::Token(tok)
}
}

View File

@@ -0,0 +1,52 @@
use crate::ast::Span;
use std::iter::Peekable;
#[derive(Debug, PartialEq, Eq, Default, Clone)]
pub struct ModuleExtra {
pub module_comments: Vec<Span>,
pub doc_comments: Vec<Span>,
pub comments: Vec<Span>,
pub empty_lines: Vec<usize>,
}
impl ModuleExtra {
pub fn new() -> Self {
Default::default()
}
}
#[derive(Debug, PartialEq, Eq)]
pub struct Comment<'a> {
pub start: usize,
pub content: &'a str,
}
impl<'a> From<(&Span, &'a str)> for Comment<'a> {
fn from(src: (&Span, &'a str)) -> Comment<'a> {
let start = src.0.start;
let end = src.0.end;
Comment {
start,
content: src.1.get(start..end).expect("From span to comment"),
}
}
}
pub fn comments_before<'a>(
comment_spans: &mut Peekable<impl Iterator<Item = &'a Span>>,
byte: usize,
src: &'a str,
) -> Vec<&'a str> {
let mut comments = vec![];
while let Some(Span { start, .. }) = comment_spans.peek() {
if start <= &byte {
let comment = comment_spans
.next()
.expect("Comment before accessing next span");
comments.push(Comment::from((comment, src)).content)
} else {
break;
}
}
comments
}

View File

@@ -0,0 +1,151 @@
use chumsky::prelude::*;
use crate::ast::Span;
use super::{error::ParseError, token::Token};
pub fn lexer() -> impl Parser<char, Vec<(Token, Span)>, Error = ParseError> {
let int = choice((
text::int(10),
text::int(16),
just("-")
.ignore_then(text::int(10))
.map(|value: String| format!("-{}", &value)),
just("-")
.ignore_then(text::int(16))
.map(|value: String| format!("-{}", &value)),
))
.map(|value| Token::Int { value });
let op = choice((
just("==").to(Token::EqualEqual),
just('=').to(Token::Equal),
just("..").to(Token::DotDot),
just('.').to(Token::Dot),
just("!=").to(Token::NotEqual),
just('!').to(Token::Bang),
just("<=").to(Token::LessEqual),
just('<').to(Token::Less),
just(">=").to(Token::GreaterEqual),
just('>').to(Token::Greater),
just('+').to(Token::Plus),
just("->").to(Token::RArrow),
just('-').to(Token::Minus),
just('*').to(Token::Star),
just('/').to(Token::Slash),
just('%').to(Token::Percent),
just("|>").to(Token::Pipe),
just(',').to(Token::Comma),
just(':').to(Token::Colon),
just("||").to(Token::VbarVbar),
just('|').to(Token::Vbar),
just("&&").to(Token::AmperAmper),
just('#').to(Token::Hash),
just("\n\n").to(Token::EmptyLine),
just("\n").to(Token::NewLine),
));
let grouping = choice((
just('(').to(Token::LeftParen),
just(')').to(Token::RightParen),
just('[').to(Token::LeftSquare),
just(']').to(Token::RightSquare),
just('{').to(Token::LeftBrace),
just('}').to(Token::RightBrace),
));
let escape = just('\\').ignore_then(
just('\\')
.or(just('/'))
.or(just('"'))
.or(just('b').to('\x08'))
.or(just('f').to('\x0C'))
.or(just('n').to('\n'))
.or(just('r').to('\r'))
.or(just('t').to('\t')),
);
let string = just('"')
.ignore_then(filter(|c| *c != '\\' && *c != '"').or(escape).repeated())
.then_ignore(just('"'))
.collect::<String>()
.map(|value| Token::String { value })
.labelled("string");
let keyword = text::ident().map(|s: String| match s.as_str() {
"trace" => Token::Trace,
"as" => Token::As,
"assert" => Token::Assert,
"check" => Token::Assert,
"const" => Token::Const,
"fn" => Token::Fn,
"test" => Token::Test,
"if" => Token::If,
"else" => Token::Else,
"is" => Token::Is,
"let" => Token::Let,
"opaque" => Token::Opaque,
"pub" => Token::Pub,
"use" => Token::Use,
"todo" => Token::Todo,
"type" => Token::Type,
"when" => Token::When,
_ => {
if s.chars().next().map_or(false, |c| c.is_uppercase()) {
Token::UpName {
// TODO: do not allow _ in upname
name: s,
}
} else if s.starts_with('_') {
Token::DiscardName {
// TODO: do not allow uppercase letters in discard name
name: s,
}
} else {
Token::Name {
// TODO: do not allow uppercase letters in name
name: s,
}
}
}
});
let module_comments = just("////").ignore_then(
take_until(text::newline().rewind())
.to(Token::ModuleComment)
.map_with_span(|token, span| (token, span)),
);
let doc_comments = just("///").ignore_then(
take_until(text::newline().rewind())
.to(Token::DocComment)
.map_with_span(|token, span| (token, span)),
);
let comments = just("//").ignore_then(
take_until(text::newline().rewind())
.to(Token::Comment)
.map_with_span(|token, span| (token, span)),
);
choice((
module_comments,
doc_comments,
comments,
choice((keyword, int, op, grouping, string))
.or(any().map(Token::Error).validate(|t, span, emit| {
emit(ParseError::expected_input_found(
span,
None,
Some(t.clone()),
));
t
}))
.map_with_span(|token, span| (token, span)),
))
.padded_by(one_of(" \t").ignored().repeated())
.recover_with(skip_then_retry_until([]))
.repeated()
.padded_by(one_of(" \t").ignored().repeated())
.then_ignore(end())
}

View File

@@ -0,0 +1,154 @@
use std::fmt;
#[derive(Clone, Debug, PartialEq, Hash, Eq)]
pub enum Token {
Error(char),
Name { name: String },
UpName { name: String },
DiscardName { name: String },
Int { value: String },
String { value: String },
// Groupings
LeftParen, // (
RightParen, // )
LeftSquare, // [
RightSquare, // }
LeftBrace, // {
RightBrace, // }
// Int Operators
Plus,
Minus,
Star,
Slash,
Less,
Greater,
LessEqual,
GreaterEqual,
Percent,
// ByteString Operators
PlusDot, // '+.'
MinusDot, // '-.'
StarDot, // '*.'
SlashDot, // '/.'
LessDot, // '<.'
GreaterDot, // '>.'
LessEqualDot, // '<=.'
GreaterEqualDot, // '>=.'
// Other Punctuation
Colon,
Comma,
Hash, // '#'
Bang, // '!'
Equal,
EqualEqual, // '=='
NotEqual, // '!='
Vbar, // '|'
VbarVbar, // '||'
AmperAmper, // '&&'
Pipe, // '|>'
Dot, // '.'
RArrow, // '->'
DotDot, // '..'
EndOfFile,
// Docs/Extra
Comment,
DocComment,
ModuleComment,
EmptyLine,
NewLine,
// Keywords (alphabetically):
As,
Assert,
Check,
Const,
Fn,
If,
Else,
Is,
Let,
Opaque,
Pub,
Use,
Test,
Todo,
Type,
When,
Trace,
}
impl fmt::Display for Token {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let s = match self {
Token::Error(c) => {
write!(f, "\"{}\"", c)?;
return Ok(());
}
Token::Name { name } => name,
Token::UpName { name } => name,
Token::DiscardName { name } => name,
Token::Int { value } => value,
Token::String { value } => value,
Token::LeftParen => "(",
Token::RightParen => ")",
Token::LeftSquare => "[",
Token::RightSquare => "]",
Token::LeftBrace => "{",
Token::RightBrace => "}",
Token::Plus => "+",
Token::Minus => "-",
Token::Star => "*",
Token::Slash => "/",
Token::Less => "<",
Token::Greater => ">",
Token::LessEqual => "<=",
Token::GreaterEqual => ">=",
Token::Percent => "%",
Token::PlusDot => "+.",
Token::MinusDot => "-.",
Token::StarDot => "*.",
Token::SlashDot => "/.",
Token::LessDot => "<.",
Token::GreaterDot => ">.",
Token::LessEqualDot => "<=.",
Token::GreaterEqualDot => ">=.",
Token::Colon => ":",
Token::Comma => ",",
Token::Hash => "#",
Token::Bang => "!",
Token::Equal => "=",
Token::EqualEqual => "==",
Token::NotEqual => "!=",
Token::Vbar => "|",
Token::VbarVbar => "||",
Token::AmperAmper => "&&",
Token::Pipe => "|>",
Token::Dot => ".",
Token::RArrow => "->",
Token::DotDot => "..",
Token::EndOfFile => "EOF",
Token::Comment => "//",
Token::DocComment => "///",
Token::ModuleComment => "////",
Token::EmptyLine => "EMPTYLINE",
Token::NewLine => "NEWLINE",
Token::As => "as",
Token::Assert => "assert",
Token::Check => "check",
Token::When => "when",
Token::Is => "is",
Token::Const => "const",
Token::Fn => "fn",
Token::If => "if",
Token::Else => "else",
Token::Use => "import",
Token::Let => "let",
Token::Opaque => "opaque",
Token::Pub => "pub",
Token::Todo => "todo",
Token::Trace => "trace",
Token::Type => "type",
Token::Test => "test",
};
write!(f, "\"{}\"", s)
}
}