Merge pull request #212 from aiken-lang/better-type-check-errors
Better type check errors
This commit is contained in:
commit
f9ac12c3da
|
@ -80,6 +80,7 @@ dependencies = [
|
||||||
"itertools",
|
"itertools",
|
||||||
"miette",
|
"miette",
|
||||||
"ordinal",
|
"ordinal",
|
||||||
|
"owo-colors",
|
||||||
"pretty_assertions",
|
"pretty_assertions",
|
||||||
"strum",
|
"strum",
|
||||||
"thiserror",
|
"thiserror",
|
||||||
|
|
|
@ -13,9 +13,11 @@ authors = ["Lucas Rosa <x@rvcas.dev>", "Kasey White <kwhitemsg@gmail.com>"]
|
||||||
[dependencies]
|
[dependencies]
|
||||||
chumsky = "0.8.0"
|
chumsky = "0.8.0"
|
||||||
indexmap = "1.9.1"
|
indexmap = "1.9.1"
|
||||||
|
indoc = "1.0.7"
|
||||||
itertools = "0.10.5"
|
itertools = "0.10.5"
|
||||||
miette = "5.2.0"
|
miette = "5.2.0"
|
||||||
ordinal = "0.3.2"
|
ordinal = "0.3.2"
|
||||||
|
owo-colors = "3.5.0"
|
||||||
strum = "0.24.1"
|
strum = "0.24.1"
|
||||||
thiserror = "1.0.37"
|
thiserror = "1.0.37"
|
||||||
uplc = { path = '../uplc', version = "0.0.25" }
|
uplc = { path = '../uplc', version = "0.0.25" }
|
||||||
|
|
|
@ -488,7 +488,7 @@ impl<'comments> Formatter<'comments> {
|
||||||
.group()
|
.group()
|
||||||
}
|
}
|
||||||
|
|
||||||
fn type_arguments<'a>(&mut self, args: &'a [Annotation]) -> Document<'a> {
|
pub fn type_arguments<'a>(&mut self, args: &'a [Annotation]) -> Document<'a> {
|
||||||
wrap_generics(args.iter().map(|t| self.annotation(t)))
|
wrap_generics(args.iter().map(|t| self.annotation(t)))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -796,7 +796,7 @@ impl<'comments> Formatter<'comments> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn pattern_constructor<'a>(
|
pub fn pattern_constructor<'a>(
|
||||||
&mut self,
|
&mut self,
|
||||||
name: &'a str,
|
name: &'a str,
|
||||||
args: &'a [CallArg<UntypedPattern>],
|
args: &'a [CallArg<UntypedPattern>],
|
||||||
|
@ -1478,7 +1478,7 @@ impl<'comments> Formatter<'comments> {
|
||||||
list(elements_document, elements.len(), tail)
|
list(elements_document, elements.len(), tail)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn pattern<'a>(&mut self, pattern: &'a UntypedPattern) -> Document<'a> {
|
pub fn pattern<'a>(&mut self, pattern: &'a UntypedPattern) -> Document<'a> {
|
||||||
let comments = self.pop_comments(pattern.location().start);
|
let comments = self.pop_comments(pattern.location().start);
|
||||||
let doc = match pattern {
|
let doc = match pattern {
|
||||||
Pattern::Int { value, .. } => value.to_doc(),
|
Pattern::Int { value, .. } => value.to_doc(),
|
||||||
|
|
|
@ -0,0 +1,42 @@
|
||||||
|
use std::cmp;
|
||||||
|
|
||||||
|
/// Calculate Levenshtein distance for two UTF-8 encoded strings.
|
||||||
|
///
|
||||||
|
/// Returns a minimum number of edits to transform from source to target string.
|
||||||
|
///
|
||||||
|
/// Levenshtein distance accepts three edit operations: insertion, deletion,
|
||||||
|
/// and substitution.
|
||||||
|
///
|
||||||
|
/// References:
|
||||||
|
///
|
||||||
|
/// - [Levenshtein distance in Cargo][1]
|
||||||
|
/// - [Ilia Schelokov: Optimizing loop heavy Rust code][2]
|
||||||
|
///
|
||||||
|
/// [1]: https://github.com/rust-lang/cargo/blob/7d7fe6797ad07f313706380d251796702272b150/src/cargo/util/lev_distance.rs
|
||||||
|
/// [2]: https://thaumant.me/optimizing-loop-heavy-rust/
|
||||||
|
pub fn distance(source: &str, target: &str) -> usize {
|
||||||
|
if source.is_empty() {
|
||||||
|
return target.len();
|
||||||
|
}
|
||||||
|
if target.is_empty() {
|
||||||
|
return source.len();
|
||||||
|
}
|
||||||
|
|
||||||
|
let mut distances = (0..=target.chars().count()).collect::<Vec<_>>();
|
||||||
|
|
||||||
|
for (i, ch1) in source.chars().enumerate() {
|
||||||
|
let mut sub = i;
|
||||||
|
distances[0] = sub + 1;
|
||||||
|
for (j, ch2) in target.chars().enumerate() {
|
||||||
|
let dist = cmp::min(
|
||||||
|
cmp::min(distances[j], distances[j + 1]) + 1,
|
||||||
|
sub + (ch1 != ch2) as usize,
|
||||||
|
);
|
||||||
|
|
||||||
|
sub = distances[j + 1];
|
||||||
|
distances[j + 1] = dist;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
*distances.last().unwrap()
|
||||||
|
}
|
|
@ -9,6 +9,7 @@ pub mod builder;
|
||||||
pub mod builtins;
|
pub mod builtins;
|
||||||
pub mod expr;
|
pub mod expr;
|
||||||
pub mod format;
|
pub mod format;
|
||||||
|
pub mod levenshtein;
|
||||||
pub mod parser;
|
pub mod parser;
|
||||||
pub mod pretty;
|
pub mod pretty;
|
||||||
pub mod tipo;
|
pub mod tipo;
|
||||||
|
|
|
@ -80,21 +80,12 @@ impl<T: Into<Pattern>> chumsky::Error<T> for ParseError {
|
||||||
|
|
||||||
#[derive(Debug, PartialEq, Eq, Diagnostic, thiserror::Error)]
|
#[derive(Debug, PartialEq, Eq, Diagnostic, thiserror::Error)]
|
||||||
pub enum ErrorKind {
|
pub enum ErrorKind {
|
||||||
#[error("Unexpected end")]
|
#[error("I arrived at the end of the file unexpectedly.")]
|
||||||
UnexpectedEnd,
|
UnexpectedEnd,
|
||||||
#[error("{0}")]
|
#[error("{0}")]
|
||||||
#[diagnostic(help("{}", .0.help().unwrap_or_else(|| Box::new(""))))]
|
#[diagnostic(help("{}", .0.help().unwrap_or_else(|| Box::new(""))))]
|
||||||
Unexpected(Pattern),
|
Unexpected(Pattern),
|
||||||
#[error("Unclosed {start}")]
|
#[error("I discovered an invalid tuple index.")]
|
||||||
Unclosed {
|
|
||||||
start: Pattern,
|
|
||||||
#[label]
|
|
||||||
before_span: Span,
|
|
||||||
before: Option<Pattern>,
|
|
||||||
},
|
|
||||||
#[error("No end branch")]
|
|
||||||
NoEndBranch,
|
|
||||||
#[error("Invalid tuple index")]
|
|
||||||
#[diagnostic()]
|
#[diagnostic()]
|
||||||
InvalidTupleIndex {
|
InvalidTupleIndex {
|
||||||
#[help]
|
#[help]
|
||||||
|
@ -104,39 +95,39 @@ pub enum ErrorKind {
|
||||||
|
|
||||||
#[derive(Debug, PartialEq, Eq, Hash, Diagnostic, thiserror::Error)]
|
#[derive(Debug, PartialEq, Eq, Hash, Diagnostic, thiserror::Error)]
|
||||||
pub enum Pattern {
|
pub enum Pattern {
|
||||||
#[error("Unexpected {0:?}")]
|
#[error("I found an unexpected char '{0:?}'.")]
|
||||||
#[diagnostic(help("Try removing it"))]
|
#[diagnostic(help("Try removing it!"))]
|
||||||
Char(char),
|
Char(char),
|
||||||
#[error("Unexpected {0}")]
|
#[error("I found an unexpected token '{0}'.")]
|
||||||
#[diagnostic(help("Try removing it"))]
|
#[diagnostic(help("Try removing it!"))]
|
||||||
Token(Token),
|
Token(Token),
|
||||||
#[error("Unexpected literal")]
|
#[error("I found an unexpected literal value.")]
|
||||||
#[diagnostic(help("Try removing it"))]
|
#[diagnostic(help("Try removing it!"))]
|
||||||
Literal,
|
Literal,
|
||||||
#[error("Unexpected type name")]
|
#[error("I found an unexpected type name.")]
|
||||||
#[diagnostic(help("Try removing it"))]
|
#[diagnostic(help("Try removing it!"))]
|
||||||
TypeIdent,
|
TypeIdent,
|
||||||
#[error("Unexpected indentifier")]
|
#[error("I found an unexpected indentifier.")]
|
||||||
#[diagnostic(help("Try removing it"))]
|
#[diagnostic(help("Try removing it!"))]
|
||||||
TermIdent,
|
TermIdent,
|
||||||
#[error("Unexpected end of input")]
|
#[error("I found an unexpected end of input.")]
|
||||||
End,
|
End,
|
||||||
#[error("Malformed list spread pattern")]
|
#[error("I found a malformed list spread pattern.")]
|
||||||
#[diagnostic(help("List spread in matches can\nuse have a discard or var"))]
|
#[diagnostic(help("List spread in matches can use a discard '_' or var."))]
|
||||||
Match,
|
Match,
|
||||||
#[error("Malformed byte literal")]
|
#[error("I found an out-of-bound byte literal.")]
|
||||||
#[diagnostic(help("Bytes must be between 0-255"))]
|
#[diagnostic(help("Bytes must be between 0-255."))]
|
||||||
Byte,
|
Byte,
|
||||||
#[error("Unexpected pattern")]
|
#[error("I found an unexpected pattern.")]
|
||||||
#[diagnostic(help(
|
#[diagnostic(help(
|
||||||
"If no label is provided then only variables\nmatching a field name are allowed"
|
"If no label is provided then only variables\nmatching a field name are allowed."
|
||||||
))]
|
))]
|
||||||
RecordPunning,
|
RecordPunning,
|
||||||
#[error("Unexpected label")]
|
#[error("I found an unexpected label.")]
|
||||||
#[diagnostic(help("You can only use labels with curly braces"))]
|
#[diagnostic(help("You can only use labels surrounded by curly braces"))]
|
||||||
Label,
|
Label,
|
||||||
#[error("Unexpected hole")]
|
#[error("I found an unexpected discard '_'.")]
|
||||||
#[diagnostic(help("You can only use capture syntax with functions not constructors"))]
|
#[diagnostic(help("You can only use capture syntax with functions not constructors."))]
|
||||||
Discard,
|
Discard,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -146,10 +146,9 @@ impl<'a> Environment<'a> {
|
||||||
|
|
||||||
if let Type::Fn { args, ret } = tipo.deref() {
|
if let Type::Fn { args, ret } = tipo.deref() {
|
||||||
return if args.len() != arity {
|
return if args.len() != arity {
|
||||||
Err(Error::IncorrectArity {
|
Err(Error::IncorrectFunctionCallArity {
|
||||||
expected: args.len(),
|
expected: args.len(),
|
||||||
given: arity,
|
given: arity,
|
||||||
labels: vec![],
|
|
||||||
location: call_location,
|
location: call_location,
|
||||||
})
|
})
|
||||||
} else {
|
} else {
|
||||||
|
@ -316,9 +315,9 @@ impl<'a> Environment<'a> {
|
||||||
) -> Result<&ValueConstructor, Error> {
|
) -> Result<&ValueConstructor, Error> {
|
||||||
match module {
|
match module {
|
||||||
None => self.scope.get(name).ok_or_else(|| Error::UnknownVariable {
|
None => self.scope.get(name).ok_or_else(|| Error::UnknownVariable {
|
||||||
|
location,
|
||||||
name: name.to_string(),
|
name: name.to_string(),
|
||||||
variables: self.local_value_names(),
|
variables: self.local_value_names(),
|
||||||
location,
|
|
||||||
}),
|
}),
|
||||||
|
|
||||||
Some(m) => {
|
Some(m) => {
|
||||||
|
@ -1009,10 +1008,10 @@ impl<'a> Environment<'a> {
|
||||||
self.ungeneralised_functions.insert(name.to_string());
|
self.ungeneralised_functions.insert(name.to_string());
|
||||||
|
|
||||||
// Create the field map so we can reorder labels for usage of this function
|
// Create the field map so we can reorder labels for usage of this function
|
||||||
let mut field_map = FieldMap::new(args.len());
|
let mut field_map = FieldMap::new(args.len(), true);
|
||||||
|
|
||||||
for (i, arg) in args.iter().enumerate() {
|
for (i, arg) in args.iter().enumerate() {
|
||||||
field_map.insert(arg.arg_name.get_label().clone(), i, location)?;
|
field_map.insert(arg.arg_name.get_label().clone(), i, &arg.location)?;
|
||||||
}
|
}
|
||||||
let field_map = field_map.into_option();
|
let field_map = field_map.into_option();
|
||||||
|
|
||||||
|
@ -1077,7 +1076,6 @@ impl<'a> Environment<'a> {
|
||||||
}
|
}
|
||||||
|
|
||||||
Definition::DataType(DataType {
|
Definition::DataType(DataType {
|
||||||
location,
|
|
||||||
public,
|
public,
|
||||||
opaque,
|
opaque,
|
||||||
name,
|
name,
|
||||||
|
@ -1115,14 +1113,17 @@ impl<'a> Environment<'a> {
|
||||||
for constructor in constructors {
|
for constructor in constructors {
|
||||||
assert_unique_value_name(names, &constructor.name, &constructor.location)?;
|
assert_unique_value_name(names, &constructor.name, &constructor.location)?;
|
||||||
|
|
||||||
let mut field_map = FieldMap::new(constructor.arguments.len());
|
let mut field_map = FieldMap::new(constructor.arguments.len(), false);
|
||||||
|
|
||||||
let mut args_types = Vec::with_capacity(constructor.arguments.len());
|
let mut args_types = Vec::with_capacity(constructor.arguments.len());
|
||||||
|
|
||||||
for (
|
for (
|
||||||
i,
|
i,
|
||||||
RecordConstructorArg {
|
RecordConstructorArg {
|
||||||
label, annotation, ..
|
label,
|
||||||
|
annotation,
|
||||||
|
location,
|
||||||
|
..
|
||||||
},
|
},
|
||||||
) in constructor.arguments.iter().enumerate()
|
) in constructor.arguments.iter().enumerate()
|
||||||
{
|
{
|
||||||
|
@ -1593,16 +1594,13 @@ fn assert_unique_const_name<'a>(
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(super) fn assert_no_labeled_arguments<A>(args: &[CallArg<A>]) -> Result<(), Error> {
|
pub(super) fn assert_no_labeled_arguments<A>(args: &[CallArg<A>]) -> Option<(Span, String)> {
|
||||||
for arg in args {
|
for arg in args {
|
||||||
if let Some(label) = &arg.label {
|
if let Some(label) = &arg.label {
|
||||||
return Err(Error::UnexpectedLabeledArg {
|
return Some((arg.location, label.to_string()));
|
||||||
location: arg.location,
|
|
||||||
label: label.to_string(),
|
|
||||||
});
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Ok(())
|
None
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(super) fn collapse_links(t: Arc<Type>) -> Arc<Type> {
|
pub(super) fn collapse_links(t: Arc<Type>) -> Arc<Type> {
|
||||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -111,7 +111,9 @@ impl<'a, 'b> ExprTyper<'a, 'b> {
|
||||||
Some(field_map) => field_map.reorder(&mut args, location)?,
|
Some(field_map) => field_map.reorder(&mut args, location)?,
|
||||||
|
|
||||||
// The fun has no field map and so we error if arguments have been labelled
|
// The fun has no field map and so we error if arguments have been labelled
|
||||||
None => assert_no_labeled_arguments(&args)?,
|
None => assert_no_labeled_arguments(&args)
|
||||||
|
.map(|(location, label)| Err(Error::UnexpectedLabeledArg { location, label }))
|
||||||
|
.unwrap_or(Ok(()))?,
|
||||||
}
|
}
|
||||||
|
|
||||||
// Extract the type of the fun, ensuring it actually is a function
|
// Extract the type of the fun, ensuring it actually is a function
|
||||||
|
@ -499,7 +501,7 @@ impl<'a, 'b> ExprTyper<'a, 'b> {
|
||||||
None => {
|
None => {
|
||||||
panic!("Failed to lookup record field after successfully inferring that field",)
|
panic!("Failed to lookup record field after successfully inferring that field",)
|
||||||
}
|
}
|
||||||
Some(p) => arguments.push(TypedRecordUpdateArg {
|
Some((p, _)) => arguments.push(TypedRecordUpdateArg {
|
||||||
location,
|
location,
|
||||||
label: label.to_string(),
|
label: label.to_string(),
|
||||||
value,
|
value,
|
||||||
|
@ -1350,7 +1352,11 @@ impl<'a, 'b> ExprTyper<'a, 'b> {
|
||||||
Some(field_map) => field_map.reorder(&mut args, location)?,
|
Some(field_map) => field_map.reorder(&mut args, location)?,
|
||||||
|
|
||||||
// The fun has no field map and so we error if arguments have been labelled
|
// The fun has no field map and so we error if arguments have been labelled
|
||||||
None => assert_no_labeled_arguments(&args)?,
|
None => assert_no_labeled_arguments(&args)
|
||||||
|
.map(|(location, label)| {
|
||||||
|
Err(Error::UnexpectedLabeledArg { location, label })
|
||||||
|
})
|
||||||
|
.unwrap_or(Ok(()))?,
|
||||||
}
|
}
|
||||||
|
|
||||||
let (mut args_types, return_type) = self.environment.match_fun_type(
|
let (mut args_types, return_type) = self.environment.match_fun_type(
|
||||||
|
@ -1723,7 +1729,10 @@ impl<'a, 'b> ExprTyper<'a, 'b> {
|
||||||
Ok(elems[index].clone())
|
Ok(elems[index].clone())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
_ => Err(Error::NotATuple { location }),
|
_ => Err(Error::NotATuple {
|
||||||
|
location,
|
||||||
|
tipo: tuple.tipo(),
|
||||||
|
}),
|
||||||
}?;
|
}?;
|
||||||
|
|
||||||
Ok(TypedExpr::TupleIndex {
|
Ok(TypedExpr::TupleIndex {
|
||||||
|
|
|
@ -8,23 +8,34 @@ use crate::ast::{CallArg, Span};
|
||||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||||
pub struct FieldMap {
|
pub struct FieldMap {
|
||||||
pub arity: usize,
|
pub arity: usize,
|
||||||
pub fields: HashMap<String, usize>,
|
pub fields: HashMap<String, (usize, Span)>,
|
||||||
|
pub is_function: bool,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl FieldMap {
|
impl FieldMap {
|
||||||
pub fn new(arity: usize) -> Self {
|
pub fn new(arity: usize, is_function: bool) -> Self {
|
||||||
Self {
|
Self {
|
||||||
arity,
|
arity,
|
||||||
fields: HashMap::new(),
|
fields: HashMap::new(),
|
||||||
|
is_function,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn insert(&mut self, label: String, index: usize, location: &Span) -> Result<(), Error> {
|
pub fn insert(&mut self, label: String, index: usize, location: &Span) -> Result<(), Error> {
|
||||||
match self.fields.insert(label.clone(), index) {
|
match self.fields.insert(label.clone(), (index, *location)) {
|
||||||
Some(_) => Err(Error::DuplicateField {
|
Some((_, location_other)) => {
|
||||||
label,
|
if self.is_function {
|
||||||
location: *location,
|
Err(Error::DuplicateArgument {
|
||||||
}),
|
label,
|
||||||
|
locations: vec![*location, location_other],
|
||||||
|
})
|
||||||
|
} else {
|
||||||
|
Err(Error::DuplicateField {
|
||||||
|
label,
|
||||||
|
locations: vec![*location, location_other],
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
None => Ok(()),
|
None => Ok(()),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -40,12 +51,12 @@ impl FieldMap {
|
||||||
/// Reorder an argument list so that labelled fields supplied out-of-order are
|
/// Reorder an argument list so that labelled fields supplied out-of-order are
|
||||||
/// in the correct order.
|
/// in the correct order.
|
||||||
pub fn reorder<A>(&self, args: &mut [CallArg<A>], location: Span) -> Result<(), Error> {
|
pub fn reorder<A>(&self, args: &mut [CallArg<A>], location: Span) -> Result<(), Error> {
|
||||||
let mut labeled_arguments_given = false;
|
let mut last_labeled_arguments_given: Option<&CallArg<A>> = None;
|
||||||
let mut seen_labels = std::collections::HashSet::new();
|
let mut seen_labels = std::collections::HashSet::new();
|
||||||
let mut unknown_labels = Vec::new();
|
let mut unknown_labels = Vec::new();
|
||||||
|
|
||||||
if self.arity != args.len() {
|
if self.arity != args.len() {
|
||||||
return Err(Error::IncorrectArity {
|
return Err(Error::IncorrectFieldsArity {
|
||||||
labels: self.incorrect_arity_labels(args),
|
labels: self.incorrect_arity_labels(args),
|
||||||
location,
|
location,
|
||||||
expected: self.arity,
|
expected: self.arity,
|
||||||
|
@ -56,13 +67,14 @@ impl FieldMap {
|
||||||
for arg in args.iter() {
|
for arg in args.iter() {
|
||||||
match &arg.label {
|
match &arg.label {
|
||||||
Some(_) => {
|
Some(_) => {
|
||||||
labeled_arguments_given = true;
|
last_labeled_arguments_given = Some(arg);
|
||||||
}
|
}
|
||||||
|
|
||||||
None => {
|
None => {
|
||||||
if labeled_arguments_given {
|
if let Some(label) = last_labeled_arguments_given {
|
||||||
return Err(Error::PositionalArgumentAfterLabeled {
|
return Err(Error::PositionalArgumentAfterLabeled {
|
||||||
location: arg.location,
|
location: arg.location,
|
||||||
|
labeled_arg_location: label.location,
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -90,7 +102,7 @@ impl FieldMap {
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
let position = match self.fields.get(label) {
|
let (position, other_location) = match self.fields.get(label) {
|
||||||
None => {
|
None => {
|
||||||
unknown_labels.push((label.clone(), location));
|
unknown_labels.push((label.clone(), location));
|
||||||
|
|
||||||
|
@ -110,7 +122,7 @@ impl FieldMap {
|
||||||
} else {
|
} else {
|
||||||
if seen_labels.contains(label) {
|
if seen_labels.contains(label) {
|
||||||
return Err(Error::DuplicateArgument {
|
return Err(Error::DuplicateArgument {
|
||||||
location,
|
locations: vec![location, other_location],
|
||||||
label: label.to_string(),
|
label: label.to_string(),
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
|
@ -357,8 +357,7 @@ impl<'a, 'b> PatternTyper<'a, 'b> {
|
||||||
Pattern::Tuple { elems, location } => match collapse_links(tipo.clone()).deref() {
|
Pattern::Tuple { elems, location } => match collapse_links(tipo.clone()).deref() {
|
||||||
Type::Tuple { elems: type_elems } => {
|
Type::Tuple { elems: type_elems } => {
|
||||||
if elems.len() != type_elems.len() {
|
if elems.len() != type_elems.len() {
|
||||||
return Err(Error::IncorrectArity {
|
return Err(Error::IncorrectTupleArity {
|
||||||
labels: vec![],
|
|
||||||
location,
|
location,
|
||||||
expected: type_elems.len(),
|
expected: type_elems.len(),
|
||||||
given: elems.len(),
|
given: elems.len(),
|
||||||
|
@ -483,7 +482,18 @@ impl<'a, 'b> PatternTyper<'a, 'b> {
|
||||||
}
|
}
|
||||||
|
|
||||||
// The fun has no field map and so we error if arguments have been labelled
|
// The fun has no field map and so we error if arguments have been labelled
|
||||||
None => assert_no_labeled_arguments(&pattern_args)?,
|
None => assert_no_labeled_arguments(&pattern_args)
|
||||||
|
.map(|(location, label)| {
|
||||||
|
Err(Error::UnexpectedLabeledArgInPattern {
|
||||||
|
location,
|
||||||
|
label,
|
||||||
|
name: name.clone(),
|
||||||
|
args: pattern_args.clone(),
|
||||||
|
module: module.clone(),
|
||||||
|
with_spread,
|
||||||
|
})
|
||||||
|
})
|
||||||
|
.unwrap_or(Ok(()))?,
|
||||||
}
|
}
|
||||||
|
|
||||||
let constructor_typ = cons.tipo.clone();
|
let constructor_typ = cons.tipo.clone();
|
||||||
|
@ -542,11 +552,13 @@ impl<'a, 'b> PatternTyper<'a, 'b> {
|
||||||
is_record,
|
is_record,
|
||||||
})
|
})
|
||||||
} else {
|
} else {
|
||||||
Err(Error::IncorrectArity {
|
Err(Error::IncorrectPatternArity {
|
||||||
labels: vec![],
|
|
||||||
location,
|
location,
|
||||||
expected: args.len(),
|
given: pattern_args,
|
||||||
given: pattern_args.len(),
|
expected: 0,
|
||||||
|
name: name.clone(),
|
||||||
|
module: module.clone(),
|
||||||
|
is_record,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -570,11 +582,13 @@ impl<'a, 'b> PatternTyper<'a, 'b> {
|
||||||
is_record,
|
is_record,
|
||||||
})
|
})
|
||||||
} else {
|
} else {
|
||||||
Err(Error::IncorrectArity {
|
Err(Error::IncorrectPatternArity {
|
||||||
labels: vec![],
|
|
||||||
location,
|
location,
|
||||||
|
given: pattern_args,
|
||||||
expected: 0,
|
expected: 0,
|
||||||
given: pattern_args.len(),
|
name: name.clone(),
|
||||||
|
module: module.clone(),
|
||||||
|
is_record,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -972,7 +972,8 @@ impl<'a> CodeGenerator<'a> {
|
||||||
.iter()
|
.iter()
|
||||||
.map(|item| {
|
.map(|item| {
|
||||||
let label = item.label.clone().unwrap_or_default();
|
let label = item.label.clone().unwrap_or_default();
|
||||||
let field_index = field_map.fields.get(&label).unwrap_or(&0);
|
let field_index =
|
||||||
|
field_map.fields.get(&label).map(|x| &x.0).unwrap_or(&0);
|
||||||
let (discard, var_name) = match &item.value {
|
let (discard, var_name) = match &item.value {
|
||||||
Pattern::Var { name, .. } => (false, name.clone()),
|
Pattern::Var { name, .. } => (false, name.clone()),
|
||||||
Pattern::Discard { .. } => (true, "".to_string()),
|
Pattern::Discard { .. } => (true, "".to_string()),
|
||||||
|
@ -1316,7 +1317,8 @@ impl<'a> CodeGenerator<'a> {
|
||||||
.iter()
|
.iter()
|
||||||
.map(|item| {
|
.map(|item| {
|
||||||
let label = item.label.clone().unwrap_or_default();
|
let label = item.label.clone().unwrap_or_default();
|
||||||
let field_index = field_map.fields.get(&label).unwrap_or(&0);
|
let field_index =
|
||||||
|
field_map.fields.get(&label).map(|x| &x.0).unwrap_or(&0);
|
||||||
let (discard, var_name) = match &item.value {
|
let (discard, var_name) = match &item.value {
|
||||||
Pattern::Var { name, .. } => (false, name.clone()),
|
Pattern::Var { name, .. } => (false, name.clone()),
|
||||||
Pattern::Discard { .. } => (true, "".to_string()),
|
Pattern::Discard { .. } => (true, "".to_string()),
|
||||||
|
@ -2031,7 +2033,11 @@ impl<'a> CodeGenerator<'a> {
|
||||||
for field in field_map
|
for field in field_map
|
||||||
.fields
|
.fields
|
||||||
.iter()
|
.iter()
|
||||||
.sorted_by(|item1, item2| item1.1.cmp(item2.1))
|
.sorted_by(|item1, item2| {
|
||||||
|
let (a, _) = item1.1;
|
||||||
|
let (b, _) = item2.1;
|
||||||
|
a.cmp(b)
|
||||||
|
})
|
||||||
.zip(&args_type)
|
.zip(&args_type)
|
||||||
.rev()
|
.rev()
|
||||||
{
|
{
|
||||||
|
@ -2092,7 +2098,11 @@ impl<'a> CodeGenerator<'a> {
|
||||||
for field in field_map
|
for field in field_map
|
||||||
.fields
|
.fields
|
||||||
.iter()
|
.iter()
|
||||||
.sorted_by(|item1, item2| item1.1.cmp(item2.1))
|
.sorted_by(|item1, item2| {
|
||||||
|
let (a, _) = item1.1;
|
||||||
|
let (b, _) = item2.1;
|
||||||
|
a.cmp(b)
|
||||||
|
})
|
||||||
.rev()
|
.rev()
|
||||||
{
|
{
|
||||||
term = Term::Lambda {
|
term = Term::Lambda {
|
||||||
|
|
|
@ -18,17 +18,17 @@ use zip_extract::ZipExtractError;
|
||||||
#[allow(dead_code)]
|
#[allow(dead_code)]
|
||||||
#[derive(thiserror::Error)]
|
#[derive(thiserror::Error)]
|
||||||
pub enum Error {
|
pub enum Error {
|
||||||
#[error("Duplicate module\n\n{module}")]
|
#[error("I just found two modules with the same name: '{module}'")]
|
||||||
DuplicateModule {
|
DuplicateModule {
|
||||||
module: String,
|
module: String,
|
||||||
first: PathBuf,
|
first: PathBuf,
|
||||||
second: PathBuf,
|
second: PathBuf,
|
||||||
},
|
},
|
||||||
|
|
||||||
#[error("File operation failed")]
|
#[error("Some operation on the file-system did fail.")]
|
||||||
FileIo { error: io::Error, path: PathBuf },
|
FileIo { error: io::Error, path: PathBuf },
|
||||||
|
|
||||||
#[error("Source code incorrectly formatted")]
|
#[error("I found some files with incorrectly formatted source code.")]
|
||||||
Format { problem_files: Vec<Unformatted> },
|
Format { problem_files: Vec<Unformatted> },
|
||||||
|
|
||||||
#[error(transparent)]
|
#[error(transparent)]
|
||||||
|
@ -52,29 +52,26 @@ pub enum Error {
|
||||||
help: String,
|
help: String,
|
||||||
},
|
},
|
||||||
|
|
||||||
#[error("Missing 'aiken.toml' manifest in {path}")]
|
#[error("I couldn't find any 'aiken.toml' manifest in {path}.")]
|
||||||
MissingManifest { path: PathBuf },
|
MissingManifest { path: PathBuf },
|
||||||
|
|
||||||
#[error("Cyclical module imports")]
|
#[error("I just found a cycle in module hierarchy!")]
|
||||||
ImportCycle { modules: Vec<String> },
|
ImportCycle { modules: Vec<String> },
|
||||||
|
|
||||||
/// Useful for returning many [`Error::Parse`] at once
|
/// Useful for returning many [`Error::Parse`] at once
|
||||||
#[error("A list of errors")]
|
#[error("A list of errors")]
|
||||||
List(Vec<Self>),
|
List(Vec<Self>),
|
||||||
|
|
||||||
#[error("Parsing")]
|
#[error("While parsing files...")]
|
||||||
Parse {
|
Parse {
|
||||||
path: PathBuf,
|
path: PathBuf,
|
||||||
|
|
||||||
src: String,
|
src: String,
|
||||||
|
|
||||||
named: NamedSource,
|
named: NamedSource,
|
||||||
|
|
||||||
#[source]
|
#[source]
|
||||||
error: Box<ParseError>,
|
error: Box<ParseError>,
|
||||||
},
|
},
|
||||||
|
|
||||||
#[error("Checking")]
|
#[error("While trying to make sense of your code...")]
|
||||||
Type {
|
Type {
|
||||||
path: PathBuf,
|
path: PathBuf,
|
||||||
src: String,
|
src: String,
|
||||||
|
@ -246,7 +243,10 @@ impl Diagnostic for Error {
|
||||||
Error::ImportCycle { .. } => Some(Box::new("aiken::module::cyclical")),
|
Error::ImportCycle { .. } => Some(Box::new("aiken::module::cyclical")),
|
||||||
Error::List(_) => None,
|
Error::List(_) => None,
|
||||||
Error::Parse { .. } => Some(Box::new("aiken::parser")),
|
Error::Parse { .. } => Some(Box::new("aiken::parser")),
|
||||||
Error::Type { .. } => Some(Box::new("aiken::check")),
|
Error::Type { error, .. } => Some(Box::new(format!(
|
||||||
|
"err::aiken::check{}",
|
||||||
|
error.code().map(|s| format!("::{s}")).unwrap_or_default()
|
||||||
|
))),
|
||||||
Error::StandardIo(_) => None,
|
Error::StandardIo(_) => None,
|
||||||
Error::MissingManifest { .. } => None,
|
Error::MissingManifest { .. } => None,
|
||||||
Error::TomlLoading { .. } => Some(Box::new("aiken::loading::toml")),
|
Error::TomlLoading { .. } => Some(Box::new("aiken::loading::toml")),
|
||||||
|
@ -368,6 +368,27 @@ impl Diagnostic for Error {
|
||||||
Error::JoinError(_) => None,
|
Error::JoinError(_) => None,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn url<'a>(&'a self) -> Option<Box<dyn Display + 'a>> {
|
||||||
|
match self {
|
||||||
|
Error::DuplicateModule { .. } => None,
|
||||||
|
Error::FileIo { .. } => None,
|
||||||
|
Error::ImportCycle { .. } => None,
|
||||||
|
Error::List { .. } => None,
|
||||||
|
Error::Parse { .. } => None,
|
||||||
|
Error::Type { error, .. } => error.url(),
|
||||||
|
Error::StandardIo(_) => None,
|
||||||
|
Error::MissingManifest { .. } => None,
|
||||||
|
Error::TomlLoading { .. } => None,
|
||||||
|
Error::Format { .. } => None,
|
||||||
|
Error::ValidatorMustReturnBool { .. } => None,
|
||||||
|
Error::WrongValidatorArity { .. } => None,
|
||||||
|
Error::TestFailure { .. } => None,
|
||||||
|
Error::Http { .. } => None,
|
||||||
|
Error::ZipExtract { .. } => None,
|
||||||
|
Error::JoinError { .. } => None,
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(thiserror::Error)]
|
#[derive(thiserror::Error)]
|
||||||
|
|
Loading…
Reference in New Issue