Merge pull request #952 from aiken-lang/auto-merge-imports

This commit is contained in:
Matthias Benkort 2024-06-05 10:11:12 +02:00 committed by GitHub
commit 71ed844db0
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
15 changed files with 272 additions and 78 deletions

View File

@ -10,6 +10,8 @@
- **aiken-lang**: the keyword `fail` on property-based test semantic has changed and now consider a test to succeed only if **every** execution of the test failed (instead of just one). The previous behavior can be recovered by adding the keyword `once` after `fail`. @KtorZ - **aiken-lang**: the keyword `fail` on property-based test semantic has changed and now consider a test to succeed only if **every** execution of the test failed (instead of just one). The previous behavior can be recovered by adding the keyword `once` after `fail`. @KtorZ
- **aiken-lang**: duplicate import lines are now automatically merged instead of raising a warning. However, imports can no longer appear anywhere in the file and must come as the first definitions. @KtorZ
### Fixed ### Fixed
- **aiken-lang**: fixed the number of 'after x tests' number reported on property test failure, which was off by one. @KtorZ - **aiken-lang**: fixed the number of 'after x tests' number reported on property test failure, which was off by one. @KtorZ

View File

@ -13,10 +13,11 @@ mod utils;
use crate::{ast, line_numbers::LineNumbers}; use crate::{ast, line_numbers::LineNumbers};
pub use annotation::parser as annotation; pub use annotation::parser as annotation;
use chumsky::prelude::*; use chumsky::prelude::*;
pub use definition::parser as definition; pub use definition::{import::parser as import, parser as definition};
use error::ParseError; use error::ParseError;
pub use expr::parser as expression; pub use expr::parser as expression;
use extra::ModuleExtra; use extra::ModuleExtra;
use indexmap::IndexMap;
pub use pattern::parser as pattern; pub use pattern::parser as pattern;
pub fn module( pub fn module(
@ -27,7 +28,48 @@ pub fn module(
let stream = chumsky::Stream::from_iter(ast::Span::create(tokens.len(), 1), tokens.into_iter()); let stream = chumsky::Stream::from_iter(ast::Span::create(tokens.len(), 1), tokens.into_iter());
let definitions = definition().repeated().then_ignore(end()).parse(stream)?; let definitions = import()
.repeated()
.map(|imports| {
let mut store = IndexMap::new();
for import in imports.into_iter() {
let key = (import.module, import.as_name);
match store.remove(&key) {
None => {
store.insert(key, (import.location, import.unqualified));
}
Some((location, unqualified)) => {
let mut merged_unqualified = Vec::new();
merged_unqualified.extend(unqualified);
merged_unqualified.extend(import.unqualified);
store.insert(key, (location, merged_unqualified));
}
}
}
store
.into_iter()
.map(|((module, as_name), (location, unqualified))| {
ast::Definition::Use(ast::Use {
module,
as_name,
location,
unqualified,
package: (),
})
})
.collect::<Vec<ast::UntypedDefinition>>()
})
.then(definition().repeated())
.map(|(imports, others)| {
let mut defs = Vec::new();
defs.extend(imports);
defs.extend(others);
defs
})
.then_ignore(end())
.parse(stream)?;
let lines = LineNumbers::new(src); let lines = LineNumbers::new(src);
@ -47,6 +89,16 @@ pub fn module(
mod tests { mod tests {
use crate::assert_module; use crate::assert_module;
#[test]
fn merge_imports() {
assert_module!(
r#"
use aiken/list.{bar, foo}
use aiken/list.{baz}
"#
);
}
#[test] #[test]
fn windows_newline() { fn windows_newline() {
assert_module!("use aiken/list\r\n"); assert_module!("use aiken/list\r\n");

View File

@ -4,7 +4,7 @@ use crate::{
}; };
use chumsky::prelude::*; use chumsky::prelude::*;
pub fn parser() -> impl Parser<Token, ast::UntypedDefinition, Error = ParseError> { pub fn parser() -> impl Parser<Token, ast::UntypedUse, Error = ParseError> {
let unqualified_import = choice(( let unqualified_import = choice((
select! {Token::Name { name } => name}.then( select! {Token::Name { name } => name}.then(
just(Token::As) just(Token::As)
@ -42,30 +42,28 @@ pub fn parser() -> impl Parser<Token, ast::UntypedDefinition, Error = ParseError
.then(as_name); .then(as_name);
just(Token::Use).ignore_then(module_path).map_with_span( just(Token::Use).ignore_then(module_path).map_with_span(
|((module, unqualified), as_name), span| { |((module, unqualified), as_name), span| ast::Use {
ast::UntypedDefinition::Use(ast::Use {
module, module,
as_name, as_name,
unqualified: unqualified.unwrap_or_default(), unqualified: unqualified.unwrap_or_default(),
package: (), package: (),
location: span, location: span,
})
}, },
) )
} }
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use crate::assert_definition; use crate::assert_import;
#[test] #[test]
fn import_basic() { fn import_basic() {
assert_definition!("use aiken/list"); assert_import!("use aiken/list");
} }
#[test] #[test]
fn import_unqualified() { fn import_unqualified() {
assert_definition!( assert_import!(
r#" r#"
use std/address.{Address as A, thing as w} use std/address.{Address as A, thing as w}
"# "#
@ -74,6 +72,6 @@ mod tests {
#[test] #[test]
fn import_alias() { fn import_alias() {
assert_definition!("use aiken/list as foo"); assert_import!("use aiken/list as foo");
} }
} }

View File

@ -3,25 +3,22 @@ use chumsky::prelude::*;
pub mod constant; pub mod constant;
mod data_type; mod data_type;
mod function; mod function;
mod import; pub mod import;
mod test; mod test;
mod type_alias; mod type_alias;
mod validator; mod validator;
use super::{error::ParseError, token::Token};
use crate::ast;
pub use constant::parser as constant; pub use constant::parser as constant;
pub use data_type::parser as data_type; pub use data_type::parser as data_type;
pub use function::parser as function; pub use function::parser as function;
pub use import::parser as import;
pub use test::parser as test; pub use test::parser as test;
pub use type_alias::parser as type_alias; pub use type_alias::parser as type_alias;
pub use validator::parser as validator; pub use validator::parser as validator;
use super::{error::ParseError, token::Token};
use crate::ast;
pub fn parser() -> impl Parser<Token, ast::UntypedDefinition, Error = ParseError> { pub fn parser() -> impl Parser<Token, ast::UntypedDefinition, Error = ParseError> {
choice(( choice((
import(),
data_type(), data_type(),
type_alias(), type_alias(),
validator(), validator(),

View File

@ -2,8 +2,7 @@
source: crates/aiken-lang/src/parser/definition/import.rs source: crates/aiken-lang/src/parser/definition/import.rs
description: "Code:\n\nuse aiken/list as foo" description: "Code:\n\nuse aiken/list as foo"
--- ---
Use( Use {
Use {
as_name: Some( as_name: Some(
"foo", "foo",
), ),
@ -14,5 +13,4 @@ Use(
], ],
package: (), package: (),
unqualified: [], unqualified: [],
}, }
)

View File

@ -2,8 +2,7 @@
source: crates/aiken-lang/src/parser/definition/import.rs source: crates/aiken-lang/src/parser/definition/import.rs
description: "Code:\n\nuse aiken/list" description: "Code:\n\nuse aiken/list"
--- ---
Use( Use {
Use {
as_name: None, as_name: None,
location: 0..14, location: 0..14,
module: [ module: [
@ -12,5 +11,4 @@ Use(
], ],
package: (), package: (),
unqualified: [], unqualified: [],
}, }
)

View File

@ -2,8 +2,7 @@
source: crates/aiken-lang/src/parser/definition/import.rs source: crates/aiken-lang/src/parser/definition/import.rs
description: "Code:\n\nuse std/address.{Address as A, thing as w}\n" description: "Code:\n\nuse std/address.{Address as A, thing as w}\n"
--- ---
Use( Use {
Use {
as_name: None, as_name: None,
location: 0..42, location: 0..42,
module: [ module: [
@ -27,5 +26,4 @@ Use(
), ),
}, },
], ],
}, }
)

View File

@ -1,6 +1,5 @@
use chumsky::prelude::*;
use super::{error::ParseError, token::Token}; use super::{error::ParseError, token::Token};
use chumsky::prelude::*;
pub fn optional_flag(token: Token) -> impl Parser<Token, bool, Error = ParseError> { pub fn optional_flag(token: Token) -> impl Parser<Token, bool, Error = ParseError> {
just(token).ignored().or_not().map(|v| v.is_some()) just(token).ignored().or_not().map(|v| v.is_some())
@ -132,6 +131,27 @@ macro_rules! assert_definition {
}; };
} }
#[macro_export]
macro_rules! assert_import {
($code:expr) => {
use chumsky::Parser;
let $crate::parser::lexer::LexInfo { tokens, .. } = $crate::parser::lexer::run(indoc::indoc! { $code }).unwrap();
let stream = chumsky::Stream::from_iter($crate::ast::Span::create(tokens.len(), 1), tokens.into_iter());
let result = $crate::parser::import().parse(stream).unwrap();
insta::with_settings!({
description => concat!("Code:\n\n", indoc::indoc! { $code }),
prepend_module_to_snapshot => false,
omit_expression => true
}, {
insta::assert_debug_snapshot!(result);
});
};
}
#[macro_export] #[macro_export]
macro_rules! assert_format { macro_rules! assert_format {
($code:expr) => { ($code:expr) => {

View File

@ -0,0 +1,51 @@
---
source: crates/aiken-lang/src/parser.rs
description: "Code:\n\nuse aiken/list.{bar, foo}\nuse aiken/list.{baz}\n"
---
Module {
name: "",
docs: [],
type_info: (),
definitions: [
Use(
Use {
as_name: None,
location: 0..25,
module: [
"aiken",
"list",
],
package: (),
unqualified: [
UnqualifiedImport {
location: 16..19,
name: "bar",
as_name: None,
},
UnqualifiedImport {
location: 21..24,
name: "foo",
as_name: None,
},
UnqualifiedImport {
location: 42..45,
name: "baz",
as_name: None,
},
],
},
),
],
lines: LineNumbers {
line_starts: [
0,
26,
47,
],
length: 47,
last: Some(
47,
),
},
kind: Validator,
}

View File

@ -309,8 +309,6 @@ fn format_else_if() {
#[test] #[test]
fn format_imports() { fn format_imports() {
// TODO: Fix this case, this is behaving weirdly, not keeping the order for the comments and
// imports.
assert_format!( assert_format!(
r#" r#"
use aiken/list use aiken/list
@ -324,6 +322,59 @@ fn format_imports() {
); );
} }
#[test]
fn format_merge_imports() {
assert_format!(
r#"
use aiken/list.{bar, foo}
use aiken/list.{baz}
"#
);
}
#[test]
fn format_merge_imports_2() {
assert_format!(
r#"
use aiken/list.{bar, foo}
use aiken/dict
use aiken/list
"#
);
}
#[test]
fn format_merge_imports_alias() {
assert_format!(
r#"
use aiken/list.{bar, foo}
use aiken/list.{baz} as vector
"#
);
}
#[test]
fn format_merge_imports_alias_2() {
assert_format!(
r#"
use aiken/list.{bar, foo} as vector
use aiken/list.{baz} as vector
"#
);
}
#[test]
fn format_merge_imports_comments() {
assert_format!(
r#"
// foo
use aiken/list.{bar, foo}
// bar
use aiken/list.{baz}
"#
);
}
#[test] #[test]
fn format_negate() { fn format_negate() {
assert_format!( assert_format!(

View File

@ -0,0 +1,5 @@
---
source: crates/aiken-lang/src/tests/format.rs
description: "Code:\n\nuse aiken/list.{bar, foo}\nuse aiken/list.{baz}\n"
---
use aiken/list.{bar, baz, foo}

View File

@ -0,0 +1,6 @@
---
source: crates/aiken-lang/src/tests/format.rs
description: "Code:\n\nuse aiken/list.{bar, foo}\nuse aiken/dict\nuse aiken/list\n"
---
use aiken/dict
use aiken/list.{bar, foo}

View File

@ -0,0 +1,6 @@
---
source: crates/aiken-lang/src/tests/format.rs
description: "Code:\n\nuse aiken/list.{bar, foo}\nuse aiken/list.{baz} as vector\n"
---
use aiken/list.{bar, foo}
use aiken/list.{baz} as vector

View File

@ -0,0 +1,5 @@
---
source: crates/aiken-lang/src/tests/format.rs
description: "Code:\n\nuse aiken/list.{bar, foo} as vector\nuse aiken/list.{baz} as vector\n"
---
use aiken/list.{bar, baz, foo} as vector

View File

@ -0,0 +1,7 @@
---
source: crates/aiken-lang/src/tests/format.rs
description: "Code:\n\n// foo\nuse aiken/list.{bar, foo}\n// bar\nuse aiken/list.{baz}\n"
---
// foo
use aiken/list.{bar, baz, foo}
// bar