chore: make folder names match crate name

This commit is contained in:
rvcas
2022-12-21 17:42:53 -05:00
committed by Lucas
parent 5694cac1a5
commit 42204d2d71
93 changed files with 7 additions and 7 deletions

View File

@@ -0,0 +1,131 @@
use crate::error::Error;
use aiken_lang::ast::Span;
use miette::NamedSource;
use serde::{de::Visitor, Deserialize, Serialize};
use std::{fmt::Display, fs, path::PathBuf};
#[derive(Deserialize)]
pub struct Config {
pub name: PackageName,
pub version: String,
#[serde(default)]
pub description: String,
pub repository: Option<Repository>,
#[serde(default)]
pub dependencies: Vec<Dependency>,
}
#[derive(Deserialize)]
pub struct Repository {
pub user: String,
pub project: String,
pub platform: Platform,
}
#[derive(Deserialize, Serialize, PartialEq, Eq, Clone, Copy)]
#[serde(rename_all = "lowercase")]
pub enum Platform {
Github,
Gitlab,
Bitbucket,
}
#[derive(Deserialize, Serialize, PartialEq, Eq, Clone)]
pub struct Dependency {
pub name: PackageName,
pub version: String,
pub source: Platform,
}
#[derive(PartialEq, Eq, Hash, Clone)]
pub struct PackageName {
pub owner: String,
pub repo: String,
}
impl Display for PackageName {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "{}/{}", self.owner, self.repo)
}
}
impl Serialize for PackageName {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: serde::Serializer,
{
serializer.serialize_str(&self.to_string())
}
}
impl<'de> Deserialize<'de> for PackageName {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: serde::Deserializer<'de>,
{
struct PackageNameVisitor;
impl<'de> Visitor<'de> for PackageNameVisitor {
type Value = PackageName;
fn expecting(&self, formatter: &mut std::fmt::Formatter) -> std::fmt::Result {
formatter
.write_str("a string representing an owner and repo, ex: aiken-lang/stdlib")
}
fn visit_str<E>(self, v: &str) -> Result<Self::Value, E>
where
E: serde::de::Error,
{
let mut name = v.split('/');
let owner = name.next().ok_or_else(|| {
serde::de::Error::invalid_value(serde::de::Unexpected::Str(v), &self)
})?;
let repo = name.next().ok_or_else(|| {
serde::de::Error::invalid_value(serde::de::Unexpected::Str(v), &self)
})?;
Ok(PackageName {
owner: owner.to_string(),
repo: repo.to_string(),
})
}
}
deserializer.deserialize_str(PackageNameVisitor)
}
}
impl Display for Platform {
fn fmt(&self, f: &mut ::std::fmt::Formatter) -> ::std::result::Result<(), ::std::fmt::Error> {
match *self {
Platform::Github => f.write_str("github"),
Platform::Gitlab => f.write_str("gitlab"),
Platform::Bitbucket => f.write_str("bitbucket"),
}
}
}
impl Config {
pub fn load(dir: PathBuf) -> Result<Config, Error> {
let config_path = dir.join("aiken.toml");
let raw_config = fs::read_to_string(&config_path)
.map_err(|_| Error::MissingManifest { path: dir.clone() })?;
let result: Self = toml::from_str(&raw_config).map_err(|e| Error::TomlLoading {
path: config_path.clone(),
src: raw_config.clone(),
named: NamedSource::new(config_path.display().to_string(), raw_config),
// this isn't actually a legit way to get the span
location: e.line_col().map(|(line, col)| Span {
start: line,
end: col,
}),
help: e.to_string(),
})?;
Ok(result)
}
}

View File

@@ -0,0 +1,242 @@
use std::{collections::HashSet, fs, path::Path};
use aiken_lang::ast::Span;
use miette::NamedSource;
use serde::{Deserialize, Serialize};
use tokio::time::Instant;
use crate::{
config::{Config, Dependency, PackageName},
error::Error,
paths,
telemetry::{Event, EventListener},
};
use self::{
downloader::Downloader,
manifest::{Manifest, Package},
};
pub mod downloader;
pub mod manifest;
pub enum UseManifest {
Yes,
No,
}
#[derive(Deserialize, Serialize)]
pub struct LocalPackages {
packages: Vec<Dependency>,
}
impl LocalPackages {
pub fn load(root_path: &Path) -> Result<Self, Error> {
let path = root_path.join(paths::packages_toml());
if !path.exists() {
return Ok(Self {
packages: Vec::new(),
});
}
let src = fs::read_to_string(&path)?;
let result: Self = toml::from_str(&src).map_err(|e| Error::TomlLoading {
path: path.clone(),
src: src.clone(),
named: NamedSource::new(path.display().to_string(), src),
// this isn't actually a legit way to get the span
location: e.line_col().map(|(line, col)| Span {
start: line,
end: col,
}),
help: e.to_string(),
})?;
Ok(result)
}
pub fn save(&self, root_path: &Path) -> Result<(), Error> {
let packages_path = root_path.join(paths::packages());
let path = root_path.join(paths::packages_toml());
if !packages_path.exists() {
fs::create_dir_all(&packages_path)?;
}
let toml = toml::to_string(&self).expect("packages.toml serialization");
fs::write(path, toml)?;
Ok(())
}
fn remove_extra_packages(&self, manifest: &Manifest, root_path: &Path) -> Result<(), Error> {
for (package, _version) in self.extra_local_packages(manifest) {
let path = root_path.join(paths::build_deps_package(&package));
if path.exists() {
fs::remove_dir_all(&path)?;
}
}
Ok(())
}
pub fn extra_local_packages(&self, manifest: &Manifest) -> Vec<(PackageName, String)> {
let manifest_packages: HashSet<_> = manifest
.packages
.iter()
.map(|p| (&p.name, &p.version))
.collect();
self.packages
.iter()
.filter(|dep| !manifest_packages.contains(&(&dep.name, &dep.version)))
.map(|dep| (dep.name.clone(), dep.version.clone()))
.collect()
}
pub fn missing_local_packages<'a>(
&self,
manifest: &'a Manifest,
root: &PackageName,
) -> Vec<&'a Package> {
manifest
.packages
.iter()
.filter(|p| {
&p.name != root
&& !matches!(
self.packages.iter().find(|p2| p2.name == p.name),
Some(Dependency { version, .. }) if &p.version == version,
)
})
.collect()
}
}
impl From<&Manifest> for LocalPackages {
fn from(value: &Manifest) -> Self {
Self {
packages: value
.packages
.iter()
.map(|p| Dependency {
name: p.name.clone(),
version: p.version.clone(),
source: p.source,
})
.collect(),
}
}
}
pub fn download<T>(
event_listener: &T,
new_package: Option<Vec<String>>,
use_manifest: UseManifest,
root_path: &Path,
config: &Config,
) -> Result<Manifest, Error>
where
T: EventListener,
{
let build_path = root_path.join(paths::build());
if !build_path.is_dir() {
fs::create_dir_all(&build_path)?;
}
let mut build_lock = fslock::LockFile::open(&build_path.join("aiken-compile.lock"))
.expect("Build Lock Creation");
if !build_lock
.try_lock_with_pid()
.expect("Trying build locking")
{
event_listener.handle_event(Event::WaitingForBuildDirLock);
build_lock.lock_with_pid().expect("Build locking")
}
let project_name = config.name.clone();
if let Some(packages) = new_package {
for _package in packages {
// config.dependencies.push(Dependency {
// name: (),
// version: (),
// source: package.sour,
// })
}
}
let runtime = tokio::runtime::Runtime::new().expect("Unable to start Tokio");
let (manifest, changed) = Manifest::load(
runtime.handle().clone(),
event_listener,
config,
use_manifest,
root_path,
)?;
let local = LocalPackages::load(root_path)?;
local.remove_extra_packages(&manifest, root_path)?;
runtime.block_on(fetch_missing_packages(
&manifest,
&local,
project_name,
root_path,
event_listener,
))?;
if changed {
manifest.save(root_path)?;
}
LocalPackages::from(&manifest).save(root_path)?;
Ok(manifest)
}
async fn fetch_missing_packages<T>(
manifest: &Manifest,
local: &LocalPackages,
project_name: PackageName,
root_path: &Path,
event_listener: &T,
) -> Result<(), Error>
where
T: EventListener,
{
let mut count = 0;
let mut missing = local
.missing_local_packages(manifest, &project_name)
.into_iter()
.map(|package| {
count += 1;
package
})
.peekable();
if missing.peek().is_some() {
let start = Instant::now();
event_listener.handle_event(Event::DownloadingPackage {
name: "packages".to_string(),
});
let downloader = Downloader::new(root_path);
downloader.download_packages(missing, &project_name).await?;
event_listener.handle_event(Event::PackagesDownloaded { start, count });
}
Ok(())
}

View File

@@ -0,0 +1,120 @@
use std::{io::Cursor, path::Path};
use futures::future;
use reqwest::Client;
use crate::{config::PackageName, error::Error, paths};
use super::manifest::Package;
pub struct Downloader<'a> {
http: Client,
root_path: &'a Path,
}
impl<'a> Downloader<'a> {
pub fn new(root_path: &'a Path) -> Self {
Self {
http: Client::new(),
root_path,
}
}
pub async fn download_packages<T>(
&self,
packages: T,
project_name: &PackageName,
) -> Result<(), Error>
where
T: Iterator<Item = &'a Package>,
{
let tasks = packages
.filter(|package| project_name != &package.name)
.map(|package| self.ensure_package_in_build_directory(package));
let _results = future::try_join_all(tasks).await?;
Ok(())
}
pub async fn ensure_package_in_build_directory(
&self,
package: &Package,
) -> Result<bool, Error> {
self.ensure_package_downloaded(package).await?;
self.extract_package_from_cache(&package.name, &package.version)
.await
}
pub async fn ensure_package_downloaded(&self, package: &Package) -> Result<bool, Error> {
let packages_cache_path = paths::packages_cache();
let zipball_path =
paths::package_cache_zipball(&package.name, &package.version.to_string());
if !packages_cache_path.exists() {
tokio::fs::create_dir_all(packages_cache_path).await?;
}
if zipball_path.is_file() {
return Ok(false);
}
let url = format!(
"https://api.github.com/repos/{}/{}/zipball/{}",
package.name.owner, package.name.repo, package.version
);
let response = self
.http
.get(url)
.header("User-Agent", "aiken-lang")
.send()
.await?
.bytes()
.await?;
// let PackageSource::Github { url } = &package.source;
tokio::fs::write(&zipball_path, response).await?;
Ok(true)
}
pub async fn extract_package_from_cache(
&self,
name: &PackageName,
version: &str,
) -> Result<bool, Error> {
let destination = self.root_path.join(paths::build_deps_package(name));
// If the directory already exists then there's nothing for us to do
if destination.is_dir() {
return Ok(false);
}
tokio::fs::create_dir_all(&destination).await?;
let zipball_path = self
.root_path
.join(paths::package_cache_zipball(name, version));
let zipball = tokio::fs::read(zipball_path).await?;
let result = {
let d = destination.clone();
tokio::task::spawn_blocking(move || {
zip_extract::extract(Cursor::new(zipball), &d, true)
})
.await?
};
if result.is_err() {
tokio::fs::remove_dir_all(destination).await?;
}
result?;
Ok(true)
}
}

View File

@@ -0,0 +1,137 @@
use std::{fs, path::Path};
use aiken_lang::ast::Span;
use miette::NamedSource;
use serde::{Deserialize, Serialize};
use crate::{
config::{Config, Dependency, PackageName, Platform},
error::Error,
paths,
telemetry::{Event, EventListener},
};
use super::UseManifest;
#[derive(Deserialize, Serialize)]
pub struct Manifest {
pub requirements: Vec<Dependency>,
pub packages: Vec<Package>,
}
impl Manifest {
pub fn load<T>(
runtime: tokio::runtime::Handle,
event_listener: &T,
config: &Config,
use_manifest: UseManifest,
root_path: &Path,
) -> Result<(Self, bool), Error>
where
T: EventListener,
{
let manifest_path = root_path.join(paths::manifest());
// If there's no manifest (or we have been asked not to use it) then resolve
// the versions anew
let should_resolve = match use_manifest {
_ if !manifest_path.exists() => true,
UseManifest::No => true,
UseManifest::Yes => false,
};
if should_resolve {
let manifest = resolve_versions(runtime, config, None, event_listener)?;
return Ok((manifest, true));
}
let toml = fs::read_to_string(&manifest_path)?;
let manifest: Self = toml::from_str(&toml).map_err(|e| Error::TomlLoading {
path: manifest_path.clone(),
src: toml.clone(),
named: NamedSource::new(manifest_path.display().to_string(), toml),
// this isn't actually a legit way to get the span
location: e.line_col().map(|(line, col)| Span {
start: line,
end: col,
}),
help: e.to_string(),
})?;
// If the config has unchanged since the manifest was written then it is up
// to date so we can return it unmodified.
if manifest.requirements == config.dependencies {
Ok((manifest, false))
} else {
let manifest = resolve_versions(runtime, config, Some(&manifest), event_listener)?;
Ok((manifest, true))
}
}
pub fn save(&self, root_path: &Path) -> Result<(), Error> {
let manifest_path = root_path.join(paths::manifest());
let mut toml = toml::to_string(&self).expect("aiken.lock serialization");
toml.insert_str(
0,
"# This file was generated by Aiken\n# You typically do not need to edit this file\n\n",
);
fs::write(manifest_path, toml)?;
Ok(())
}
}
#[derive(Deserialize, Serialize)]
pub struct Package {
pub name: PackageName,
pub version: String,
pub requirements: Vec<String>,
pub source: Platform,
}
fn resolve_versions<T>(
_runtime: tokio::runtime::Handle,
config: &Config,
_manifest: Option<&Manifest>,
event_listener: &T,
) -> Result<Manifest, Error>
where
T: EventListener,
{
event_listener.handle_event(Event::ResolvingVersions);
// let resolved = hex::resolve_versions(
// PackageFetcher::boxed(runtime.clone()),
// mode,
// config,
// manifest,
// )?;
// let packages = runtime.block_on(future::try_join_all(
// resolved
// .into_iter()
// .map(|(name, version)| lookup_package(name, version)),
// ))?;
let manifest = Manifest {
packages: config
.dependencies
.iter()
.map(|dep| Package {
name: dep.name.clone(),
version: dep.version.clone(),
requirements: vec![],
source: dep.source,
})
.collect(),
requirements: config.dependencies.clone(),
};
Ok(manifest)
}

View File

@@ -0,0 +1,680 @@
use crate::{
config::{Config, Repository},
module::CheckedModule,
};
use aiken_lang::{
ast::{Definition, RecordConstructor, RecordConstructorArg, TypedDefinition},
format,
tipo::Type,
};
use askama::Template;
use itertools::Itertools;
use pulldown_cmark as markdown;
use serde::Serialize;
use serde_json as json;
use std::{
path::{Path, PathBuf},
sync::Arc,
time::{Duration, SystemTime},
};
const MAX_COLUMNS: isize = 999;
const VERSION: &str = env!("CARGO_PKG_VERSION");
#[derive(Debug, PartialEq, Eq, Clone)]
pub struct DocFile {
pub path: PathBuf,
pub content: String,
}
#[derive(Template)]
#[template(path = "module.html")]
struct ModuleTemplate<'a> {
aiken_version: &'a str,
breadcrumbs: String,
page_title: &'a str,
module_name: String,
project_name: &'a str,
project_version: &'a str,
modules_prefix: String,
modules: &'a Vec<DocLink>,
functions: Vec<DocFunction>,
types: Vec<DocType>,
constants: Vec<DocConstant>,
documentation: String,
source: &'a DocLink,
timestamp: String,
}
impl<'a> ModuleTemplate<'a> {
pub fn is_current_module(&self, module: &DocLink) -> bool {
match module.path.split(".html").next() {
None => false,
Some(name) => self.module_name == name,
}
}
}
#[derive(Template)]
#[template(path = "page.html")]
struct PageTemplate<'a> {
aiken_version: &'a str,
breadcrumbs: &'a str,
page_title: &'a str,
project_name: &'a str,
project_version: &'a str,
modules_prefix: String,
modules: &'a Vec<DocLink>,
content: String,
source: &'a DocLink,
timestamp: &'a str,
}
impl<'a> PageTemplate<'a> {
pub fn is_current_module(&self, _module: &DocLink) -> bool {
false
}
}
#[derive(PartialEq, Eq, PartialOrd, Ord, Clone)]
struct DocLink {
name: String,
path: String,
}
impl DocLink {
pub fn is_empty(&self) -> bool {
self.name.is_empty()
}
}
/// Generate documentation files for a given project.
///
/// The documentation is built using template files located at the root of this crate.
/// With the documentation, we also build a client-side search index to ease navigation
/// across multiple modules.
pub fn generate_all(root: &Path, config: &Config, modules: Vec<&CheckedModule>) -> Vec<DocFile> {
let timestamp = new_timestamp();
let (modules_prefix, modules_links) = generate_modules_links(&modules);
let source = match &config.repository {
None => DocLink {
name: String::new(),
path: String::new(),
},
Some(Repository {
user,
project,
platform,
}) => DocLink {
name: format!("{user}/{project}"),
path: format!("https://{platform}.com/{user}/{project}"),
},
};
let mut output_files: Vec<DocFile> = vec![];
let mut search_indexes: Vec<SearchIndex> = vec![];
for module in &modules {
let (indexes, file) = generate_module(
config,
module,
(&modules_prefix, &modules_links),
&source,
&timestamp,
);
search_indexes.extend(indexes);
output_files.push(file);
}
output_files.extend(generate_static_assets(search_indexes));
output_files.push(generate_readme(
root,
config,
(&modules_prefix, &modules_links),
&source,
&timestamp,
));
output_files
}
fn generate_module(
config: &Config,
module: &CheckedModule,
(modules_prefix, modules): (&str, &Vec<DocLink>),
source: &DocLink,
timestamp: &Duration,
) -> (Vec<SearchIndex>, DocFile) {
let mut search_indexes = vec![];
// Functions
let functions: Vec<DocFunction> = module
.ast
.definitions
.iter()
.flat_map(DocFunction::from_definition)
.sorted()
.collect();
functions
.iter()
.for_each(|function| search_indexes.push(SearchIndex::from_function(module, function)));
// Types
let types: Vec<DocType> = module
.ast
.definitions
.iter()
.flat_map(DocType::from_definition)
.sorted()
.collect();
types
.iter()
.for_each(|type_info| search_indexes.push(SearchIndex::from_type(module, type_info)));
// Constants
let constants: Vec<DocConstant> = module
.ast
.definitions
.iter()
.flat_map(DocConstant::from_definition)
.sorted()
.collect();
constants
.iter()
.for_each(|constant| search_indexes.push(SearchIndex::from_constant(module, constant)));
// Module
search_indexes.push(SearchIndex::from_module(module));
let module = ModuleTemplate {
aiken_version: VERSION,
breadcrumbs: to_breadcrumbs(&module.name),
documentation: render_markdown(&module.ast.docs.iter().join("\n")),
modules_prefix: modules_prefix.to_string(),
modules,
project_name: &config.name.to_string(),
page_title: &format!("{} - {}", module.name, config.name),
module_name: module.name.clone(),
project_version: &config.version.to_string(),
functions,
types,
constants,
source,
timestamp: timestamp.as_secs().to_string(),
};
(
search_indexes,
DocFile {
path: PathBuf::from(format!("{}.html", module.module_name)),
content: module
.render()
.expect("Module documentation template rendering"),
},
)
}
fn generate_static_assets(search_indexes: Vec<SearchIndex>) -> Vec<DocFile> {
let mut assets: Vec<DocFile> = vec![];
assets.push(DocFile {
path: PathBuf::from("favicon.svg"),
content: std::include_str!("../templates/favicon.svg").to_string(),
});
assets.push(DocFile {
path: PathBuf::from("css/atom-one-light.min.css"),
content: std::include_str!("../templates/css/atom-one-light.min.css").to_string(),
});
assets.push(DocFile {
path: PathBuf::from("css/atom-one-dark.min.css"),
content: std::include_str!("../templates/css/atom-one-dark.min.css").to_string(),
});
assets.push(DocFile {
path: PathBuf::from("css/index.css"),
content: std::include_str!("../templates/css/index.css").to_string(),
});
assets.push(DocFile {
path: PathBuf::from("js/highlight.min.js"),
content: std::include_str!("../templates/js/highlight.min.js").to_string(),
});
assets.push(DocFile {
path: PathBuf::from("js/highlightjs-aiken.js"),
content: std::include_str!("../templates/js/highlightjs-aiken.js").to_string(),
});
assets.push(DocFile {
path: PathBuf::from("js/lunr.min.js"),
content: std::include_str!("../templates/js/lunr.min.js").to_string(),
});
assets.push(DocFile {
path: PathBuf::from("js/index.js"),
content: std::include_str!("../templates/js/index.js").to_string(),
});
assets.push(DocFile {
path: PathBuf::from("search-data.js"),
content: format!(
"window.Aiken.initSearch({});",
json::to_string(&escape_html_contents(search_indexes))
.expect("search index serialization")
),
});
assets
}
fn generate_readme(
root: &Path,
config: &Config,
(modules_prefix, modules): (&str, &Vec<DocLink>),
source: &DocLink,
timestamp: &Duration,
) -> DocFile {
let path = PathBuf::from("index.html");
let content = std::fs::read_to_string(root.join("README.md")).unwrap_or_default();
let template = PageTemplate {
aiken_version: VERSION,
breadcrumbs: ".",
modules_prefix: modules_prefix.to_string(),
modules,
project_name: &config.name.to_string(),
page_title: &config.name.to_string(),
project_version: &config.version.to_string(),
content: render_markdown(&content),
source,
timestamp: &timestamp.as_secs().to_string(),
};
DocFile {
path,
content: template.render().expect("Page template rendering"),
}
}
fn generate_modules_links(modules: &Vec<&CheckedModule>) -> (String, Vec<DocLink>) {
let mut modules_links = vec![];
for module in modules {
let module_path = [&module.name.clone(), ".html"].concat();
modules_links.push(DocLink {
path: module_path,
name: module.name.to_string().clone(),
});
}
modules_links.sort();
let prefix = find_modules_prefix(&modules_links);
for module in &mut modules_links {
let name = module.name.strip_prefix(&prefix).unwrap_or_default();
module.name = name.strip_prefix('/').unwrap_or(name).to_string();
if module.name == String::new() {
module.name = "/".to_string()
}
}
(prefix, modules_links)
}
#[derive(Serialize, PartialEq, Eq, PartialOrd, Ord, Clone)]
struct SearchIndex {
doc: String,
title: String,
content: String,
url: String,
}
impl SearchIndex {
fn from_function(module: &CheckedModule, function: &DocFunction) -> Self {
SearchIndex {
doc: module.name.to_string(),
title: function.name.to_string(),
content: format!("{}\n{}", function.signature, function.documentation),
url: format!("{}.html#{}", module.name, function.name),
}
}
fn from_type(module: &CheckedModule, type_info: &DocType) -> Self {
let constructors = type_info
.constructors
.iter()
.map(|constructor| {
let arguments = constructor
.arguments
.iter()
.map(|argument| format!("{}\n{}", argument.label, argument.documentation))
.join("\n");
format!(
"{}\n{}\n{}",
constructor.definition, constructor.documentation, arguments
)
})
.join("\n");
SearchIndex {
doc: module.name.to_string(),
title: type_info.name.to_string(),
content: format!(
"{}\n{}\n{}",
type_info.definition, type_info.documentation, constructors,
),
url: format!("{}.html#{}", module.name, type_info.name),
}
}
fn from_constant(module: &CheckedModule, constant: &DocConstant) -> Self {
SearchIndex {
doc: module.name.to_string(),
title: constant.name.to_string(),
content: format!("{}\n{}", constant.definition, constant.documentation),
url: format!("{}.html#{}", module.name, constant.name),
}
}
fn from_module(module: &CheckedModule) -> Self {
SearchIndex {
doc: module.name.to_string(),
title: module.name.to_string(),
content: module.ast.docs.iter().join("\n"),
url: format!("{}.html", module.name),
}
}
}
#[derive(PartialEq, Eq, PartialOrd, Ord)]
struct DocFunction {
name: String,
signature: String,
documentation: String,
source_url: String,
}
impl DocFunction {
fn from_definition(def: &TypedDefinition) -> Option<Self> {
match def {
Definition::Fn(func_def) if func_def.public => Some(DocFunction {
name: func_def.name.clone(),
documentation: func_def
.doc
.as_deref()
.map(render_markdown)
.unwrap_or_default(),
signature: format::Formatter::new()
.docs_fn_signature(
&func_def.name,
&func_def.arguments,
&func_def.return_annotation,
func_def.return_type.clone(),
)
.to_pretty_string(MAX_COLUMNS),
source_url: "#todo".to_string(),
}),
_ => None,
}
}
}
#[derive(PartialEq, Eq, PartialOrd, Ord)]
struct DocConstant {
name: String,
definition: String,
documentation: String,
source_url: String,
}
impl DocConstant {
fn from_definition(def: &TypedDefinition) -> Option<Self> {
match def {
Definition::ModuleConstant(const_def) if const_def.public => Some(DocConstant {
name: const_def.name.clone(),
documentation: const_def
.doc
.as_deref()
.map(render_markdown)
.unwrap_or_default(),
definition: format::Formatter::new()
.docs_const_expr(&const_def.name, &const_def.value)
.to_pretty_string(MAX_COLUMNS),
source_url: "#todo".to_string(),
}),
_ => None,
}
}
}
#[derive(PartialEq, Eq, PartialOrd, Ord, Debug)]
struct DocType {
name: String,
definition: String,
documentation: String,
constructors: Vec<DocTypeConstructor>,
source_url: String,
}
impl DocType {
fn from_definition(def: &TypedDefinition) -> Option<Self> {
match def {
Definition::TypeAlias(info) if info.public => Some(DocType {
name: info.alias.clone(),
definition: format::Formatter::new()
.docs_type_alias(&info.alias, &info.parameters, &info.annotation)
.to_pretty_string(MAX_COLUMNS),
documentation: info.doc.as_deref().map(render_markdown).unwrap_or_default(),
constructors: vec![],
source_url: "#todo".to_string(),
}),
Definition::DataType(info) if info.public && !info.opaque => Some(DocType {
name: info.name.clone(),
definition: format::Formatter::new()
.docs_data_type(
&info.name,
&info.parameters,
&info.constructors,
&info.location,
)
.to_pretty_string(MAX_COLUMNS),
documentation: info.doc.as_deref().map(render_markdown).unwrap_or_default(),
constructors: info
.constructors
.iter()
.map(DocTypeConstructor::from_record_constructor)
.collect(),
source_url: "#todo".to_string(),
}),
Definition::DataType(info) if info.public && info.opaque => Some(DocType {
name: info.name.clone(),
definition: format::Formatter::new()
.docs_opaque_data_type(&info.name, &info.parameters, &info.location)
.to_pretty_string(MAX_COLUMNS),
documentation: info.doc.as_deref().map(render_markdown).unwrap_or_default(),
constructors: vec![],
source_url: "#todo".to_string(),
}),
_ => None,
}
}
}
#[derive(PartialEq, Eq, PartialOrd, Ord, Debug)]
struct DocTypeConstructor {
definition: String,
documentation: String,
arguments: Vec<DocTypeConstructorArg>,
}
impl DocTypeConstructor {
fn from_record_constructor(constructor: &RecordConstructor<Arc<Type>>) -> Self {
DocTypeConstructor {
definition: format::Formatter::new()
.docs_record_constructor(constructor)
.to_pretty_string(MAX_COLUMNS),
documentation: constructor
.doc
.as_deref()
.map(render_markdown)
.unwrap_or_default(),
arguments: constructor
.arguments
.iter()
.filter_map(DocTypeConstructorArg::from_record_constructor_arg)
.collect(),
}
}
}
#[derive(PartialEq, Eq, PartialOrd, Ord, Debug)]
struct DocTypeConstructorArg {
label: String,
documentation: String,
}
impl DocTypeConstructorArg {
fn from_record_constructor_arg(arg: &RecordConstructorArg<Arc<Type>>) -> Option<Self> {
arg.label.as_ref().map(|label| DocTypeConstructorArg {
label: label.clone(),
documentation: arg.doc.as_deref().map(render_markdown).unwrap_or_default(),
})
}
}
// ------ Extra Helpers
fn render_markdown(text: &str) -> String {
let mut s = String::with_capacity(text.len() * 3 / 2);
let p = markdown::Parser::new_ext(text, markdown::Options::all());
markdown::html::push_html(&mut s, p);
s
}
fn escape_html_contents(indexes: Vec<SearchIndex>) -> Vec<SearchIndex> {
fn escape_html_content(it: String) -> String {
it.replace('&', "&amp;")
.replace('<', "&lt;")
.replace('>', "&gt;")
.replace('\"', "&quot;")
.replace('\'', "&#39;")
}
indexes
.into_iter()
.map(|idx| SearchIndex {
doc: idx.doc,
title: idx.title,
content: escape_html_content(idx.content),
url: idx.url,
})
.collect::<Vec<SearchIndex>>()
}
fn new_timestamp() -> Duration {
SystemTime::now()
.duration_since(SystemTime::UNIX_EPOCH)
.expect("get current timestamp")
}
fn find_modules_prefix(modules: &[DocLink]) -> String {
modules
.iter()
.fold(None, |previous_prefix, module| {
let prefix = module
.name
.split('/')
.next()
.unwrap_or_default()
.to_string();
match previous_prefix {
None if prefix != module.name => Some(prefix),
Some(..) if Some(prefix) == previous_prefix => previous_prefix,
_ => Some(String::new()),
}
})
.unwrap_or_default()
}
#[test]
fn find_modules_prefix_test() {
assert_eq!(find_modules_prefix(&[]), "".to_string());
assert_eq!(
find_modules_prefix(&[DocLink {
name: "aiken/list".to_string(),
path: String::new()
}]),
"aiken".to_string()
);
assert_eq!(
find_modules_prefix(&[DocLink {
name: "my_module".to_string(),
path: String::new()
}]),
"".to_string()
);
assert_eq!(
find_modules_prefix(&[
DocLink {
name: "aiken/list".to_string(),
path: String::new()
},
DocLink {
name: "aiken/byte_array".to_string(),
path: String::new(),
}
]),
"aiken".to_string()
);
assert_eq!(
find_modules_prefix(&[
DocLink {
name: "aiken/list".to_string(),
path: String::new()
},
DocLink {
name: "foo/byte_array".to_string(),
path: String::new(),
}
]),
"".to_string()
);
}
fn to_breadcrumbs(path: &str) -> String {
let breadcrumbs = path
.strip_prefix('/')
.unwrap_or(path)
.split('/')
.skip(1)
.map(|_| "..")
.join("/");
if breadcrumbs.is_empty() {
".".to_string()
} else {
breadcrumbs
}
}
#[test]
fn to_breadcrumbs_test() {
// Pages
assert_eq!(to_breadcrumbs("a.html"), ".");
assert_eq!(to_breadcrumbs("/a.html"), ".");
assert_eq!(to_breadcrumbs("/a/b.html"), "..");
assert_eq!(to_breadcrumbs("/a/b/c.html"), "../..");
// Modules
assert_eq!(to_breadcrumbs("a"), ".");
assert_eq!(to_breadcrumbs("a/b"), "..");
assert_eq!(to_breadcrumbs("a/b/c"), "../..");
}

View File

@@ -0,0 +1,448 @@
use crate::{pretty, script::EvalHint};
use aiken_lang::{
ast::{BinOp, Span},
parser::error::ParseError,
tipo,
};
use miette::{
Diagnostic, EyreContext, LabeledSpan, MietteHandlerOpts, NamedSource, RgbColors, SourceCode,
};
use std::{
fmt::{Debug, Display},
io,
path::{Path, PathBuf},
};
use uplc::machine::cost_model::ExBudget;
use zip_extract::ZipExtractError;
#[allow(dead_code)]
#[derive(thiserror::Error)]
pub enum Error {
#[error("Duplicate module\n\n{module}")]
DuplicateModule {
module: String,
first: PathBuf,
second: PathBuf,
},
#[error("File operation failed")]
FileIo { error: io::Error, path: PathBuf },
#[error("Source code incorrectly formatted")]
Format { problem_files: Vec<Unformatted> },
#[error(transparent)]
StandardIo(#[from] io::Error),
#[error(transparent)]
Http(#[from] reqwest::Error),
#[error(transparent)]
ZipExtract(#[from] ZipExtractError),
#[error(transparent)]
JoinError(#[from] tokio::task::JoinError),
#[error("{help}")]
TomlLoading {
path: PathBuf,
src: String,
named: NamedSource,
location: Option<Span>,
help: String,
},
#[error("Missing 'aiken.toml' manifest in {path}")]
MissingManifest { path: PathBuf },
#[error("Cyclical module imports")]
ImportCycle { modules: Vec<String> },
/// Useful for returning many [`Error::Parse`] at once
#[error("A list of errors")]
List(Vec<Self>),
#[error("Parsing")]
Parse {
path: PathBuf,
src: String,
named: NamedSource,
#[source]
error: Box<ParseError>,
},
#[error("Checking")]
Type {
path: PathBuf,
src: String,
named: NamedSource,
#[source]
error: tipo::error::Error,
},
#[error("Validator functions must return Bool")]
ValidatorMustReturnBool {
path: PathBuf,
src: String,
named: NamedSource,
location: Span,
},
#[error("Validator\n\n{name}\n\nrequires at least {at_least} arguments")]
WrongValidatorArity {
name: String,
at_least: u8,
location: Span,
path: PathBuf,
src: String,
named: NamedSource,
},
#[error("{name} failed{}", if *verbose { format!("\n{src}") } else { String::new() } )]
TestFailure {
name: String,
path: PathBuf,
verbose: bool,
src: String,
evaluation_hint: Option<EvalHint>,
},
}
impl Error {
pub fn len(&self) -> usize {
match self {
Error::List(errors) => errors.len(),
_ => 1,
}
}
pub fn report(&self) {
match self {
Error::List(errors) => {
for error in errors {
eprintln!("Error: {:?}", error)
}
}
rest => eprintln!("Error: {:?}", rest),
}
}
pub fn from_parse_errors(errs: Vec<ParseError>, path: &Path, src: &str) -> Self {
let mut errors = Vec::with_capacity(errs.len());
for error in errs {
errors.push(Error::Parse {
path: path.into(),
src: src.to_string(),
named: NamedSource::new(path.display().to_string(), src.to_string()),
error: error.into(),
});
}
Error::List(errors)
}
pub fn append(self, next: Self) -> Self {
match (self, next) {
(Error::List(mut errors), Error::List(mut next_errors)) => {
errors.append(&mut next_errors);
Error::List(errors)
}
(Error::List(mut errors), rest) => {
errors.push(rest);
Error::List(errors)
}
(rest, Error::List(mut next_errors)) => {
let mut errors = vec![rest];
errors.append(&mut next_errors);
Error::List(errors)
}
(error, next_error) => Error::List(vec![error, next_error]),
}
}
pub fn is_empty(&self) -> bool {
matches!(self, Error::List(errors) if errors.is_empty())
}
pub fn path(&self) -> Option<PathBuf> {
match self {
Error::DuplicateModule { second, .. } => Some(second.to_path_buf()),
Error::FileIo { .. } => None,
Error::Format { .. } => None,
Error::StandardIo(_) => None,
Error::MissingManifest { path } => Some(path.to_path_buf()),
Error::TomlLoading { path, .. } => Some(path.to_path_buf()),
Error::ImportCycle { .. } => None,
Error::List(_) => None,
Error::Parse { path, .. } => Some(path.to_path_buf()),
Error::Type { path, .. } => Some(path.to_path_buf()),
Error::ValidatorMustReturnBool { path, .. } => Some(path.to_path_buf()),
Error::WrongValidatorArity { path, .. } => Some(path.to_path_buf()),
Error::TestFailure { path, .. } => Some(path.to_path_buf()),
Error::Http(_) => None,
Error::ZipExtract(_) => None,
Error::JoinError(_) => None,
}
}
pub fn src(&self) -> Option<String> {
match self {
Error::DuplicateModule { .. } => None,
Error::FileIo { .. } => None,
Error::Format { .. } => None,
Error::StandardIo(_) => None,
Error::MissingManifest { .. } => None,
Error::TomlLoading { src, .. } => Some(src.to_string()),
Error::ImportCycle { .. } => None,
Error::List(_) => None,
Error::Parse { src, .. } => Some(src.to_string()),
Error::Type { src, .. } => Some(src.to_string()),
Error::ValidatorMustReturnBool { src, .. } => Some(src.to_string()),
Error::WrongValidatorArity { src, .. } => Some(src.to_string()),
Error::TestFailure { .. } => None,
Error::Http(_) => None,
Error::ZipExtract(_) => None,
Error::JoinError(_) => None,
}
}
}
impl Debug for Error {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let miette_handler = MietteHandlerOpts::new()
// For better support of terminal themes use the ANSI coloring
.rgb_colors(RgbColors::Never)
// If ansi support is disabled in the config disable the eye-candy
.color(true)
.unicode(true)
.terminal_links(true)
.build();
// Ignore error to prevent format! panics. This can happen if span points at some
// inaccessible location, for example by calling `report_error()` with wrong working set.
let _ = miette_handler.debug(self, f);
Ok(())
}
}
impl Diagnostic for Error {
fn severity(&self) -> Option<miette::Severity> {
Some(miette::Severity::Error)
}
fn code<'a>(&'a self) -> Option<Box<dyn Display + 'a>> {
match self {
Error::DuplicateModule { .. } => Some(Box::new("aiken::module::duplicate")),
Error::FileIo { .. } => None,
Error::ImportCycle { .. } => Some(Box::new("aiken::module::cyclical")),
Error::List(_) => None,
Error::Parse { .. } => Some(Box::new("aiken::parser")),
Error::Type { .. } => Some(Box::new("aiken::check")),
Error::StandardIo(_) => None,
Error::MissingManifest { .. } => None,
Error::TomlLoading { .. } => Some(Box::new("aiken::loading::toml")),
Error::Format { .. } => None,
Error::ValidatorMustReturnBool { .. } => Some(Box::new("aiken::scripts")),
Error::WrongValidatorArity { .. } => Some(Box::new("aiken::validators")),
Error::TestFailure { path, .. } => Some(Box::new(path.to_str().unwrap_or(""))),
Error::Http(_) => Some(Box::new("aiken::deps")),
Error::ZipExtract(_) => None,
Error::JoinError(_) => None,
}
}
fn help<'a>(&'a self) -> Option<Box<dyn Display + 'a>> {
match self {
Error::DuplicateModule { first, second, .. } => Some(Box::new(format!(
"Rename either {} or {}",
first.display(),
second.display()
))),
Error::FileIo { .. } => None,
Error::ImportCycle { modules } => Some(Box::new(format!(
"Try moving the shared code to a separate module that the others can depend on\n- {}",
modules.join("\n- ")
))),
Error::List(_) => None,
Error::Parse { error, .. } => error.kind.help(),
Error::Type { error, .. } => error.help(),
Error::StandardIo(_) => None,
Error::MissingManifest { .. } => Some(Box::new("Try running `aiken new <REPOSITORY/PROJECT>` to initialise a project with an example manifest.")),
Error::TomlLoading { .. } => None,
Error::Format { .. } => None,
Error::ValidatorMustReturnBool { .. } => Some(Box::new("Try annotating the validator's return type with Bool")),
Error::WrongValidatorArity { .. } => Some(Box::new("Validators require a minimum number of arguments please add the missing arguments.\nIf you don't need one of the required arguments use an underscore `_datum`.")),
Error::TestFailure { evaluation_hint, .. } =>{
match evaluation_hint {
None => None,
Some(hint) => {
let budget = ExBudget { mem: i64::MAX, cpu: i64::MAX, };
let left = pretty::boxed("left", &match hint.left.eval(budget) {
(Ok(term), _, _) => format!("{term}"),
(Err(err), _, _) => format!("{err}"),
});
let right = pretty::boxed("right", &match hint.right.eval(budget) {
(Ok(term), _, _) => format!("{term}"),
(Err(err), _, _) => format!("{err}"),
});
let msg = match hint.bin_op {
BinOp::And => Some(format!("{left}\n\nand\n\n{right}\n\nshould both be true.")),
BinOp::Or => Some(format!("{left}\n\nor\n\n{right}\n\nshould be true.")),
BinOp::Eq => Some(format!("{left}\n\nshould be equal to\n\n{right}")),
BinOp::NotEq => Some(format!("{left}\n\nshould not be equal to\n\n{right}")),
BinOp::LtInt => Some(format!("{left}\n\nshould be lower than\n\n{right}")),
BinOp::LtEqInt => Some(format!("{left}\n\nshould be lower than or equal to\n\n{right}")),
BinOp::GtEqInt => Some(format!("{left}\n\nshould be greater than\n\n{right}")),
BinOp::GtInt => Some(format!("{left}\n\nshould be greater than or equal to\n\n{right}")),
_ => None
}?;
Some(Box::new(msg))
}
}
},
Error::Http(_) => None,
Error::ZipExtract(_) => None,
Error::JoinError(_) => None,
}
}
fn labels(&self) -> Option<Box<dyn Iterator<Item = LabeledSpan> + '_>> {
match self {
Error::DuplicateModule { .. } => None,
Error::FileIo { .. } => None,
Error::ImportCycle { .. } => None,
Error::List(_) => None,
Error::Parse { error, .. } => error.labels(),
Error::MissingManifest { .. } => None,
Error::Type { error, .. } => error.labels(),
Error::StandardIo(_) => None,
Error::TomlLoading { location, .. } => {
if let Some(location) = location {
Some(Box::new(
vec![LabeledSpan::new_with_span(None, *location)].into_iter(),
))
} else {
None
}
}
Error::Format { .. } => None,
Error::ValidatorMustReturnBool { location, .. } => Some(Box::new(
vec![LabeledSpan::new_with_span(None, *location)].into_iter(),
)),
Error::WrongValidatorArity { location, .. } => Some(Box::new(
vec![LabeledSpan::new_with_span(None, *location)].into_iter(),
)),
Error::TestFailure { .. } => None,
Error::Http(_) => None,
Error::ZipExtract(_) => None,
Error::JoinError(_) => None,
}
}
fn source_code(&self) -> Option<&dyn SourceCode> {
match self {
Error::DuplicateModule { .. } => None,
Error::FileIo { .. } => None,
Error::ImportCycle { .. } => None,
Error::List(_) => None,
Error::Parse { named, .. } => Some(named),
Error::Type { named, .. } => Some(named),
Error::StandardIo(_) => None,
Error::MissingManifest { .. } => None,
Error::TomlLoading { named, .. } => Some(named),
Error::Format { .. } => None,
Error::ValidatorMustReturnBool { named, .. } => Some(named),
Error::WrongValidatorArity { named, .. } => Some(named),
Error::TestFailure { .. } => None,
Error::Http(_) => None,
Error::ZipExtract(_) => None,
Error::JoinError(_) => None,
}
}
}
#[derive(thiserror::Error)]
pub enum Warning {
#[error("Checking")]
Type {
path: PathBuf,
src: String,
named: NamedSource,
#[source]
warning: tipo::error::Warning,
},
}
impl Diagnostic for Warning {
fn severity(&self) -> Option<miette::Severity> {
Some(miette::Severity::Warning)
}
fn source_code(&self) -> Option<&dyn SourceCode> {
match self {
Warning::Type { named, .. } => Some(named),
}
}
fn labels(&self) -> Option<Box<dyn Iterator<Item = LabeledSpan> + '_>> {
match self {
Warning::Type { warning, .. } => warning.labels(),
}
}
fn code<'a>(&'a self) -> Option<Box<dyn Display + 'a>> {
match self {
Warning::Type { .. } => Some(Box::new("aiken::check")),
}
}
}
impl Warning {
pub fn from_type_warning(warning: tipo::error::Warning, path: PathBuf, src: String) -> Warning {
Warning::Type {
path: path.clone(),
warning,
src: src.clone(),
named: NamedSource::new(path.display().to_string(), src),
}
}
pub fn report(&self) {
eprintln!("Warning: {:?}", self)
}
}
impl Debug for Warning {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let miette_handler = MietteHandlerOpts::new()
// For better support of terminal themes use the ANSI coloring
.rgb_colors(RgbColors::Never)
// If ansi support is disabled in the config disable the eye-candy
.color(true)
.unicode(true)
.terminal_links(true)
.build();
// Ignore error to prevent format! panics. This can happen if span points at some
// inaccessible location, for example by calling `report_error()` with wrong working set.
let _ = miette_handler.debug(self, f);
Ok(())
}
}
#[derive(Debug, PartialEq, Eq)]
pub struct Unformatted {
pub source: PathBuf,
pub destination: PathBuf,
pub input: String,
pub output: String,
}

View File

@@ -0,0 +1,146 @@
use std::{
fs,
io::Read,
path::{Path, PathBuf},
str::FromStr,
};
use aiken_lang::{ast::ModuleKind, parser};
use crate::{
error::{Error, Unformatted},
is_aiken_path,
};
pub fn run(stdin: bool, check: bool, files: Vec<String>) -> Result<(), Error> {
if stdin {
process_stdin(check)
} else {
process_files(check, files)
}
}
fn process_stdin(check: bool) -> Result<(), Error> {
let src = read_stdin()?;
let mut out = String::new();
let (module, extra) = parser::module(&src, ModuleKind::Lib)
.map_err(|errs| Error::from_parse_errors(errs, Path::new("<stdin>"), &src))?;
aiken_lang::format::pretty(&mut out, module, extra, &src);
if !check {
print!("{}", out);
return Ok(());
}
if src != out {
return Err(Error::Format {
problem_files: vec![Unformatted {
source: PathBuf::from("<standard input>"),
destination: PathBuf::from("<standard output>"),
input: src,
output: out,
}],
});
}
Ok(())
}
fn process_files(check: bool, files: Vec<String>) -> Result<(), Error> {
if check {
check_files(files)
} else {
format_files(files)
}
}
fn check_files(files: Vec<String>) -> Result<(), Error> {
let problem_files = unformatted_files(files)?;
if problem_files.is_empty() {
Ok(())
} else {
Err(Error::Format { problem_files })
}
}
fn format_files(files: Vec<String>) -> Result<(), Error> {
for file in unformatted_files(files)? {
fs::write(file.destination, file.output)?;
}
Ok(())
}
fn unformatted_files(files: Vec<String>) -> Result<Vec<Unformatted>, Error> {
let mut problem_files = Vec::with_capacity(files.len());
let mut errors = Error::List(vec![]);
for file_path in files {
let path = PathBuf::from_str(&file_path).unwrap();
if path.is_dir() {
for path in aiken_files_excluding_gitignore(&path) {
if let Err(err) = format_file(&mut problem_files, path) {
errors = errors.append(err);
};
}
} else if let Err(err) = format_file(&mut problem_files, path) {
errors = errors.append(err);
}
}
if errors.is_empty() {
Ok(problem_files)
} else {
Err(errors)
}
}
fn format_file(problem_files: &mut Vec<Unformatted>, path: PathBuf) -> Result<(), Error> {
let src = fs::read_to_string(&path).map_err(|error| Error::FileIo {
error,
path: path.clone(),
})?;
let mut output = String::new();
let (module, extra) = parser::module(&src, ModuleKind::Lib)
.map_err(|errs| Error::from_parse_errors(errs, &path, &src))?;
aiken_lang::format::pretty(&mut output, module, extra, &src);
if src != output {
problem_files.push(Unformatted {
source: path.clone(),
destination: path,
input: src,
output,
});
}
Ok(())
}
pub fn read_stdin() -> Result<String, Error> {
let mut src = String::new();
std::io::stdin().read_to_string(&mut src)?;
Ok(src)
}
pub fn aiken_files_excluding_gitignore(dir: &Path) -> impl Iterator<Item = PathBuf> + '_ {
ignore::WalkBuilder::new(dir)
.follow_links(true)
.require_git(false)
.build()
.into_iter()
.filter_map(Result::ok)
.filter(|e| e.file_type().map(|t| t.is_file()).unwrap_or(false))
.map(ignore::DirEntry::into_path)
.filter(move |d| is_aiken_path(d, dir))
}

View File

@@ -0,0 +1,903 @@
pub mod config;
pub mod deps;
pub mod docs;
pub mod error;
pub mod format;
pub mod module;
pub mod options;
pub mod paths;
pub mod pretty;
pub mod script;
pub mod telemetry;
use crate::module::{CERT, MINT, SPEND, VALIDATOR_NAMES, WITHDRAW};
use aiken_lang::{
ast::{Definition, Function, ModuleKind, TypedDataType, TypedDefinition, TypedFunction},
builder::{DataTypeKey, FunctionAccessKey},
builtins::{self, generic_var},
tipo::TypeInfo,
uplc::CodeGenerator,
IdGenerator,
};
use config::PackageName;
use deps::UseManifest;
use miette::NamedSource;
use options::{CodeGenMode, Options};
use pallas::{
codec::minicbor,
ledger::{addresses::Address, primitives::babbage},
};
use pallas_traverse::ComputeHash;
use script::{EvalHint, EvalInfo, Script};
use serde_json::json;
use std::{
collections::HashMap,
fs,
path::{Path, PathBuf},
};
use telemetry::EventListener;
use uplc::{
ast::{Constant, DeBruijn, Program, Term},
machine::cost_model::ExBudget,
};
use crate::{
config::Config,
error::{Error, Warning},
module::{CheckedModule, CheckedModules, ParsedModule, ParsedModules},
telemetry::Event,
};
#[derive(Debug)]
pub struct Source {
pub path: PathBuf,
pub name: String,
pub code: String,
pub kind: ModuleKind,
}
pub struct Project<T>
where
T: EventListener,
{
config: Config,
defined_modules: HashMap<String, PathBuf>,
checked_modules: CheckedModules,
id_gen: IdGenerator,
module_types: HashMap<String, TypeInfo>,
root: PathBuf,
sources: Vec<Source>,
pub warnings: Vec<Warning>,
event_listener: T,
}
impl<T> Project<T>
where
T: EventListener,
{
pub fn new(root: PathBuf, event_listener: T) -> Result<Project<T>, Error> {
let id_gen = IdGenerator::new();
let mut module_types = HashMap::new();
module_types.insert("aiken".to_string(), builtins::prelude(&id_gen));
module_types.insert("aiken/builtin".to_string(), builtins::plutus(&id_gen));
let config = Config::load(root.clone())?;
Ok(Project {
config,
checked_modules: CheckedModules::default(),
defined_modules: HashMap::new(),
id_gen,
module_types,
root,
sources: vec![],
warnings: vec![],
event_listener,
})
}
pub fn build(&mut self, uplc: bool) -> Result<(), Error> {
let options = Options {
code_gen_mode: CodeGenMode::Build(uplc),
};
self.compile(options)
}
pub fn docs(&mut self, destination: Option<PathBuf>) -> Result<(), Error> {
self.event_listener
.handle_event(Event::BuildingDocumentation {
root: self.root.clone(),
name: self.config.name.to_string(),
version: self.config.version.clone(),
});
self.read_source_files()?;
let destination = destination.unwrap_or_else(|| self.root.join("doc"));
let mut parsed_modules = self.parse_sources(self.config.name.clone())?;
for (_, module) in parsed_modules.iter_mut() {
module.attach_doc_and_module_comments();
}
self.type_check(parsed_modules)?;
self.event_listener.handle_event(Event::GeneratingDocFiles {
output_path: destination.clone(),
});
let doc_files = docs::generate_all(
&self.root,
&self.config,
self.checked_modules.values().collect(),
);
for file in doc_files {
let path = destination.join(file.path);
fs::create_dir_all(path.parent().unwrap())?;
fs::write(&path, file.content)?;
}
Ok(())
}
pub fn check(
&mut self,
skip_tests: bool,
match_tests: Option<String>,
verbose: bool,
) -> Result<(), Error> {
let options = Options {
code_gen_mode: if skip_tests {
CodeGenMode::NoOp
} else {
CodeGenMode::Test {
match_tests,
verbose,
}
},
};
self.compile(options)
}
pub fn compile(&mut self, options: Options) -> Result<(), Error> {
self.compile_deps()?;
self.event_listener
.handle_event(Event::StartingCompilation {
root: self.root.clone(),
name: self.config.name.to_string(),
version: self.config.version.clone(),
});
self.read_source_files()?;
let parsed_modules = self.parse_sources(self.config.name.clone())?;
self.type_check(parsed_modules)?;
let validators = self.validate_validators()?;
match options.code_gen_mode {
CodeGenMode::Build(uplc_dump) => {
let programs = self.code_gen(validators)?;
self.write_build_outputs(programs, uplc_dump)?;
Ok(())
}
CodeGenMode::Test {
match_tests,
verbose,
} => {
let tests =
self.collect_scripts(verbose, |def| matches!(def, Definition::Test(..)))?;
if !tests.is_empty() {
self.event_listener.handle_event(Event::RunningTests);
}
let results = self.eval_scripts(tests, match_tests);
let errors: Vec<Error> = results
.iter()
.filter_map(|e| {
if e.success {
None
} else {
Some(Error::TestFailure {
name: e.script.name.clone(),
path: e.script.input_path.clone(),
evaluation_hint: e.script.evaluation_hint.clone(),
src: e.script.program.to_pretty(),
verbose,
})
}
})
.collect();
self.event_listener
.handle_event(Event::FinishedTests { tests: results });
if !errors.is_empty() {
Err(Error::List(errors))
} else {
Ok(())
}
}
CodeGenMode::NoOp => Ok(()),
}
}
fn compile_deps(&mut self) -> Result<(), Error> {
let manifest = deps::download(
&self.event_listener,
None,
UseManifest::Yes,
&self.root,
&self.config,
)?;
for package in manifest.packages {
let lib = self.root.join(paths::build_deps_package(&package.name));
self.event_listener
.handle_event(Event::StartingCompilation {
root: lib.clone(),
name: package.name.to_string(),
version: package.version.clone(),
});
self.read_package_source_files(&lib.join("lib"))?;
let parsed_modules = self.parse_sources(package.name)?;
self.type_check(parsed_modules)?;
}
Ok(())
}
fn read_source_files(&mut self) -> Result<(), Error> {
let lib = self.root.join("lib");
let validators = self.root.join("validators");
self.aiken_files(&validators, ModuleKind::Validator)?;
self.aiken_files(&lib, ModuleKind::Lib)?;
Ok(())
}
fn read_package_source_files(&mut self, lib: &Path) -> Result<(), Error> {
self.aiken_files(lib, ModuleKind::Lib)?;
Ok(())
}
fn parse_sources(&mut self, package_name: PackageName) -> Result<ParsedModules, Error> {
let mut errors = Vec::new();
let mut parsed_modules = HashMap::with_capacity(self.sources.len());
for Source {
path,
name,
code,
kind,
} in self.sources.drain(0..)
{
match aiken_lang::parser::module(&code, kind) {
Ok((mut ast, extra)) => {
// Store the name
ast.name = name.clone();
let module = ParsedModule {
kind,
ast,
code,
name,
path,
extra,
package: package_name.to_string(),
};
if let Some(first) = self
.defined_modules
.insert(module.name.clone(), module.path.clone())
{
return Err(Error::DuplicateModule {
module: module.name.clone(),
first,
second: module.path,
});
}
parsed_modules.insert(module.name.clone(), module);
}
Err(errs) => {
for error in errs {
errors.push(Error::Parse {
path: path.clone(),
src: code.clone(),
named: NamedSource::new(path.display().to_string(), code.clone()),
error: Box::new(error),
})
}
}
}
}
if errors.is_empty() {
Ok(parsed_modules.into())
} else {
Err(Error::List(errors))
}
}
fn type_check(&mut self, mut parsed_modules: ParsedModules) -> Result<(), Error> {
let processing_sequence = parsed_modules.sequence()?;
for name in processing_sequence {
if let Some(ParsedModule {
name,
path,
code,
kind,
extra,
package,
ast,
}) = parsed_modules.remove(&name)
{
let mut type_warnings = Vec::new();
let ast = ast
.infer(
&self.id_gen,
kind,
&self.config.name.to_string(),
&self.module_types,
&mut type_warnings,
)
.map_err(|error| Error::Type {
path: path.clone(),
src: code.clone(),
named: NamedSource::new(path.display().to_string(), code.clone()),
error,
})?;
// Register any warnings emitted as type warnings
let type_warnings = type_warnings
.into_iter()
.map(|w| Warning::from_type_warning(w, path.clone(), code.clone()));
self.warnings.extend(type_warnings);
// Register the types from this module so they can be imported into
// other modules.
self.module_types
.insert(name.clone(), ast.type_info.clone());
self.checked_modules.insert(
name.clone(),
CheckedModule {
kind,
extra,
name,
code,
ast,
package,
input_path: path,
},
);
}
}
Ok(())
}
fn validate_validators(&self) -> Result<Vec<(PathBuf, String, TypedFunction)>, Error> {
let mut errors = Vec::new();
let mut validators = Vec::new();
for module in self.checked_modules.validators() {
for def in module.ast.definitions() {
if let Definition::Fn(func_def) = def {
if VALIDATOR_NAMES.contains(&func_def.name.as_str()) {
// validators must return a Bool
if !func_def.return_type.is_bool() {
errors.push(Error::ValidatorMustReturnBool {
location: func_def.location,
src: module.code.clone(),
path: module.input_path.clone(),
named: NamedSource::new(
module.input_path.display().to_string(),
module.code.clone(),
),
})
}
// depending on name, validate the minimum number of arguments
// if too low, push a new error on to errors
if [MINT, CERT, WITHDRAW].contains(&func_def.name.as_str())
&& func_def.arguments.len() < 2
{
errors.push(Error::WrongValidatorArity {
location: func_def.location,
src: module.code.clone(),
path: module.input_path.clone(),
named: NamedSource::new(
module.input_path.display().to_string(),
module.code.clone(),
),
name: func_def.name.clone(),
at_least: 2,
})
}
if SPEND == func_def.name && func_def.arguments.len() < 3 {
errors.push(Error::WrongValidatorArity {
location: func_def.location,
src: module.code.clone(),
path: module.input_path.clone(),
named: NamedSource::new(
module.input_path.display().to_string(),
module.code.clone(),
),
name: func_def.name.clone(),
at_least: 3,
})
}
validators.push((
module.input_path.clone(),
module.name.clone(),
func_def.clone(),
));
}
}
}
}
if errors.is_empty() {
Ok(validators)
} else {
Err(Error::List(errors))
}
}
fn code_gen(
&mut self,
validators: Vec<(PathBuf, String, TypedFunction)>,
) -> Result<Vec<Script>, Error> {
let mut programs = Vec::new();
let mut functions = HashMap::new();
let mut type_aliases = HashMap::new();
let mut data_types = HashMap::new();
let mut imports = HashMap::new();
let mut constants = HashMap::new();
let option_data_type = TypedDataType::option(generic_var(self.id_gen.next()));
data_types.insert(
DataTypeKey {
module_name: "".to_string(),
defined_type: "Option".to_string(),
},
&option_data_type,
);
for module in self.checked_modules.values() {
for def in module.ast.definitions() {
match def {
Definition::Fn(func) => {
functions.insert(
FunctionAccessKey {
module_name: module.name.clone(),
function_name: func.name.clone(),
variant_name: String::new(),
},
func,
);
}
Definition::Test(_) => {}
Definition::TypeAlias(ta) => {
type_aliases.insert((module.name.clone(), ta.alias.clone()), ta);
}
Definition::DataType(dt) => {
data_types.insert(
DataTypeKey {
module_name: module.name.clone(),
defined_type: dt.name.clone(),
},
dt,
);
}
Definition::Use(import) => {
imports.insert((module.name.clone(), import.module.join("/")), import);
}
Definition::ModuleConstant(mc) => {
constants.insert((module.name.clone(), mc.name.clone()), mc);
}
}
}
}
for (input_path, module_name, func_def) in validators {
let Function {
arguments,
name,
body,
..
} = func_def;
let mut generator = CodeGenerator::new(
&functions,
// &type_aliases,
&data_types,
// &imports,
// &constants,
&self.module_types,
);
self.event_listener.handle_event(Event::GeneratingUPLC {
output_path: self.output_path().join(&module_name).join(&name),
name: format!("{}.{}", module_name, name),
});
let program = generator.generate(body, arguments, true);
let script = Script::new(
input_path,
module_name,
name,
program.try_into().unwrap(),
None,
);
programs.push(script);
}
Ok(programs)
}
// TODO: revisit ownership and lifetimes of data in this function
fn collect_scripts(
&mut self,
verbose: bool,
should_collect: fn(&TypedDefinition) -> bool,
) -> Result<Vec<Script>, Error> {
let mut programs = Vec::new();
let mut functions = HashMap::new();
let mut type_aliases = HashMap::new();
let mut data_types = HashMap::new();
let mut imports = HashMap::new();
let mut constants = HashMap::new();
let option_data_type = TypedDataType::option(generic_var(self.id_gen.next()));
data_types.insert(
DataTypeKey {
module_name: "".to_string(),
defined_type: "Option".to_string(),
},
&option_data_type,
);
let mut scripts = Vec::new();
for module in self
.checked_modules
.values()
.filter(|checked| checked.package == self.config.name.to_string())
{
for (_index, def) in module.ast.definitions().enumerate() {
match def {
Definition::Fn(func) => {
functions.insert(
FunctionAccessKey {
module_name: module.name.clone(),
function_name: func.name.clone(),
variant_name: String::new(),
},
func,
);
if should_collect(def) {
scripts.push((module.input_path.clone(), module.name.clone(), func));
}
}
Definition::Test(func) => {
if should_collect(def) {
scripts.push((module.input_path.clone(), module.name.clone(), func));
}
// indices_to_remove.push(index);
}
Definition::TypeAlias(ta) => {
type_aliases.insert((module.name.clone(), ta.alias.clone()), ta);
}
Definition::DataType(dt) => {
data_types.insert(
DataTypeKey {
module_name: module.name.clone(),
defined_type: dt.name.clone(),
},
dt,
);
}
Definition::Use(import) => {
imports.insert((module.name.clone(), import.module.join("/")), import);
}
Definition::ModuleConstant(mc) => {
constants.insert((module.name.clone(), mc.name.clone()), mc);
}
}
}
}
for (input_path, module_name, func_def) in scripts {
let Function {
arguments,
name,
body,
..
} = func_def;
if verbose {
self.event_listener.handle_event(Event::GeneratingUPLCFor {
name: name.clone(),
path: input_path.clone(),
})
}
let mut generator = CodeGenerator::new(
&functions,
// &type_aliases,
&data_types,
// &imports,
// &constants,
&self.module_types,
);
let evaluation_hint = if let Some((bin_op, left_src, right_src)) = func_def.test_hint()
{
let left = CodeGenerator::new(&functions, &data_types, &self.module_types)
.generate(*left_src, vec![], false)
.try_into()
.unwrap();
let right = CodeGenerator::new(&functions, &data_types, &self.module_types)
.generate(*right_src, vec![], false)
.try_into()
.unwrap();
Some(EvalHint {
bin_op,
left,
right,
})
} else {
None
};
let program = generator.generate(body.clone(), arguments.clone(), false);
let script = Script::new(
input_path,
module_name,
name.to_string(),
program.try_into().unwrap(),
evaluation_hint,
);
programs.push(script);
}
Ok(programs)
}
fn eval_scripts(&self, scripts: Vec<Script>, match_name: Option<String>) -> Vec<EvalInfo> {
// TODO: in the future we probably just want to be able to
// tell the machine to not explode on budget consumption.
let initial_budget = ExBudget {
mem: i64::MAX,
cpu: i64::MAX,
};
let mut results = Vec::new();
for script in scripts {
let path = format!("{}{}", script.module, script.name);
if matches!(&match_name, Some(search_str) if !path.to_string().contains(search_str)) {
continue;
}
match script.program.eval(initial_budget) {
(Ok(result), remaining_budget, logs) => {
let eval_info = EvalInfo {
success: result != Term::Error
&& result != Term::Constant(Constant::Bool(false)),
script,
spent_budget: initial_budget - remaining_budget,
output: Some(result),
logs,
};
results.push(eval_info);
}
(Err(..), remaining_budget, logs) => {
let eval_info = EvalInfo {
success: false,
script,
spent_budget: initial_budget - remaining_budget,
output: None,
logs,
};
results.push(eval_info);
}
}
}
results
}
fn output_path(&self) -> PathBuf {
self.root.join("assets")
}
fn write_build_outputs(&self, programs: Vec<Script>, uplc_dump: bool) -> Result<(), Error> {
for script in programs {
let script_output_dir = self.output_path().join(script.module).join(script.name);
fs::create_dir_all(&script_output_dir)?;
// dump textual uplc
if uplc_dump {
let uplc_path = script_output_dir.join("raw.uplc");
fs::write(uplc_path, script.program.to_pretty())?;
}
let program: Program<DeBruijn> = script.program.into();
let cbor = program.to_cbor().unwrap();
// Create file containing just the script cbor hex
let script_path = script_output_dir.join("script.cbor");
let cbor_hex = hex::encode(&cbor);
fs::write(script_path, cbor_hex)?;
// Create the payment script JSON file
let payment_script_path = script_output_dir.join("payment_script.json");
let mut bytes = Vec::new();
let mut encoder = minicbor::Encoder::new(&mut bytes);
encoder.bytes(&cbor).unwrap();
let prefixed_cbor_hex = hex::encode(&bytes);
let payment_script = json!({
"type": "PlutusScriptV2",
"description": "Generated by Aiken",
"cborHex": prefixed_cbor_hex
});
fs::write(
payment_script_path,
serde_json::to_string_pretty(&payment_script).unwrap(),
)?;
// Create mainnet and testnet addresses
let plutus_script = babbage::PlutusV2Script(cbor.into());
let hash = plutus_script.compute_hash();
// mainnet
let mainnet_path = script_output_dir.join("mainnet.addr");
let mut mainnet_bytes: Vec<u8> = vec![0b01110001];
mainnet_bytes.extend(hash.iter());
let mainnet_addr = Address::from_bytes(&mainnet_bytes)
.unwrap()
.to_bech32()
.unwrap();
fs::write(mainnet_path, mainnet_addr)?;
// testnet
let testnet_path = script_output_dir.join("testnet.addr");
let mut testnet_bytes: Vec<u8> = vec![0b01110000];
testnet_bytes.extend(hash.iter());
let testnet_addr = Address::from_bytes(&testnet_bytes)
.unwrap()
.to_bech32()
.unwrap();
fs::write(testnet_path, testnet_addr)?;
}
Ok(())
}
fn aiken_files(&mut self, dir: &Path, kind: ModuleKind) -> Result<(), Error> {
let paths = walkdir::WalkDir::new(dir)
.follow_links(true)
.into_iter()
.filter_map(Result::ok)
.filter(|e| e.file_type().is_file())
.map(|d| d.into_path())
.filter(move |d| is_aiken_path(d, dir));
for path in paths {
self.add_module(path, dir, kind)?;
}
Ok(())
}
fn add_module(&mut self, path: PathBuf, dir: &Path, kind: ModuleKind) -> Result<(), Error> {
let name = self.module_name(dir, &path);
let code = fs::read_to_string(&path).map_err(|error| Error::FileIo {
path: path.clone(),
error,
})?;
self.sources.push(Source {
name,
code,
kind,
path,
});
Ok(())
}
fn module_name(&self, package_path: &Path, full_module_path: &Path) -> String {
// ../../{config.name}/module.ak
// module.ak
let mut module_path = full_module_path
.strip_prefix(package_path)
.expect("Stripping package prefix from module path")
.to_path_buf();
// module
module_path.set_extension("");
// Stringify
let name = module_path
.to_str()
.expect("Module name path to str")
.to_string();
// normalise windows paths
name.replace('\\', "/")
}
}
fn is_aiken_path(path: &Path, dir: impl AsRef<Path>) -> bool {
use regex::Regex;
let re = Regex::new(&format!(
"^({module}{slash})*{module}\\.ak$",
module = "[a-z][_a-z0-9]*",
slash = "(/|\\\\)",
))
.expect("is_aiken_path() RE regex");
re.is_match(
path.strip_prefix(dir)
.expect("is_aiken_path(): strip_prefix")
.to_str()
.expect("is_aiken_path(): to_str"),
)
}

View File

@@ -0,0 +1,266 @@
use std::{
collections::{HashMap, HashSet},
ops::{Deref, DerefMut},
path::PathBuf,
};
use aiken_lang::{
ast::{DataType, Definition, ModuleKind, TypedModule, UntypedModule},
parser::extra::{comments_before, Comment, ModuleExtra},
};
use petgraph::{algo, graph::NodeIndex, Direction, Graph};
use crate::error::Error;
#[derive(Debug)]
pub struct ParsedModule {
pub path: PathBuf,
pub name: String,
pub code: String,
pub kind: ModuleKind,
pub package: String,
pub ast: UntypedModule,
pub extra: ModuleExtra,
}
impl ParsedModule {
pub fn deps_for_graph(&self) -> (String, Vec<String>) {
let name = self.name.clone();
let deps: Vec<_> = self
.ast
.dependencies()
.into_iter()
.map(|(dep, _span)| dep)
.collect();
(name, deps)
}
pub fn attach_doc_and_module_comments(&mut self) {
// Module Comments
self.ast.docs = self
.extra
.module_comments
.iter()
.map(|span| {
Comment::from((span, self.code.as_str()))
.content
.to_string()
})
.collect();
// Order definitions to avoid dissociating doc comments from them
let mut definitions: Vec<_> = self.ast.definitions.iter_mut().collect();
definitions.sort_by(|a, b| a.location().start.cmp(&b.location().start));
// Doc Comments
let mut doc_comments = self.extra.doc_comments.iter().peekable();
for def in &mut definitions {
let docs: Vec<&str> =
comments_before(&mut doc_comments, def.location().start, &self.code);
if !docs.is_empty() {
let doc = docs.join("\n");
def.put_doc(doc);
}
if let Definition::DataType(DataType { constructors, .. }) = def {
for constructor in constructors {
let docs: Vec<&str> =
comments_before(&mut doc_comments, constructor.location.start, &self.code);
if !docs.is_empty() {
let doc = docs.join("\n");
constructor.put_doc(doc);
}
for argument in constructor.arguments.iter_mut() {
let docs: Vec<&str> =
comments_before(&mut doc_comments, argument.location.start, &self.code);
if !docs.is_empty() {
let doc = docs.join("\n");
argument.put_doc(doc);
}
}
}
}
}
}
}
pub struct ParsedModules(HashMap<String, ParsedModule>);
impl ParsedModules {
pub fn sequence(&self) -> Result<Vec<String>, Error> {
let inputs = self
.0
.values()
.map(|m| m.deps_for_graph())
.collect::<Vec<(String, Vec<String>)>>();
let capacity = inputs.len();
let mut graph = Graph::<(), ()>::with_capacity(capacity, capacity * 5);
// TODO: maybe use a bimap?
let mut indices = HashMap::with_capacity(capacity);
let mut values = HashMap::with_capacity(capacity);
for (value, _) in &inputs {
let index = graph.add_node(());
indices.insert(value.clone(), index);
values.insert(index, value.clone());
}
for (value, deps) in inputs {
if let Some(from_index) = indices.get(&value) {
let deps = deps.into_iter().filter_map(|dep| indices.get(&dep));
for to_index in deps {
graph.add_edge(*from_index, *to_index, ());
}
}
}
match algo::toposort(&graph, None) {
Ok(sequence) => {
let sequence = sequence
.iter()
.filter_map(|i| values.remove(i))
.rev()
.collect();
Ok(sequence)
}
Err(cycle) => {
let origin = cycle.node_id();
let mut path = vec![];
find_cycle(origin, origin, &graph, &mut path, &mut HashSet::new());
let modules = path
.iter()
.filter_map(|index| values.remove(index))
.collect();
Err(Error::ImportCycle { modules })
}
}
}
}
impl From<HashMap<String, ParsedModule>> for ParsedModules {
fn from(parsed_modules: HashMap<String, ParsedModule>) -> Self {
ParsedModules(parsed_modules)
}
}
impl From<ParsedModules> for HashMap<String, ParsedModule> {
fn from(parsed_modules: ParsedModules) -> Self {
parsed_modules.0
}
}
impl Deref for ParsedModules {
type Target = HashMap<String, ParsedModule>;
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl DerefMut for ParsedModules {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.0
}
}
fn find_cycle(
origin: NodeIndex,
parent: NodeIndex,
graph: &petgraph::Graph<(), ()>,
path: &mut Vec<NodeIndex>,
seen: &mut HashSet<NodeIndex>,
) -> bool {
seen.insert(parent);
for node in graph.neighbors_directed(parent, Direction::Outgoing) {
if node == origin {
path.push(node);
return true;
}
if seen.contains(&node) {
continue;
}
if find_cycle(origin, node, graph, path, seen) {
path.push(node);
return true;
}
}
false
}
pub const SPEND: &str = "spend";
pub const CERT: &str = "cert";
pub const MINT: &str = "mint";
pub const WITHDRAW: &str = "withdraw";
pub const VALIDATOR_NAMES: [&str; 4] = [SPEND, CERT, MINT, WITHDRAW];
#[derive(Debug, Clone)]
pub struct CheckedModule {
pub name: String,
pub code: String,
pub input_path: PathBuf,
pub kind: ModuleKind,
pub package: String,
pub ast: TypedModule,
pub extra: ModuleExtra,
}
#[derive(Default, Debug, Clone)]
pub struct CheckedModules(HashMap<String, CheckedModule>);
impl From<HashMap<String, CheckedModule>> for CheckedModules {
fn from(checked_modules: HashMap<String, CheckedModule>) -> Self {
CheckedModules(checked_modules)
}
}
impl From<CheckedModules> for HashMap<String, CheckedModule> {
fn from(checked_modules: CheckedModules) -> Self {
checked_modules.0
}
}
impl CheckedModules {
pub fn validators(&self) -> impl Iterator<Item = &CheckedModule> {
self.0.values().filter(|module| module.kind.is_validator())
}
pub fn into_validators(self) -> impl Iterator<Item = CheckedModule> {
self.0
.into_values()
.filter(|module| module.kind.is_validator())
}
}
impl Deref for CheckedModules {
type Target = HashMap<String, CheckedModule>;
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl DerefMut for CheckedModules {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.0
}
}

View File

@@ -0,0 +1,12 @@
pub struct Options {
pub code_gen_mode: CodeGenMode,
}
pub enum CodeGenMode {
Test {
match_tests: Option<String>,
verbose: bool,
},
Build(bool),
NoOp,
}

View File

@@ -0,0 +1,40 @@
use std::path::PathBuf;
use crate::config::PackageName;
pub fn manifest() -> PathBuf {
PathBuf::from("aiken.lock")
}
pub fn build() -> PathBuf {
PathBuf::from("build")
}
pub fn packages() -> PathBuf {
build().join("packages")
}
pub fn packages_toml() -> PathBuf {
packages().join("packages.toml")
}
pub fn build_deps_package(package_name: &PackageName) -> PathBuf {
packages().join(format!("{}-{}", package_name.owner, package_name.repo))
}
pub fn package_cache_zipball(package_name: &PackageName, version: &str) -> PathBuf {
packages_cache().join(format!(
"{}-{}-{}.zip",
package_name.owner, package_name.repo, version
))
}
pub fn packages_cache() -> PathBuf {
default_aiken_cache().join("packages")
}
pub fn default_aiken_cache() -> PathBuf {
dirs::cache_dir()
.expect("Failed to determine user cache directory")
.join("aiken")
}

View File

@@ -0,0 +1,122 @@
pub fn ansi_len(s: &str) -> usize {
String::from_utf8(strip_ansi_escapes::strip(s).unwrap())
.unwrap()
.chars()
.count()
}
pub fn len_longest_line(s: &str) -> usize {
s.lines().fold(0, |max, l| {
let n = ansi_len(l);
if n > max {
n
} else {
max
}
})
}
pub fn boxed(title: &str, content: &str) -> String {
boxed_with(title, content, |s| s.to_string())
}
pub fn boxed_with(title: &str, content: &str, border_style: fn(&str) -> String) -> String {
let n = len_longest_line(content);
let content = content
.lines()
.map(|line| {
format!(
"{} {} {}",
border_style(""),
pad_right(line.to_string(), n, " "),
border_style(""),
)
})
.collect::<Vec<String>>()
.join("\n");
let top = format!(
"{} {}{}",
border_style("┍━"),
pad_right(format!("{title} "), n, &border_style("")),
border_style(""),
);
let bottom = format!(
"{}{}{}",
border_style(""),
pad_right(String::new(), n + 2, &border_style("")),
border_style("")
);
format!("{top}\n{content}\n{bottom}")
}
pub fn open_box(
title: &str,
content: &str,
footer: &str,
border_style: fn(&str) -> String,
) -> String {
let i = ansi_len(content.lines().collect::<Vec<_>>().first().unwrap());
let j = len_longest_line(content);
let k = ansi_len(footer);
let content = content
.lines()
.map(|line| format!("{} {line}", border_style(""),))
.collect::<Vec<String>>()
.join("\n");
let top = format!(
"{} {}",
border_style("┍━"),
pad_right(format!("{title} "), i - 1, &border_style("")),
);
let bottom = format!(
"{} {}",
pad_right(border_style(""), j - k + 1, &border_style("")),
footer
);
format!("{top}\n{content}\n{bottom}")
}
pub fn indent(lines: &str, n: usize) -> String {
let tab = pad_left(String::new(), n, " ");
lines
.lines()
.map(|line| format!("{tab}{line}"))
.collect::<Vec<_>>()
.join("\n")
}
pub fn pad_left(mut text: String, n: usize, delimiter: &str) -> String {
let diff = n as i32 - ansi_len(&text) as i32;
if diff.is_positive() {
for _ in 0..diff {
text.insert_str(0, delimiter);
}
}
text
}
pub fn pad_right(mut text: String, n: usize, delimiter: &str) -> String {
let diff = n as i32 - ansi_len(&text) as i32;
if diff.is_positive() {
for _ in 0..diff {
text.push_str(delimiter);
}
}
text
}
pub fn style_if(styled: bool, s: String, apply_style: fn(String) -> String) -> String {
if styled {
apply_style(s)
} else {
s
}
}

View File

@@ -0,0 +1,47 @@
use crate::{ExBudget, Term};
use aiken_lang::ast::BinOp;
use std::path::PathBuf;
use uplc::ast::{NamedDeBruijn, Program};
#[derive(Debug)]
pub struct Script {
pub input_path: PathBuf,
pub module: String,
pub name: String,
pub program: Program<NamedDeBruijn>,
pub evaluation_hint: Option<EvalHint>,
}
impl Script {
pub fn new(
input_path: PathBuf,
module: String,
name: String,
program: Program<NamedDeBruijn>,
evaluation_hint: Option<EvalHint>,
) -> Script {
Script {
input_path,
module,
name,
program,
evaluation_hint,
}
}
}
#[derive(Debug, Clone)]
pub struct EvalHint {
pub bin_op: BinOp,
pub left: Program<NamedDeBruijn>,
pub right: Program<NamedDeBruijn>,
}
#[derive(Debug)]
pub struct EvalInfo {
pub success: bool,
pub script: Script,
pub spent_budget: ExBudget,
pub output: Option<Term<NamedDeBruijn>>,
pub logs: Vec<String>,
}

View File

@@ -0,0 +1,46 @@
use crate::script::EvalInfo;
use std::path::PathBuf;
pub trait EventListener: std::fmt::Debug {
fn handle_event(&self, event: Event);
}
pub enum Event {
StartingCompilation {
name: String,
version: String,
root: PathBuf,
},
BuildingDocumentation {
name: String,
version: String,
root: PathBuf,
},
GeneratingDocFiles {
output_path: PathBuf,
},
GeneratingUPLC {
output_path: PathBuf,
name: String,
},
GeneratingUPLCFor {
name: String,
path: PathBuf,
},
EvaluatingFunction {
results: Vec<EvalInfo>,
},
RunningTests,
FinishedTests {
tests: Vec<EvalInfo>,
},
WaitingForBuildDirLock,
DownloadingPackage {
name: String,
},
PackagesDownloaded {
start: tokio::time::Instant,
count: usize,
},
ResolvingVersions,
}