Merge branch 'aiken-lang:main' into add-completion-install
This commit is contained in:
commit
6df1fcb9b0
|
@ -1,148 +1,312 @@
|
|||
# Copyright 2022-2024, axodotdev
|
||||
# SPDX-License-Identifier: MIT or Apache-2.0
|
||||
#
|
||||
# CI that:
|
||||
#
|
||||
# * checks for a Git Tag that looks like a release
|
||||
# * builds artifacts with cargo-dist (archives, installers, hashes)
|
||||
# * uploads those artifacts to temporary workflow zip
|
||||
# * on success, uploads the artifacts to a GitHub Release
|
||||
#
|
||||
# Note that the GitHub Release will be created with a generated
|
||||
# title/body based on your changelogs.
|
||||
|
||||
name: Release
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
|
||||
# This task will run whenever you push a git tag that looks like a version
|
||||
# like "1.0.0", "v0.1.0-prerelease.1", "my-app/0.1.0", "releases/v1.0.0", etc.
|
||||
# Various formats will be parsed into a VERSION and an optional PACKAGE_NAME, where
|
||||
# PACKAGE_NAME must be the name of a Cargo package in your workspace, and VERSION
|
||||
# must be a Cargo-style SemVer Version (must have at least major.minor.patch).
|
||||
#
|
||||
# If PACKAGE_NAME is specified, then the announcement will be for that
|
||||
# package (erroring out if it doesn't have the given version or isn't cargo-dist-able).
|
||||
#
|
||||
# If PACKAGE_NAME isn't specified, then the announcement will be for all
|
||||
# (cargo-dist-able) packages in the workspace with that version (this mode is
|
||||
# intended for workspaces with only one dist-able package, or with all dist-able
|
||||
# packages versioned/released in lockstep).
|
||||
#
|
||||
# If you push multiple tags at once, separate instances of this workflow will
|
||||
# spin up, creating an independent announcement for each one. However, GitHub
|
||||
# will hard limit this to 3 tags per commit, as it will assume more tags is a
|
||||
# mistake.
|
||||
#
|
||||
# If there's a prerelease-style suffix to the version, then the release(s)
|
||||
# will be marked as a prerelease.
|
||||
on:
|
||||
push:
|
||||
tags: ["v*.*.*"]
|
||||
|
||||
env:
|
||||
CARGO_TERM_COLOR: always
|
||||
tags:
|
||||
- '**[0-9]+.[0-9]+.[0-9]+*'
|
||||
pull_request:
|
||||
|
||||
jobs:
|
||||
prepare:
|
||||
name: Prepare release
|
||||
# Run 'cargo dist plan' (or host) to determine what tasks we need to do
|
||||
plan:
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
tag_name: ${{ steps.release_info.outputs.tag_name }}
|
||||
release_name: ${{ steps.release_info.outputs.release_name }}
|
||||
# release_notes: ${{ steps.extract_release_notes.outputs.release_notes }}
|
||||
|
||||
val: ${{ steps.plan.outputs.manifest }}
|
||||
tag: ${{ !github.event.pull_request && github.ref_name || '' }}
|
||||
tag-flag: ${{ !github.event.pull_request && format('--tag={0}', github.ref_name) || '' }}
|
||||
publishing: ${{ !github.event.pull_request }}
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v2
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Compute release name and tag
|
||||
id: release_info
|
||||
run: |
|
||||
echo "tag_name=${GITHUB_REF_NAME}" >> $GITHUB_OUTPUT
|
||||
echo "release_name=${GITHUB_REF_NAME}" >> $GITHUB_OUTPUT
|
||||
|
||||
# - name: Extract release notes
|
||||
# id: extract_release_notes
|
||||
# run: echo "release_notes=\"$(sed -n '/^## .*$/,$p' CHANGELOG.md | sed '1d;/^## /,$d')\"" >> $GITHUB_OUTPUT
|
||||
|
||||
release:
|
||||
name: ${{ matrix.job.target }} (${{ matrix.job.os }})
|
||||
runs-on: ${{ matrix.job.os }}
|
||||
needs: prepare
|
||||
strategy:
|
||||
matrix:
|
||||
job:
|
||||
# os: used for the runner
|
||||
# platform: a generic platform name
|
||||
# target: used by Cargo
|
||||
# arch: either 386, arm64 or amd64
|
||||
- os: ubuntu-latest
|
||||
platform: linux
|
||||
target: x86_64-unknown-linux-gnu
|
||||
arch: amd64
|
||||
# - os: ubuntu-latest
|
||||
# platform: linux
|
||||
# target: aarch64-unknown-linux-gnu
|
||||
# arch: arm64
|
||||
- os: macos-latest
|
||||
platform: darwin
|
||||
target: x86_64-apple-darwin
|
||||
arch: amd64
|
||||
- os: macos-latest
|
||||
platform: darwin
|
||||
target: aarch64-apple-darwin
|
||||
arch: arm64
|
||||
- os: windows-latest
|
||||
platform: win32
|
||||
target: x86_64-pc-windows-msvc
|
||||
arch: amd64
|
||||
# - os: windows-latest
|
||||
# platform: win32
|
||||
# target: aarch64-pc-windows-msvc
|
||||
# arch: arm64
|
||||
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v2
|
||||
|
||||
- name: Install toolchain
|
||||
uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
profile: minimal
|
||||
toolchain: stable
|
||||
target: ${{ matrix.job.target }}
|
||||
override: true
|
||||
|
||||
- uses: Swatinem/rust-cache@v1
|
||||
with:
|
||||
cache-on-failure: true
|
||||
|
||||
- name: Apple M1 setup
|
||||
if: ${{ matrix.job.target == 'aarch64-apple-darwin' }}
|
||||
run: |
|
||||
echo "SDKROOT=$(xcrun -sdk macosx --show-sdk-path)" >> $GITHUB_ENV
|
||||
echo "MACOSX_DEPLOYMENT_TARGET=$(xcrun -sdk macosx --show-sdk-platform-version)" >> $GITHUB_ENV
|
||||
|
||||
- name: Linux AMD setup
|
||||
if: ${{ matrix.job.target == 'x86_64-unknown-linux-gnu' }}
|
||||
run: |
|
||||
echo "RUSTFLAGS=-C target-feature=+crt-static" >> $GITHUB_ENV
|
||||
|
||||
- name: Linux ARM setup
|
||||
if: ${{ matrix.job.target == 'aarch64-unknown-linux-gnu' }}
|
||||
run: |
|
||||
sudo apt-get update -y
|
||||
sudo apt-get install -y gcc-aarch64-linux-gnu libssl-dev:armhf
|
||||
echo "CARGO_TARGET_AARCH64_UNKNOWN_LINUX_GNU_LINKER=aarch64-linux-gnu-gcc" >> $GITHUB_ENV
|
||||
echo "RUSTFLAGS=-C target-feature=+crt-static" >> $GITHUB_ENV
|
||||
|
||||
- name: Windows setup
|
||||
if: ${{ matrix.job.os == 'windows-latest' }}
|
||||
run: |
|
||||
echo "RUSTFLAGS=-C target-feature=+crt-static" >> $GITHUB_ENV
|
||||
|
||||
- name: Build binaries
|
||||
uses: actions-rs/cargo@v1
|
||||
with:
|
||||
command: build
|
||||
args: --release --bins --target ${{ matrix.job.target }}
|
||||
|
||||
- name: Archive binaries
|
||||
id: artifacts
|
||||
env:
|
||||
PLATFORM_NAME: ${{ matrix.job.platform }}
|
||||
TARGET: ${{ matrix.job.target }}
|
||||
ARCH: ${{ matrix.job.arch }}
|
||||
VERSION_NAME: ${{ needs.prepare.outputs.tag_name }}
|
||||
run: |
|
||||
if [ "$PLATFORM_NAME" == "linux" ]; then
|
||||
tar -czvf "aiken_${VERSION_NAME}_${PLATFORM_NAME}_${ARCH}.tar.gz" -C ./target/${TARGET}/release aiken
|
||||
echo "::set-output name=file_name::aiken_${VERSION_NAME}_${PLATFORM_NAME}_${ARCH}.tar.gz"
|
||||
elif [ "$PLATFORM_NAME" == "darwin" ]; then
|
||||
# We need to use gtar here otherwise the archive is corrupt.
|
||||
# See: https://github.com/actions/virtual-environments/issues/2619
|
||||
gtar -czvf "aiken_${VERSION_NAME}_${PLATFORM_NAME}_${ARCH}.tar.gz" -C ./target/${TARGET}/release aiken
|
||||
echo "::set-output name=file_name::aiken_${VERSION_NAME}_${PLATFORM_NAME}_${ARCH}.tar.gz"
|
||||
else
|
||||
cd ./target/${TARGET}/release
|
||||
7z a -tzip "aiken_${VERSION_NAME}_${PLATFORM_NAME}_${ARCH}.zip" aiken.exe
|
||||
mv "aiken_${VERSION_NAME}_${PLATFORM_NAME}_${ARCH}.zip" ../../../
|
||||
echo "::set-output name=file_name::aiken_${VERSION_NAME}_${PLATFORM_NAME}_${ARCH}.zip"
|
||||
fi
|
||||
submodules: recursive
|
||||
- name: Install cargo-dist
|
||||
# we specify bash to get pipefail; it guards against the `curl` command
|
||||
# failing. otherwise `sh` won't catch that `curl` returned non-0
|
||||
shell: bash
|
||||
|
||||
# Creates the release for this specific version
|
||||
- name: Create release
|
||||
uses: softprops/action-gh-release@v1
|
||||
run: "curl --proto '=https' --tlsv1.2 -LsSf https://github.com/axodotdev/cargo-dist/releases/download/v0.13.3/cargo-dist-installer.sh | sh"
|
||||
# sure would be cool if github gave us proper conditionals...
|
||||
# so here's a doubly-nested ternary-via-truthiness to try to provide the best possible
|
||||
# functionality based on whether this is a pull_request, and whether it's from a fork.
|
||||
# (PRs run on the *source* but secrets are usually on the *target* -- that's *good*
|
||||
# but also really annoying to build CI around when it needs secrets to work right.)
|
||||
- id: plan
|
||||
run: |
|
||||
cargo dist ${{ (!github.event.pull_request && format('host --steps=create --tag={0}', github.ref_name)) || 'plan' }} --output-format=json > plan-dist-manifest.json
|
||||
echo "cargo dist ran successfully"
|
||||
cat plan-dist-manifest.json
|
||||
echo "manifest=$(jq -c "." plan-dist-manifest.json)" >> "$GITHUB_OUTPUT"
|
||||
- name: "Upload dist-manifest.json"
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: ${{ needs.prepare.outputs.release_name }}
|
||||
tag_name: ${{ needs.prepare.outputs.tag_name }}
|
||||
files: |
|
||||
${{ steps.artifacts.outputs.file_name }}
|
||||
name: artifacts-plan-dist-manifest
|
||||
path: plan-dist-manifest.json
|
||||
|
||||
# Build and packages all the platform-specific things
|
||||
build-local-artifacts:
|
||||
name: build-local-artifacts (${{ join(matrix.targets, ', ') }})
|
||||
# Let the initial task tell us to not run (currently very blunt)
|
||||
needs:
|
||||
- plan
|
||||
if: ${{ fromJson(needs.plan.outputs.val).ci.github.artifacts_matrix.include != null && (needs.plan.outputs.publishing == 'true' || fromJson(needs.plan.outputs.val).ci.github.pr_run_mode == 'upload') }}
|
||||
strategy:
|
||||
fail-fast: false
|
||||
# Target platforms/runners are computed by cargo-dist in create-release.
|
||||
# Each member of the matrix has the following arguments:
|
||||
#
|
||||
# - runner: the github runner
|
||||
# - dist-args: cli flags to pass to cargo dist
|
||||
# - install-dist: expression to run to install cargo-dist on the runner
|
||||
#
|
||||
# Typically there will be:
|
||||
# - 1 "global" task that builds universal installers
|
||||
# - N "local" tasks that build each platform's binaries and platform-specific installers
|
||||
matrix: ${{ fromJson(needs.plan.outputs.val).ci.github.artifacts_matrix }}
|
||||
runs-on: ${{ matrix.runner }}
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
BUILD_MANIFEST_NAME: target/distrib/${{ join(matrix.targets, '-') }}-dist-manifest.json
|
||||
steps:
|
||||
- name: enable windows longpaths
|
||||
run: |
|
||||
git config --global core.longpaths true
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
submodules: recursive
|
||||
- uses: swatinem/rust-cache@v2
|
||||
with:
|
||||
key: ${{ join(matrix.targets, '-') }}
|
||||
- name: Install cargo-dist
|
||||
run: ${{ matrix.install_dist }}
|
||||
# Get the dist-manifest
|
||||
- name: Fetch local artifacts
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
pattern: artifacts-*
|
||||
path: target/distrib/
|
||||
merge-multiple: true
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
${{ matrix.packages_install }}
|
||||
- name: Build artifacts
|
||||
run: |
|
||||
# Actually do builds and make zips and whatnot
|
||||
cargo dist build ${{ needs.plan.outputs.tag-flag }} --print=linkage --output-format=json ${{ matrix.dist_args }} > dist-manifest.json
|
||||
echo "cargo dist ran successfully"
|
||||
- id: cargo-dist
|
||||
name: Post-build
|
||||
# We force bash here just because github makes it really hard to get values up
|
||||
# to "real" actions without writing to env-vars, and writing to env-vars has
|
||||
# inconsistent syntax between shell and powershell.
|
||||
shell: bash
|
||||
run: |
|
||||
# Parse out what we just built and upload it to scratch storage
|
||||
echo "paths<<EOF" >> "$GITHUB_OUTPUT"
|
||||
jq --raw-output ".upload_files[]" dist-manifest.json >> "$GITHUB_OUTPUT"
|
||||
echo "EOF" >> "$GITHUB_OUTPUT"
|
||||
|
||||
cp dist-manifest.json "$BUILD_MANIFEST_NAME"
|
||||
- name: "Upload artifacts"
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: artifacts-build-local-${{ join(matrix.targets, '_') }}
|
||||
path: |
|
||||
${{ steps.cargo-dist.outputs.paths }}
|
||||
${{ env.BUILD_MANIFEST_NAME }}
|
||||
|
||||
# Build and package all the platform-agnostic(ish) things
|
||||
build-global-artifacts:
|
||||
needs:
|
||||
- plan
|
||||
- build-local-artifacts
|
||||
runs-on: "ubuntu-20.04"
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
BUILD_MANIFEST_NAME: target/distrib/global-dist-manifest.json
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
submodules: recursive
|
||||
- name: Install cargo-dist
|
||||
shell: bash
|
||||
run: "curl --proto '=https' --tlsv1.2 -LsSf https://github.com/axodotdev/cargo-dist/releases/download/v0.13.3/cargo-dist-installer.sh | sh"
|
||||
# Get all the local artifacts for the global tasks to use (for e.g. checksums)
|
||||
- name: Fetch local artifacts
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
pattern: artifacts-*
|
||||
path: target/distrib/
|
||||
merge-multiple: true
|
||||
- id: cargo-dist
|
||||
shell: bash
|
||||
run: |
|
||||
cargo dist build ${{ needs.plan.outputs.tag-flag }} --output-format=json "--artifacts=global" > dist-manifest.json
|
||||
echo "cargo dist ran successfully"
|
||||
|
||||
# Parse out what we just built and upload it to scratch storage
|
||||
echo "paths<<EOF" >> "$GITHUB_OUTPUT"
|
||||
jq --raw-output ".upload_files[]" dist-manifest.json >> "$GITHUB_OUTPUT"
|
||||
echo "EOF" >> "$GITHUB_OUTPUT"
|
||||
|
||||
cp dist-manifest.json "$BUILD_MANIFEST_NAME"
|
||||
- name: "Upload artifacts"
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: artifacts-build-global
|
||||
path: |
|
||||
${{ steps.cargo-dist.outputs.paths }}
|
||||
${{ env.BUILD_MANIFEST_NAME }}
|
||||
# Determines if we should publish/announce
|
||||
host:
|
||||
needs:
|
||||
- plan
|
||||
- build-local-artifacts
|
||||
- build-global-artifacts
|
||||
# Only run if we're "publishing", and only if local and global didn't fail (skipped is fine)
|
||||
if: ${{ always() && needs.plan.outputs.publishing == 'true' && (needs.build-global-artifacts.result == 'skipped' || needs.build-global-artifacts.result == 'success') && (needs.build-local-artifacts.result == 'skipped' || needs.build-local-artifacts.result == 'success') }}
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
runs-on: "ubuntu-20.04"
|
||||
outputs:
|
||||
val: ${{ steps.host.outputs.manifest }}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
submodules: recursive
|
||||
- name: Install cargo-dist
|
||||
run: "curl --proto '=https' --tlsv1.2 -LsSf https://github.com/axodotdev/cargo-dist/releases/download/v0.13.3/cargo-dist-installer.sh | sh"
|
||||
# Fetch artifacts from scratch-storage
|
||||
- name: Fetch artifacts
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
pattern: artifacts-*
|
||||
path: target/distrib/
|
||||
merge-multiple: true
|
||||
# This is a harmless no-op for GitHub Releases, hosting for that happens in "announce"
|
||||
- id: host
|
||||
shell: bash
|
||||
run: |
|
||||
cargo dist host ${{ needs.plan.outputs.tag-flag }} --steps=upload --steps=release --output-format=json > dist-manifest.json
|
||||
echo "artifacts uploaded and released successfully"
|
||||
cat dist-manifest.json
|
||||
echo "manifest=$(jq -c "." dist-manifest.json)" >> "$GITHUB_OUTPUT"
|
||||
- name: "Upload dist-manifest.json"
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
# Overwrite the previous copy
|
||||
name: artifacts-dist-manifest
|
||||
path: dist-manifest.json
|
||||
|
||||
publish-homebrew-formula:
|
||||
needs:
|
||||
- plan
|
||||
- host
|
||||
runs-on: "ubuntu-20.04"
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
PLAN: ${{ needs.plan.outputs.val }}
|
||||
GITHUB_USER: "axo bot"
|
||||
GITHUB_EMAIL: "admin+bot@axo.dev"
|
||||
if: ${{ !fromJson(needs.plan.outputs.val).announcement_is_prerelease || fromJson(needs.plan.outputs.val).publish_prereleases }}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
repository: "aiken-lang/homebrew-tap"
|
||||
token: ${{ secrets.HOMEBREW_TAP_TOKEN }}
|
||||
# So we have access to the formula
|
||||
- name: Fetch local artifacts
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
pattern: artifacts-*
|
||||
path: Formula/
|
||||
merge-multiple: true
|
||||
# This is extra complex because you can make your Formula name not match your app name
|
||||
# so we need to find releases with a *.rb file, and publish with that filename.
|
||||
- name: Commit formula files
|
||||
run: |
|
||||
git config --global user.name "${GITHUB_USER}"
|
||||
git config --global user.email "${GITHUB_EMAIL}"
|
||||
|
||||
for release in $(echo "$PLAN" | jq --compact-output '.releases[] | select([.artifacts[] | endswith(".rb")] | any)'); do
|
||||
filename=$(echo "$release" | jq '.artifacts[] | select(endswith(".rb"))' --raw-output)
|
||||
name=$(echo "$filename" | sed "s/\.rb$//")
|
||||
version=$(echo "$release" | jq .app_version --raw-output)
|
||||
|
||||
git add "Formula/${filename}"
|
||||
git commit -m "${name} ${version}"
|
||||
done
|
||||
git push
|
||||
|
||||
# Create a GitHub Release while uploading all files to it
|
||||
announce:
|
||||
needs:
|
||||
- plan
|
||||
- host
|
||||
- publish-homebrew-formula
|
||||
# use "always() && ..." to allow us to wait for all publish jobs while
|
||||
# still allowing individual publish jobs to skip themselves (for prereleases).
|
||||
# "host" however must run to completion, no skipping allowed!
|
||||
if: ${{ always() && needs.host.result == 'success' && (needs.publish-homebrew-formula.result == 'skipped' || needs.publish-homebrew-formula.result == 'success') }}
|
||||
runs-on: "ubuntu-20.04"
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
submodules: recursive
|
||||
- name: "Download GitHub Artifacts"
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
pattern: artifacts-*
|
||||
path: artifacts
|
||||
merge-multiple: true
|
||||
- name: Cleanup
|
||||
run: |
|
||||
# Remove the granular manifests
|
||||
rm -f artifacts/*-dist-manifest.json
|
||||
- name: Create GitHub Release
|
||||
uses: ncipollo/release-action@v1
|
||||
with:
|
||||
tag: ${{ needs.plan.outputs.tag }}
|
||||
name: ${{ fromJson(needs.host.outputs.val).announcement_title }}
|
||||
body: ${{ fromJson(needs.host.outputs.val).announcement_github_body }}
|
||||
prerelease: ${{ fromJson(needs.host.outputs.val).announcement_is_prerelease }}
|
||||
artifacts: "artifacts/*"
|
||||
|
|
10
CHANGELOG.md
10
CHANGELOG.md
|
@ -13,6 +13,16 @@
|
|||
|
||||
- **aiken-lang**: formatter should not erase `pub` on validators. @rvcas
|
||||
- **aiken-lang**: error on using tuple index when a tuple is returned by a generic function. @rvcas
|
||||
- **aiken-lang**: fix a regression in the Type-checker introduced in v1.0.25-alpha regarding types comparison. See #917. @KtorZ
|
||||
- **aiken-lang**: Fix incongruous generics after type-checking which caused [] to be treated as a list in cases where it needed to be an empty map primitive. See #922. @KtorZ
|
||||
- **aiken-lang**: Fix for generic constrs being used as functions causing type mismatch errors. @Microproofs
|
||||
- **aiken-lang**: Fix for error occuring when a field holds Data that is not a constr type when compiler traces are on. @Microproofs
|
||||
|
||||
### Changed
|
||||
- **aiken-lang**: **MAJOR CHANGE** 2-tuples are now treated the same as 3+ tuples. To replace the representation of pairs at the uplc level, we now have a new Prelude type called Pair with 2 generic arguments. The main place you will see its usage is in the script context. For existing contracts you can continue to use 2-tuples, just note the offchain representation is an array of 2 items in CBOR. @KtorZ @Microproofs
|
||||
- **aiken-lang**: Some more code gen cleanup. @Microproofs
|
||||
- **aiken-lang**: New optimization for wrapped builtins found in the stdlib. @Microproofs
|
||||
|
||||
|
||||
## v1.0.26-alpha - 2024-03-25
|
||||
|
||||
|
|
33
Cargo.toml
33
Cargo.toml
|
@ -2,13 +2,35 @@
|
|||
members = ["crates/*"]
|
||||
resolver = "2"
|
||||
|
||||
[profile.release]
|
||||
strip = true
|
||||
|
||||
[workspace.metadata.release]
|
||||
shared-version = true
|
||||
tag-name = "v{{version}}"
|
||||
|
||||
# Config for 'cargo dist'
|
||||
[workspace.metadata.dist]
|
||||
# The preferred cargo-dist version to use in CI (Cargo.toml SemVer syntax)
|
||||
cargo-dist-version = "0.13.3"
|
||||
# CI backends to support
|
||||
ci = ["github"]
|
||||
# The installers to generate for each app
|
||||
installers = ["shell", "powershell", "npm", "homebrew", "msi"]
|
||||
# A GitHub repo to push Homebrew formulas to
|
||||
tap = "aiken-lang/homebrew-tap"
|
||||
# Target platforms to build apps for (Rust target-triple syntax)
|
||||
targets = ["aarch64-apple-darwin", "x86_64-apple-darwin", "x86_64-unknown-linux-gnu", "x86_64-pc-windows-msvc"]
|
||||
# The archive format to use for windows builds (defaults .zip)
|
||||
windows-archive = ".tar.gz"
|
||||
# The archive format to use for non-windows builds (defaults .tar.xz)
|
||||
unix-archive = ".tar.gz"
|
||||
# A namespace to use when publishing this package to the npm registry
|
||||
npm-scope = "@aiken-lang"
|
||||
# Publish jobs to run in CI
|
||||
publish-jobs = ["homebrew"]
|
||||
# Publish jobs to run in CI
|
||||
pr-run-mode = "plan"
|
||||
# Whether to install an updater program
|
||||
install-updater = false
|
||||
|
||||
[workspace.dependencies]
|
||||
walkdir = "2.3.2"
|
||||
pallas = "0.22.0"
|
||||
|
@ -19,3 +41,8 @@ opt-level = 3
|
|||
|
||||
[profile.dev.package.similar]
|
||||
opt-level = 3
|
||||
|
||||
# The profile that 'cargo dist' will build with
|
||||
[profile.dist]
|
||||
inherits = "release"
|
||||
lto = "thin"
|
||||
|
|
3
LICENSE
3
LICENSE
|
@ -187,7 +187,8 @@
|
|||
identification within third-party archives.
|
||||
|
||||
Copyright 2016-2022 Louis Pilfold (as Gleam)
|
||||
Copyright 2022-Present TxPipe & Lucas Rosa (as Aiken)
|
||||
Copyright 2022-2024 Cardano Foundation (as Aiken)
|
||||
Copyright 2024-Present PRAGMA (as Aiken)
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
|
|
|
@ -970,6 +970,12 @@ pub enum Annotation {
|
|||
location: Span,
|
||||
elems: Vec<Self>,
|
||||
},
|
||||
|
||||
Pair {
|
||||
location: Span,
|
||||
fst: Box<Self>,
|
||||
snd: Box<Self>,
|
||||
},
|
||||
}
|
||||
|
||||
impl Annotation {
|
||||
|
@ -979,7 +985,8 @@ impl Annotation {
|
|||
| Annotation::Tuple { location, .. }
|
||||
| Annotation::Var { location, .. }
|
||||
| Annotation::Hole { location, .. }
|
||||
| Annotation::Constructor { location, .. } => *location,
|
||||
| Annotation::Constructor { location, .. }
|
||||
| Annotation::Pair { location, .. } => *location,
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1081,6 +1088,18 @@ impl Annotation {
|
|||
} => name == o_name,
|
||||
_ => false,
|
||||
},
|
||||
Annotation::Pair { fst, snd, .. } => {
|
||||
if let Annotation::Pair {
|
||||
fst: o_fst,
|
||||
snd: o_snd,
|
||||
..
|
||||
} = other
|
||||
{
|
||||
fst.is_logically_equal(o_fst) && snd.is_logically_equal(o_snd)
|
||||
} else {
|
||||
false
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1101,6 +1120,9 @@ impl Annotation {
|
|||
elems.iter().find_map(|arg| arg.find_node(byte_index))
|
||||
}
|
||||
Annotation::Var { .. } | Annotation::Hole { .. } => None,
|
||||
Annotation::Pair { fst, snd, .. } => fst
|
||||
.find_node(byte_index)
|
||||
.or_else(|| snd.find_node(byte_index)),
|
||||
};
|
||||
|
||||
located.or(Some(Located::Annotation(self)))
|
||||
|
@ -1225,6 +1247,12 @@ pub enum Pattern<Constructor, Type> {
|
|||
tipo: Type,
|
||||
},
|
||||
|
||||
Pair {
|
||||
location: Span,
|
||||
fst: Box<Self>,
|
||||
snd: Box<Self>,
|
||||
},
|
||||
|
||||
Tuple {
|
||||
location: Span,
|
||||
elems: Vec<Self>,
|
||||
|
@ -1240,6 +1268,7 @@ impl<A, B> Pattern<A, B> {
|
|||
| Pattern::List { location, .. }
|
||||
| Pattern::Discard { location, .. }
|
||||
| Pattern::Tuple { location, .. }
|
||||
| Pattern::Pair { location, .. }
|
||||
| Pattern::Constructor { location, .. } => *location,
|
||||
}
|
||||
}
|
||||
|
@ -1309,6 +1338,19 @@ impl TypedPattern {
|
|||
_ => None,
|
||||
},
|
||||
|
||||
Pattern::Pair { fst, snd, .. } => match &**value {
|
||||
Type::Pair {
|
||||
fst: fst_v,
|
||||
snd: snd_v,
|
||||
..
|
||||
} => [fst, snd]
|
||||
.into_iter()
|
||||
.zip([fst_v, snd_v].iter())
|
||||
.find_map(|(e, t)| e.find_node(byte_index, t))
|
||||
.or(Some(Located::Pattern(self, value.clone()))),
|
||||
_ => None,
|
||||
},
|
||||
|
||||
Pattern::Constructor {
|
||||
arguments, tipo, ..
|
||||
} => match &**tipo {
|
||||
|
@ -1322,6 +1364,7 @@ impl TypedPattern {
|
|||
}
|
||||
}
|
||||
|
||||
// TODO: This function definition is weird, see where this is used and how.
|
||||
pub fn tipo(&self, value: &TypedExpr) -> Option<Rc<Type>> {
|
||||
match self {
|
||||
Pattern::Int { .. } => Some(builtins::int()),
|
||||
|
@ -1329,7 +1372,7 @@ impl TypedPattern {
|
|||
Pattern::Var { .. } | Pattern::Assign { .. } | Pattern::Discard { .. } => {
|
||||
Some(value.tipo())
|
||||
}
|
||||
Pattern::List { .. } | Pattern::Tuple { .. } => None,
|
||||
Pattern::List { .. } | Pattern::Tuple { .. } | Pattern::Pair { .. } => None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -15,11 +15,16 @@ use std::{cell::RefCell, collections::HashMap, rc::Rc};
|
|||
use strum::IntoEnumIterator;
|
||||
use uplc::builtins::DefaultFunction;
|
||||
|
||||
pub const PRELUDE: &str = "aiken";
|
||||
pub const BUILTIN: &str = "aiken/builtin";
|
||||
|
||||
pub const BYTE_ARRAY: &str = "ByteArray";
|
||||
pub const BOOL: &str = "Bool";
|
||||
pub const INT: &str = "Int";
|
||||
pub const DATA: &str = "Data";
|
||||
pub const LIST: &str = "List";
|
||||
pub const ALIST: &str = "AList";
|
||||
pub const PAIR: &str = "Pair";
|
||||
pub const VOID: &str = "Void";
|
||||
pub const G1_ELEMENT: &str = "G1Element";
|
||||
pub const G2_ELEMENT: &str = "G2Element";
|
||||
|
@ -35,7 +40,7 @@ pub const FUZZER: &str = "Fuzzer";
|
|||
/// into a compiler pipeline
|
||||
pub fn prelude(id_gen: &IdGenerator) -> TypeInfo {
|
||||
let mut prelude = TypeInfo {
|
||||
name: "aiken".to_string(),
|
||||
name: PRELUDE.to_string(),
|
||||
package: "".to_string(),
|
||||
kind: ModuleKind::Lib,
|
||||
types: HashMap::new(),
|
||||
|
@ -320,6 +325,24 @@ pub fn prelude(id_gen: &IdGenerator) -> TypeInfo {
|
|||
},
|
||||
);
|
||||
|
||||
// Pair(a, b)
|
||||
let fst_parameter = generic_var(id_gen.next());
|
||||
let snd_parameter = generic_var(id_gen.next());
|
||||
prelude.types.insert(
|
||||
PAIR.to_string(),
|
||||
TypeConstructor {
|
||||
location: Span::empty(),
|
||||
parameters: vec![fst_parameter.clone(), snd_parameter.clone()],
|
||||
tipo: pair(fst_parameter.clone(), snd_parameter.clone()),
|
||||
module: "".to_string(),
|
||||
public: true,
|
||||
},
|
||||
);
|
||||
|
||||
prelude
|
||||
.types_constructors
|
||||
.insert(PAIR.to_string(), vec![PAIR.to_string()]);
|
||||
|
||||
// String
|
||||
prelude.types.insert(
|
||||
STRING.to_string(),
|
||||
|
@ -371,7 +394,7 @@ pub fn prelude(id_gen: &IdGenerator) -> TypeInfo {
|
|||
TypeConstructor {
|
||||
location: Span::empty(),
|
||||
parameters: vec![option_value.clone()],
|
||||
tipo: option(option_value),
|
||||
tipo: option(option_value.clone()),
|
||||
module: "".to_string(),
|
||||
public: true,
|
||||
},
|
||||
|
@ -382,12 +405,10 @@ pub fn prelude(id_gen: &IdGenerator) -> TypeInfo {
|
|||
vec!["Some".to_string(), "None".to_string()],
|
||||
);
|
||||
|
||||
let some = generic_var(id_gen.next());
|
||||
|
||||
prelude.values.insert(
|
||||
"Some".to_string(),
|
||||
ValueConstructor::public(
|
||||
function(vec![some.clone()], option(some)),
|
||||
function(vec![option_value.clone()], option(option_value.clone())),
|
||||
ValueConstructorVariant::Record {
|
||||
module: "".into(),
|
||||
name: "Some".to_string(),
|
||||
|
@ -399,12 +420,10 @@ pub fn prelude(id_gen: &IdGenerator) -> TypeInfo {
|
|||
),
|
||||
);
|
||||
|
||||
let some = generic_var(id_gen.next());
|
||||
|
||||
prelude.values.insert(
|
||||
"None".to_string(),
|
||||
ValueConstructor::public(
|
||||
option(some),
|
||||
option(option_value),
|
||||
ValueConstructorVariant::Record {
|
||||
module: "".into(),
|
||||
name: "None".to_string(),
|
||||
|
@ -422,7 +441,6 @@ pub fn prelude(id_gen: &IdGenerator) -> TypeInfo {
|
|||
// Seeded { seed: ByteArray, choices: ByteArray }
|
||||
// Replayed { cursor: Int, choices: ByteArray }
|
||||
// }
|
||||
|
||||
prelude.types.insert(
|
||||
PRNG.to_string(),
|
||||
TypeConstructor {
|
||||
|
@ -487,7 +505,6 @@ pub fn prelude(id_gen: &IdGenerator) -> TypeInfo {
|
|||
//
|
||||
// pub type Fuzzer<a> =
|
||||
// fn(PRNG) -> Option<(PRNG, a)>
|
||||
|
||||
let fuzzer_value = generic_var(id_gen.next());
|
||||
prelude.types.insert(
|
||||
FUZZER.to_string(),
|
||||
|
@ -500,12 +517,28 @@ pub fn prelude(id_gen: &IdGenerator) -> TypeInfo {
|
|||
},
|
||||
);
|
||||
|
||||
// Map
|
||||
//
|
||||
// pub type Map<k, v> = List<Pair<k, v>>
|
||||
let alist_key = generic_var(id_gen.next());
|
||||
let alist_value = generic_var(id_gen.next());
|
||||
prelude.types.insert(
|
||||
ALIST.to_string(),
|
||||
TypeConstructor {
|
||||
location: Span::empty(),
|
||||
parameters: vec![alist_key.clone(), alist_value.clone()],
|
||||
tipo: map(alist_key, alist_value),
|
||||
module: "".to_string(),
|
||||
public: true,
|
||||
},
|
||||
);
|
||||
|
||||
prelude
|
||||
}
|
||||
|
||||
pub fn plutus(id_gen: &IdGenerator) -> TypeInfo {
|
||||
let mut plutus = TypeInfo {
|
||||
name: "aiken/builtin".to_string(),
|
||||
name: BUILTIN.to_string(),
|
||||
package: "".to_string(),
|
||||
kind: ModuleKind::Lib,
|
||||
types: HashMap::new(),
|
||||
|
@ -658,7 +691,7 @@ pub fn from_default_function(builtin: DefaultFunction, id_gen: &IdGenerator) ->
|
|||
(tipo, 2)
|
||||
}
|
||||
DefaultFunction::MapData => {
|
||||
let tipo = function(vec![list(tuple(vec![data(), data()]))], data());
|
||||
let tipo = function(vec![list(pair(data(), data()))], data());
|
||||
|
||||
(tipo, 1)
|
||||
}
|
||||
|
@ -678,12 +711,12 @@ pub fn from_default_function(builtin: DefaultFunction, id_gen: &IdGenerator) ->
|
|||
(tipo, 1)
|
||||
}
|
||||
DefaultFunction::UnConstrData => {
|
||||
let tipo = function(vec![data()], tuple(vec![int(), list(data())]));
|
||||
let tipo = function(vec![data()], pair(int(), list(data())));
|
||||
|
||||
(tipo, 1)
|
||||
}
|
||||
DefaultFunction::UnMapData => {
|
||||
let tipo = function(vec![data()], list(tuple(vec![data(), data()])));
|
||||
let tipo = function(vec![data()], list(pair(data(), data())));
|
||||
|
||||
(tipo, 1)
|
||||
}
|
||||
|
@ -728,7 +761,7 @@ pub fn from_default_function(builtin: DefaultFunction, id_gen: &IdGenerator) ->
|
|||
(tipo, 6)
|
||||
}
|
||||
DefaultFunction::MkPairData => {
|
||||
let tipo = function(vec![data(), data()], tuple(vec![data(), data()]));
|
||||
let tipo = function(vec![data(), data()], pair(data(), data()));
|
||||
(tipo, 2)
|
||||
}
|
||||
DefaultFunction::MkNilData => {
|
||||
|
@ -736,7 +769,7 @@ pub fn from_default_function(builtin: DefaultFunction, id_gen: &IdGenerator) ->
|
|||
(tipo, 0)
|
||||
}
|
||||
DefaultFunction::MkNilPairData => {
|
||||
let tipo = function(vec![], list(tuple(vec![data(), data()])));
|
||||
let tipo = function(vec![], list(pair(data(), data())));
|
||||
(tipo, 0)
|
||||
}
|
||||
DefaultFunction::ChooseUnit => {
|
||||
|
@ -752,13 +785,13 @@ pub fn from_default_function(builtin: DefaultFunction, id_gen: &IdGenerator) ->
|
|||
DefaultFunction::FstPair => {
|
||||
let a = generic_var(id_gen.next());
|
||||
let b = generic_var(id_gen.next());
|
||||
let tipo = function(vec![tuple(vec![a.clone(), b])], a);
|
||||
let tipo = function(vec![pair(a.clone(), b)], a);
|
||||
(tipo, 1)
|
||||
}
|
||||
DefaultFunction::SndPair => {
|
||||
let a = generic_var(id_gen.next());
|
||||
let b = generic_var(id_gen.next());
|
||||
let tipo = function(vec![tuple(vec![a, b.clone()])], b);
|
||||
let tipo = function(vec![pair(a, b.clone())], b);
|
||||
(tipo, 1)
|
||||
}
|
||||
DefaultFunction::ChooseList => {
|
||||
|
@ -1334,6 +1367,14 @@ pub fn tuple(elems: Vec<Rc<Type>>) -> Rc<Type> {
|
|||
Rc::new(Type::Tuple { elems, alias: None })
|
||||
}
|
||||
|
||||
pub fn pair(fst: Rc<Type>, snd: Rc<Type>) -> Rc<Type> {
|
||||
Rc::new(Type::Pair {
|
||||
fst,
|
||||
snd,
|
||||
alias: None,
|
||||
})
|
||||
}
|
||||
|
||||
pub fn bool() -> Rc<Type> {
|
||||
Rc::new(Type::App {
|
||||
args: vec![],
|
||||
|
@ -1397,9 +1438,43 @@ pub fn fuzzer(a: Rc<Type>) -> Rc<Type> {
|
|||
})
|
||||
}
|
||||
|
||||
pub fn map(k: Rc<Type>, v: Rc<Type>) -> Rc<Type> {
|
||||
Rc::new(Type::App {
|
||||
public: true,
|
||||
contains_opaque: false,
|
||||
module: "".to_string(),
|
||||
name: LIST.to_string(),
|
||||
args: vec![pair(k, v)],
|
||||
alias: Some(
|
||||
TypeAliasAnnotation {
|
||||
alias: ALIST.to_string(),
|
||||
parameters: vec!["k".to_string(), "v".to_string()],
|
||||
annotation: Annotation::Constructor {
|
||||
location: Span::empty(),
|
||||
module: None,
|
||||
name: LIST.to_string(),
|
||||
arguments: vec![Annotation::Pair {
|
||||
location: Span::empty(),
|
||||
fst: Box::new(Annotation::Var {
|
||||
location: Span::empty(),
|
||||
name: "k".to_string(),
|
||||
}),
|
||||
snd: Box::new(Annotation::Var {
|
||||
location: Span::empty(),
|
||||
name: "v".to_string(),
|
||||
}),
|
||||
}],
|
||||
},
|
||||
}
|
||||
.into(),
|
||||
),
|
||||
})
|
||||
}
|
||||
|
||||
pub fn list(t: Rc<Type>) -> Rc<Type> {
|
||||
Rc::new(Type::App {
|
||||
public: true,
|
||||
// FIXME: We should probably have t.contains_opaque here?
|
||||
contains_opaque: false,
|
||||
name: LIST.to_string(),
|
||||
module: "".to_string(),
|
||||
|
@ -1433,6 +1508,7 @@ pub fn void() -> Rc<Type> {
|
|||
pub fn option(a: Rc<Type>) -> Rc<Type> {
|
||||
Rc::new(Type::App {
|
||||
public: true,
|
||||
// FIXME: We should probably have t.contains_opaque here?
|
||||
contains_opaque: false,
|
||||
name: OPTION.to_string(),
|
||||
module: "".to_string(),
|
||||
|
|
|
@ -161,6 +161,13 @@ pub enum TypedExpr {
|
|||
elems: Vec<Self>,
|
||||
},
|
||||
|
||||
Pair {
|
||||
location: Span,
|
||||
tipo: Rc<Type>,
|
||||
fst: Box<Self>,
|
||||
snd: Box<Self>,
|
||||
},
|
||||
|
||||
TupleIndex {
|
||||
location: Span,
|
||||
tipo: Rc<Type>,
|
||||
|
@ -214,6 +221,7 @@ impl TypedExpr {
|
|||
| Self::UnOp { tipo, .. }
|
||||
| Self::BinOp { tipo, .. }
|
||||
| Self::Tuple { tipo, .. }
|
||||
| Self::Pair { tipo, .. }
|
||||
| Self::String { tipo, .. }
|
||||
| Self::ByteArray { tipo, .. }
|
||||
| Self::TupleIndex { tipo, .. }
|
||||
|
@ -256,6 +264,7 @@ impl TypedExpr {
|
|||
| TypedExpr::ErrorTerm { .. }
|
||||
| TypedExpr::BinOp { .. }
|
||||
| TypedExpr::Tuple { .. }
|
||||
| TypedExpr::Pair { .. }
|
||||
| TypedExpr::UnOp { .. }
|
||||
| TypedExpr::String { .. }
|
||||
| TypedExpr::Sequence { .. }
|
||||
|
@ -299,6 +308,7 @@ impl TypedExpr {
|
|||
| Self::List { location, .. }
|
||||
| Self::BinOp { location, .. }
|
||||
| Self::Tuple { location, .. }
|
||||
| Self::Pair { location, .. }
|
||||
| Self::String { location, .. }
|
||||
| Self::UnOp { location, .. }
|
||||
| Self::Pipeline { location, .. }
|
||||
|
@ -337,6 +347,7 @@ impl TypedExpr {
|
|||
| Self::List { location, .. }
|
||||
| Self::BinOp { location, .. }
|
||||
| Self::Tuple { location, .. }
|
||||
| Self::Pair { location, .. }
|
||||
| Self::String { location, .. }
|
||||
| Self::UnOp { location, .. }
|
||||
| Self::Sequence { location, .. }
|
||||
|
@ -392,6 +403,11 @@ impl TypedExpr {
|
|||
.find_map(|e| e.find_node(byte_index))
|
||||
.or(Some(Located::Expression(self))),
|
||||
|
||||
TypedExpr::Pair { fst, snd, .. } => [fst, snd]
|
||||
.iter()
|
||||
.find_map(|e| e.find_node(byte_index))
|
||||
.or(Some(Located::Expression(self))),
|
||||
|
||||
TypedExpr::List { elements, tail, .. } => elements
|
||||
.iter()
|
||||
.find_map(|e| e.find_node(byte_index))
|
||||
|
@ -578,6 +594,12 @@ pub enum UntypedExpr {
|
|||
elems: Vec<Self>,
|
||||
},
|
||||
|
||||
Pair {
|
||||
location: Span,
|
||||
fst: Box<Self>,
|
||||
snd: Box<Self>,
|
||||
},
|
||||
|
||||
TupleIndex {
|
||||
location: Span,
|
||||
index: usize,
|
||||
|
@ -771,11 +793,10 @@ impl UntypedExpr {
|
|||
},
|
||||
|
||||
uplc::ast::Constant::ProtoPair(_, _, left, right) => match tipo {
|
||||
Type::Tuple { elems, .. } => Ok(UntypedExpr::Tuple {
|
||||
location: Span::empty(),
|
||||
elems: [left.as_ref(), right.as_ref()]
|
||||
Type::Pair { fst, snd, .. } => {
|
||||
let elems = [left.as_ref(), right.as_ref()]
|
||||
.into_iter()
|
||||
.zip(elems)
|
||||
.zip([fst, snd])
|
||||
.map(|(arg, arg_type)| {
|
||||
UntypedExpr::do_reify_constant(
|
||||
generics,
|
||||
|
@ -784,10 +805,16 @@ impl UntypedExpr {
|
|||
arg_type,
|
||||
)
|
||||
})
|
||||
.collect::<Result<Vec<_>, _>>()?,
|
||||
}),
|
||||
.collect::<Result<Vec<_>, _>>()?;
|
||||
|
||||
Ok(UntypedExpr::Pair {
|
||||
location: Span::empty(),
|
||||
fst: elems.first().unwrap().to_owned().into(),
|
||||
snd: elems.last().unwrap().to_owned().into(),
|
||||
})
|
||||
}
|
||||
_ => Err(format!(
|
||||
"invalid type annotation. expected Tuple but got: {tipo:?}"
|
||||
"invalid type annotation. expected Pair but got: {tipo:?}"
|
||||
)),
|
||||
},
|
||||
|
||||
|
@ -882,9 +909,10 @@ impl UntypedExpr {
|
|||
location: Span::empty(),
|
||||
elements: kvs
|
||||
.into_iter()
|
||||
.map(|(k, v)| UntypedExpr::Tuple {
|
||||
.map(|(k, v)| UntypedExpr::Pair {
|
||||
location: Span::empty(),
|
||||
elems: vec![UntypedExpr::reify_blind(k), UntypedExpr::reify_blind(v)],
|
||||
fst: UntypedExpr::reify_blind(k).into(),
|
||||
snd: UntypedExpr::reify_blind(v).into(),
|
||||
})
|
||||
.collect::<Vec<_>>(),
|
||||
tail: None,
|
||||
|
@ -1000,6 +1028,21 @@ impl UntypedExpr {
|
|||
})
|
||||
.collect::<Result<Vec<_>, _>>()?,
|
||||
}),
|
||||
Type::Pair { fst, snd, .. } => {
|
||||
let mut elems = args
|
||||
.into_iter()
|
||||
.zip([fst, snd])
|
||||
.map(|(arg, arg_type)| {
|
||||
UntypedExpr::do_reify_data(generics, data_types, arg, arg_type)
|
||||
})
|
||||
.collect::<Result<Vec<_>, _>>()?;
|
||||
|
||||
Ok(UntypedExpr::Pair {
|
||||
location: Span::empty(),
|
||||
fst: elems.remove(0).into(),
|
||||
snd: elems.remove(0).into(),
|
||||
})
|
||||
}
|
||||
_ => Err(format!(
|
||||
"invalid type annotation. expected List but got: {tipo:?}"
|
||||
)),
|
||||
|
@ -1272,6 +1315,7 @@ impl UntypedExpr {
|
|||
| Self::ByteArray { location, .. }
|
||||
| Self::BinOp { location, .. }
|
||||
| Self::Tuple { location, .. }
|
||||
| Self::Pair { location, .. }
|
||||
| Self::String { location, .. }
|
||||
| Self::Assignment { location, .. }
|
||||
| Self::TupleIndex { location, .. }
|
||||
|
|
|
@ -424,6 +424,14 @@ impl<'comments> Formatter<'comments> {
|
|||
Annotation::Tuple { elems, .. } => {
|
||||
wrap_args(elems.iter().map(|t| (self.annotation(t), false)))
|
||||
}
|
||||
Annotation::Pair { fst, snd, .. } => "Pair"
|
||||
.to_doc()
|
||||
.append("<")
|
||||
.append(self.annotation(fst))
|
||||
.append(break_(",", ", "))
|
||||
.append(self.annotation(snd))
|
||||
.append(">")
|
||||
.group(),
|
||||
}
|
||||
.group()
|
||||
}
|
||||
|
@ -979,6 +987,15 @@ impl<'comments> Formatter<'comments> {
|
|||
wrap_args(elems.iter().map(|e| (self.wrap_expr(e), false))).group()
|
||||
}
|
||||
|
||||
UntypedExpr::Pair { fst, snd, .. } => "Pair"
|
||||
.to_doc()
|
||||
.append("(")
|
||||
.append(self.expr(fst, false))
|
||||
.append(break_(",", ", "))
|
||||
.append(self.expr(snd, false))
|
||||
.append(")")
|
||||
.group(),
|
||||
|
||||
UntypedExpr::TupleIndex { index, tuple, .. } => {
|
||||
let suffix = Ordinal(*index + 1).suffix().to_doc();
|
||||
self.expr(tuple, false)
|
||||
|
@ -1778,6 +1795,15 @@ impl<'comments> Formatter<'comments> {
|
|||
wrap_args(elems.iter().map(|e| (self.pattern(e), false))).group()
|
||||
}
|
||||
|
||||
Pattern::Pair { fst, snd, .. } => "Pair"
|
||||
.to_doc()
|
||||
.append("(")
|
||||
.append(self.pattern(fst))
|
||||
.append(break_(",", ", "))
|
||||
.append(self.pattern(snd))
|
||||
.append(")")
|
||||
.group(),
|
||||
|
||||
Pattern::List { elements, tail, .. } => {
|
||||
let elements_document =
|
||||
join(elements.iter().map(|e| self.pattern(e)), break_(",", ", "));
|
||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -51,6 +51,9 @@ pub enum Air {
|
|||
tipo: Rc<Type>,
|
||||
count: usize,
|
||||
},
|
||||
Pair {
|
||||
tipo: Rc<Type>,
|
||||
},
|
||||
Void,
|
||||
Var {
|
||||
constructor: ValueConstructor,
|
||||
|
@ -136,6 +139,13 @@ pub enum Air {
|
|||
subject_name: String,
|
||||
complex_clause: bool,
|
||||
},
|
||||
PairClause {
|
||||
subject_tipo: Rc<Type>,
|
||||
subject_name: String,
|
||||
fst_name: Option<String>,
|
||||
snd_name: Option<String>,
|
||||
complex_clause: bool,
|
||||
},
|
||||
ClauseGuard {
|
||||
subject_name: String,
|
||||
subject_tipo: Rc<Type>,
|
||||
|
@ -151,6 +161,12 @@ pub enum Air {
|
|||
indices: IndexSet<(usize, String)>,
|
||||
subject_name: String,
|
||||
},
|
||||
PairGuard {
|
||||
subject_tipo: Rc<Type>,
|
||||
subject_name: String,
|
||||
fst_name: Option<String>,
|
||||
snd_name: Option<String>,
|
||||
},
|
||||
Finally,
|
||||
// If
|
||||
If {
|
||||
|
@ -190,6 +206,13 @@ pub enum Air {
|
|||
tipo: Rc<Type>,
|
||||
is_expect: bool,
|
||||
},
|
||||
// Tuple Access
|
||||
PairAccessor {
|
||||
fst: Option<String>,
|
||||
snd: Option<String>,
|
||||
tipo: Rc<Type>,
|
||||
is_expect: bool,
|
||||
},
|
||||
// Misc.
|
||||
ErrorTerm {
|
||||
tipo: Rc<Type>,
|
||||
|
|
|
@ -95,6 +95,7 @@ pub enum SpecificClause {
|
|||
TupleClause {
|
||||
defined_tuple_indices: IndexSet<(usize, String)>,
|
||||
},
|
||||
PairClause,
|
||||
}
|
||||
|
||||
impl ClauseProperties {
|
||||
|
@ -123,6 +124,15 @@ impl ClauseProperties {
|
|||
defined_tuple_indices: IndexSet::new(),
|
||||
},
|
||||
}
|
||||
} else if t.is_pair() {
|
||||
ClauseProperties {
|
||||
clause_var_name: constr_var,
|
||||
complex_clause: false,
|
||||
original_subject_name: subject_name,
|
||||
needs_constr_var: false,
|
||||
final_clause: false,
|
||||
specific_clause: SpecificClause::PairClause,
|
||||
}
|
||||
} else {
|
||||
ClauseProperties {
|
||||
clause_var_name: constr_var,
|
||||
|
@ -165,6 +175,15 @@ impl ClauseProperties {
|
|||
defined_tuple_indices: IndexSet::new(),
|
||||
},
|
||||
}
|
||||
} else if t.is_pair() {
|
||||
ClauseProperties {
|
||||
clause_var_name: constr_var,
|
||||
complex_clause: false,
|
||||
original_subject_name: subject_name,
|
||||
needs_constr_var: false,
|
||||
final_clause,
|
||||
specific_clause: SpecificClause::PairClause,
|
||||
}
|
||||
} else {
|
||||
ClauseProperties {
|
||||
clause_var_name: constr_var,
|
||||
|
@ -340,38 +359,25 @@ pub fn handle_clause_guard(clause_guard: &TypedClauseGuard) -> AirTree {
|
|||
}
|
||||
|
||||
pub fn get_generic_variant_name(t: &Rc<Type>) -> String {
|
||||
if t.is_string() {
|
||||
"_string".to_string()
|
||||
} else if t.is_int() {
|
||||
"_int".to_string()
|
||||
} else if t.is_bool() {
|
||||
"_bool".to_string()
|
||||
} else if t.is_bytearray() {
|
||||
"_bytearray".to_string()
|
||||
} else if t.is_bls381_12_g1() {
|
||||
"_bls381_12_g1".to_string()
|
||||
} else if t.is_bls381_12_g2() {
|
||||
"_bls381_12_g2".to_string()
|
||||
} else if t.is_ml_result() {
|
||||
"_ml_result".to_string()
|
||||
} else if t.is_map() {
|
||||
"_map".to_string()
|
||||
} else if t.is_2_tuple() {
|
||||
"_pair".to_string()
|
||||
} else if t.is_list() {
|
||||
"_list".to_string()
|
||||
} else if t.is_tuple() {
|
||||
"_tuple".to_string()
|
||||
} else if t.is_unbound() {
|
||||
"_unbound".to_string()
|
||||
} else {
|
||||
let full_type = "_data".to_string();
|
||||
let uplc_type = t.get_uplc_type();
|
||||
|
||||
if t.is_generic() {
|
||||
panic!("FOUND A POLYMORPHIC TYPE. EXPECTED MONOMORPHIC TYPE");
|
||||
match uplc_type {
|
||||
Some(UplcType::Bool) => "_bool".to_string(),
|
||||
Some(UplcType::Integer) => "_int".to_string(),
|
||||
Some(UplcType::String) => "_string".to_string(),
|
||||
Some(UplcType::ByteString) => "_bytearray".to_string(),
|
||||
Some(UplcType::Unit) => "_void".to_string(),
|
||||
Some(UplcType::List(_)) if t.is_map() => "_map".to_string(),
|
||||
Some(UplcType::List(_)) => "_list".to_string(),
|
||||
Some(UplcType::Pair(_, _)) => "_pair".to_string(),
|
||||
Some(UplcType::Bls12_381G1Element) => "_bls381_12_g1".to_string(),
|
||||
Some(UplcType::Bls12_381G2Element) => "_bls381_12_g2".to_string(),
|
||||
Some(UplcType::Bls12_381MlResult) => "_ml_result".to_string(),
|
||||
None if t.is_unbound() => "_unbound".to_string(),
|
||||
None if t.is_generic() => {
|
||||
unreachable!("FOUND A POLYMORPHIC TYPE. EXPECTED MONOMORPHIC TYPE")
|
||||
}
|
||||
|
||||
full_type
|
||||
None | Some(UplcType::Data) => "_data".to_string(),
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -673,11 +679,14 @@ pub fn pattern_has_conditions(
|
|||
Pattern::Tuple { elems, .. } => elems
|
||||
.iter()
|
||||
.any(|elem| pattern_has_conditions(elem, data_types)),
|
||||
Pattern::Pair { fst, snd, .. } => {
|
||||
pattern_has_conditions(fst, data_types) || pattern_has_conditions(snd, data_types)
|
||||
}
|
||||
Pattern::Constructor {
|
||||
arguments, tipo, ..
|
||||
} => {
|
||||
let data_type =
|
||||
lookup_data_type_by_tipo(data_types, tipo).expect("Data type not found");
|
||||
let data_type = lookup_data_type_by_tipo(data_types, tipo)
|
||||
.unwrap_or_else(|| panic!("Data type not found: {:#?}", tipo));
|
||||
|
||||
data_type.constructors.len() > 1
|
||||
|| arguments
|
||||
|
@ -931,62 +940,54 @@ pub fn find_list_clause_or_default_first(clauses: &[TypedClause]) -> &TypedClaus
|
|||
}
|
||||
|
||||
pub fn known_data_to_type(term: Term<Name>, field_type: &Type) -> Term<Name> {
|
||||
if field_type.is_int() {
|
||||
Term::un_i_data().apply(term)
|
||||
} else if field_type.is_bytearray() {
|
||||
Term::un_b_data().apply(term)
|
||||
} else if field_type.is_void() {
|
||||
Term::unit().lambda("_").apply(term)
|
||||
} else if field_type.is_map() {
|
||||
Term::unmap_data().apply(term)
|
||||
} else if field_type.is_string() {
|
||||
Term::Builtin(DefaultFunction::DecodeUtf8).apply(Term::un_b_data().apply(term))
|
||||
} else if field_type.is_tuple() && matches!(field_type.get_uplc_type(), UplcType::Pair(_, _)) {
|
||||
Term::mk_pair_data()
|
||||
let uplc_type = field_type.get_uplc_type();
|
||||
|
||||
match uplc_type {
|
||||
Some(UplcType::Integer) => Term::un_i_data().apply(term),
|
||||
Some(UplcType::ByteString) => Term::un_b_data().apply(term),
|
||||
Some(UplcType::Bool) => Term::less_than_integer()
|
||||
.apply(Term::integer(0.into()))
|
||||
.apply(Term::fst_pair().apply(Term::unconstr_data().apply(term))),
|
||||
Some(UplcType::String) => Term::decode_utf8().apply(Term::un_b_data().apply(term)),
|
||||
Some(UplcType::Unit) => Term::unit().lambda("_").apply(term),
|
||||
Some(UplcType::List(_)) if field_type.is_map() => Term::unmap_data().apply(term),
|
||||
Some(UplcType::List(_)) => Term::unlist_data().apply(term),
|
||||
Some(UplcType::Pair(_, _)) => Term::mk_pair_data()
|
||||
.apply(Term::head_list().apply(Term::var("__list_data")))
|
||||
.apply(Term::head_list().apply(Term::tail_list().apply(Term::var("__list_data"))))
|
||||
.lambda("__list_data")
|
||||
.apply(Term::unlist_data().apply(term))
|
||||
} else if field_type.is_list() || field_type.is_tuple() {
|
||||
Term::unlist_data().apply(term)
|
||||
} else if field_type.is_bool() {
|
||||
Term::less_than_integer()
|
||||
.apply(Term::integer(0.into()))
|
||||
.apply(Term::fst_pair().apply(Term::unconstr_data().apply(term)))
|
||||
} else if field_type.is_bls381_12_g1() {
|
||||
Term::bls12_381_g1_uncompress().apply(Term::un_b_data().apply(term))
|
||||
} else if field_type.is_bls381_12_g2() {
|
||||
Term::bls12_381_g2_uncompress().apply(Term::un_b_data().apply(term))
|
||||
} else if field_type.is_ml_result() {
|
||||
panic!("ML Result not supported")
|
||||
} else {
|
||||
term
|
||||
.apply(Term::unlist_data().apply(term)),
|
||||
|
||||
Some(UplcType::Bls12_381G1Element) => {
|
||||
Term::bls12_381_g1_uncompress().apply(Term::un_b_data().apply(term))
|
||||
}
|
||||
Some(UplcType::Bls12_381G2Element) => {
|
||||
Term::bls12_381_g2_uncompress().apply(Term::un_b_data().apply(term))
|
||||
}
|
||||
Some(UplcType::Bls12_381MlResult) => panic!("ML Result not supported"),
|
||||
Some(UplcType::Data) | None => term,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn unknown_data_to_type(term: Term<Name>, field_type: &Type) -> Term<Name> {
|
||||
if field_type.is_int() {
|
||||
Term::un_i_data().apply(term)
|
||||
} else if field_type.is_bytearray() {
|
||||
Term::un_b_data().apply(term)
|
||||
} else if field_type.is_void() {
|
||||
Term::equals_integer()
|
||||
.apply(Term::integer(0.into()))
|
||||
.apply(Term::fst_pair().apply(Term::var("__pair__")))
|
||||
.delayed_if_then_else(
|
||||
Term::snd_pair()
|
||||
.apply(Term::var("__pair__"))
|
||||
.delayed_choose_list(Term::unit(), Term::Error),
|
||||
Term::Error,
|
||||
)
|
||||
.lambda("__pair__")
|
||||
.apply(Term::unconstr_data().apply(term))
|
||||
} else if field_type.is_map() {
|
||||
Term::unmap_data().apply(term)
|
||||
} else if field_type.is_string() {
|
||||
Term::Builtin(DefaultFunction::DecodeUtf8).apply(Term::un_b_data().apply(term))
|
||||
} else if field_type.is_tuple() && matches!(field_type.get_uplc_type(), UplcType::Pair(_, _)) {
|
||||
Term::tail_list()
|
||||
let uplc_type = field_type.get_uplc_type();
|
||||
|
||||
match uplc_type {
|
||||
Some(UplcType::Integer) => Term::un_i_data().apply(term),
|
||||
Some(UplcType::ByteString) => Term::un_b_data().apply(term),
|
||||
Some(UplcType::String) => Term::decode_utf8().apply(Term::un_b_data().apply(term)),
|
||||
Some(UplcType::List(_)) if field_type.is_map() => Term::unmap_data().apply(term),
|
||||
Some(UplcType::List(_)) => Term::unlist_data().apply(term),
|
||||
|
||||
Some(UplcType::Bls12_381G1Element) => {
|
||||
Term::bls12_381_g1_uncompress().apply(Term::un_b_data().apply(term))
|
||||
}
|
||||
Some(UplcType::Bls12_381G2Element) => {
|
||||
Term::bls12_381_g2_uncompress().apply(Term::un_b_data().apply(term))
|
||||
}
|
||||
Some(UplcType::Bls12_381MlResult) => panic!("ML Result not supported"),
|
||||
|
||||
Some(UplcType::Pair(_, _)) => Term::tail_list()
|
||||
.apply(Term::tail_list().apply(Term::var("__list_data")))
|
||||
.delayed_choose_list(
|
||||
Term::mk_pair_data()
|
||||
|
@ -997,11 +998,8 @@ pub fn unknown_data_to_type(term: Term<Name>, field_type: &Type) -> Term<Name> {
|
|||
Term::Error,
|
||||
)
|
||||
.lambda("__list_data")
|
||||
.apply(Term::unlist_data().apply(term))
|
||||
} else if field_type.is_list() || field_type.is_tuple() {
|
||||
Term::unlist_data().apply(term)
|
||||
} else if field_type.is_bool() {
|
||||
Term::snd_pair()
|
||||
.apply(Term::unlist_data().apply(term)),
|
||||
Some(UplcType::Bool) => Term::snd_pair()
|
||||
.apply(Term::var("__pair__"))
|
||||
.delayed_choose_list(
|
||||
Term::equals_integer()
|
||||
|
@ -1017,25 +1015,35 @@ pub fn unknown_data_to_type(term: Term<Name>, field_type: &Type) -> Term<Name> {
|
|||
Term::Error,
|
||||
)
|
||||
.lambda("__pair__")
|
||||
.apply(Term::unconstr_data().apply(term))
|
||||
} else if field_type.is_bls381_12_g1() {
|
||||
Term::bls12_381_g1_uncompress().apply(Term::un_b_data().apply(term))
|
||||
} else if field_type.is_bls381_12_g2() {
|
||||
Term::bls12_381_g2_uncompress().apply(Term::un_b_data().apply(term))
|
||||
} else if field_type.is_ml_result() {
|
||||
panic!("ML Result not supported")
|
||||
} else {
|
||||
term
|
||||
.apply(Term::unconstr_data().apply(term)),
|
||||
Some(UplcType::Unit) => Term::equals_integer()
|
||||
.apply(Term::integer(0.into()))
|
||||
.apply(Term::fst_pair().apply(Term::var("__pair__")))
|
||||
.delayed_if_then_else(
|
||||
Term::snd_pair()
|
||||
.apply(Term::var("__pair__"))
|
||||
.delayed_choose_list(Term::unit(), Term::Error),
|
||||
Term::Error,
|
||||
)
|
||||
.lambda("__pair__")
|
||||
.apply(Term::unconstr_data().apply(term)),
|
||||
|
||||
Some(UplcType::Data) | None => term,
|
||||
}
|
||||
}
|
||||
|
||||
/// Due to the nature of the types BLS12_381_G1Element and BLS12_381_G2Element and String coming from bytearray
|
||||
/// We don't have error handling if the bytearray is not properly aligned to the type. Oh well lol
|
||||
/// For BLS12_381_G1Element and BLS12_381_G2Element, hash to group exists so just adopt that.
|
||||
pub fn unknown_data_to_type_debug(
|
||||
term: Term<Name>,
|
||||
field_type: &Type,
|
||||
error_term: Term<Name>,
|
||||
) -> Term<Name> {
|
||||
if field_type.is_int() {
|
||||
Term::var("__val")
|
||||
let uplc_type = field_type.get_uplc_type();
|
||||
|
||||
match uplc_type {
|
||||
Some(UplcType::Integer) => Term::var("__val")
|
||||
.delayed_choose_data(
|
||||
error_term.clone(),
|
||||
error_term.clone(),
|
||||
|
@ -1044,9 +1052,8 @@ pub fn unknown_data_to_type_debug(
|
|||
error_term.clone(),
|
||||
)
|
||||
.lambda("__val")
|
||||
.apply(term)
|
||||
} else if field_type.is_bytearray() {
|
||||
Term::var("__val")
|
||||
.apply(term),
|
||||
Some(UplcType::ByteString) => Term::var("__val")
|
||||
.delayed_choose_data(
|
||||
error_term.clone(),
|
||||
error_term.clone(),
|
||||
|
@ -1055,28 +1062,19 @@ pub fn unknown_data_to_type_debug(
|
|||
Term::un_b_data().apply(Term::var("__val")),
|
||||
)
|
||||
.lambda("__val")
|
||||
.apply(term)
|
||||
} else if field_type.is_void() {
|
||||
Term::var("__val")
|
||||
.apply(term),
|
||||
Some(UplcType::String) => Term::var("__val")
|
||||
.delayed_choose_data(
|
||||
Term::equals_integer()
|
||||
.apply(Term::integer(0.into()))
|
||||
.apply(Term::fst_pair().apply(Term::unconstr_data().apply(Term::var("__val"))))
|
||||
.delayed_if_then_else(
|
||||
Term::snd_pair()
|
||||
.apply(Term::unconstr_data().apply(Term::var("__val")))
|
||||
.delayed_choose_list(Term::unit(), error_term.clone()),
|
||||
error_term.clone(),
|
||||
),
|
||||
error_term.clone(),
|
||||
error_term.clone(),
|
||||
error_term.clone(),
|
||||
error_term.clone(),
|
||||
Term::decode_utf8().apply(Term::un_b_data().apply(Term::var("__val"))),
|
||||
)
|
||||
.lambda("__val")
|
||||
.apply(term)
|
||||
} else if field_type.is_map() {
|
||||
Term::var("__val")
|
||||
.apply(term),
|
||||
|
||||
Some(UplcType::List(_)) if field_type.is_map() => Term::var("__val")
|
||||
.delayed_choose_data(
|
||||
error_term.clone(),
|
||||
Term::unmap_data().apply(Term::var("__val")),
|
||||
|
@ -1085,21 +1083,40 @@ pub fn unknown_data_to_type_debug(
|
|||
error_term.clone(),
|
||||
)
|
||||
.lambda("__val")
|
||||
.apply(term)
|
||||
} else if field_type.is_string() {
|
||||
Term::var("__val")
|
||||
.apply(term),
|
||||
Some(UplcType::List(_)) => Term::var("__val")
|
||||
.delayed_choose_data(
|
||||
error_term.clone(),
|
||||
error_term.clone(),
|
||||
Term::unlist_data().apply(Term::var("__val")),
|
||||
error_term.clone(),
|
||||
error_term.clone(),
|
||||
)
|
||||
.lambda("__val")
|
||||
.apply(term),
|
||||
|
||||
Some(UplcType::Bls12_381G1Element) => Term::var("__val")
|
||||
.delayed_choose_data(
|
||||
error_term.clone(),
|
||||
error_term.clone(),
|
||||
error_term.clone(),
|
||||
error_term.clone(),
|
||||
Term::Builtin(DefaultFunction::DecodeUtf8)
|
||||
.apply(Term::un_b_data().apply(Term::var("__val"))),
|
||||
Term::bls12_381_g1_uncompress().apply(Term::un_b_data().apply(Term::var("__val"))),
|
||||
)
|
||||
.lambda("__val")
|
||||
.apply(term)
|
||||
} else if field_type.is_tuple() && matches!(field_type.get_uplc_type(), UplcType::Pair(_, _)) {
|
||||
Term::var("__val")
|
||||
.apply(term),
|
||||
Some(UplcType::Bls12_381G2Element) => Term::var("__val")
|
||||
.delayed_choose_data(
|
||||
error_term.clone(),
|
||||
error_term.clone(),
|
||||
error_term.clone(),
|
||||
error_term.clone(),
|
||||
Term::bls12_381_g2_uncompress().apply(Term::un_b_data().apply(Term::var("__val"))),
|
||||
)
|
||||
.lambda("__val")
|
||||
.apply(term),
|
||||
Some(UplcType::Bls12_381MlResult) => panic!("ML Result not supported"),
|
||||
Some(UplcType::Pair(_, _)) => Term::var("__val")
|
||||
.delayed_choose_data(
|
||||
error_term.clone(),
|
||||
error_term.clone(),
|
||||
|
@ -1129,20 +1146,8 @@ pub fn unknown_data_to_type_debug(
|
|||
error_term.clone(),
|
||||
)
|
||||
.lambda("__val")
|
||||
.apply(term)
|
||||
} else if field_type.is_list() || field_type.is_tuple() {
|
||||
Term::var("__val")
|
||||
.delayed_choose_data(
|
||||
error_term.clone(),
|
||||
error_term.clone(),
|
||||
Term::unlist_data().apply(Term::var("__val")),
|
||||
error_term.clone(),
|
||||
error_term.clone(),
|
||||
)
|
||||
.lambda("__val")
|
||||
.apply(term)
|
||||
} else if field_type.is_bool() {
|
||||
Term::var("__val")
|
||||
.apply(term),
|
||||
Some(UplcType::Bool) => Term::var("__val")
|
||||
.delayed_choose_data(
|
||||
Term::snd_pair()
|
||||
.apply(Term::var("__pair__"))
|
||||
|
@ -1167,35 +1172,29 @@ pub fn unknown_data_to_type_debug(
|
|||
error_term.clone(),
|
||||
)
|
||||
.lambda("__val")
|
||||
.apply(term)
|
||||
} else if field_type.is_bls381_12_g1() {
|
||||
Term::var("__val")
|
||||
.apply(term),
|
||||
Some(UplcType::Unit) => Term::var("__val")
|
||||
.delayed_choose_data(
|
||||
Term::equals_integer()
|
||||
.apply(Term::integer(0.into()))
|
||||
.apply(Term::fst_pair().apply(Term::unconstr_data().apply(Term::var("__val"))))
|
||||
.delayed_if_then_else(
|
||||
Term::snd_pair()
|
||||
.apply(Term::unconstr_data().apply(Term::var("__val")))
|
||||
.delayed_choose_list(Term::unit(), error_term.clone()),
|
||||
error_term.clone(),
|
||||
),
|
||||
error_term.clone(),
|
||||
error_term.clone(),
|
||||
error_term.clone(),
|
||||
error_term.clone(),
|
||||
Term::bls12_381_g1_uncompress().apply(Term::un_b_data().apply(Term::var("__val"))),
|
||||
)
|
||||
.lambda("__val")
|
||||
.apply(term)
|
||||
} else if field_type.is_bls381_12_g2() {
|
||||
Term::var("__val")
|
||||
.delayed_choose_data(
|
||||
error_term.clone(),
|
||||
error_term.clone(),
|
||||
error_term.clone(),
|
||||
error_term.clone(),
|
||||
Term::bls12_381_g2_uncompress().apply(Term::un_b_data().apply(Term::var("__val"))),
|
||||
)
|
||||
.lambda("__val")
|
||||
.apply(term)
|
||||
} else if field_type.is_ml_result() {
|
||||
panic!("ML Result not supported")
|
||||
} else if field_type.is_data() {
|
||||
term
|
||||
} else {
|
||||
Term::var("__val")
|
||||
.apply(term),
|
||||
|
||||
Some(UplcType::Data) => term,
|
||||
// constr type
|
||||
None => Term::var("__val")
|
||||
.delayed_choose_data(
|
||||
Term::var("__val"),
|
||||
error_term.clone(),
|
||||
|
@ -1204,7 +1203,7 @@ pub fn unknown_data_to_type_debug(
|
|||
error_term.clone(),
|
||||
)
|
||||
.lambda("__val")
|
||||
.apply(term)
|
||||
.apply(term),
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1296,25 +1295,23 @@ pub fn convert_constants_to_data(constants: Vec<Rc<UplcConstant>>) -> Vec<UplcCo
|
|||
}
|
||||
|
||||
pub fn convert_type_to_data(term: Term<Name>, field_type: &Rc<Type>) -> Term<Name> {
|
||||
if field_type.is_bytearray() {
|
||||
Term::b_data().apply(term)
|
||||
} else if field_type.is_int() {
|
||||
Term::i_data().apply(term)
|
||||
} else if field_type.is_void() {
|
||||
term.choose_unit(Term::Constant(
|
||||
UplcConstant::Data(PlutusData::Constr(Constr {
|
||||
tag: convert_constr_to_tag(0).unwrap(),
|
||||
any_constructor: None,
|
||||
fields: vec![],
|
||||
}))
|
||||
.into(),
|
||||
))
|
||||
} else if field_type.is_map() {
|
||||
Term::map_data().apply(term)
|
||||
} else if field_type.is_string() {
|
||||
Term::b_data().apply(Term::Builtin(DefaultFunction::EncodeUtf8).apply(term))
|
||||
} else if field_type.is_tuple() && matches!(field_type.get_uplc_type(), UplcType::Pair(_, _)) {
|
||||
Term::list_data()
|
||||
let uplc_type = field_type.get_uplc_type();
|
||||
|
||||
match uplc_type {
|
||||
Some(UplcType::Integer) => Term::i_data().apply(term),
|
||||
Some(UplcType::String) => Term::b_data().apply(Term::encode_utf8().apply(term)),
|
||||
Some(UplcType::ByteString) => Term::b_data().apply(term),
|
||||
Some(UplcType::List(_)) if field_type.is_map() => Term::map_data().apply(term),
|
||||
Some(UplcType::List(_)) => Term::list_data().apply(term),
|
||||
|
||||
Some(UplcType::Bls12_381G1Element) => {
|
||||
Term::b_data().apply(Term::bls12_381_g1_compress().apply(term))
|
||||
}
|
||||
Some(UplcType::Bls12_381G2Element) => {
|
||||
Term::b_data().apply(Term::bls12_381_g2_compress().apply(term))
|
||||
}
|
||||
Some(UplcType::Bls12_381MlResult) => panic!("ML Result not supported"),
|
||||
Some(UplcType::Pair(_, _)) => Term::list_data()
|
||||
.apply(
|
||||
Term::mk_cons()
|
||||
.apply(Term::fst_pair().apply(Term::var("__pair")))
|
||||
|
@ -1325,11 +1322,18 @@ pub fn convert_type_to_data(term: Term<Name>, field_type: &Rc<Type>) -> Term<Nam
|
|||
),
|
||||
)
|
||||
.lambda("__pair")
|
||||
.apply(term)
|
||||
} else if field_type.is_list() || field_type.is_tuple() {
|
||||
Term::list_data().apply(term)
|
||||
} else if field_type.is_bool() {
|
||||
term.if_then_else(
|
||||
.apply(term),
|
||||
Some(UplcType::Unit) => Term::Constant(
|
||||
UplcConstant::Data(PlutusData::Constr(Constr {
|
||||
tag: convert_constr_to_tag(0).unwrap(),
|
||||
any_constructor: None,
|
||||
fields: vec![],
|
||||
}))
|
||||
.into(),
|
||||
)
|
||||
.lambda("_")
|
||||
.apply(term),
|
||||
Some(UplcType::Bool) => term.if_then_else(
|
||||
Term::Constant(
|
||||
UplcConstant::Data(PlutusData::Constr(Constr {
|
||||
tag: convert_constr_to_tag(1).unwrap(),
|
||||
|
@ -1346,15 +1350,9 @@ pub fn convert_type_to_data(term: Term<Name>, field_type: &Rc<Type>) -> Term<Nam
|
|||
}))
|
||||
.into(),
|
||||
),
|
||||
)
|
||||
} else if field_type.is_bls381_12_g1() {
|
||||
Term::b_data().apply(Term::bls12_381_g1_compress().apply(term))
|
||||
} else if field_type.is_bls381_12_g2() {
|
||||
Term::b_data().apply(Term::bls12_381_g2_compress().apply(term))
|
||||
} else if field_type.is_ml_result() {
|
||||
panic!("ML Result not supported")
|
||||
} else {
|
||||
term
|
||||
),
|
||||
|
||||
Some(UplcType::Data) | None => term,
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1410,7 +1408,7 @@ pub fn list_access_to_uplc(
|
|||
let head_item = |name, tipo: &Rc<Type>, tail_name: &str| {
|
||||
if name == "_" {
|
||||
Term::unit()
|
||||
} else if matches!(tipo.get_uplc_type(), UplcType::Pair(_, _)) && is_list_accessor {
|
||||
} else if tipo.is_pair() && is_list_accessor {
|
||||
Term::head_list().apply(Term::var(tail_name.to_string()))
|
||||
} else if matches!(expect_level, ExpectLevel::Full) {
|
||||
// Expect level is full so we have an unknown piece of data to cast
|
||||
|
@ -1765,7 +1763,7 @@ pub fn get_list_elements_len_and_tail(
|
|||
pub fn cast_validator_args(term: Term<Name>, arguments: &[TypedArg]) -> Term<Name> {
|
||||
let mut term = term;
|
||||
for arg in arguments.iter().rev() {
|
||||
if !matches!(arg.tipo.get_uplc_type(), UplcType::Data) {
|
||||
if !matches!(arg.tipo.get_uplc_type(), Some(UplcType::Data) | None) {
|
||||
term = term
|
||||
.lambda(arg.arg_name.get_variable_name().unwrap_or("_"))
|
||||
.apply(known_data_to_type(
|
||||
|
@ -1852,6 +1850,7 @@ pub fn air_holds_msg(air: &Air) -> bool {
|
|||
|
||||
Air::FieldsExpose { is_expect, .. }
|
||||
| Air::TupleAccessor { is_expect, .. }
|
||||
| Air::PairAccessor { is_expect, .. }
|
||||
| Air::CastFromData { is_expect, .. } => *is_expect,
|
||||
|
||||
Air::ListAccessor { expect_level, .. } => {
|
||||
|
|
|
@ -162,6 +162,13 @@ pub enum AirTree {
|
|||
subject_name: String,
|
||||
then: Box<AirTree>,
|
||||
},
|
||||
PairGuard {
|
||||
subject_tipo: Rc<Type>,
|
||||
subject_name: String,
|
||||
fst_name: Option<String>,
|
||||
snd_name: Option<String>,
|
||||
then: Box<AirTree>,
|
||||
},
|
||||
// Field Access
|
||||
FieldsExpose {
|
||||
indices: Vec<(usize, String, Rc<Type>)>,
|
||||
|
@ -195,6 +202,16 @@ pub enum AirTree {
|
|||
msg: Option<AirMsg>,
|
||||
then: Box<AirTree>,
|
||||
},
|
||||
// Pair Access
|
||||
PairAccessor {
|
||||
fst: Option<String>,
|
||||
snd: Option<String>,
|
||||
tipo: Rc<Type>,
|
||||
is_expect: bool,
|
||||
msg: Option<AirMsg>,
|
||||
pair: Box<AirTree>,
|
||||
then: Box<AirTree>,
|
||||
},
|
||||
// Misc.
|
||||
FieldsEmpty {
|
||||
constr: Box<AirTree>,
|
||||
|
@ -237,6 +254,11 @@ pub enum AirTree {
|
|||
tipo: Rc<Type>,
|
||||
items: Vec<AirTree>,
|
||||
},
|
||||
Pair {
|
||||
tipo: Rc<Type>,
|
||||
fst: Box<AirTree>,
|
||||
snd: Box<AirTree>,
|
||||
},
|
||||
Void,
|
||||
Var {
|
||||
constructor: ValueConstructor,
|
||||
|
@ -320,6 +342,16 @@ pub enum AirTree {
|
|||
otherwise: Box<AirTree>,
|
||||
},
|
||||
|
||||
PairClause {
|
||||
subject_tipo: Rc<Type>,
|
||||
subject_name: String,
|
||||
fst_name: Option<String>,
|
||||
snd_name: Option<String>,
|
||||
complex_clause: bool,
|
||||
then: Box<AirTree>,
|
||||
otherwise: Box<AirTree>,
|
||||
},
|
||||
|
||||
Finally {
|
||||
pattern: Box<AirTree>,
|
||||
then: Box<AirTree>,
|
||||
|
@ -363,20 +395,25 @@ impl AirTree {
|
|||
value: value.to_string(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn string(value: impl ToString) -> AirTree {
|
||||
AirTree::String {
|
||||
value: value.to_string(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn byte_array(bytes: Vec<u8>) -> AirTree {
|
||||
AirTree::ByteArray { bytes }
|
||||
}
|
||||
|
||||
pub fn curve(point: Curve) -> AirTree {
|
||||
AirTree::CurvePoint { point }
|
||||
}
|
||||
|
||||
pub fn bool(value: bool) -> AirTree {
|
||||
AirTree::Bool { value }
|
||||
}
|
||||
|
||||
pub fn list(mut items: Vec<AirTree>, tipo: Rc<Type>, tail: Option<AirTree>) -> AirTree {
|
||||
if let Some(tail) = tail {
|
||||
items.push(tail);
|
||||
|
@ -394,12 +431,23 @@ impl AirTree {
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn tuple(items: Vec<AirTree>, tipo: Rc<Type>) -> AirTree {
|
||||
AirTree::Tuple { tipo, items }
|
||||
}
|
||||
|
||||
pub fn pair(fst: AirTree, snd: AirTree, tipo: Rc<Type>) -> AirTree {
|
||||
AirTree::Pair {
|
||||
tipo,
|
||||
fst: fst.into(),
|
||||
snd: snd.into(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn void() -> AirTree {
|
||||
AirTree::Void
|
||||
}
|
||||
|
||||
pub fn var(
|
||||
constructor: ValueConstructor,
|
||||
name: impl ToString,
|
||||
|
@ -411,6 +459,7 @@ impl AirTree {
|
|||
variant_name: variant_name.to_string(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn local_var(name: impl ToString, tipo: Rc<Type>) -> AirTree {
|
||||
AirTree::Var {
|
||||
constructor: ValueConstructor::public(
|
||||
|
@ -423,6 +472,7 @@ impl AirTree {
|
|||
variant_name: "".to_string(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn call(func: AirTree, tipo: Rc<Type>, args: Vec<AirTree>) -> AirTree {
|
||||
AirTree::Call {
|
||||
tipo,
|
||||
|
@ -453,6 +503,7 @@ impl AirTree {
|
|||
then: then.into(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn define_cyclic_func(
|
||||
func_name: impl ToString,
|
||||
module_name: impl ToString,
|
||||
|
@ -468,15 +519,18 @@ impl AirTree {
|
|||
then: then.into(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn anon_func(params: Vec<String>, func_body: AirTree) -> AirTree {
|
||||
AirTree::Fn {
|
||||
params,
|
||||
func_body: func_body.into(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn builtin(func: DefaultFunction, tipo: Rc<Type>, args: Vec<AirTree>) -> AirTree {
|
||||
AirTree::Builtin { func, tipo, args }
|
||||
}
|
||||
|
||||
pub fn binop(
|
||||
op: BinOp,
|
||||
tipo: Rc<Type>,
|
||||
|
@ -492,12 +546,14 @@ impl AirTree {
|
|||
argument_tipo,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn unop(op: UnOp, arg: AirTree) -> AirTree {
|
||||
AirTree::UnOp {
|
||||
op,
|
||||
arg: arg.into(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn let_assignment(name: impl ToString, value: AirTree, then: AirTree) -> AirTree {
|
||||
AirTree::Let {
|
||||
name: name.to_string(),
|
||||
|
@ -505,6 +561,7 @@ impl AirTree {
|
|||
then: then.into(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn cast_from_data(value: AirTree, tipo: Rc<Type>, msg: Option<AirMsg>) -> AirTree {
|
||||
AirTree::CastFromData {
|
||||
tipo,
|
||||
|
@ -512,12 +569,14 @@ impl AirTree {
|
|||
msg,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn cast_to_data(value: AirTree, tipo: Rc<Type>) -> AirTree {
|
||||
AirTree::CastToData {
|
||||
tipo,
|
||||
value: value.into(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn assert_constr_index(
|
||||
constr_index: usize,
|
||||
constr: AirTree,
|
||||
|
@ -531,6 +590,7 @@ impl AirTree {
|
|||
then: then.into(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn assert_bool(
|
||||
is_true: bool,
|
||||
value: AirTree,
|
||||
|
@ -544,6 +604,7 @@ impl AirTree {
|
|||
then: then.into(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn when(
|
||||
subject_name: impl ToString,
|
||||
tipo: Rc<Type>,
|
||||
|
@ -559,6 +620,7 @@ impl AirTree {
|
|||
clauses: clauses.into(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn clause(
|
||||
subject_name: impl ToString,
|
||||
pattern: AirTree,
|
||||
|
@ -576,6 +638,7 @@ impl AirTree {
|
|||
otherwise: otherwise.into(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn list_clause(
|
||||
tail_name: impl ToString,
|
||||
subject_tipo: Rc<Type>,
|
||||
|
@ -593,6 +656,7 @@ impl AirTree {
|
|||
otherwise: otherwise.into(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn tuple_clause(
|
||||
subject_name: impl ToString,
|
||||
subject_tipo: Rc<Type>,
|
||||
|
@ -612,12 +676,34 @@ impl AirTree {
|
|||
otherwise: otherwise.into(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn pair_clause(
|
||||
subject_name: impl ToString,
|
||||
subject_tipo: Rc<Type>,
|
||||
fst_name: Option<String>,
|
||||
snd_name: Option<String>,
|
||||
then: AirTree,
|
||||
otherwise: AirTree,
|
||||
complex_clause: bool,
|
||||
) -> AirTree {
|
||||
AirTree::PairClause {
|
||||
subject_tipo,
|
||||
subject_name: subject_name.to_string(),
|
||||
fst_name,
|
||||
snd_name,
|
||||
complex_clause,
|
||||
then: then.into(),
|
||||
otherwise: otherwise.into(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn wrap_clause(then: AirTree, otherwise: AirTree) -> AirTree {
|
||||
AirTree::WrapClause {
|
||||
then: then.into(),
|
||||
otherwise: otherwise.into(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn clause_guard(
|
||||
subject_name: impl ToString,
|
||||
pattern: AirTree,
|
||||
|
@ -631,6 +717,7 @@ impl AirTree {
|
|||
then: then.into(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn list_clause_guard(
|
||||
tail_name: impl ToString,
|
||||
subject_tipo: Rc<Type>,
|
||||
|
@ -646,6 +733,7 @@ impl AirTree {
|
|||
then: then.into(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn tuple_clause_guard(
|
||||
subject_name: impl ToString,
|
||||
subject_tipo: Rc<Type>,
|
||||
|
@ -659,12 +747,30 @@ impl AirTree {
|
|||
then: then.into(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn pair_clause_guard(
|
||||
subject_name: impl ToString,
|
||||
subject_tipo: Rc<Type>,
|
||||
fst_name: Option<String>,
|
||||
snd_name: Option<String>,
|
||||
then: AirTree,
|
||||
) -> AirTree {
|
||||
AirTree::PairGuard {
|
||||
subject_name: subject_name.to_string(),
|
||||
subject_tipo,
|
||||
fst_name,
|
||||
snd_name,
|
||||
then: then.into(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn finally(pattern: AirTree, then: AirTree) -> AirTree {
|
||||
AirTree::Finally {
|
||||
pattern: pattern.into(),
|
||||
then: then.into(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn if_branches(
|
||||
mut branches: Vec<(AirTree, AirTree)>,
|
||||
tipo: Rc<Type>,
|
||||
|
@ -691,6 +797,7 @@ impl AirTree {
|
|||
|
||||
final_if
|
||||
}
|
||||
|
||||
pub fn create_constr(tag: usize, tipo: Rc<Type>, args: Vec<AirTree>) -> AirTree {
|
||||
AirTree::Constr { tag, tipo, args }
|
||||
}
|
||||
|
@ -710,6 +817,7 @@ impl AirTree {
|
|||
args,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn index_access(function_name: String, tipo: Rc<Type>, list_of_fields: AirTree) -> AirTree {
|
||||
AirTree::cast_from_data(
|
||||
AirTree::call(
|
||||
|
@ -740,6 +848,7 @@ impl AirTree {
|
|||
None,
|
||||
)
|
||||
}
|
||||
|
||||
pub fn fields_expose(
|
||||
indices: Vec<(usize, String, Rc<Type>)>,
|
||||
record: AirTree,
|
||||
|
@ -775,6 +884,7 @@ impl AirTree {
|
|||
then: then.into(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn list_expose(
|
||||
tail_head_names: Vec<(String, String)>,
|
||||
tail: Option<(String, String)>,
|
||||
|
@ -788,6 +898,7 @@ impl AirTree {
|
|||
then: then.into(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn tuple_access(
|
||||
names: Vec<String>,
|
||||
tipo: Rc<Type>,
|
||||
|
@ -805,6 +916,27 @@ impl AirTree {
|
|||
then: then.into(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn pair_access(
|
||||
fst: Option<String>,
|
||||
snd: Option<String>,
|
||||
tipo: Rc<Type>,
|
||||
pair: AirTree,
|
||||
msg: Option<AirMsg>,
|
||||
is_expect: bool,
|
||||
then: AirTree,
|
||||
) -> AirTree {
|
||||
AirTree::PairAccessor {
|
||||
fst,
|
||||
snd,
|
||||
tipo,
|
||||
is_expect,
|
||||
msg,
|
||||
pair: pair.into(),
|
||||
then: then.into(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn pair_index(index: usize, tipo: Rc<Type>, tuple: AirTree) -> AirTree {
|
||||
AirTree::cast_from_data(
|
||||
AirTree::builtin(
|
||||
|
@ -820,9 +952,11 @@ impl AirTree {
|
|||
None,
|
||||
)
|
||||
}
|
||||
|
||||
pub fn error(tipo: Rc<Type>, validator: bool) -> AirTree {
|
||||
AirTree::ErrorTerm { tipo, validator }
|
||||
}
|
||||
|
||||
pub fn trace(msg: AirTree, tipo: Rc<Type>, then: AirTree) -> AirTree {
|
||||
AirTree::Trace {
|
||||
tipo,
|
||||
|
@ -840,6 +974,7 @@ impl AirTree {
|
|||
pub fn no_op(then: AirTree) -> AirTree {
|
||||
AirTree::NoOp { then: then.into() }
|
||||
}
|
||||
|
||||
pub fn fields_empty(constr: AirTree, msg: Option<AirMsg>, then: AirTree) -> AirTree {
|
||||
AirTree::FieldsEmpty {
|
||||
constr: constr.into(),
|
||||
|
@ -847,6 +982,7 @@ impl AirTree {
|
|||
then: then.into(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn list_empty(list: AirTree, msg: Option<AirMsg>, then: AirTree) -> AirTree {
|
||||
AirTree::ListEmpty {
|
||||
list: list.into(),
|
||||
|
@ -1058,6 +1194,21 @@ impl AirTree {
|
|||
});
|
||||
then.create_air_vec(air_vec);
|
||||
}
|
||||
AirTree::PairGuard {
|
||||
subject_tipo,
|
||||
subject_name,
|
||||
fst_name,
|
||||
snd_name,
|
||||
then,
|
||||
} => {
|
||||
air_vec.push(Air::PairGuard {
|
||||
subject_tipo: subject_tipo.clone(),
|
||||
subject_name: subject_name.clone(),
|
||||
fst_name: fst_name.clone(),
|
||||
snd_name: snd_name.clone(),
|
||||
});
|
||||
then.create_air_vec(air_vec);
|
||||
}
|
||||
AirTree::FieldsExpose {
|
||||
indices,
|
||||
record,
|
||||
|
@ -1134,6 +1285,29 @@ impl AirTree {
|
|||
tuple.create_air_vec(air_vec);
|
||||
then.create_air_vec(air_vec);
|
||||
}
|
||||
AirTree::PairAccessor {
|
||||
fst,
|
||||
snd,
|
||||
tipo,
|
||||
is_expect,
|
||||
msg,
|
||||
pair,
|
||||
then,
|
||||
} => {
|
||||
air_vec.push(Air::PairAccessor {
|
||||
fst: fst.clone(),
|
||||
snd: snd.clone(),
|
||||
tipo: tipo.clone(),
|
||||
is_expect: *is_expect,
|
||||
});
|
||||
|
||||
if let Some(msg) = msg {
|
||||
msg.to_air_tree().create_air_vec(air_vec);
|
||||
}
|
||||
|
||||
pair.create_air_vec(air_vec);
|
||||
then.create_air_vec(air_vec);
|
||||
}
|
||||
AirTree::FieldsEmpty { constr, msg, then } => {
|
||||
air_vec.push(Air::FieldsEmpty);
|
||||
|
||||
|
@ -1189,6 +1363,11 @@ impl AirTree {
|
|||
item.create_air_vec(air_vec);
|
||||
}
|
||||
}
|
||||
AirTree::Pair { tipo, fst, snd } => {
|
||||
air_vec.push(Air::Pair { tipo: tipo.clone() });
|
||||
fst.create_air_vec(air_vec);
|
||||
snd.create_air_vec(air_vec);
|
||||
}
|
||||
AirTree::Void => air_vec.push(Air::Void),
|
||||
AirTree::Var {
|
||||
constructor,
|
||||
|
@ -1334,6 +1513,25 @@ impl AirTree {
|
|||
then.create_air_vec(air_vec);
|
||||
otherwise.create_air_vec(air_vec);
|
||||
}
|
||||
AirTree::PairClause {
|
||||
subject_tipo,
|
||||
subject_name,
|
||||
fst_name,
|
||||
snd_name,
|
||||
complex_clause,
|
||||
then,
|
||||
otherwise,
|
||||
} => {
|
||||
air_vec.push(Air::PairClause {
|
||||
subject_tipo: subject_tipo.clone(),
|
||||
subject_name: subject_name.clone(),
|
||||
fst_name: fst_name.clone(),
|
||||
snd_name: snd_name.clone(),
|
||||
complex_clause: *complex_clause,
|
||||
});
|
||||
then.create_air_vec(air_vec);
|
||||
otherwise.create_air_vec(air_vec);
|
||||
}
|
||||
AirTree::Finally { pattern, then } => {
|
||||
air_vec.push(Air::Finally);
|
||||
pattern.create_air_vec(air_vec);
|
||||
|
@ -1398,6 +1596,7 @@ impl AirTree {
|
|||
AirTree::CurvePoint { point } => point.tipo(),
|
||||
AirTree::List { tipo, .. }
|
||||
| AirTree::Tuple { tipo, .. }
|
||||
| AirTree::Pair { tipo, .. }
|
||||
| AirTree::Call { tipo, .. }
|
||||
| AirTree::Builtin { tipo, .. }
|
||||
| AirTree::BinOp { tipo, .. }
|
||||
|
@ -1420,6 +1619,7 @@ impl AirTree {
|
|||
| AirTree::ListClause { then, .. }
|
||||
| AirTree::WrapClause { then, .. }
|
||||
| AirTree::TupleClause { then, .. }
|
||||
| AirTree::PairClause { then, .. }
|
||||
| AirTree::Finally { then, .. }
|
||||
| AirTree::Let { then, .. }
|
||||
| AirTree::DefineFunc { then, .. }
|
||||
|
@ -1429,10 +1629,12 @@ impl AirTree {
|
|||
| AirTree::ClauseGuard { then, .. }
|
||||
| AirTree::ListClauseGuard { then, .. }
|
||||
| AirTree::TupleGuard { then, .. }
|
||||
| AirTree::PairGuard { then, .. }
|
||||
| AirTree::FieldsExpose { then, .. }
|
||||
| AirTree::ListAccessor { then, .. }
|
||||
| AirTree::ListExpose { then, .. }
|
||||
| AirTree::TupleAccessor { then, .. }
|
||||
| AirTree::PairAccessor { then, .. }
|
||||
| AirTree::FieldsEmpty { then, .. }
|
||||
| AirTree::ListEmpty { then, .. }
|
||||
| AirTree::NoOp { then } => then.return_type(),
|
||||
|
@ -1443,18 +1645,18 @@ impl AirTree {
|
|||
match self {
|
||||
AirTree::ClauseGuard { subject_tipo, .. }
|
||||
| AirTree::ListClauseGuard { subject_tipo, .. }
|
||||
| AirTree::TupleGuard { subject_tipo, .. } => vec![subject_tipo],
|
||||
| AirTree::PairGuard { subject_tipo, .. }
|
||||
| AirTree::TupleGuard { subject_tipo, .. }
|
||||
| AirTree::Clause { subject_tipo, .. }
|
||||
| AirTree::ListClause { subject_tipo, .. }
|
||||
| AirTree::TupleClause { subject_tipo, .. }
|
||||
| AirTree::PairClause { subject_tipo, .. } => vec![subject_tipo],
|
||||
|
||||
AirTree::ListAccessor { tipo, .. }
|
||||
| AirTree::ListExpose { tipo, .. }
|
||||
| AirTree::TupleAccessor { tipo, .. } => vec![tipo],
|
||||
AirTree::FieldsExpose { indices, .. } => {
|
||||
let mut types = vec![];
|
||||
for (_, _, tipo) in indices {
|
||||
types.push(tipo);
|
||||
}
|
||||
types
|
||||
}
|
||||
AirTree::List { tipo, .. }
|
||||
| AirTree::TupleAccessor { tipo, .. }
|
||||
| AirTree::PairAccessor { tipo, .. }
|
||||
| AirTree::List { tipo, .. }
|
||||
| AirTree::Tuple { tipo, .. }
|
||||
| AirTree::Call { tipo, .. }
|
||||
| AirTree::Builtin { tipo, .. }
|
||||
|
@ -1463,7 +1665,17 @@ impl AirTree {
|
|||
| AirTree::If { tipo, .. }
|
||||
| AirTree::Constr { tipo, .. }
|
||||
| AirTree::ErrorTerm { tipo, .. }
|
||||
| AirTree::Trace { tipo, .. } => vec![tipo],
|
||||
| AirTree::Trace { tipo, .. }
|
||||
| AirTree::Pair { tipo, .. } => vec![tipo],
|
||||
|
||||
AirTree::FieldsExpose { indices, .. } => {
|
||||
let mut types = vec![];
|
||||
for (_, _, tipo) in indices {
|
||||
types.push(tipo);
|
||||
}
|
||||
types
|
||||
}
|
||||
|
||||
AirTree::Var { constructor, .. } => {
|
||||
vec![constructor.tipo.borrow_mut()]
|
||||
}
|
||||
|
@ -1477,9 +1689,7 @@ impl AirTree {
|
|||
AirTree::When {
|
||||
tipo, subject_tipo, ..
|
||||
} => vec![tipo, subject_tipo],
|
||||
AirTree::Clause { subject_tipo, .. }
|
||||
| AirTree::ListClause { subject_tipo, .. }
|
||||
| AirTree::TupleClause { subject_tipo, .. } => vec![subject_tipo],
|
||||
|
||||
AirTree::RecordUpdate { tipo, indices, .. } => {
|
||||
let mut types = vec![tipo];
|
||||
for (_, tipo) in indices {
|
||||
|
@ -1487,9 +1697,24 @@ impl AirTree {
|
|||
}
|
||||
types
|
||||
}
|
||||
_ => {
|
||||
vec![]
|
||||
}
|
||||
AirTree::Let { .. }
|
||||
| AirTree::DefineFunc { .. }
|
||||
| AirTree::DefineCyclicFuncs { .. }
|
||||
| AirTree::AssertConstr { .. }
|
||||
| AirTree::AssertBool { .. }
|
||||
| AirTree::FieldsEmpty { .. }
|
||||
| AirTree::ListEmpty { .. }
|
||||
| AirTree::NoOp { .. }
|
||||
| AirTree::Int { .. }
|
||||
| AirTree::String { .. }
|
||||
| AirTree::ByteArray { .. }
|
||||
| AirTree::CurvePoint { .. }
|
||||
| AirTree::Bool { .. }
|
||||
| AirTree::Void
|
||||
| AirTree::Fn { .. }
|
||||
| AirTree::UnOp { .. }
|
||||
| AirTree::WrapClause { .. }
|
||||
| AirTree::Finally { .. } => vec![],
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1525,7 +1750,8 @@ impl AirTree {
|
|||
tree_path.push(current_depth, depth_index);
|
||||
let mut tuple_then_index = None;
|
||||
|
||||
// Assignments/Statements get traversed here
|
||||
// Assignments'/Statements' values get traversed here
|
||||
// Then the body under these assignments/statements get traversed later on
|
||||
match self {
|
||||
AirTree::Let { value, .. } => {
|
||||
value.do_traverse_tree_with(
|
||||
|
@ -1592,6 +1818,15 @@ impl AirTree {
|
|||
apply_with_func_last,
|
||||
);
|
||||
}
|
||||
AirTree::PairAccessor { pair, .. } => {
|
||||
pair.do_traverse_tree_with(
|
||||
tree_path,
|
||||
current_depth + 1,
|
||||
index_count.next_number(),
|
||||
with,
|
||||
apply_with_func_last,
|
||||
);
|
||||
}
|
||||
AirTree::FieldsEmpty { constr, .. } => {
|
||||
constr.do_traverse_tree_with(
|
||||
tree_path,
|
||||
|
@ -1629,7 +1864,49 @@ impl AirTree {
|
|||
apply_with_func_last,
|
||||
);
|
||||
}
|
||||
_ => {}
|
||||
AirTree::PairClause { otherwise, .. } => {
|
||||
tuple_then_index = Some(index_count.next_number());
|
||||
otherwise.do_traverse_tree_with(
|
||||
tree_path,
|
||||
current_depth + 1,
|
||||
index_count.next_number(),
|
||||
with,
|
||||
apply_with_func_last,
|
||||
);
|
||||
}
|
||||
AirTree::DefineFunc { .. }
|
||||
| AirTree::DefineCyclicFuncs { .. }
|
||||
| AirTree::ListClauseGuard { .. }
|
||||
| AirTree::TupleGuard { .. }
|
||||
| AirTree::PairGuard { .. }
|
||||
| AirTree::ListExpose { .. }
|
||||
| AirTree::NoOp { .. }
|
||||
| AirTree::Int { .. }
|
||||
| AirTree::String { .. }
|
||||
| AirTree::ByteArray { .. }
|
||||
| AirTree::CurvePoint { .. }
|
||||
| AirTree::Bool { .. }
|
||||
| AirTree::List { .. }
|
||||
| AirTree::Tuple { .. }
|
||||
| AirTree::Pair { .. }
|
||||
| AirTree::Void
|
||||
| AirTree::Var { .. }
|
||||
| AirTree::Call { .. }
|
||||
| AirTree::Fn { .. }
|
||||
| AirTree::Builtin { .. }
|
||||
| AirTree::BinOp { .. }
|
||||
| AirTree::UnOp { .. }
|
||||
| AirTree::CastFromData { .. }
|
||||
| AirTree::CastToData { .. }
|
||||
| AirTree::Clause { .. }
|
||||
| AirTree::ListClause { .. }
|
||||
| AirTree::WrapClause { .. }
|
||||
| AirTree::Finally { .. }
|
||||
| AirTree::If { .. }
|
||||
| AirTree::Constr { .. }
|
||||
| AirTree::RecordUpdate { .. }
|
||||
| AirTree::ErrorTerm { .. }
|
||||
| AirTree::Trace { .. } => {}
|
||||
}
|
||||
|
||||
if !apply_with_func_last {
|
||||
|
@ -1645,11 +1922,13 @@ impl AirTree {
|
|||
| AirTree::FieldsExpose { then, .. }
|
||||
| AirTree::ListAccessor { then, .. }
|
||||
| AirTree::TupleAccessor { then, .. }
|
||||
| AirTree::PairAccessor { then, .. }
|
||||
| AirTree::FieldsEmpty { then, .. }
|
||||
| AirTree::ListEmpty { then, .. }
|
||||
| AirTree::ListExpose { then, .. }
|
||||
| AirTree::ListClauseGuard { then, .. }
|
||||
| AirTree::TupleGuard { then, .. }
|
||||
| AirTree::PairGuard { then, .. }
|
||||
| AirTree::NoOp { then } => {
|
||||
then.do_traverse_tree_with(
|
||||
tree_path,
|
||||
|
@ -1681,6 +1960,19 @@ impl AirTree {
|
|||
apply_with_func_last,
|
||||
);
|
||||
}
|
||||
AirTree::PairClause { then, .. } => {
|
||||
let Some(index) = tuple_then_index else {
|
||||
unreachable!()
|
||||
};
|
||||
|
||||
then.do_traverse_tree_with(
|
||||
tree_path,
|
||||
current_depth + 1,
|
||||
index,
|
||||
with,
|
||||
apply_with_func_last,
|
||||
);
|
||||
}
|
||||
AirTree::List { items, .. } => {
|
||||
for item in items {
|
||||
item.do_traverse_tree_with(
|
||||
|
@ -1703,6 +1995,23 @@ impl AirTree {
|
|||
);
|
||||
}
|
||||
}
|
||||
AirTree::Pair { fst, snd, .. } => {
|
||||
fst.do_traverse_tree_with(
|
||||
tree_path,
|
||||
current_depth + 1,
|
||||
index_count.next_number(),
|
||||
with,
|
||||
apply_with_func_last,
|
||||
);
|
||||
|
||||
snd.do_traverse_tree_with(
|
||||
tree_path,
|
||||
current_depth + 1,
|
||||
index_count.next_number(),
|
||||
with,
|
||||
apply_with_func_last,
|
||||
);
|
||||
}
|
||||
AirTree::Call { func, args, .. } => {
|
||||
func.do_traverse_tree_with(
|
||||
tree_path,
|
||||
|
@ -2071,6 +2380,13 @@ impl AirTree {
|
|||
panic!("Tree Path index outside tree children nodes")
|
||||
}
|
||||
}
|
||||
AirTree::PairGuard { then, .. } => {
|
||||
if *index == 0 {
|
||||
then.as_mut().do_find_air_tree_node(tree_path_iter)
|
||||
} else {
|
||||
panic!("Tree Path index outside tree children nodes")
|
||||
}
|
||||
}
|
||||
AirTree::FieldsExpose { record, then, .. } => {
|
||||
if *index == 0 {
|
||||
record.as_mut().do_find_air_tree_node(tree_path_iter)
|
||||
|
@ -2105,6 +2421,15 @@ impl AirTree {
|
|||
panic!("Tree Path index outside tree children nodes")
|
||||
}
|
||||
}
|
||||
AirTree::PairAccessor { pair, then, .. } => {
|
||||
if *index == 0 {
|
||||
pair.as_mut().do_find_air_tree_node(tree_path_iter)
|
||||
} else if *index == 1 {
|
||||
then.as_mut().do_find_air_tree_node(tree_path_iter)
|
||||
} else {
|
||||
panic!("Tree Path index outside tree children nodes")
|
||||
}
|
||||
}
|
||||
AirTree::NoOp { then } => {
|
||||
if *index == 0 {
|
||||
then.as_mut().do_find_air_tree_node(tree_path_iter)
|
||||
|
@ -2112,8 +2437,7 @@ impl AirTree {
|
|||
panic!("Tree Path index outside tree children nodes")
|
||||
}
|
||||
}
|
||||
AirTree::DefineFunc { .. } => unreachable!(),
|
||||
AirTree::DefineCyclicFuncs { .. } => unreachable!(),
|
||||
AirTree::DefineFunc { .. } | AirTree::DefineCyclicFuncs { .. } => unreachable!(),
|
||||
AirTree::FieldsEmpty { constr, then, .. } => {
|
||||
if *index == 0 {
|
||||
constr.as_mut().do_find_air_tree_node(tree_path_iter)
|
||||
|
@ -2140,6 +2464,15 @@ impl AirTree {
|
|||
.expect("Tree Path index outside tree children nodes");
|
||||
item.do_find_air_tree_node(tree_path_iter)
|
||||
}
|
||||
AirTree::Pair { fst, snd, .. } => {
|
||||
if *index == 0 {
|
||||
fst.as_mut().do_find_air_tree_node(tree_path_iter)
|
||||
} else if *index == 1 {
|
||||
snd.as_mut().do_find_air_tree_node(tree_path_iter)
|
||||
} else {
|
||||
panic!("Tree Path index outside tree children nodes")
|
||||
}
|
||||
}
|
||||
AirTree::Call { func, args, .. } => {
|
||||
children_nodes.push(func.as_mut());
|
||||
children_nodes.extend(args.iter_mut());
|
||||
|
@ -2243,6 +2576,17 @@ impl AirTree {
|
|||
panic!("Tree Path index outside tree children nodes")
|
||||
}
|
||||
}
|
||||
AirTree::PairClause {
|
||||
then, otherwise, ..
|
||||
} => {
|
||||
if *index == 0 {
|
||||
then.as_mut().do_find_air_tree_node(tree_path_iter)
|
||||
} else if *index == 1 {
|
||||
otherwise.as_mut().do_find_air_tree_node(tree_path_iter)
|
||||
} else {
|
||||
panic!("Tree Path index outside tree children nodes")
|
||||
}
|
||||
}
|
||||
AirTree::Finally { pattern, then } => {
|
||||
if *index == 0 {
|
||||
pattern.as_mut().do_find_air_tree_node(tree_path_iter)
|
||||
|
@ -2291,7 +2635,15 @@ impl AirTree {
|
|||
panic!("Tree Path index outside tree children nodes")
|
||||
}
|
||||
}
|
||||
_ => {
|
||||
|
||||
AirTree::Int { .. }
|
||||
| AirTree::String { .. }
|
||||
| AirTree::ByteArray { .. }
|
||||
| AirTree::CurvePoint { .. }
|
||||
| AirTree::Bool { .. }
|
||||
| AirTree::Void
|
||||
| AirTree::Var { .. }
|
||||
| AirTree::ErrorTerm { .. } => {
|
||||
panic!("A tree node with no children was encountered with a longer tree path.")
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,8 +1,9 @@
|
|||
use chumsky::prelude::*;
|
||||
|
||||
use crate::ast;
|
||||
|
||||
use super::{error::ParseError, token::Token};
|
||||
use crate::{
|
||||
ast,
|
||||
builtins::{PAIR, PRELUDE},
|
||||
};
|
||||
use chumsky::prelude::*;
|
||||
|
||||
pub fn parser() -> impl Parser<Token, ast::Annotation, Error = ParseError> {
|
||||
recursive(|expression| {
|
||||
|
@ -14,6 +15,31 @@ pub fn parser() -> impl Parser<Token, ast::Annotation, Error = ParseError> {
|
|||
name,
|
||||
}
|
||||
}),
|
||||
// Pair
|
||||
select! {Token::Name { name } if name == PRELUDE => name}
|
||||
.then_ignore(just(Token::Dot))
|
||||
.or_not()
|
||||
.then_ignore(select! {Token::UpName { name } if name == PAIR => name})
|
||||
.ignore_then(
|
||||
expression
|
||||
.clone()
|
||||
.separated_by(just(Token::Comma))
|
||||
.exactly(2)
|
||||
.delimited_by(just(Token::Less), just(Token::Greater)),
|
||||
)
|
||||
.map_with_span(|elems: Vec<ast::Annotation>, span| ast::Annotation::Pair {
|
||||
location: span,
|
||||
fst: elems
|
||||
.first()
|
||||
.expect("Pair should have exactly 2 elements")
|
||||
.to_owned()
|
||||
.into(),
|
||||
snd: elems
|
||||
.last()
|
||||
.expect("Pair should have exactly 2 elements")
|
||||
.to_owned()
|
||||
.into(),
|
||||
}),
|
||||
// Tuple
|
||||
expression
|
||||
.clone()
|
||||
|
|
|
@ -1,20 +1,11 @@
|
|||
use chumsky::prelude::*;
|
||||
|
||||
use super::anonymous_function::parser as anonymous_function;
|
||||
use super::assignment;
|
||||
use super::block::parser as block;
|
||||
use super::bytearray::parser as bytearray;
|
||||
use super::if_else::parser as if_else;
|
||||
use super::int::parser as int;
|
||||
use super::list::parser as list;
|
||||
use super::record::parser as record;
|
||||
use super::record_update::parser as record_update;
|
||||
use super::string::parser as string;
|
||||
use super::tuple::parser as tuple;
|
||||
use super::var::parser as var;
|
||||
use super::when::parser as when;
|
||||
use super::{and_or_chain, anonymous_binop::parser as anonymous_binop};
|
||||
|
||||
use super::{
|
||||
and_or_chain, anonymous_binop::parser as anonymous_binop,
|
||||
anonymous_function::parser as anonymous_function, assignment, block::parser as block,
|
||||
bytearray::parser as bytearray, if_else::parser as if_else, int::parser as int,
|
||||
list::parser as list, pair::parser as pair, record::parser as record,
|
||||
record_update::parser as record_update, string::parser as string, tuple::parser as tuple,
|
||||
var::parser as var, when::parser as when,
|
||||
};
|
||||
use crate::{
|
||||
expr::UntypedExpr,
|
||||
parser::{
|
||||
|
@ -23,6 +14,7 @@ use crate::{
|
|||
token::Token,
|
||||
},
|
||||
};
|
||||
use chumsky::prelude::*;
|
||||
|
||||
pub fn parser<'a>(
|
||||
sequence: Recursive<'a, Token, UntypedExpr, ParseError>,
|
||||
|
@ -58,6 +50,7 @@ pub fn chain_start<'a>(
|
|||
choice((
|
||||
string(),
|
||||
int(),
|
||||
pair(expression.clone()),
|
||||
record_update(expression.clone()),
|
||||
record(expression.clone()),
|
||||
field_access::constructor(),
|
||||
|
|
|
@ -12,6 +12,7 @@ mod fail_todo_trace;
|
|||
mod if_else;
|
||||
mod int;
|
||||
mod list;
|
||||
mod pair;
|
||||
mod record;
|
||||
mod record_update;
|
||||
mod sequence;
|
||||
|
@ -31,6 +32,7 @@ pub use fail_todo_trace::parser as fail_todo_trace;
|
|||
pub use if_else::parser as if_else;
|
||||
pub use int::parser as int;
|
||||
pub use list::parser as list;
|
||||
pub use pair::parser as pair;
|
||||
pub use record::parser as record;
|
||||
pub use record_update::parser as record_update;
|
||||
pub use sequence::parser as sequence;
|
||||
|
|
|
@ -0,0 +1,53 @@
|
|||
use crate::{
|
||||
builtins::{PAIR, PRELUDE},
|
||||
expr::UntypedExpr,
|
||||
parser::{error::ParseError, token::Token},
|
||||
};
|
||||
use chumsky::prelude::*;
|
||||
|
||||
pub fn parser(
|
||||
r: Recursive<'_, Token, UntypedExpr, ParseError>,
|
||||
) -> impl Parser<Token, UntypedExpr, Error = ParseError> + '_ {
|
||||
select! {Token::Name { name } if name == PRELUDE => name}
|
||||
.then_ignore(just(Token::Dot))
|
||||
.or_not()
|
||||
.then_ignore(select! {Token::UpName { name } if name == PAIR => name})
|
||||
.ignore_then(
|
||||
r.clone()
|
||||
.separated_by(just(Token::Comma))
|
||||
.exactly(2)
|
||||
.allow_trailing()
|
||||
.delimited_by(
|
||||
choice((just(Token::LeftParen), just(Token::NewLineLeftParen))),
|
||||
just(Token::RightParen),
|
||||
)
|
||||
.map_with_span(|elems, location| UntypedExpr::Pair {
|
||||
location,
|
||||
fst: elems
|
||||
.first()
|
||||
.expect("Pair should have exactly 2 elements")
|
||||
.to_owned()
|
||||
.into(),
|
||||
snd: elems
|
||||
.last()
|
||||
.expect("Pair should have exactly 2 elements")
|
||||
.to_owned()
|
||||
.into(),
|
||||
}),
|
||||
)
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use crate::assert_expr;
|
||||
|
||||
#[test]
|
||||
fn basic_pair() {
|
||||
assert_expr!(r#"Pair(1, 2)"#);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn pair_from_prelude() {
|
||||
assert_expr!(r#"aiken.Pair(1, 2)"#);
|
||||
}
|
||||
}
|
|
@ -0,0 +1,21 @@
|
|||
---
|
||||
source: crates/aiken-lang/src/parser/expr/pair.rs
|
||||
description: "Code:\n\nPair(1, 2)"
|
||||
---
|
||||
Pair {
|
||||
location: 4..10,
|
||||
fst: UInt {
|
||||
location: 5..6,
|
||||
value: "1",
|
||||
base: Decimal {
|
||||
numeric_underscore: false,
|
||||
},
|
||||
},
|
||||
snd: UInt {
|
||||
location: 8..9,
|
||||
value: "2",
|
||||
base: Decimal {
|
||||
numeric_underscore: false,
|
||||
},
|
||||
},
|
||||
}
|
|
@ -0,0 +1,21 @@
|
|||
---
|
||||
source: crates/aiken-lang/src/parser/expr/pair.rs
|
||||
description: "Code:\n\naiken.Pair(1, 2)"
|
||||
---
|
||||
Pair {
|
||||
location: 10..16,
|
||||
fst: UInt {
|
||||
location: 11..12,
|
||||
value: "1",
|
||||
base: Decimal {
|
||||
numeric_underscore: false,
|
||||
},
|
||||
},
|
||||
snd: UInt {
|
||||
location: 14..15,
|
||||
value: "2",
|
||||
base: Decimal {
|
||||
numeric_underscore: false,
|
||||
},
|
||||
},
|
||||
}
|
|
@ -4,30 +4,32 @@ mod constructor;
|
|||
mod discard;
|
||||
mod int;
|
||||
mod list;
|
||||
mod pair;
|
||||
mod tuple;
|
||||
mod var;
|
||||
|
||||
pub use constructor::parser as constructor;
|
||||
pub use discard::parser as discard;
|
||||
pub use int::parser as int;
|
||||
pub use list::parser as list;
|
||||
pub use tuple::parser as tuple;
|
||||
pub use var::parser as var;
|
||||
|
||||
use crate::{
|
||||
ast::UntypedPattern,
|
||||
parser::{error::ParseError, token::Token},
|
||||
};
|
||||
pub use constructor::parser as constructor;
|
||||
pub use discard::parser as discard;
|
||||
pub use int::parser as int;
|
||||
pub use list::parser as list;
|
||||
pub use pair::parser as pair;
|
||||
pub use tuple::parser as tuple;
|
||||
pub use var::parser as var;
|
||||
|
||||
pub fn parser() -> impl Parser<Token, UntypedPattern, Error = ParseError> {
|
||||
recursive(|expression| {
|
||||
recursive(|pattern| {
|
||||
choice((
|
||||
var(expression.clone()),
|
||||
constructor(expression.clone()),
|
||||
var(pattern.clone()),
|
||||
pair(pattern.clone()),
|
||||
constructor(pattern.clone()),
|
||||
discard(),
|
||||
int(),
|
||||
tuple(expression.clone()),
|
||||
list(expression),
|
||||
tuple(pattern.clone()),
|
||||
list(pattern),
|
||||
))
|
||||
.then(
|
||||
just(Token::As)
|
||||
|
@ -47,3 +49,63 @@ pub fn parser() -> impl Parser<Token, UntypedPattern, Error = ParseError> {
|
|||
})
|
||||
})
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use crate::assert_pattern;
|
||||
|
||||
#[test]
|
||||
fn pattern_var() {
|
||||
assert_pattern!("foo");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn pattern_discard_unnamed() {
|
||||
assert_pattern!("_");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn pattern_discard_named() {
|
||||
assert_pattern!("_foo");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn pattern_pair_discards() {
|
||||
assert_pattern!("Pair(_, _)");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn pattern_pair_explicit_depth_1() {
|
||||
assert_pattern!("Pair(14, True)");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn pattern_pair_explicit_depth_2() {
|
||||
assert_pattern!("Pair([1,2,3], Pair((14, 42), _))");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn pattern_constructor_no_labels() {
|
||||
assert_pattern!("Foo(a, b)");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn pattern_constructor_labels() {
|
||||
assert_pattern!("Foo { a, b }");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn pattern_constructor_spread() {
|
||||
assert_pattern!("Foo { a, .. }");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn pattern_constructor_pair_interleaved() {
|
||||
assert_pattern!("Foo(a, Pair(1, 2))");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn pattern_list_spread() {
|
||||
assert_pattern!("[head, ..]");
|
||||
}
|
||||
}
|
||||
|
|
|
@ -0,0 +1,25 @@
|
|||
use crate::{
|
||||
ast::UntypedPattern,
|
||||
builtins::PAIR,
|
||||
parser::{error::ParseError, token::Token},
|
||||
};
|
||||
use chumsky::prelude::*;
|
||||
|
||||
pub fn parser(
|
||||
pattern: Recursive<'_, Token, UntypedPattern, ParseError>,
|
||||
) -> impl Parser<Token, UntypedPattern, Error = ParseError> + '_ {
|
||||
select! {Token::UpName { name } if name == PAIR => name}
|
||||
.ignore_then(choice((
|
||||
just(Token::LeftParen),
|
||||
just(Token::NewLineLeftParen),
|
||||
)))
|
||||
.then(pattern.clone())
|
||||
.then_ignore(just(Token::Comma))
|
||||
.then(pattern.clone())
|
||||
.then_ignore(just(Token::RightParen))
|
||||
.map_with_span(|((_name, fst), snd), location| UntypedPattern::Pair {
|
||||
fst: Box::new(fst),
|
||||
snd: Box::new(snd),
|
||||
location,
|
||||
})
|
||||
}
|
|
@ -0,0 +1,35 @@
|
|||
---
|
||||
source: crates/aiken-lang/src/parser/pattern/mod.rs
|
||||
description: "Code:\n\nFoo { a, b }"
|
||||
---
|
||||
Constructor {
|
||||
is_record: true,
|
||||
location: 0..12,
|
||||
name: "Foo",
|
||||
arguments: [
|
||||
CallArg {
|
||||
label: Some(
|
||||
"a",
|
||||
),
|
||||
location: 6..7,
|
||||
value: Var {
|
||||
location: 6..7,
|
||||
name: "a",
|
||||
},
|
||||
},
|
||||
CallArg {
|
||||
label: Some(
|
||||
"b",
|
||||
),
|
||||
location: 9..10,
|
||||
value: Var {
|
||||
location: 9..10,
|
||||
name: "b",
|
||||
},
|
||||
},
|
||||
],
|
||||
module: None,
|
||||
constructor: (),
|
||||
with_spread: false,
|
||||
tipo: (),
|
||||
}
|
|
@ -0,0 +1,31 @@
|
|||
---
|
||||
source: crates/aiken-lang/src/parser/pattern/mod.rs
|
||||
description: "Code:\n\nFoo(a, b)"
|
||||
---
|
||||
Constructor {
|
||||
is_record: false,
|
||||
location: 0..9,
|
||||
name: "Foo",
|
||||
arguments: [
|
||||
CallArg {
|
||||
label: None,
|
||||
location: 4..5,
|
||||
value: Var {
|
||||
location: 4..5,
|
||||
name: "a",
|
||||
},
|
||||
},
|
||||
CallArg {
|
||||
label: None,
|
||||
location: 7..8,
|
||||
value: Var {
|
||||
location: 7..8,
|
||||
name: "b",
|
||||
},
|
||||
},
|
||||
],
|
||||
module: None,
|
||||
constructor: (),
|
||||
with_spread: false,
|
||||
tipo: (),
|
||||
}
|
|
@ -0,0 +1,44 @@
|
|||
---
|
||||
source: crates/aiken-lang/src/parser/pattern/mod.rs
|
||||
description: "Code:\n\nFoo(a, Pair(1, 2))"
|
||||
---
|
||||
Constructor {
|
||||
is_record: false,
|
||||
location: 0..18,
|
||||
name: "Foo",
|
||||
arguments: [
|
||||
CallArg {
|
||||
label: None,
|
||||
location: 4..5,
|
||||
value: Var {
|
||||
location: 4..5,
|
||||
name: "a",
|
||||
},
|
||||
},
|
||||
CallArg {
|
||||
label: None,
|
||||
location: 7..17,
|
||||
value: Pair {
|
||||
location: 7..17,
|
||||
fst: Int {
|
||||
location: 12..13,
|
||||
value: "1",
|
||||
base: Decimal {
|
||||
numeric_underscore: false,
|
||||
},
|
||||
},
|
||||
snd: Int {
|
||||
location: 15..16,
|
||||
value: "2",
|
||||
base: Decimal {
|
||||
numeric_underscore: false,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
],
|
||||
module: None,
|
||||
constructor: (),
|
||||
with_spread: false,
|
||||
tipo: (),
|
||||
}
|
|
@ -0,0 +1,25 @@
|
|||
---
|
||||
source: crates/aiken-lang/src/parser/pattern/mod.rs
|
||||
description: "Code:\n\nFoo { a, .. }"
|
||||
---
|
||||
Constructor {
|
||||
is_record: true,
|
||||
location: 0..13,
|
||||
name: "Foo",
|
||||
arguments: [
|
||||
CallArg {
|
||||
label: Some(
|
||||
"a",
|
||||
),
|
||||
location: 6..7,
|
||||
value: Var {
|
||||
location: 6..7,
|
||||
name: "a",
|
||||
},
|
||||
},
|
||||
],
|
||||
module: None,
|
||||
constructor: (),
|
||||
with_spread: true,
|
||||
tipo: (),
|
||||
}
|
|
@ -0,0 +1,8 @@
|
|||
---
|
||||
source: crates/aiken-lang/src/parser/pattern/mod.rs
|
||||
description: "Code:\n\n_foo"
|
||||
---
|
||||
Discard {
|
||||
name: "_foo",
|
||||
location: 0..4,
|
||||
}
|
|
@ -0,0 +1,8 @@
|
|||
---
|
||||
source: crates/aiken-lang/src/parser/pattern/mod.rs
|
||||
description: "Code:\n\n_"
|
||||
---
|
||||
Discard {
|
||||
name: "_",
|
||||
location: 0..1,
|
||||
}
|
|
@ -0,0 +1,19 @@
|
|||
---
|
||||
source: crates/aiken-lang/src/parser/pattern/mod.rs
|
||||
description: "Code:\n\n[head, ..]"
|
||||
---
|
||||
List {
|
||||
location: 0..10,
|
||||
elements: [
|
||||
Var {
|
||||
location: 1..5,
|
||||
name: "head",
|
||||
},
|
||||
],
|
||||
tail: Some(
|
||||
Discard {
|
||||
name: "_",
|
||||
location: 9..10,
|
||||
},
|
||||
),
|
||||
}
|
|
@ -0,0 +1,15 @@
|
|||
---
|
||||
source: crates/aiken-lang/src/parser/pattern/mod.rs
|
||||
description: "Code:\n\nPair(_, _)"
|
||||
---
|
||||
Pair {
|
||||
location: 0..10,
|
||||
fst: Discard {
|
||||
name: "_",
|
||||
location: 5..6,
|
||||
},
|
||||
snd: Discard {
|
||||
name: "_",
|
||||
location: 8..9,
|
||||
},
|
||||
}
|
|
@ -0,0 +1,24 @@
|
|||
---
|
||||
source: crates/aiken-lang/src/parser/pattern/mod.rs
|
||||
description: "Code:\n\nPair(14, True)"
|
||||
---
|
||||
Pair {
|
||||
location: 0..14,
|
||||
fst: Int {
|
||||
location: 5..7,
|
||||
value: "14",
|
||||
base: Decimal {
|
||||
numeric_underscore: false,
|
||||
},
|
||||
},
|
||||
snd: Constructor {
|
||||
is_record: false,
|
||||
location: 9..13,
|
||||
name: "True",
|
||||
arguments: [],
|
||||
module: None,
|
||||
constructor: (),
|
||||
with_spread: false,
|
||||
tipo: (),
|
||||
},
|
||||
}
|
|
@ -0,0 +1,60 @@
|
|||
---
|
||||
source: crates/aiken-lang/src/parser/pattern/mod.rs
|
||||
description: "Code:\n\nPair([1,2,3], Pair((14, 42), _))"
|
||||
---
|
||||
Pair {
|
||||
location: 0..32,
|
||||
fst: List {
|
||||
location: 5..12,
|
||||
elements: [
|
||||
Int {
|
||||
location: 6..7,
|
||||
value: "1",
|
||||
base: Decimal {
|
||||
numeric_underscore: false,
|
||||
},
|
||||
},
|
||||
Int {
|
||||
location: 8..9,
|
||||
value: "2",
|
||||
base: Decimal {
|
||||
numeric_underscore: false,
|
||||
},
|
||||
},
|
||||
Int {
|
||||
location: 10..11,
|
||||
value: "3",
|
||||
base: Decimal {
|
||||
numeric_underscore: false,
|
||||
},
|
||||
},
|
||||
],
|
||||
tail: None,
|
||||
},
|
||||
snd: Pair {
|
||||
location: 14..31,
|
||||
fst: Tuple {
|
||||
location: 19..27,
|
||||
elems: [
|
||||
Int {
|
||||
location: 20..22,
|
||||
value: "14",
|
||||
base: Decimal {
|
||||
numeric_underscore: false,
|
||||
},
|
||||
},
|
||||
Int {
|
||||
location: 24..26,
|
||||
value: "42",
|
||||
base: Decimal {
|
||||
numeric_underscore: false,
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
snd: Discard {
|
||||
name: "_",
|
||||
location: 29..30,
|
||||
},
|
||||
},
|
||||
}
|
|
@ -0,0 +1,8 @@
|
|||
---
|
||||
source: crates/aiken-lang/src/parser/pattern/mod.rs
|
||||
description: "Code:\n\nfoo"
|
||||
---
|
||||
Var {
|
||||
location: 0..3,
|
||||
name: "foo",
|
||||
}
|
|
@ -2295,3 +2295,106 @@ fn tuple_access_on_call() {
|
|||
|
||||
assert!(check(parse(source_code)).is_ok())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn partial_eq_call_args() {
|
||||
let source_code = r#"
|
||||
fn foo(a: Int, b: Int, c: Bool) -> Int {
|
||||
todo
|
||||
}
|
||||
|
||||
fn main() -> Int {
|
||||
foo(14, 42)
|
||||
}
|
||||
"#;
|
||||
|
||||
assert!(matches!(
|
||||
dbg!(check(parse(source_code))),
|
||||
Err((_, Error::IncorrectFieldsArity { .. }))
|
||||
));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn partial_eq_callback_args() {
|
||||
let source_code = r#"
|
||||
fn foo(cb: fn(Int, Int, Bool) -> Int) -> Int {
|
||||
todo
|
||||
}
|
||||
|
||||
fn main() -> Int {
|
||||
foo(fn(a, b) { a + b })
|
||||
}
|
||||
"#;
|
||||
|
||||
assert!(matches!(
|
||||
dbg!(check(parse(source_code))),
|
||||
Err((_, Error::CouldNotUnify { .. }))
|
||||
));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn partial_eq_callback_return() {
|
||||
let source_code = r#"
|
||||
fn foo(cb: fn(Int, Int) -> (Int, Int, Bool)) -> Int {
|
||||
todo
|
||||
}
|
||||
|
||||
fn main() -> Int {
|
||||
foo(fn(a, b) { (a, b) })
|
||||
}
|
||||
"#;
|
||||
|
||||
assert!(matches!(
|
||||
dbg!(check(parse(source_code))),
|
||||
Err((_, Error::CouldNotUnify { .. }))
|
||||
));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn pair_access_on_call() {
|
||||
let source_code = r#"
|
||||
use aiken/builtin
|
||||
|
||||
pub fn list_at(xs: List<a>, index: Int) -> a {
|
||||
if index == 0 {
|
||||
builtin.head_list(xs)
|
||||
} else {
|
||||
list_at(builtin.tail_list(xs), index - 1)
|
||||
}
|
||||
}
|
||||
|
||||
fn foo() {
|
||||
[list_at([Pair(1, 2)], 0).2nd, ..[1, 2]]
|
||||
}
|
||||
"#;
|
||||
|
||||
assert!(check(parse(source_code)).is_ok())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn pair_index_out_of_bound() {
|
||||
let source_code = r#"
|
||||
pub fn foo() {
|
||||
Pair(1, 2).3rd
|
||||
}
|
||||
"#;
|
||||
|
||||
assert!(matches!(
|
||||
dbg!(check_validator(parse(source_code))),
|
||||
Err((_, Error::PairIndexOutOfBound { .. }))
|
||||
))
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn not_indexable() {
|
||||
let source_code = r#"
|
||||
pub fn foo() {
|
||||
"foo".1st
|
||||
}
|
||||
"#;
|
||||
|
||||
assert!(matches!(
|
||||
dbg!(check_validator(parse(source_code))),
|
||||
Err((_, Error::NotIndexable { .. }))
|
||||
))
|
||||
}
|
||||
|
|
|
@ -792,3 +792,13 @@ fn superfluous_parens_in_binop() {
|
|||
"#
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn format_pairs() {
|
||||
assert_format!(
|
||||
r#"
|
||||
pub fn foo(x: Pair<Int, Int>) {
|
||||
Pair(x.1st, x.2nd)
|
||||
}"#
|
||||
);
|
||||
}
|
||||
|
|
|
@ -0,0 +1,7 @@
|
|||
---
|
||||
source: crates/aiken-lang/src/tests/format.rs
|
||||
description: "Code:\n\npub fn foo(x: Pair<Int, Int>) {\n Pair(x.1st, x.2nd)\n}"
|
||||
---
|
||||
pub fn foo(x: Pair<Int, Int>) {
|
||||
Pair(x.1st, x.2nd)
|
||||
}
|
|
@ -71,6 +71,12 @@ pub enum Type {
|
|||
elems: Vec<Rc<Type>>,
|
||||
alias: Option<Rc<TypeAliasAnnotation>>,
|
||||
},
|
||||
|
||||
Pair {
|
||||
fst: Rc<Type>,
|
||||
snd: Rc<Type>,
|
||||
alias: Option<Rc<TypeAliasAnnotation>>,
|
||||
},
|
||||
}
|
||||
|
||||
impl PartialEq for Type {
|
||||
|
@ -96,6 +102,7 @@ impl PartialEq for Type {
|
|||
name == name2
|
||||
&& module == module2
|
||||
&& public == public2
|
||||
&& args.len() == args2.len()
|
||||
&& args.iter().zip(args2).all(|(left, right)| left == right)
|
||||
} else {
|
||||
false
|
||||
|
@ -109,7 +116,9 @@ impl PartialEq for Type {
|
|||
alias: _,
|
||||
} = other
|
||||
{
|
||||
ret == ret2 && args.iter().zip(args2).all(|(left, right)| left == right)
|
||||
ret == ret2
|
||||
&& args.len() == args2.len()
|
||||
&& args.iter().zip(args2).all(|(left, right)| left == right)
|
||||
} else {
|
||||
false
|
||||
}
|
||||
|
@ -117,7 +126,8 @@ impl PartialEq for Type {
|
|||
|
||||
Type::Tuple { elems, alias: _ } => {
|
||||
if let Type::Tuple { elems: elems2, .. } = other {
|
||||
elems.iter().zip(elems2).all(|(left, right)| left == right)
|
||||
elems.len() == elems2.len()
|
||||
&& elems.iter().zip(elems2).all(|(left, right)| left == right)
|
||||
} else {
|
||||
false
|
||||
}
|
||||
|
@ -134,6 +144,18 @@ impl PartialEq for Type {
|
|||
false
|
||||
}
|
||||
}
|
||||
Type::Pair { fst, snd, .. } => {
|
||||
if let Type::Pair {
|
||||
fst: fst2,
|
||||
snd: snd2,
|
||||
..
|
||||
} = other
|
||||
{
|
||||
fst == fst2 && snd == snd2
|
||||
} else {
|
||||
false
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -144,7 +166,8 @@ impl Type {
|
|||
Type::App { alias, .. }
|
||||
| Type::Fn { alias, .. }
|
||||
| Type::Var { alias, .. }
|
||||
| Type::Tuple { alias, .. } => alias.clone(),
|
||||
| Type::Tuple { alias, .. }
|
||||
| Type::Pair { alias, .. } => alias.clone(),
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -179,6 +202,7 @@ impl Type {
|
|||
} => Type::Fn { args, ret, alias },
|
||||
Type::Var { tipo, alias: _ } => Type::Var { tipo, alias },
|
||||
Type::Tuple { elems, alias: _ } => Type::Tuple { elems, alias },
|
||||
Type::Pair { fst, snd, alias: _ } => Type::Pair { fst, snd, alias },
|
||||
})
|
||||
}
|
||||
|
||||
|
@ -191,6 +215,7 @@ impl Type {
|
|||
_ => None,
|
||||
},
|
||||
Type::Tuple { .. } => Some((String::new(), "Tuple".to_string())),
|
||||
Type::Pair { .. } => Some((String::new(), "Pair".to_string())),
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -204,6 +229,7 @@ impl Type {
|
|||
} => *opaque || args.iter().any(|arg| arg.contains_opaque()),
|
||||
Type::Tuple { elems, .. } => elems.iter().any(|elem| elem.contains_opaque()),
|
||||
Type::Fn { .. } => false,
|
||||
Type::Pair { fst, snd, .. } => fst.contains_opaque() || snd.contains_opaque(),
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -214,7 +240,7 @@ impl Type {
|
|||
} => {
|
||||
*contains_opaque = opaque;
|
||||
}
|
||||
Type::Fn { .. } | Type::Var { .. } | Type::Tuple { .. } => (),
|
||||
Type::Fn { .. } | Type::Var { .. } | Type::Tuple { .. } | Type::Pair { .. } => (),
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -241,12 +267,23 @@ impl Type {
|
|||
}
|
||||
|
||||
pub fn is_primitive(&self) -> bool {
|
||||
self.is_bool()
|
||||
|| self.is_bytearray()
|
||||
|| self.is_int()
|
||||
|| self.is_string()
|
||||
|| self.is_void()
|
||||
|| self.is_data()
|
||||
let uplc_type = self.get_uplc_type();
|
||||
match uplc_type {
|
||||
Some(
|
||||
UplcType::Bool
|
||||
| UplcType::Integer
|
||||
| UplcType::String
|
||||
| UplcType::ByteString
|
||||
| UplcType::Unit
|
||||
| UplcType::Bls12_381G1Element
|
||||
| UplcType::Bls12_381G2Element
|
||||
| UplcType::Bls12_381MlResult
|
||||
| UplcType::Data,
|
||||
) => true,
|
||||
|
||||
None => false,
|
||||
Some(UplcType::List(_) | UplcType::Pair(_, _)) => false,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn is_void(&self) -> bool {
|
||||
|
@ -339,7 +376,7 @@ impl Type {
|
|||
} if "List" == name && module.is_empty() => args
|
||||
.first()
|
||||
.expect("unreachable: List should have an inner type")
|
||||
.is_2_tuple(),
|
||||
.is_pair(),
|
||||
Self::Var { tipo, .. } => tipo.borrow().is_map(),
|
||||
_ => false,
|
||||
}
|
||||
|
@ -347,16 +384,16 @@ impl Type {
|
|||
|
||||
pub fn is_tuple(&self) -> bool {
|
||||
match self {
|
||||
Type::Var { tipo, .. } => tipo.borrow().is_tuple(),
|
||||
Type::Tuple { .. } => true,
|
||||
Self::Var { tipo, .. } => tipo.borrow().is_tuple(),
|
||||
Self::Tuple { .. } => true,
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn is_2_tuple(&self) -> bool {
|
||||
pub fn is_pair(&self) -> bool {
|
||||
match self {
|
||||
Type::Var { tipo, .. } => tipo.borrow().is_2_tuple(),
|
||||
Type::Tuple { elems, .. } => elems.len() == 2,
|
||||
Self::Var { tipo, .. } => tipo.borrow().is_pair(),
|
||||
Self::Pair { .. } => true,
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
|
@ -371,7 +408,7 @@ impl Type {
|
|||
|
||||
pub fn is_generic(&self) -> bool {
|
||||
match self {
|
||||
Type::App { args, .. } => {
|
||||
Self::App { args, .. } => {
|
||||
let mut is_a_generic = false;
|
||||
for arg in args {
|
||||
is_a_generic = is_a_generic || arg.is_generic();
|
||||
|
@ -379,24 +416,29 @@ impl Type {
|
|||
is_a_generic
|
||||
}
|
||||
|
||||
Type::Var { tipo, .. } => tipo.borrow().is_generic(),
|
||||
Type::Tuple { elems, .. } => {
|
||||
Self::Var { tipo, .. } => tipo.borrow().is_generic(),
|
||||
Self::Tuple { elems, .. } => {
|
||||
let mut is_a_generic = false;
|
||||
for elem in elems {
|
||||
is_a_generic = is_a_generic || elem.is_generic();
|
||||
}
|
||||
is_a_generic
|
||||
}
|
||||
Type::Fn { args, ret, .. } => {
|
||||
Self::Fn { args, ret, .. } => {
|
||||
let mut is_a_generic = false;
|
||||
for arg in args {
|
||||
is_a_generic = is_a_generic || arg.is_generic();
|
||||
}
|
||||
is_a_generic || ret.is_generic()
|
||||
}
|
||||
Self::Pair { fst, snd, .. } => fst.is_generic() || snd.is_generic(),
|
||||
}
|
||||
}
|
||||
|
||||
// TODO: Self::App { args, ..} looks fishy, because App's args are referring
|
||||
// to _type parameters_ not to value types unlike Fn's args. So this function
|
||||
// definition is probably wrong. Luckily, we likely never hit the `Self::App`
|
||||
// case at all.
|
||||
pub fn arg_types(&self) -> Option<Vec<Rc<Self>>> {
|
||||
match self {
|
||||
Self::Fn { args, .. } => Some(args.clone()),
|
||||
|
@ -408,7 +450,7 @@ impl Type {
|
|||
|
||||
pub fn get_generic(&self) -> Option<u64> {
|
||||
match self {
|
||||
Type::Var { tipo, .. } => tipo.borrow().get_generic(),
|
||||
Self::Var { tipo, .. } => tipo.borrow().get_generic(),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
@ -426,7 +468,13 @@ impl Type {
|
|||
Self::Var { tipo, .. } => tipo.borrow().get_inner_types(),
|
||||
_ => vec![],
|
||||
}
|
||||
} else if matches!(self.get_uplc_type(), UplcType::Data) {
|
||||
} else if self.is_pair() {
|
||||
match self {
|
||||
Self::Pair { fst, snd, .. } => vec![fst.clone(), snd.clone()],
|
||||
Self::Var { tipo, .. } => tipo.borrow().get_inner_types(),
|
||||
_ => vec![],
|
||||
}
|
||||
} else if self.get_uplc_type().is_none() {
|
||||
match self {
|
||||
Type::App { args, .. } => args.clone(),
|
||||
Type::Fn { args, ret, .. } => {
|
||||
|
@ -442,39 +490,35 @@ impl Type {
|
|||
}
|
||||
}
|
||||
|
||||
pub fn get_uplc_type(&self) -> UplcType {
|
||||
pub fn get_uplc_type(&self) -> Option<UplcType> {
|
||||
if self.is_int() {
|
||||
UplcType::Integer
|
||||
Some(UplcType::Integer)
|
||||
} else if self.is_bytearray() {
|
||||
UplcType::ByteString
|
||||
Some(UplcType::ByteString)
|
||||
} else if self.is_string() {
|
||||
UplcType::String
|
||||
Some(UplcType::String)
|
||||
} else if self.is_bool() {
|
||||
UplcType::Bool
|
||||
Some(UplcType::Bool)
|
||||
} else if self.is_void() {
|
||||
Some(UplcType::Unit)
|
||||
} else if self.is_map() {
|
||||
UplcType::List(UplcType::Pair(UplcType::Data.into(), UplcType::Data.into()).into())
|
||||
} else if self.is_list() {
|
||||
UplcType::List(UplcType::Data.into())
|
||||
} else if self.is_tuple() {
|
||||
match self {
|
||||
Self::Tuple { elems, .. } => {
|
||||
if elems.len() == 2 {
|
||||
UplcType::Pair(UplcType::Data.into(), UplcType::Data.into())
|
||||
} else {
|
||||
UplcType::List(UplcType::Data.into())
|
||||
}
|
||||
}
|
||||
Self::Var { tipo, .. } => tipo.borrow().get_uplc_type().unwrap(),
|
||||
_ => unreachable!(),
|
||||
}
|
||||
Some(UplcType::List(
|
||||
UplcType::Pair(UplcType::Data.into(), UplcType::Data.into()).into(),
|
||||
))
|
||||
} else if self.is_list() || self.is_tuple() {
|
||||
Some(UplcType::List(UplcType::Data.into()))
|
||||
} else if self.is_pair() {
|
||||
Some(UplcType::Pair(UplcType::Data.into(), UplcType::Data.into()))
|
||||
} else if self.is_bls381_12_g1() {
|
||||
UplcType::Bls12_381G1Element
|
||||
Some(UplcType::Bls12_381G1Element)
|
||||
} else if self.is_bls381_12_g2() {
|
||||
UplcType::Bls12_381G2Element
|
||||
Some(UplcType::Bls12_381G2Element)
|
||||
} else if self.is_ml_result() {
|
||||
UplcType::Bls12_381MlResult
|
||||
Some(UplcType::Bls12_381MlResult)
|
||||
} else if self.is_data() {
|
||||
Some(UplcType::Data)
|
||||
} else {
|
||||
UplcType::Data
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -555,6 +599,13 @@ impl Type {
|
|||
|
||||
TypeVar::Link { tipo, .. } => tipo.find_private_type(),
|
||||
},
|
||||
Self::Pair { fst, snd, .. } => {
|
||||
if let Some(private_type) = fst.find_private_type() {
|
||||
Some(private_type)
|
||||
} else {
|
||||
snd.find_private_type()
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -734,6 +785,16 @@ pub fn convert_opaque_type(
|
|||
}
|
||||
.into()
|
||||
}
|
||||
Type::Pair { fst, snd, alias } => {
|
||||
let fst = convert_opaque_type(fst, data_types, deep);
|
||||
let snd = convert_opaque_type(snd, data_types, deep);
|
||||
Type::Pair {
|
||||
fst,
|
||||
snd,
|
||||
alias: alias.clone(),
|
||||
}
|
||||
.into()
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -758,9 +819,12 @@ pub fn find_and_replace_generics(
|
|||
mono_types: &IndexMap<u64, Rc<Type>>,
|
||||
) -> Rc<Type> {
|
||||
if let Some(id) = tipo.get_generic() {
|
||||
// If a generic does not have a type we know of
|
||||
// like a None in option then just use same type
|
||||
mono_types.get(&id).unwrap_or(tipo).clone()
|
||||
mono_types
|
||||
.get(&id)
|
||||
.unwrap_or_else(|| {
|
||||
panic!("Unknown generic id {id:?} for type {tipo:?} in mono_types {mono_types:#?}");
|
||||
})
|
||||
.clone()
|
||||
} else if tipo.is_generic() {
|
||||
match &**tipo {
|
||||
Type::App {
|
||||
|
@ -823,6 +887,16 @@ pub fn find_and_replace_generics(
|
|||
TypeVar::Generic { .. } | TypeVar::Unbound { .. } => unreachable!(),
|
||||
}
|
||||
}
|
||||
Type::Pair { fst, snd, alias } => {
|
||||
let fst = find_and_replace_generics(fst, mono_types);
|
||||
let snd = find_and_replace_generics(snd, mono_types);
|
||||
Type::Pair {
|
||||
fst,
|
||||
snd,
|
||||
alias: alias.clone(),
|
||||
}
|
||||
.into()
|
||||
}
|
||||
}
|
||||
} else {
|
||||
tipo.clone()
|
||||
|
@ -951,9 +1025,9 @@ impl TypeVar {
|
|||
}
|
||||
}
|
||||
|
||||
pub fn is_2_tuple(&self) -> bool {
|
||||
pub fn is_pair(&self) -> bool {
|
||||
match self {
|
||||
Self::Link { tipo } => tipo.is_2_tuple(),
|
||||
Self::Link { tipo } => tipo.is_pair(),
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
|
@ -1001,13 +1075,6 @@ impl TypeVar {
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn get_uplc_type(&self) -> Option<UplcType> {
|
||||
match self {
|
||||
Self::Link { tipo } => Some(tipo.get_uplc_type()),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, serde::Serialize, serde::Deserialize)]
|
||||
|
|
|
@ -8,10 +8,11 @@ use super::{
|
|||
use crate::{
|
||||
ast::{
|
||||
Annotation, CallArg, DataType, Definition, Function, ModuleConstant, ModuleKind,
|
||||
RecordConstructor, RecordConstructorArg, Span, TypeAlias, TypedDefinition, TypedPattern,
|
||||
UnqualifiedImport, UntypedArg, UntypedDefinition, Use, Validator, PIPE_VARIABLE,
|
||||
RecordConstructor, RecordConstructorArg, Span, TypeAlias, TypedDefinition, TypedFunction,
|
||||
TypedPattern, UnqualifiedImport, UntypedArg, UntypedDefinition, UntypedFunction, Use,
|
||||
Validator, PIPE_VARIABLE,
|
||||
},
|
||||
builtins::{function, generic_var, tuple, unbound_var},
|
||||
builtins::{function, generic_var, pair, tuple, unbound_var},
|
||||
tipo::{fields::FieldMap, TypeAliasAnnotation},
|
||||
IdGenerator,
|
||||
};
|
||||
|
@ -54,6 +55,12 @@ pub struct Environment<'a> {
|
|||
/// Values defined in the current module (or the prelude)
|
||||
pub module_values: HashMap<String, ValueConstructor>,
|
||||
|
||||
/// Top-level function definitions from the module
|
||||
pub module_functions: HashMap<String, &'a UntypedFunction>,
|
||||
|
||||
/// Top-level functions that have been inferred
|
||||
pub inferred_functions: HashMap<String, TypedFunction>,
|
||||
|
||||
previous_id: u64,
|
||||
|
||||
/// Values defined in the current function (or the prelude)
|
||||
|
@ -644,6 +651,13 @@ impl<'a> Environment<'a> {
|
|||
),
|
||||
alias.clone(),
|
||||
),
|
||||
Type::Pair { fst, snd, alias } => Type::with_alias(
|
||||
pair(
|
||||
self.instantiate(fst.clone(), ids, hydrator),
|
||||
self.instantiate(snd.clone(), ids, hydrator),
|
||||
),
|
||||
alias.clone(),
|
||||
),
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -700,9 +714,11 @@ impl<'a> Environment<'a> {
|
|||
previous_id: id_gen.next(),
|
||||
id_gen,
|
||||
ungeneralised_functions: HashSet::new(),
|
||||
inferred_functions: HashMap::new(),
|
||||
module_types: prelude.types.clone(),
|
||||
module_types_constructors: prelude.types_constructors.clone(),
|
||||
module_values: HashMap::new(),
|
||||
module_functions: HashMap::new(),
|
||||
imported_modules: HashMap::new(),
|
||||
unused_modules: HashMap::new(),
|
||||
unqualified_imported_names: HashMap::new(),
|
||||
|
@ -1194,6 +1210,8 @@ impl<'a> Environment<'a> {
|
|||
&fun.location,
|
||||
)?;
|
||||
|
||||
self.module_functions.insert(fun.name.clone(), fun);
|
||||
|
||||
if !fun.public {
|
||||
self.init_usage(fun.name.clone(), EntityKind::PrivateFunction, fun.location);
|
||||
}
|
||||
|
@ -1537,6 +1555,28 @@ impl<'a> Environment<'a> {
|
|||
Ok(())
|
||||
}
|
||||
|
||||
(
|
||||
Type::Pair {
|
||||
fst: lhs_fst,
|
||||
snd: lhs_snd,
|
||||
alias: _,
|
||||
},
|
||||
Type::Pair {
|
||||
fst: rhs_fst,
|
||||
snd: rhs_snd,
|
||||
alias: _,
|
||||
},
|
||||
) => {
|
||||
for (a, b) in [lhs_fst, lhs_snd].into_iter().zip([rhs_fst, rhs_snd]) {
|
||||
unify_enclosed_type(
|
||||
lhs.clone(),
|
||||
rhs.clone(),
|
||||
self.unify(a.clone(), b.clone(), location, false),
|
||||
)?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
(
|
||||
Type::Fn {
|
||||
args: args1,
|
||||
|
@ -1794,6 +1834,12 @@ fn unify_unbound_type(tipo: Rc<Type>, own_id: u64, location: Span) -> Result<(),
|
|||
|
||||
Ok(())
|
||||
}
|
||||
Type::Pair { fst, snd, alias: _ } => {
|
||||
unify_unbound_type(fst.clone(), own_id, location)?;
|
||||
unify_unbound_type(snd.clone(), own_id, location)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
Type::Var { .. } => unreachable!(),
|
||||
}
|
||||
|
@ -1970,5 +2016,12 @@ pub(crate) fn generalise(t: Rc<Type>, ctx_level: usize) -> Rc<Type> {
|
|||
),
|
||||
alias.clone(),
|
||||
),
|
||||
Type::Pair { fst, snd, alias } => Type::with_alias(
|
||||
pair(
|
||||
generalise(fst.clone(), ctx_level),
|
||||
generalise(snd.clone(), ctx_level),
|
||||
),
|
||||
alias.clone(),
|
||||
),
|
||||
}
|
||||
}
|
||||
|
|
|
@ -508,18 +508,16 @@ If you really meant to return that last expression, try to replace it with the f
|
|||
name: String,
|
||||
},
|
||||
|
||||
#[error(
|
||||
"I tripped over an attempt to access tuple elements on something else than a tuple.\n"
|
||||
)]
|
||||
#[error("I tripped over an attempt to access elements on something that isn't indexable.\n")]
|
||||
#[diagnostic(url("https://aiken-lang.org/language-tour/primitive-types#tuples"))]
|
||||
#[diagnostic(code("illegal::tuple_index"))]
|
||||
#[diagnostic(code("illegal::indexable"))]
|
||||
#[diagnostic(help(
|
||||
r#"Because you used a tuple-index on an element, I assumed it had to be a tuple but instead I found something of type:
|
||||
r#"Because you used an ordinal index on an element, I assumed it had to be a tuple or a pair but instead I found something of type:
|
||||
|
||||
╰─▶ {type_info}"#,
|
||||
type_info = tipo.to_pretty(0).if_supports_color(Stdout, |s| s.red())
|
||||
))]
|
||||
NotATuple {
|
||||
NotIndexable {
|
||||
#[label]
|
||||
location: Span,
|
||||
tipo: Rc<Type>,
|
||||
|
@ -675,12 +673,25 @@ You can help me by providing a type-annotation for 'x', as such:
|
|||
#[diagnostic(url("https://aiken-lang.org/language-tour/primitive-types#tuples"))]
|
||||
#[diagnostic(code("invalid::tuple_index"))]
|
||||
TupleIndexOutOfBound {
|
||||
#[label]
|
||||
#[label("out of bounds")]
|
||||
location: Span,
|
||||
index: usize,
|
||||
size: usize,
|
||||
},
|
||||
|
||||
#[error(
|
||||
"I discovered an attempt to access the {} element of a {}.\n",
|
||||
Ordinal(*index + 1).to_string().if_supports_color(Stdout, |s| s.purple()),
|
||||
"Pair".if_supports_color(Stdout, |s| s.bright_blue()).if_supports_color(Stdout, |s| s.bold()),
|
||||
)]
|
||||
#[diagnostic(url("https://aiken-lang.org/language-tour/primitive-types#pairs"))]
|
||||
#[diagnostic(code("invalid::pair_index"))]
|
||||
PairIndexOutOfBound {
|
||||
#[label("out of bounds")]
|
||||
location: Span,
|
||||
index: usize,
|
||||
},
|
||||
|
||||
#[error(
|
||||
"I tripped over the following labeled argument: {}.\n",
|
||||
label.if_supports_color(Stdout, |s| s.purple())
|
||||
|
@ -1035,7 +1046,7 @@ impl ExtraData for Error {
|
|||
| Error::MissingVarInAlternativePattern { .. }
|
||||
| Error::MultiValidatorEqualArgs { .. }
|
||||
| Error::NonLocalClauseGuardVariable { .. }
|
||||
| Error::NotATuple { .. }
|
||||
| Error::NotIndexable { .. }
|
||||
| Error::NotExhaustivePatternMatch { .. }
|
||||
| Error::NotFn { .. }
|
||||
| Error::PositionalArgumentAfterLabeled { .. }
|
||||
|
@ -1045,6 +1056,7 @@ impl ExtraData for Error {
|
|||
| Error::RecursiveType { .. }
|
||||
| Error::RedundantMatchClause { .. }
|
||||
| Error::TupleIndexOutOfBound { .. }
|
||||
| Error::PairIndexOutOfBound { .. }
|
||||
| Error::UnexpectedLabeledArg { .. }
|
||||
| Error::UnexpectedLabeledArgInPattern { .. }
|
||||
| Error::UnknownLabels { .. }
|
||||
|
|
|
@ -1,11 +1,10 @@
|
|||
use std::{collections::BTreeMap, iter, ops::Deref};
|
||||
|
||||
use itertools::Itertools;
|
||||
|
||||
use crate::{
|
||||
ast, builtins,
|
||||
ast,
|
||||
builtins::{self},
|
||||
tipo::{self, environment::Environment, error::Error},
|
||||
};
|
||||
use itertools::Itertools;
|
||||
use std::{collections::BTreeMap, iter, ops::Deref};
|
||||
|
||||
const NIL_NAME: &str = "[]";
|
||||
const CONS_NAME: &str = "::";
|
||||
|
@ -87,8 +86,8 @@ impl PatternStack {
|
|||
Some(self.chain_tail_into_iter(vec![Pattern::Wildcard; arity].into_iter()))
|
||||
}
|
||||
Pattern::Literal(_) => unreachable!(
|
||||
"constructors and literals should never align in pattern match exhaustiveness checks."
|
||||
),
|
||||
"constructors and literals should never align in pattern match exhaustiveness checks."
|
||||
),
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -598,6 +597,13 @@ pub(super) fn simplify(
|
|||
|
||||
Ok(Pattern::Constructor(name.to_string(), alts, args))
|
||||
}
|
||||
ast::Pattern::Pair { fst, snd, location } => simplify(
|
||||
environment,
|
||||
&ast::Pattern::Tuple {
|
||||
elems: vec![*fst.clone(), *snd.clone()],
|
||||
location: *location,
|
||||
},
|
||||
),
|
||||
ast::Pattern::Tuple { elems, .. } => {
|
||||
let mut args = vec![];
|
||||
|
||||
|
|
|
@ -1,5 +1,7 @@
|
|||
use super::{
|
||||
environment::{assert_no_labeled_arguments, collapse_links, EntityKind, Environment},
|
||||
environment::{
|
||||
assert_no_labeled_arguments, collapse_links, generalise, EntityKind, Environment,
|
||||
},
|
||||
error::{Error, Warning},
|
||||
hydrator::Hydrator,
|
||||
pattern::PatternTyper,
|
||||
|
@ -9,14 +11,15 @@ use super::{
|
|||
use crate::{
|
||||
ast::{
|
||||
self, Annotation, Arg, ArgName, AssignmentKind, AssignmentPattern, BinOp, Bls12_381Point,
|
||||
ByteArrayFormatPreference, CallArg, ClauseGuard, Constant, Curve, IfBranch,
|
||||
ByteArrayFormatPreference, CallArg, ClauseGuard, Constant, Curve, Function, IfBranch,
|
||||
LogicalOpChainKind, Pattern, RecordUpdateSpread, Span, TraceKind, TraceLevel, Tracing,
|
||||
TypedArg, TypedCallArg, TypedClause, TypedClauseGuard, TypedIfBranch, TypedPattern,
|
||||
TypedRecordUpdateArg, UnOp, UntypedArg, UntypedAssignmentKind, UntypedClause,
|
||||
UntypedClauseGuard, UntypedIfBranch, UntypedPattern, UntypedRecordUpdateArg,
|
||||
UntypedClauseGuard, UntypedFunction, UntypedIfBranch, UntypedPattern,
|
||||
UntypedRecordUpdateArg,
|
||||
},
|
||||
builtins::{
|
||||
bool, byte_array, function, g1_element, g2_element, int, list, string, tuple, void,
|
||||
bool, byte_array, function, g1_element, g2_element, int, list, pair, string, tuple, void,
|
||||
},
|
||||
expr::{FnStyle, TypedExpr, UntypedExpr},
|
||||
format,
|
||||
|
@ -26,12 +29,126 @@ use crate::{
|
|||
use std::{cmp::Ordering, collections::HashMap, ops::Deref, rc::Rc};
|
||||
use vec1::Vec1;
|
||||
|
||||
pub(crate) fn infer_function(
|
||||
fun: &UntypedFunction,
|
||||
module_name: &str,
|
||||
hydrators: &mut HashMap<String, Hydrator>,
|
||||
environment: &mut Environment<'_>,
|
||||
lines: &LineNumbers,
|
||||
tracing: Tracing,
|
||||
) -> Result<Function<Rc<Type>, TypedExpr, TypedArg>, Error> {
|
||||
if let Some(typed_fun) = environment.inferred_functions.get(&fun.name) {
|
||||
return Ok(typed_fun.clone());
|
||||
};
|
||||
|
||||
let Function {
|
||||
doc,
|
||||
location,
|
||||
name,
|
||||
public,
|
||||
arguments,
|
||||
body,
|
||||
return_annotation,
|
||||
end_position,
|
||||
can_error,
|
||||
return_type: _,
|
||||
} = fun;
|
||||
|
||||
let preregistered_fn = environment
|
||||
.get_variable(name)
|
||||
.expect("Could not find preregistered type for function");
|
||||
|
||||
let field_map = preregistered_fn.field_map().cloned();
|
||||
|
||||
let preregistered_type = preregistered_fn.tipo.clone();
|
||||
|
||||
let (args_types, return_type) = preregistered_type
|
||||
.function_types()
|
||||
.unwrap_or_else(|| panic!("Preregistered type for fn {name} was not a fn"));
|
||||
|
||||
// Infer the type using the preregistered args + return types as a starting point
|
||||
let (tipo, arguments, body, safe_to_generalise) = environment.in_new_scope(|environment| {
|
||||
let args = arguments
|
||||
.iter()
|
||||
.zip(&args_types)
|
||||
.map(|(arg_name, tipo)| arg_name.to_owned().set_type(tipo.clone()))
|
||||
.collect();
|
||||
|
||||
let hydrator = hydrators
|
||||
.remove(name)
|
||||
.unwrap_or_else(|| panic!("Could not find hydrator for fn {name}"));
|
||||
|
||||
let mut expr_typer = ExprTyper::new(environment, hydrators, lines, tracing);
|
||||
|
||||
expr_typer.hydrator = hydrator;
|
||||
|
||||
let (args, body, return_type) =
|
||||
expr_typer.infer_fn_with_known_types(args, body.to_owned(), Some(return_type))?;
|
||||
|
||||
let args_types = args.iter().map(|a| a.tipo.clone()).collect();
|
||||
|
||||
let tipo = function(args_types, return_type);
|
||||
|
||||
let safe_to_generalise = !expr_typer.ungeneralised_function_used;
|
||||
|
||||
Ok::<_, Error>((tipo, args, body, safe_to_generalise))
|
||||
})?;
|
||||
|
||||
// Assert that the inferred type matches the type of any recursive call
|
||||
environment.unify(preregistered_type, tipo.clone(), *location, false)?;
|
||||
|
||||
// Generalise the function if safe to do so
|
||||
let tipo = if safe_to_generalise {
|
||||
environment.ungeneralised_functions.remove(name);
|
||||
|
||||
let tipo = generalise(tipo, 0);
|
||||
|
||||
let module_fn = ValueConstructorVariant::ModuleFn {
|
||||
name: name.clone(),
|
||||
field_map,
|
||||
module: module_name.to_owned(),
|
||||
arity: arguments.len(),
|
||||
location: *location,
|
||||
builtin: None,
|
||||
};
|
||||
|
||||
environment.insert_variable(name.clone(), module_fn, tipo.clone());
|
||||
|
||||
tipo
|
||||
} else {
|
||||
tipo
|
||||
};
|
||||
|
||||
let inferred_fn = Function {
|
||||
doc: doc.clone(),
|
||||
location: *location,
|
||||
name: name.clone(),
|
||||
public: *public,
|
||||
arguments,
|
||||
return_annotation: return_annotation.clone(),
|
||||
return_type: tipo
|
||||
.return_type()
|
||||
.expect("Could not find return type for fn"),
|
||||
body,
|
||||
can_error: *can_error,
|
||||
end_position: *end_position,
|
||||
};
|
||||
|
||||
environment
|
||||
.inferred_functions
|
||||
.insert(name.to_string(), inferred_fn.clone());
|
||||
|
||||
Ok(inferred_fn)
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub(crate) struct ExprTyper<'a, 'b> {
|
||||
pub(crate) lines: &'a LineNumbers,
|
||||
|
||||
pub(crate) environment: &'a mut Environment<'b>,
|
||||
|
||||
pub(crate) hydrators: &'a mut HashMap<String, Hydrator>,
|
||||
|
||||
// We tweak the tracing behavior during type-check. Traces are either kept or left out of the
|
||||
// typed AST depending on this setting.
|
||||
pub(crate) tracing: Tracing,
|
||||
|
@ -46,6 +163,22 @@ pub(crate) struct ExprTyper<'a, 'b> {
|
|||
}
|
||||
|
||||
impl<'a, 'b> ExprTyper<'a, 'b> {
|
||||
pub fn new(
|
||||
environment: &'a mut Environment<'b>,
|
||||
hydrators: &'a mut HashMap<String, Hydrator>,
|
||||
lines: &'a LineNumbers,
|
||||
tracing: Tracing,
|
||||
) -> Self {
|
||||
Self {
|
||||
hydrator: Hydrator::new(),
|
||||
environment,
|
||||
hydrators,
|
||||
tracing,
|
||||
ungeneralised_function_used: false,
|
||||
lines,
|
||||
}
|
||||
}
|
||||
|
||||
fn check_when_exhaustiveness(
|
||||
&mut self,
|
||||
typed_clauses: &[TypedClause],
|
||||
|
@ -226,6 +359,8 @@ impl<'a, 'b> ExprTyper<'a, 'b> {
|
|||
|
||||
UntypedExpr::Tuple { location, elems } => self.infer_tuple(elems, location),
|
||||
|
||||
UntypedExpr::Pair { location, fst, snd } => self.infer_pair(*fst, *snd, location),
|
||||
|
||||
UntypedExpr::String { location, value } => Ok(self.infer_string(value, location)),
|
||||
|
||||
UntypedExpr::LogicalOpChain {
|
||||
|
@ -837,7 +972,11 @@ impl<'a, 'b> ExprTyper<'a, 'b> {
|
|||
.get(module)
|
||||
.and_then(|module| module.accessors.get(name)),
|
||||
|
||||
_something_without_fields => return Err(unknown_field(vec![])),
|
||||
Type::Pair { .. } => self.environment.accessors.get("Pair"),
|
||||
|
||||
_something_without_fields => {
|
||||
return Err(unknown_field(vec![]));
|
||||
}
|
||||
}
|
||||
.ok_or_else(|| unknown_field(vec![]))?;
|
||||
|
||||
|
@ -2016,6 +2155,26 @@ impl<'a, 'b> ExprTyper<'a, 'b> {
|
|||
}
|
||||
}
|
||||
|
||||
fn infer_pair(
|
||||
&mut self,
|
||||
fst: UntypedExpr,
|
||||
snd: UntypedExpr,
|
||||
location: Span,
|
||||
) -> Result<TypedExpr, Error> {
|
||||
let typed_fst = self.infer(fst)?;
|
||||
ensure_serialisable(false, typed_fst.tipo(), location)?;
|
||||
|
||||
let typed_snd = self.infer(snd)?;
|
||||
ensure_serialisable(false, typed_snd.tipo(), location)?;
|
||||
|
||||
Ok(TypedExpr::Pair {
|
||||
location,
|
||||
tipo: pair(typed_fst.tipo(), typed_snd.tipo()),
|
||||
fst: typed_fst.into(),
|
||||
snd: typed_snd.into(),
|
||||
})
|
||||
}
|
||||
|
||||
fn infer_tuple(&mut self, elems: Vec<UntypedExpr>, location: Span) -> Result<TypedExpr, Error> {
|
||||
let mut typed_elems = vec![];
|
||||
|
||||
|
@ -2039,13 +2198,13 @@ impl<'a, 'b> ExprTyper<'a, 'b> {
|
|||
|
||||
fn infer_tuple_index(
|
||||
&mut self,
|
||||
tuple: UntypedExpr,
|
||||
tuple_or_pair: UntypedExpr,
|
||||
index: usize,
|
||||
location: Span,
|
||||
) -> Result<TypedExpr, Error> {
|
||||
let tuple = self.infer(tuple)?;
|
||||
let tuple_or_pair = self.infer(tuple_or_pair)?;
|
||||
|
||||
let tipo = match *collapse_links(tuple.tipo()) {
|
||||
let tipo = match *collapse_links(tuple_or_pair.tipo()) {
|
||||
Type::Tuple {
|
||||
ref elems,
|
||||
alias: _,
|
||||
|
@ -2061,9 +2220,22 @@ impl<'a, 'b> ExprTyper<'a, 'b> {
|
|||
Ok(elems[index].clone())
|
||||
}
|
||||
}
|
||||
_ => Err(Error::NotATuple {
|
||||
Type::Pair {
|
||||
ref fst,
|
||||
ref snd,
|
||||
alias: _,
|
||||
} => {
|
||||
if index == 0 {
|
||||
Ok(fst.clone())
|
||||
} else if index == 1 {
|
||||
Ok(snd.clone())
|
||||
} else {
|
||||
Err(Error::PairIndexOutOfBound { location, index })
|
||||
}
|
||||
}
|
||||
_ => Err(Error::NotIndexable {
|
||||
location,
|
||||
tipo: tuple.tipo(),
|
||||
tipo: tuple_or_pair.tipo(),
|
||||
}),
|
||||
}?;
|
||||
|
||||
|
@ -2071,7 +2243,7 @@ impl<'a, 'b> ExprTyper<'a, 'b> {
|
|||
location,
|
||||
tipo,
|
||||
index,
|
||||
tuple: Box::new(tuple),
|
||||
tuple: Box::new(tuple_or_pair),
|
||||
})
|
||||
}
|
||||
|
||||
|
@ -2145,17 +2317,40 @@ impl<'a, 'b> ExprTyper<'a, 'b> {
|
|||
variables: self.environment.local_value_names(),
|
||||
})?;
|
||||
|
||||
// Note whether we are using an ungeneralised function so that we can
|
||||
// tell if it is safe to generalise this function after inference has
|
||||
// completed.
|
||||
if matches!(
|
||||
&constructor.variant,
|
||||
ValueConstructorVariant::ModuleFn { .. }
|
||||
) {
|
||||
if let ValueConstructorVariant::ModuleFn { name: fn_name, .. } =
|
||||
&constructor.variant
|
||||
{
|
||||
// Note whether we are using an ungeneralised function so that we can
|
||||
// tell if it is safe to generalise this function after inference has
|
||||
// completed.
|
||||
let is_ungeneralised = self.environment.ungeneralised_functions.contains(name);
|
||||
|
||||
self.ungeneralised_function_used =
|
||||
self.ungeneralised_function_used || is_ungeneralised;
|
||||
|
||||
// In case we use another function, infer it first before going further.
|
||||
// This ensures we have as much information possible about the function
|
||||
// when we start inferring expressions using it (i.e. calls).
|
||||
//
|
||||
// In a way, this achieves a cheap topological processing of definitions
|
||||
// where we infer used definitions first. And as a consequence, it solves
|
||||
// issues where expressions would be wrongly assigned generic variables
|
||||
// from other definitions.
|
||||
if let Some(fun) = self.environment.module_functions.remove(fn_name) {
|
||||
// NOTE: Recursive functions should not run into this multiple time.
|
||||
// If we have no hydrator for this function, it means that we have already
|
||||
// encountered it.
|
||||
if self.hydrators.get(&fun.name).is_some() {
|
||||
infer_function(
|
||||
fun,
|
||||
self.environment.current_module,
|
||||
self.hydrators,
|
||||
self.environment,
|
||||
self.lines,
|
||||
self.tracing,
|
||||
)?;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Register the value as seen for detection of unused values
|
||||
|
@ -2284,20 +2479,6 @@ impl<'a, 'b> ExprTyper<'a, 'b> {
|
|||
self.environment.instantiate(t, ids, &self.hydrator)
|
||||
}
|
||||
|
||||
pub fn new(
|
||||
environment: &'a mut Environment<'b>,
|
||||
lines: &'a LineNumbers,
|
||||
tracing: Tracing,
|
||||
) -> Self {
|
||||
Self {
|
||||
hydrator: Hydrator::new(),
|
||||
environment,
|
||||
tracing,
|
||||
ungeneralised_function_used: false,
|
||||
lines,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn new_unbound_var(&mut self) -> Rc<Type> {
|
||||
self.environment.new_unbound_var()
|
||||
}
|
||||
|
@ -2339,6 +2520,7 @@ fn assert_no_assignment(expr: &UntypedExpr) -> Result<(), Error> {
|
|||
| UntypedExpr::Sequence { .. }
|
||||
| UntypedExpr::String { .. }
|
||||
| UntypedExpr::Tuple { .. }
|
||||
| UntypedExpr::Pair { .. }
|
||||
| UntypedExpr::TupleIndex { .. }
|
||||
| UntypedExpr::UnOp { .. }
|
||||
| UntypedExpr::Var { .. }
|
||||
|
@ -2442,5 +2624,9 @@ pub fn ensure_serialisable(allow_fn: bool, t: Rc<Type>, location: Span) -> Resul
|
|||
location,
|
||||
),
|
||||
},
|
||||
Type::Pair { fst, snd, .. } => {
|
||||
ensure_serialisable(false, fst.clone(), location)?;
|
||||
ensure_serialisable(false, snd.clone(), location)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -5,7 +5,7 @@ use super::{
|
|||
};
|
||||
use crate::{
|
||||
ast::Annotation,
|
||||
builtins::{function, tuple},
|
||||
builtins::{function, pair, tuple},
|
||||
tipo::Span,
|
||||
};
|
||||
use std::{collections::HashMap, rc::Rc};
|
||||
|
@ -246,6 +246,12 @@ impl Hydrator {
|
|||
|
||||
Ok(tuple(typed_elems))
|
||||
}
|
||||
Annotation::Pair { fst, snd, .. } => {
|
||||
let fst = self.do_type_from_annotation(fst, environment, unbounds)?;
|
||||
let snd = self.do_type_from_annotation(snd, environment, unbounds)?;
|
||||
|
||||
Ok(pair(fst, snd))
|
||||
}
|
||||
}?;
|
||||
|
||||
Ok(environment.annotate(return_type, annotation))
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
use super::{
|
||||
environment::{generalise, EntityKind, Environment},
|
||||
environment::{EntityKind, Environment},
|
||||
error::{Error, UnifyErrorSituation, Warning},
|
||||
expr::ExprTyper,
|
||||
hydrator::Hydrator,
|
||||
|
@ -8,15 +8,13 @@ use super::{
|
|||
use crate::{
|
||||
ast::{
|
||||
Annotation, Arg, ArgName, ArgVia, DataType, Definition, Function, ModuleConstant,
|
||||
ModuleKind, RecordConstructor, RecordConstructorArg, Tracing, TypeAlias, TypedArg,
|
||||
TypedDefinition, TypedFunction, TypedModule, UntypedArg, UntypedDefinition, UntypedModule,
|
||||
Use, Validator,
|
||||
ModuleKind, RecordConstructor, RecordConstructorArg, Tracing, TypeAlias, TypedDefinition,
|
||||
TypedFunction, TypedModule, UntypedDefinition, UntypedModule, Use, Validator,
|
||||
},
|
||||
builtins,
|
||||
builtins::{function, fuzzer, generic_var},
|
||||
expr::{TypedExpr, UntypedExpr},
|
||||
builtins::{fuzzer, generic_var},
|
||||
line_numbers::LineNumbers,
|
||||
tipo::{Span, Type, TypeVar},
|
||||
tipo::{expr::infer_function, Span, Type, TypeVar},
|
||||
IdGenerator,
|
||||
};
|
||||
use std::{borrow::Borrow, collections::HashMap, ops::Deref, rc::Rc};
|
||||
|
@ -31,9 +29,10 @@ impl UntypedModule {
|
|||
tracing: Tracing,
|
||||
warnings: &mut Vec<Warning>,
|
||||
) -> Result<TypedModule, Error> {
|
||||
let name = self.name.clone();
|
||||
let module_name = self.name.clone();
|
||||
let docs = std::mem::take(&mut self.docs);
|
||||
let mut environment = Environment::new(id_gen.clone(), &name, &kind, modules, warnings);
|
||||
let mut environment =
|
||||
Environment::new(id_gen.clone(), &module_name, &kind, modules, warnings);
|
||||
|
||||
let mut type_names = HashMap::with_capacity(self.definitions.len());
|
||||
let mut value_names = HashMap::with_capacity(self.definitions.len());
|
||||
|
@ -50,14 +49,20 @@ impl UntypedModule {
|
|||
// earlier in the module.
|
||||
environment.register_types(
|
||||
self.definitions.iter().collect(),
|
||||
&name,
|
||||
&module_name,
|
||||
&mut hydrators,
|
||||
&mut type_names,
|
||||
)?;
|
||||
|
||||
// Register values so they can be used in functions earlier in the module.
|
||||
for def in self.definitions() {
|
||||
environment.register_values(def, &name, &mut hydrators, &mut value_names, kind)?;
|
||||
environment.register_values(
|
||||
def,
|
||||
&module_name,
|
||||
&mut hydrators,
|
||||
&mut value_names,
|
||||
kind,
|
||||
)?;
|
||||
}
|
||||
|
||||
// Infer the types of each definition in the module
|
||||
|
@ -83,7 +88,7 @@ impl UntypedModule {
|
|||
for def in consts.into_iter().chain(not_consts) {
|
||||
let definition = infer_definition(
|
||||
def,
|
||||
&name,
|
||||
&module_name,
|
||||
&mut hydrators,
|
||||
&mut environment,
|
||||
&self.lines,
|
||||
|
@ -96,7 +101,7 @@ impl UntypedModule {
|
|||
// Generalise functions now that the entire module has been inferred
|
||||
let definitions = definitions
|
||||
.into_iter()
|
||||
.map(|def| environment.generalise_definition(def, &name))
|
||||
.map(|def| environment.generalise_definition(def, &module_name))
|
||||
.collect();
|
||||
|
||||
// Generate warnings for unused items
|
||||
|
@ -105,7 +110,7 @@ impl UntypedModule {
|
|||
// Remove private and imported types and values to create the public interface
|
||||
environment
|
||||
.module_types
|
||||
.retain(|_, info| info.public && info.module == name);
|
||||
.retain(|_, info| info.public && info.module == module_name);
|
||||
|
||||
environment.module_values.retain(|_, info| info.public);
|
||||
|
||||
|
@ -134,12 +139,12 @@ impl UntypedModule {
|
|||
|
||||
Ok(TypedModule {
|
||||
docs,
|
||||
name: name.clone(),
|
||||
name: module_name.clone(),
|
||||
definitions,
|
||||
kind,
|
||||
lines: self.lines,
|
||||
type_info: TypeInfo {
|
||||
name,
|
||||
name: module_name,
|
||||
types,
|
||||
types_constructors,
|
||||
values,
|
||||
|
@ -162,7 +167,7 @@ fn infer_definition(
|
|||
) -> Result<TypedDefinition, Error> {
|
||||
match def {
|
||||
Definition::Fn(f) => Ok(Definition::Fn(infer_function(
|
||||
f,
|
||||
&f,
|
||||
module_name,
|
||||
hydrators,
|
||||
environment,
|
||||
|
@ -219,19 +224,8 @@ fn infer_definition(
|
|||
};
|
||||
}
|
||||
|
||||
let Definition::Fn(mut typed_fun) = infer_definition(
|
||||
Definition::Fn(fun),
|
||||
module_name,
|
||||
hydrators,
|
||||
environment,
|
||||
lines,
|
||||
tracing,
|
||||
)?
|
||||
else {
|
||||
unreachable!(
|
||||
"validator definition inferred as something other than a function?"
|
||||
)
|
||||
};
|
||||
let mut typed_fun =
|
||||
infer_function(&fun, module_name, hydrators, environment, lines, tracing)?;
|
||||
|
||||
if !typed_fun.return_type.is_bool() {
|
||||
return Err(Error::ValidatorMustReturnBool {
|
||||
|
@ -270,19 +264,14 @@ fn infer_definition(
|
|||
let params = params.into_iter().chain(other.arguments);
|
||||
other.arguments = params.collect();
|
||||
|
||||
let Definition::Fn(mut other_typed_fun) = infer_definition(
|
||||
Definition::Fn(other),
|
||||
let mut other_typed_fun = infer_function(
|
||||
&other,
|
||||
module_name,
|
||||
hydrators,
|
||||
environment,
|
||||
lines,
|
||||
tracing,
|
||||
)?
|
||||
else {
|
||||
unreachable!(
|
||||
"validator definition inferred as something other than a function?"
|
||||
)
|
||||
};
|
||||
)?;
|
||||
|
||||
if !other_typed_fun.return_type.is_bool() {
|
||||
return Err(Error::ValidatorMustReturnBool {
|
||||
|
@ -341,8 +330,8 @@ fn infer_definition(
|
|||
});
|
||||
}
|
||||
|
||||
let typed_via =
|
||||
ExprTyper::new(environment, lines, tracing).infer(arg.via.clone())?;
|
||||
let typed_via = ExprTyper::new(environment, hydrators, lines, tracing)
|
||||
.infer(arg.via.clone())?;
|
||||
|
||||
let hydrator: &mut Hydrator = hydrators.get_mut(&f.name).unwrap();
|
||||
|
||||
|
@ -406,7 +395,7 @@ fn infer_definition(
|
|||
}?;
|
||||
|
||||
let typed_f = infer_function(
|
||||
f.into(),
|
||||
&f.into(),
|
||||
module_name,
|
||||
hydrators,
|
||||
environment,
|
||||
|
@ -635,8 +624,8 @@ fn infer_definition(
|
|||
value,
|
||||
tipo: _,
|
||||
}) => {
|
||||
let typed_expr =
|
||||
ExprTyper::new(environment, lines, tracing).infer_const(&annotation, *value)?;
|
||||
let typed_expr = ExprTyper::new(environment, hydrators, lines, tracing)
|
||||
.infer_const(&annotation, *value)?;
|
||||
|
||||
let tipo = typed_expr.tipo();
|
||||
|
||||
|
@ -671,106 +660,6 @@ fn infer_definition(
|
|||
}
|
||||
}
|
||||
|
||||
fn infer_function(
|
||||
f: Function<(), UntypedExpr, UntypedArg>,
|
||||
module_name: &String,
|
||||
hydrators: &mut HashMap<String, Hydrator>,
|
||||
environment: &mut Environment<'_>,
|
||||
lines: &LineNumbers,
|
||||
tracing: Tracing,
|
||||
) -> Result<Function<Rc<Type>, TypedExpr, TypedArg>, Error> {
|
||||
let Function {
|
||||
doc,
|
||||
location,
|
||||
name,
|
||||
public,
|
||||
arguments,
|
||||
body,
|
||||
return_annotation,
|
||||
end_position,
|
||||
can_error,
|
||||
return_type: _,
|
||||
} = f;
|
||||
|
||||
let preregistered_fn = environment
|
||||
.get_variable(&name)
|
||||
.expect("Could not find preregistered type for function");
|
||||
|
||||
let field_map = preregistered_fn.field_map().cloned();
|
||||
|
||||
let preregistered_type = preregistered_fn.tipo.clone();
|
||||
|
||||
let (args_types, return_type) = preregistered_type
|
||||
.function_types()
|
||||
.expect("Preregistered type for fn was not a fn");
|
||||
|
||||
// Infer the type using the preregistered args + return types as a starting point
|
||||
let (tipo, arguments, body, safe_to_generalise) = environment.in_new_scope(|environment| {
|
||||
let args = arguments
|
||||
.into_iter()
|
||||
.zip(&args_types)
|
||||
.map(|(arg_name, tipo)| arg_name.set_type(tipo.clone()))
|
||||
.collect();
|
||||
|
||||
let mut expr_typer = ExprTyper::new(environment, lines, tracing);
|
||||
|
||||
expr_typer.hydrator = hydrators
|
||||
.remove(&name)
|
||||
.expect("Could not find hydrator for fn");
|
||||
|
||||
let (args, body, return_type) =
|
||||
expr_typer.infer_fn_with_known_types(args, body, Some(return_type))?;
|
||||
|
||||
let args_types = args.iter().map(|a| a.tipo.clone()).collect();
|
||||
|
||||
let tipo = function(args_types, return_type);
|
||||
|
||||
let safe_to_generalise = !expr_typer.ungeneralised_function_used;
|
||||
|
||||
Ok::<_, Error>((tipo, args, body, safe_to_generalise))
|
||||
})?;
|
||||
|
||||
// Assert that the inferred type matches the type of any recursive call
|
||||
environment.unify(preregistered_type, tipo.clone(), location, false)?;
|
||||
|
||||
// Generalise the function if safe to do so
|
||||
let tipo = if safe_to_generalise {
|
||||
environment.ungeneralised_functions.remove(&name);
|
||||
|
||||
let tipo = generalise(tipo, 0);
|
||||
|
||||
let module_fn = ValueConstructorVariant::ModuleFn {
|
||||
name: name.clone(),
|
||||
field_map,
|
||||
module: module_name.to_owned(),
|
||||
arity: arguments.len(),
|
||||
location,
|
||||
builtin: None,
|
||||
};
|
||||
|
||||
environment.insert_variable(name.clone(), module_fn, tipo.clone());
|
||||
|
||||
tipo
|
||||
} else {
|
||||
tipo
|
||||
};
|
||||
|
||||
Ok(Function {
|
||||
doc,
|
||||
location,
|
||||
name,
|
||||
public,
|
||||
arguments,
|
||||
return_annotation,
|
||||
return_type: tipo
|
||||
.return_type()
|
||||
.expect("Could not find return type for fn"),
|
||||
body,
|
||||
can_error,
|
||||
end_position,
|
||||
})
|
||||
}
|
||||
|
||||
fn infer_fuzzer(
|
||||
environment: &mut Environment<'_>,
|
||||
expected_inner_type: Option<Rc<Type>>,
|
||||
|
@ -843,7 +732,7 @@ fn infer_fuzzer(
|
|||
}),
|
||||
},
|
||||
|
||||
Type::App { .. } | Type::Tuple { .. } => Err(could_not_unify()),
|
||||
Type::App { .. } | Type::Tuple { .. } | Type::Pair { .. } => Err(could_not_unify()),
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -894,5 +783,14 @@ fn annotate_fuzzer(tipo: &Type, location: &Span) -> Result<Annotation, Error> {
|
|||
location: *location,
|
||||
tipo: Rc::new(tipo.clone()),
|
||||
}),
|
||||
Type::Pair { fst, snd, .. } => {
|
||||
let fst = annotate_fuzzer(fst, location)?;
|
||||
let snd = annotate_fuzzer(snd, location)?;
|
||||
Ok(Annotation::Pair {
|
||||
fst: Box::new(fst),
|
||||
snd: Box::new(snd),
|
||||
location: *location,
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -8,7 +8,7 @@ use super::{
|
|||
};
|
||||
use crate::{
|
||||
ast::{CallArg, Pattern, Span, TypedPattern, UntypedPattern},
|
||||
builtins::{int, list, tuple},
|
||||
builtins::{int, list, pair, tuple},
|
||||
};
|
||||
use itertools::Itertools;
|
||||
use std::{
|
||||
|
@ -236,6 +236,46 @@ impl<'a, 'b> PatternTyper<'a, 'b> {
|
|||
}),
|
||||
},
|
||||
|
||||
Pattern::Pair { fst, snd, location } => match collapse_links(tipo.clone()).deref() {
|
||||
Type::Pair {
|
||||
fst: t_fst,
|
||||
snd: t_snd,
|
||||
..
|
||||
} => {
|
||||
let fst = Box::new(self.unify(*fst, t_fst.clone(), None, false)?);
|
||||
let snd = Box::new(self.unify(*snd, t_snd.clone(), None, false)?);
|
||||
Ok(Pattern::Pair { fst, snd, location })
|
||||
}
|
||||
|
||||
Type::Var { .. } => {
|
||||
let t_fst = self.environment.new_unbound_var();
|
||||
let t_snd = self.environment.new_unbound_var();
|
||||
|
||||
self.environment.unify(
|
||||
pair(t_fst.clone(), t_snd.clone()),
|
||||
tipo,
|
||||
location,
|
||||
false,
|
||||
)?;
|
||||
|
||||
let fst = Box::new(self.unify(*fst, t_fst, None, false)?);
|
||||
let snd = Box::new(self.unify(*snd, t_snd, None, false)?);
|
||||
|
||||
Ok(Pattern::Pair { fst, snd, location })
|
||||
}
|
||||
|
||||
_ => Err(Error::CouldNotUnify {
|
||||
given: pair(
|
||||
self.environment.new_unbound_var(),
|
||||
self.environment.new_unbound_var(),
|
||||
),
|
||||
expected: tipo,
|
||||
situation: None,
|
||||
location,
|
||||
rigid_type_names: HashMap::new(),
|
||||
}),
|
||||
},
|
||||
|
||||
Pattern::Tuple { elems, location } => match collapse_links(tipo.clone()).deref() {
|
||||
Type::Tuple {
|
||||
elems: type_elems, ..
|
||||
|
|
|
@ -86,6 +86,9 @@ impl Printer {
|
|||
Type::Var { tipo: typ, .. } => self.type_var_doc(&typ.borrow()),
|
||||
|
||||
Type::Tuple { elems, .. } => self.args_to_aiken_doc(elems).surround("(", ")"),
|
||||
Type::Pair { fst, snd, .. } => self
|
||||
.args_to_aiken_doc(&[fst.clone(), snd.clone()])
|
||||
.surround("Pair<", ">"),
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -120,7 +123,8 @@ impl Printer {
|
|||
fn type_var_doc<'a>(&mut self, typ: &TypeVar) -> Document<'a> {
|
||||
match typ {
|
||||
TypeVar::Link { tipo: ref typ, .. } => self.print(typ),
|
||||
TypeVar::Unbound { id, .. } | TypeVar::Generic { id, .. } => self.generic_type_var(*id),
|
||||
TypeVar::Generic { id, .. } => self.generic_type_var(*id),
|
||||
TypeVar::Unbound { .. } => "?".to_doc(),
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -246,6 +250,21 @@ fn resolve_alias(
|
|||
result
|
||||
}
|
||||
|
||||
(
|
||||
Annotation::Pair { fst, snd, .. },
|
||||
Type::Pair {
|
||||
fst: t_fst,
|
||||
snd: t_snd,
|
||||
..
|
||||
},
|
||||
) => {
|
||||
let mut result = None;
|
||||
for (ann, t) in [fst, snd].into_iter().zip([t_fst, t_snd]) {
|
||||
result = result.or_else(|| resolve_one(parameter, ann, t.clone()));
|
||||
}
|
||||
result
|
||||
}
|
||||
|
||||
(Annotation::Var { name, .. }, ..) if name == parameter => Some(typ),
|
||||
|
||||
_ => None,
|
||||
|
@ -455,7 +474,7 @@ mod tests {
|
|||
tipo: Rc::new(RefCell::new(TypeVar::Unbound { id: 2231 })),
|
||||
alias: None,
|
||||
},
|
||||
"a",
|
||||
"?",
|
||||
);
|
||||
assert_string!(
|
||||
function(
|
||||
|
@ -468,7 +487,7 @@ mod tests {
|
|||
alias: None,
|
||||
}),
|
||||
),
|
||||
"fn(a) -> b",
|
||||
"fn(?) -> ?",
|
||||
);
|
||||
assert_string!(
|
||||
function(
|
||||
|
|
|
@ -3,7 +3,7 @@ name = "aiken-project"
|
|||
description = "Aiken project utilities"
|
||||
version = "1.0.26-alpha"
|
||||
edition = "2021"
|
||||
repository = "https://github.com/aiken-lang/aiken/crates/project"
|
||||
repository = "https://github.com/aiken-lang/aiken"
|
||||
homepage = "https://github.com/aiken-lang/aiken"
|
||||
license = "Apache-2.0"
|
||||
authors = [
|
||||
|
|
|
@ -145,6 +145,13 @@ impl Reference {
|
|||
elems = Self::from_types(elems, type_parameters)
|
||||
),
|
||||
},
|
||||
Type::Pair { fst, snd, .. } => Self {
|
||||
inner: format!(
|
||||
"Pair{fst}{snd}",
|
||||
fst = Self::from_type(fst, type_parameters),
|
||||
snd = Self::from_type(snd, type_parameters)
|
||||
),
|
||||
},
|
||||
|
||||
// NOTE:
|
||||
//
|
||||
|
|
|
@ -384,6 +384,7 @@ impl Annotated<Schema> {
|
|||
}
|
||||
},
|
||||
Type::Fn { .. } => unreachable!(),
|
||||
Type::Pair { .. } => unreachable!(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -9,7 +9,7 @@ Schema {
|
|||
Var {
|
||||
tipo: RefCell {
|
||||
value: Generic {
|
||||
id: 33,
|
||||
id: 35,
|
||||
},
|
||||
},
|
||||
alias: None,
|
||||
|
|
|
@ -10,8 +10,8 @@ description: "Code:\n\ntype Dict<key, value> {\n inner: List<(ByteArray, valu
|
|||
"$ref": "#/definitions/test_module~1Dict$test_module~1UUID_Int"
|
||||
}
|
||||
},
|
||||
"compiledCode": "59014e010000323232323232323232232253330054a22930a9980324811856616c696461746f722072657475726e65642066616c736500136563253330043370e900018031baa0011325333009001153300600416132533300a300c002132498c8cc004004008894ccc03000452613233003003300f0023232325333333012001153300c00a16153300c00a16153300c00a161375a0022a660180142c601a00464a6666660220022a660160122c2a660160122c2a660160122c2a660160122c26eb8004c02c004c03400454cc01c01458c94cccccc03400454cc01c014584dd58008a998038028b0a998038028b0a998038028b180500098039baa001153300500316533333300a001100115330040021615330040021615330040021615330040021649011972656465656d65723a20446963743c555549442c20496e743e005734ae7155ceaab9e5573eae855d12ba41",
|
||||
"hash": "de6d51e2a272ec0ab73566bbb32700ad5864fdd01290dd925e35ebb4",
|
||||
"compiledCode": "59019c010000323232323232323232232253330054a22930a9980324811856616c696461746f722072657475726e65642066616c736500136563253330043370e900018031baa0011325333009001153300600416132533300a300c002132498c8cc004004008894ccc03000452613233003003300f0023232533300e001153300b00916132325333010001153300d00b1613253330113013002149854cc03803058c94cccccc05000454cc0380305854cc0380305854cc038030584dd68008a998070060b180880098088011929999998090008a998060050b0a998060050b0a998060050b0a998060050b09bae001300f0015333333010001153300a00816153300a00816137580022a660140102c2a660140102c601a0022a6600e00a2c64a66666601a0022a6600e00a2c2a6600e00a2c26eb000454cc01c0145854cc01c01458c028004c01cdd50008a998028018b299999980500088008a998020010b0a998020010b0a998020010b0a998020010b2491972656465656d65723a20446963743c555549442c20496e743e005734ae7155ceaab9e5573eae855d12ba41",
|
||||
"hash": "6027685dde99d967b45333852fe9f59531237d85fcb6b6feb2890672",
|
||||
"definitions": {
|
||||
"ByteArray": {
|
||||
"dataType": "bytes"
|
||||
|
|
|
@ -16,8 +16,8 @@ description: "Code:\n\npub type LinkedList<a> {\n Cons(a, LinkedList<a>)\n Nil
|
|||
"$ref": "#/definitions/test_module~1LinkedList$Int"
|
||||
}
|
||||
},
|
||||
"compiledCode": "590409010000323232323232323232322323232322322533300b4a22930a998062491856616c696461746f722072657475726e65642066616c736500136563300353333330100011001153300a00716153300a00716153300a00716153300a00716007323253330093005001132533300e001153300b00916132533300f3011002132498c8cc00400400888c94ccc038c0280044c94ccc04c00454cc040038584c8c94ccc05400454cc048040584c94ccc058c0600084c926330070070011533013011163253333330190011001153301301116153301301116153301301116153301301116301600130160023253333330170011325333015301400115333011300c301300114a22a666022601a6026002294054cc0480405854cc04804058dd50008a998088078b0a998088078b0a998088078b0a998088078b180a00098089baa0021533300e300900115333012301137540042930a998078068b0a998078068b18079baa001153300c00a163253333330120011001153300c00a16153300c00a16153300c00a16153300c00a16300f001300c37540042a6660126008002264a66601c0022a660160122c26464a6660200022a6601a0162c264a66602260260042649319191980080099299999980b8008a998088078b0a998088078b09bac001153301100f16153301100f16301200322533301400114984c8cc00c00cc05c008c8cc02d4cccccc060004400454cc0480405854cc0480405854cc0480405854cc04804058040c054004c94cccccc05400454cc03c0345854cc03c0345854cc03c0345854cc03c034584dd700098078008a998070060b19299999980a0008a998070060b0a998070060b09929998090008a998078068b09929998098008a998080070b0a999809980a800899bb030140023014001153301000e16301400137580022a6601c0182c2a6601c0182c6022002602200464a6666660240022a660180142c2a660180142c2a660180142c26eb400454cc03002858c03c004c030dd50010a998050040b18051baa001533333300e00410041533008006161533008006161533008006161533008006162232330010010032232533300b30070011325333010001153300d00516132325333012001153300f0071613253330133015002132498cc01c01c00454cc04002058c94cccccc058004400454cc0400205854cc0400205854cc0400205854cc04002058c04c004c04c008c94cccccc05000454cc0380185854cc0380185854cc038018584dd68008a998070030b180880098071baa0021533300b30060011533300f300e37540042930a998060020b0a998060020b18061baa001370e90011b87480012411972656465656d65723a204c696e6b65644c6973743c496e743e0049010a646174756d3a20466f6f005734ae7155ceaab9e5573eae855d12ba41",
|
||||
"hash": "451dccdc86f334c88e491fba78784300aa8f5523298f83b17b258d7d",
|
||||
"compiledCode": "590403010000323232323232323232322323232322322533300b4a22930a998062491856616c696461746f722072657475726e65642066616c736500136563300353333330100011001153300a00716153300a00716153300a00716153300a00716007323253330093005001132533300e001153300b00916132533300f3011002132498c8cc00400400888c94ccc038c0280044c94ccc04c00454cc040038584c8c94ccc05400454cc048040584c94ccc058c0600084c926330070070011533013011163253333330190011001153301301116153301301116153301301116153301301116301600130160023253333330170011325333015301400115333011300c301300114a22a666022601a6026002294054cc0480405854cc04804058dd50008a998088078b0a998088078b0a998088078b0a998088078b180a00098089baa0021533300e300900115333012301137540042930a998078068b0a998078068b18079baa001153300c00a163253333330120011001153300c00a16153300c00a16153300c00a16153300c00a16300f001300c37540042a6660126008002264a66601c0022a660160122c26464a6660200022a6601a0162c264a66602260260042649329998088008a998070060b0991929998098008a998080070b099299980a180b00109924c646600200200444a66602c00229309919801801980c8011919806a99999980d00088008a9980a0090b0a9980a0090b0a9980a0090b0a9980a0090b009180b8008a998088078b19299999980b8008a998088078b0a998088078b09bac001153301100f16153301100f1630140013014002325333333015001153300f00d16153300f00d16153300f00d16153300f00d161375c00260240022a6601c0182c64a6666660280022a6601c0182c2a6601c0182c26eb000454cc0380305854cc03803058c044004c044008c94cccccc04800454cc0300285854cc0300285854cc030028584dd68008a998060050b180780098061baa002153300a00816300a3754002a66666601c00820082a6601000c2c2a6601000c2c2a6601000c2c2a6601000c2c4464660020020064464a666016600e002264a6660200022a6601a00a2c26464a6660240022a6601e00e2c264a666026602a00426493198038038008a998080040b19299999980b00088008a998080040b0a998080040b0a998080040b0a998080040b1809800980980119299999980a0008a998070030b0a998070030b0a998070030b09bad001153300e006163011001300e37540042a666016600c0022a66601e601c6ea8008526153300c00416153300c00416300c37540026e1d2002370e90002491972656465656d65723a204c696e6b65644c6973743c496e743e0049010a646174756d3a20466f6f005734ae7155ceaab9e5573eae855d12ba41",
|
||||
"hash": "ff1413d8a35753076ff26df84e7829fde430f9920208fe0ba8ae3c52",
|
||||
"definitions": {
|
||||
"Bool": {
|
||||
"title": "Bool",
|
||||
|
|
|
@ -16,8 +16,8 @@ description: "Code:\n\nvalidator {\n fn tuples(datum: (Int, ByteArray), redeeme
|
|||
"$ref": "#/definitions/Tuple$Int_Int_Int"
|
||||
}
|
||||
},
|
||||
"compiledCode": "5901fc0100003232323232323232323223232232322533300a4a22930a99805a491856616c696461746f722072657475726e65642066616c73650013656533300c00115330090061613232533300e001153300b00816132325333010001153300d00a1613253330113013002149854cc03802c58c94cccccc05000454cc03802c5854cc03802c5854cc03802c584dd68008a998070058b180880098088011929999998090008a998060048b0a998060048b0a998060048b09bad001153300c00916300f001300f002325333333010001153300a00716153300a00716153300a007161375a0022a6601400e2c601a002a66666601c0022a6601000a2c2a6601000a2c26eb000454cc0200145854cc02001458c8c94cccccc03800454cc0200185854cc0200185854cc0200185854cc020018584dd700098048011929999998068008a998038028b0a998038028b0a998038028b09bad0011533007005163007001533333300b0011533005003161533005003161325333009001153300600416132533300a0011533007005161533300a300c001133760601600460160022a6600e00a2c60160026eb000454cc01400c5854cc01400c592411972656465656d65723a2028496e742c20496e742c20496e742900490117646174756d3a2028496e742c2042797465417272617929005734ae7155ceaab9e5573eae855d12ba41",
|
||||
"hash": "9f4b38854cc56274f9baee929c3de458a0375d56fd5b47e8fe36f063",
|
||||
"compiledCode": "5901ed010000323232323232323223232232322533300a4a22930a99805a491856616c696461746f722072657475726e65642066616c73650013656533300a00115330090061613232533300c001153300b0081613232533300e001153300d00a16132533300f3011002149854cc03802c58c94cccccc04800454cc03802c5854cc03802c5854cc03802c584dd68008a998070058b180780098078011929999998080008a998060048b0a998060048b0a998060048b09bad001153300c00916300d001300d00232533333300e001153300a00716153300a00716153300a007161375a0022a6601400e2c6016002a6666660180022a6601000a2c2a6601000a2c26eb000454cc0200145854cc020014594ccc01c00454cc018010584c8c94ccc02400454cc020018584c94ccc028c03000852615330090071632533333300d0011533009007161533009007161533009007161533009007161375c0026014002601400464a6666660160022a6600e00a2c2a6600e00a2c2a6600e00a2c26eb400454cc01c01458c0200054cccccc02400454cc01400c5854cc01400c584dd60008a998028018b0a998028018b24811972656465656d65723a2028496e742c20496e742c20496e742900490117646174756d3a2028496e742c2042797465417272617929005734ae7155cfaba15744ae91",
|
||||
"hash": "9d5de9c290eb221450ae3f039b1ce3b9057f06cff7498d761e223eee",
|
||||
"definitions": {
|
||||
"ByteArray": {
|
||||
"dataType": "bytes"
|
||||
|
|
|
@ -9,7 +9,7 @@ Schema {
|
|||
Var {
|
||||
tipo: RefCell {
|
||||
value: Generic {
|
||||
id: 33,
|
||||
id: 35,
|
||||
},
|
||||
},
|
||||
alias: None,
|
||||
|
|
|
@ -18,8 +18,8 @@ description: "Code:\n\npub type Foo<a> {\n Empty\n Bar(a, Foo<a>)\n}\n\npub fn
|
|||
}
|
||||
}
|
||||
],
|
||||
"compiledCode": "5901c3010000323232323222323232323253330083002300937540062a666010600460126ea801052000001001132323232533300b3004300c375400c2646464a66601c600e601e6ea80284c8cdc019b80003375a60240026600c0046024602600260206ea8028010c040c044008dd6980780098069baa006001132533300b3005300c375400c2a666016600860186ea801c4c8cdc01bad300f001330034c103d8798000300f3010001300d375400e00200226466e00dd698070009980118071807800a60103d8798000300c375400a600200244464646464a66601e601260206ea800854ccc03cc024c040dd50018a4000002002264a66601e601060206ea80084c8c8c94ccc048c02cc04cdd500309919b80337000066eb4c058004ccc02c02c008c058c05c004c050dd5003002180a180a8011bad301300130113754004002264a66601e601260206ea800854ccc03cc020c040dd500189919b80375a6026002666010010980103d8798000301330140013011375400600200226466e00dd6980900099980380398091809800a60103d879800030103754002601c004601c00266ec0008004dc3a40046e1d20003006002300600133760004002ae6955ceaab9e5742ae89",
|
||||
"hash": "4ce96c928b3be798496fca0ec3666d15d09004115df638801715b5e8",
|
||||
"compiledCode": "5901d501000032323232323222323232323253330083002300937540062a666010600460126ea801052000001001132323232533300b3004300c375400c2646464a66601c600e601e6ea80284c8cdc019b80003375a60260026600c0046026602800260206ea8028010c044c048008dd6980800098069baa006001132533300b3005300c375400c2a666016600860186ea801c4c8cdc01bad3010001330034c103d879800030103011001300d375400e00200226466e00dd698078009980118079808000a60103d8798000300c375400a600200244464646464a66601e601260206ea800854ccc03cc024c040dd50018a4000002002264a66601e601060206ea80084c8c8c94ccc048c02cc04cdd500309919b80337000066eb4c05c004ccc02c02c008c05cc060004c050dd5003002180a980b0011bad301400130113754004002264a66601e601260206ea800854ccc03cc020c040dd500189919b80375a6028002666010010980103d8798000301430150013011375400600200226466e00dd698098009998038039809980a000a60103d8798000301037540026022004602060220026601c0046601c00297ae0370e90011b8748000c024008c020c024004cc018008cc0180052f5c0ae6955ceaab9e5740ae855d101",
|
||||
"hash": "dca86b6e092019b67ef310ba8360682d7bf8284cc728c6b525fb0b0d",
|
||||
"definitions": {
|
||||
"Int": {
|
||||
"dataType": "integer"
|
||||
|
|
|
@ -70,6 +70,7 @@ impl Test {
|
|||
.map(|cst| (cst, side.tipo()))
|
||||
};
|
||||
|
||||
// Assertion at this point is evaluated so it's not just a normal assertion
|
||||
Some(Assertion {
|
||||
bin_op,
|
||||
head: as_constant(generator, head.expect("cannot be Err at this point")),
|
||||
|
@ -1128,10 +1129,16 @@ impl Assertion<UntypedExpr> {
|
|||
.to_string()
|
||||
};
|
||||
|
||||
// head did not map to a constant
|
||||
if self.head.is_err() {
|
||||
return red("program failed");
|
||||
}
|
||||
|
||||
// any value in tail did not map to a constant
|
||||
if self.tail.is_err() {
|
||||
return red("program failed");
|
||||
}
|
||||
|
||||
fn fmt_side(side: &UntypedExpr, stream: Stream) -> String {
|
||||
let __ = "│".if_supports_color(stream, |s| s.red());
|
||||
|
||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -13,6 +13,12 @@ authors = [
|
|||
]
|
||||
rust-version = "1.66.1"
|
||||
|
||||
[package.metadata.wix]
|
||||
upgrade-guid = "288B160D-418A-4558-91B9-7C38CFD789C7"
|
||||
path-guid = "4EB8FCD6-261B-4F6C-B7DB-CFA67B4E6960"
|
||||
license = false
|
||||
eula = false
|
||||
|
||||
[dependencies]
|
||||
clap = { version = "4.1.8", features = [
|
||||
"derive",
|
||||
|
|
|
@ -0,0 +1,228 @@
|
|||
<?xml version='1.0' encoding='windows-1252'?>
|
||||
<!--
|
||||
Copyright (C) 2017 Christopher R. Field.
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
-->
|
||||
|
||||
<!--
|
||||
The "cargo wix" subcommand provides a variety of predefined variables available
|
||||
for customization of this template. The values for each variable are set at
|
||||
installer creation time. The following variables are available:
|
||||
|
||||
TargetTriple = The rustc target triple name.
|
||||
TargetEnv = The rustc target environment. This is typically either
|
||||
"msvc" or "gnu" depending on the toolchain downloaded and
|
||||
installed.
|
||||
TargetVendor = The rustc target vendor. This is typically "pc", but Rust
|
||||
does support other vendors, like "uwp".
|
||||
CargoTargetBinDir = The complete path to the directory containing the
|
||||
binaries (exes) to include. The default would be
|
||||
"target\release\". If an explicit rustc target triple is
|
||||
used, i.e. cross-compiling, then the default path would
|
||||
be "target\<CARGO_TARGET>\<CARGO_PROFILE>",
|
||||
where "<CARGO_TARGET>" is replaced with the "CargoTarget"
|
||||
variable value and "<CARGO_PROFILE>" is replaced with the
|
||||
value from the "CargoProfile" variable. This can also
|
||||
be overridden manually with the "target-bin-dir" flag.
|
||||
CargoTargetDir = The path to the directory for the build artifacts, i.e.
|
||||
"target".
|
||||
CargoProfile = The cargo profile used to build the binaries
|
||||
(usually "debug" or "release").
|
||||
Version = The version for the installer. The default is the
|
||||
"Major.Minor.Fix" semantic versioning number of the Rust
|
||||
package.
|
||||
-->
|
||||
|
||||
<!--
|
||||
Please do not remove these pre-processor If-Else blocks. These are used with
|
||||
the `cargo wix` subcommand to automatically determine the installation
|
||||
destination for 32-bit versus 64-bit installers. Removal of these lines will
|
||||
cause installation errors.
|
||||
-->
|
||||
<?if $(sys.BUILDARCH) = x64 or $(sys.BUILDARCH) = arm64 ?>
|
||||
<?define PlatformProgramFilesFolder = "ProgramFiles64Folder" ?>
|
||||
<?else ?>
|
||||
<?define PlatformProgramFilesFolder = "ProgramFilesFolder" ?>
|
||||
<?endif ?>
|
||||
|
||||
<Wix xmlns='http://schemas.microsoft.com/wix/2006/wi'>
|
||||
|
||||
<Product
|
||||
Id='*'
|
||||
Name='aiken'
|
||||
UpgradeCode='288B160D-418A-4558-91B9-7C38CFD789C7'
|
||||
Manufacturer='Lucas Rosa; Kasey White; KtorZ'
|
||||
Language='1033'
|
||||
Codepage='1252'
|
||||
Version='$(var.Version)'>
|
||||
|
||||
<Package Id='*'
|
||||
Keywords='Installer'
|
||||
Description='Cardano smart contract language and toolchain'
|
||||
Manufacturer='Lucas Rosa; Kasey White; KtorZ'
|
||||
InstallerVersion='450'
|
||||
Languages='1033'
|
||||
Compressed='yes'
|
||||
InstallScope='perMachine'
|
||||
SummaryCodepage='1252'
|
||||
/>
|
||||
|
||||
<MajorUpgrade
|
||||
Schedule='afterInstallInitialize'
|
||||
DowngradeErrorMessage='A newer version of [ProductName] is already installed. Setup will now exit.'/>
|
||||
|
||||
<Media Id='1' Cabinet='media1.cab' EmbedCab='yes' DiskPrompt='CD-ROM #1'/>
|
||||
<Property Id='DiskPrompt' Value='aiken Installation'/>
|
||||
|
||||
<Directory Id='TARGETDIR' Name='SourceDir'>
|
||||
<Directory Id='$(var.PlatformProgramFilesFolder)' Name='PFiles'>
|
||||
<Directory Id='APPLICATIONFOLDER' Name='aiken'>
|
||||
|
||||
<!--
|
||||
Enabling the license sidecar file in the installer is a four step process:
|
||||
|
||||
1. Uncomment the `Component` tag and its contents.
|
||||
2. Change the value for the `Source` attribute in the `File` tag to a path
|
||||
to the file that should be included as the license sidecar file. The path
|
||||
can, and probably should be, relative to this file.
|
||||
3. Change the value for the `Name` attribute in the `File` tag to the
|
||||
desired name for the file when it is installed alongside the `bin` folder
|
||||
in the installation directory. This can be omitted if the desired name is
|
||||
the same as the file name.
|
||||
4. Uncomment the `ComponentRef` tag with the Id attribute value of "License"
|
||||
further down in this file.
|
||||
-->
|
||||
<!--
|
||||
<Component Id='License' Guid='*'>
|
||||
<File Id='LicenseFile' Name='ChangeMe' DiskId='1' Source='C:\Path\To\File' KeyPath='yes'/>
|
||||
</Component>
|
||||
-->
|
||||
|
||||
<Directory Id='Bin' Name='bin'>
|
||||
<Component Id='Path' Guid='4EB8FCD6-261B-4F6C-B7DB-CFA67B4E6960' KeyPath='yes'>
|
||||
<Environment
|
||||
Id='PATH'
|
||||
Name='PATH'
|
||||
Value='[Bin]'
|
||||
Permanent='no'
|
||||
Part='last'
|
||||
Action='set'
|
||||
System='yes'/>
|
||||
</Component>
|
||||
<Component Id='binary0' Guid='*'>
|
||||
<File
|
||||
Id='exe0'
|
||||
Name='aiken.exe'
|
||||
DiskId='1'
|
||||
Source='$(var.CargoTargetBinDir)\aiken.exe'
|
||||
KeyPath='yes'/>
|
||||
</Component>
|
||||
</Directory>
|
||||
</Directory>
|
||||
</Directory>
|
||||
</Directory>
|
||||
|
||||
<Feature
|
||||
Id='Binaries'
|
||||
Title='Application'
|
||||
Description='Installs all binaries and the license.'
|
||||
Level='1'
|
||||
ConfigurableDirectory='APPLICATIONFOLDER'
|
||||
AllowAdvertise='no'
|
||||
Display='expand'
|
||||
Absent='disallow'>
|
||||
|
||||
<!--
|
||||
Uncomment the following `ComponentRef` tag to add the license
|
||||
sidecar file to the installer.
|
||||
-->
|
||||
<!--<ComponentRef Id='License'/>-->
|
||||
|
||||
<ComponentRef Id='binary0'/>
|
||||
|
||||
<Feature
|
||||
Id='Environment'
|
||||
Title='PATH Environment Variable'
|
||||
Description='Add the install location of the [ProductName] executable to the PATH system environment variable. This allows the [ProductName] executable to be called from any location.'
|
||||
Level='1'
|
||||
Absent='allow'>
|
||||
<ComponentRef Id='Path'/>
|
||||
</Feature>
|
||||
</Feature>
|
||||
|
||||
<SetProperty Id='ARPINSTALLLOCATION' Value='[APPLICATIONFOLDER]' After='CostFinalize'/>
|
||||
|
||||
|
||||
<!--
|
||||
Uncomment the following `Icon` and `Property` tags to change the product icon.
|
||||
|
||||
The product icon is the graphic that appears in the Add/Remove
|
||||
Programs control panel for the application.
|
||||
-->
|
||||
<!--<Icon Id='ProductICO' SourceFile='wix\Product.ico'/>-->
|
||||
<!--<Property Id='ARPPRODUCTICON' Value='ProductICO' />-->
|
||||
|
||||
<Property Id='ARPHELPLINK' Value='https://github.com/aiken-lang/aiken'/>
|
||||
|
||||
<UI>
|
||||
<UIRef Id='WixUI_FeatureTree'/>
|
||||
|
||||
<!--
|
||||
Enabling the EULA dialog in the installer is a three step process:
|
||||
|
||||
1. Comment out or remove the two `Publish` tags that follow the
|
||||
`WixVariable` tag.
|
||||
2. Uncomment the `<WixVariable Id='WixUILicenseRtf' Value='Path\to\Eula.rft'>` tag further down
|
||||
3. Replace the `Value` attribute of the `WixVariable` tag with
|
||||
the path to a RTF file that will be used as the EULA and
|
||||
displayed in the license agreement dialog.
|
||||
-->
|
||||
<Publish Dialog='WelcomeDlg' Control='Next' Event='NewDialog' Value='CustomizeDlg' Order='99'>1</Publish>
|
||||
<Publish Dialog='CustomizeDlg' Control='Back' Event='NewDialog' Value='WelcomeDlg' Order='99'>1</Publish>
|
||||
|
||||
</UI>
|
||||
|
||||
|
||||
<!--
|
||||
Enabling the EULA dialog in the installer requires uncommenting
|
||||
the following `WixUILicenseRTF` tag and changing the `Value`
|
||||
attribute.
|
||||
-->
|
||||
<!-- <WixVariable Id='WixUILicenseRtf' Value='Relative\Path\to\Eula.rtf'/> -->
|
||||
|
||||
|
||||
<!--
|
||||
Uncomment the next `WixVariable` tag to customize the installer's
|
||||
Graphical User Interface (GUI) and add a custom banner image across
|
||||
the top of each screen. See the WiX Toolset documentation for details
|
||||
about customization.
|
||||
|
||||
The banner BMP dimensions are 493 x 58 pixels.
|
||||
-->
|
||||
<!--<WixVariable Id='WixUIBannerBmp' Value='wix\Banner.bmp'/>-->
|
||||
|
||||
|
||||
<!--
|
||||
Uncomment the next `WixVariable` tag to customize the installer's
|
||||
Graphical User Interface (GUI) and add a custom image to the first
|
||||
dialog, or screen. See the WiX Toolset documentation for details about
|
||||
customization.
|
||||
|
||||
The dialog BMP dimensions are 493 x 312 pixels.
|
||||
-->
|
||||
<!--<WixVariable Id='WixUIDialogBmp' Value='wix\Dialog.bmp'/>-->
|
||||
|
||||
</Product>
|
||||
|
||||
</Wix>
|
|
@ -3,7 +3,7 @@ name = "uplc"
|
|||
description = "Utilities for working with Untyped Plutus Core"
|
||||
version = "1.0.26-alpha"
|
||||
edition = "2021"
|
||||
repository = "https://github.com/aiken-lang/aiken/crates/uplc"
|
||||
repository = "https://github.com/aiken-lang/aiken"
|
||||
homepage = "https://github.com/aiken-lang/aiken"
|
||||
license = "Apache-2.0"
|
||||
authors = ["Lucas Rosa <x@rvcas.dev>", "Kasey White <kwhitemsg@gmail.com>"]
|
||||
|
|
|
@ -13,4 +13,4 @@ requirements = []
|
|||
source = "github"
|
||||
|
||||
[etags]
|
||||
"aiken-lang/stdlib@main" = [{ secs_since_epoch = 1711569180, nanos_since_epoch = 895108000 }, "b3c7a0d03fa7a9f454fccd45c69a09010858ec0b6c9a1f5c71ef2ebc36fc46bb"]
|
||||
"aiken-lang/stdlib@main" = [{ secs_since_epoch = 1715005298, nanos_since_epoch = 528741000 }, "5ee55dc5ccf269bb493f4cacb32096f0191a6adb2ef39d62a1f79b8c5a8fcc7f"]
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
"plutusVersion": "v2",
|
||||
"compiler": {
|
||||
"name": "Aiken",
|
||||
"version": "v1.0.26-alpha+075668b"
|
||||
"version": "v1.0.26-alpha+c5227a2"
|
||||
}
|
||||
},
|
||||
"validators": [
|
||||
|
@ -42,8 +42,8 @@
|
|||
"$ref": "#/definitions/Data"
|
||||
}
|
||||
},
|
||||
"compiledCode": "58d501000032323232323232222533300432330010013758601460166016600e6ea8c028c01cdd50011129998048008a501325333007333007533300a3253330083370e900118049baa00114bd6f7b63009bab300d300a375400264660020026eacc034c038c028dd518068019129998060008a60103d87a80001323232533300c3371e91105000000000000375c601a006266e95200033010374c00297ae01330050050023756601a0046020004601c00229445282504a229444cc00c00c004c030004526136565734aae7555cf2ab9f5740ae855d101",
|
||||
"hash": "416db3eec35a0e94198a5123de948b773f98d4fcba87f581598a8068"
|
||||
"compiledCode": "58ef01000032323232323232222533300432330010013758601460166016600e6ea8c028c01cdd50011129998048008a501325333007333007533300a3253330083370e900118049baa00114bd6f7b63009bab300d300a375400264660020026eacc034c038c028dd518068019129998060008a60103d87a8000132323232533300d33722911050000000000000021533300d3371e91010500000000000000213374a9000198089ba60014bd700a6103d87a80001330060060033756601c0066eb8c030008c040008c0380045288a504a094452889980180180098060008a4c26cacae6955ceaab9e5573eae815d0aba21",
|
||||
"hash": "f56561e01063b11146809755d9907147e79d3166aa5c65fba4040fd1"
|
||||
}
|
||||
],
|
||||
"definitions": {
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
"plutusVersion": "v2",
|
||||
"compiler": {
|
||||
"name": "Aiken",
|
||||
"version": "v1.0.26-alpha+075668b"
|
||||
"version": "v1.0.26-alpha+c5227a2"
|
||||
}
|
||||
},
|
||||
"validators": [
|
||||
|
|
|
@ -13,4 +13,4 @@ requirements = []
|
|||
source = "github"
|
||||
|
||||
[etags]
|
||||
"aiken-lang/stdlib@main" = [{ secs_since_epoch = 1711569179, nanos_since_epoch = 784240000 }, "b3c7a0d03fa7a9f454fccd45c69a09010858ec0b6c9a1f5c71ef2ebc36fc46bb"]
|
||||
"aiken-lang/stdlib@main" = [{ secs_since_epoch = 1715005296, nanos_since_epoch = 102490000 }, "5ee55dc5ccf269bb493f4cacb32096f0191a6adb2ef39d62a1f79b8c5a8fcc7f"]
|
||||
|
|
|
@ -13,4 +13,4 @@ requirements = []
|
|||
source = "github"
|
||||
|
||||
[etags]
|
||||
"aiken-lang/stdlib@main" = [{ secs_since_epoch = 1711569192, nanos_since_epoch = 806001000 }, "b3c7a0d03fa7a9f454fccd45c69a09010858ec0b6c9a1f5c71ef2ebc36fc46bb"]
|
||||
"aiken-lang/stdlib@main" = [{ secs_since_epoch = 1715005303, nanos_since_epoch = 72632000 }, "5ee55dc5ccf269bb493f4cacb32096f0191a6adb2ef39d62a1f79b8c5a8fcc7f"]
|
||||
|
|
|
@ -85,10 +85,10 @@ const bar = #"626172"
|
|||
|
||||
fn fixture_1() {
|
||||
dict.new()
|
||||
|> dict.insert(fooz, 42, bytearray.compare)
|
||||
|> dict.insert(bar, 14, bytearray.compare)
|
||||
|> dict.insert(fooz, 42)
|
||||
|> dict.insert(bar, 14)
|
||||
}
|
||||
|
||||
test union_1() {
|
||||
dict.union(fixture_1(), dict.new(), bytearray.compare) == fixture_1()
|
||||
dict.union(fixture_1(), dict.new()) == fixture_1()
|
||||
}
|
||||
|
|
|
@ -13,4 +13,4 @@ requirements = []
|
|||
source = "github"
|
||||
|
||||
[etags]
|
||||
"aiken-lang/stdlib@main" = [{ secs_since_epoch = 1711569192, nanos_since_epoch = 805967000 }, "b3c7a0d03fa7a9f454fccd45c69a09010858ec0b6c9a1f5c71ef2ebc36fc46bb"]
|
||||
"aiken-lang/stdlib@main" = [{ secs_since_epoch = 1715005305, nanos_since_epoch = 39479000 }, "5ee55dc5ccf269bb493f4cacb32096f0191a6adb2ef39d62a1f79b8c5a8fcc7f"]
|
||||
|
|
|
@ -61,7 +61,7 @@ pub fn tx_1() -> Transaction {
|
|||
fee: value.zero(),
|
||||
mint: value.from_asset(#"000000", #"00", -1) |> value.to_minted_value,
|
||||
certificates: [],
|
||||
withdrawals: dict.new(),
|
||||
withdrawals: [],
|
||||
validity_range: Interval {
|
||||
lower_bound: IntervalBound {
|
||||
bound_type: PositiveInfinity,
|
||||
|
@ -73,7 +73,7 @@ pub fn tx_1() -> Transaction {
|
|||
},
|
||||
},
|
||||
extra_signatories: [keyhash],
|
||||
redeemers: dict.new(),
|
||||
redeemers: [],
|
||||
datums: dict.new(),
|
||||
id: TransactionId { hash: #"" },
|
||||
}
|
||||
|
|
|
@ -13,4 +13,4 @@ requirements = []
|
|||
source = "github"
|
||||
|
||||
[etags]
|
||||
"aiken-lang/stdlib@main" = [{ secs_since_epoch = 1711569179, nanos_since_epoch = 348000000 }, "b3c7a0d03fa7a9f454fccd45c69a09010858ec0b6c9a1f5c71ef2ebc36fc46bb"]
|
||||
"aiken-lang/stdlib@main" = [{ secs_since_epoch = 1715005310, nanos_since_epoch = 773339000 }, "5ee55dc5ccf269bb493f4cacb32096f0191a6adb2ef39d62a1f79b8c5a8fcc7f"]
|
||||
|
|
|
@ -13,4 +13,4 @@ requirements = []
|
|||
source = "github"
|
||||
|
||||
[etags]
|
||||
"aiken-lang/stdlib@main" = [{ secs_since_epoch = 1711569194, nanos_since_epoch = 888685000 }, "b3c7a0d03fa7a9f454fccd45c69a09010858ec0b6c9a1f5c71ef2ebc36fc46bb"]
|
||||
"aiken-lang/stdlib@main" = [{ secs_since_epoch = 1715005316, nanos_since_epoch = 645681000 }, "5ee55dc5ccf269bb493f4cacb32096f0191a6adb2ef39d62a1f79b8c5a8fcc7f"]
|
||||
|
|
|
@ -13,4 +13,4 @@ requirements = []
|
|||
source = "github"
|
||||
|
||||
[etags]
|
||||
"aiken-lang/stdlib@main" = [{ secs_since_epoch = 1711569186, nanos_since_epoch = 741545000 }, "b3c7a0d03fa7a9f454fccd45c69a09010858ec0b6c9a1f5c71ef2ebc36fc46bb"]
|
||||
"aiken-lang/stdlib@main" = [{ secs_since_epoch = 1715005314, nanos_since_epoch = 209079000 }, "5ee55dc5ccf269bb493f4cacb32096f0191a6adb2ef39d62a1f79b8c5a8fcc7f"]
|
||||
|
|
|
@ -24,9 +24,9 @@ pub fn from_asset(
|
|||
) -> Value {
|
||||
let asset =
|
||||
dict.new()
|
||||
|> dict.insert(asset_name, quantity, bytearray.compare)
|
||||
|> dict.insert(asset_name, quantity)
|
||||
dict.new()
|
||||
|> dict.insert(policy_id, asset, bytearray.compare)
|
||||
|> dict.insert(policy_id, asset)
|
||||
|> Value
|
||||
}
|
||||
|
||||
|
@ -47,7 +47,6 @@ pub fn add(left v0: Value, right v1: Value) -> Value {
|
|||
Some(q)
|
||||
}
|
||||
},
|
||||
bytearray.compare,
|
||||
)
|
||||
|
||||
if dict.is_empty(result) {
|
||||
|
@ -56,7 +55,6 @@ pub fn add(left v0: Value, right v1: Value) -> Value {
|
|||
Some(result)
|
||||
}
|
||||
},
|
||||
bytearray.compare,
|
||||
)
|
||||
|> Value
|
||||
}
|
||||
|
|
|
@ -2,7 +2,11 @@
|
|||
"preamble": {
|
||||
"title": "aiken-lang/acceptance_test_068",
|
||||
"version": "0.0.0",
|
||||
"plutusVersion": "v2"
|
||||
"plutusVersion": "v2",
|
||||
"compiler": {
|
||||
"name": "Aiken",
|
||||
"version": "v1.0.26-alpha+a44ed4c"
|
||||
}
|
||||
},
|
||||
"validators": []
|
||||
}
|
|
@ -13,4 +13,4 @@ requirements = []
|
|||
source = "github"
|
||||
|
||||
[etags]
|
||||
"aiken-lang/stdlib@main" = [{ secs_since_epoch = 1711569179, nanos_since_epoch = 533207000 }, "b3c7a0d03fa7a9f454fccd45c69a09010858ec0b6c9a1f5c71ef2ebc36fc46bb"]
|
||||
"aiken-lang/stdlib@main" = [{ secs_since_epoch = 1715005310, nanos_since_epoch = 708386000 }, "5ee55dc5ccf269bb493f4cacb32096f0191a6adb2ef39d62a1f79b8c5a8fcc7f"]
|
||||
|
|
|
@ -13,4 +13,4 @@ requirements = []
|
|||
source = "github"
|
||||
|
||||
[etags]
|
||||
"aiken-lang/stdlib@main" = [{ secs_since_epoch = 1711569178, nanos_since_epoch = 711093000 }, "b3c7a0d03fa7a9f454fccd45c69a09010858ec0b6c9a1f5c71ef2ebc36fc46bb"]
|
||||
"aiken-lang/stdlib@main" = [{ secs_since_epoch = 1715005301, nanos_since_epoch = 544359000 }, "5ee55dc5ccf269bb493f4cacb32096f0191a6adb2ef39d62a1f79b8c5a8fcc7f"]
|
||||
|
|
|
@ -13,4 +13,4 @@ requirements = []
|
|||
source = "github"
|
||||
|
||||
[etags]
|
||||
"aiken-lang/stdlib@main" = [{ secs_since_epoch = 1711569181, nanos_since_epoch = 605261000 }, "b3c7a0d03fa7a9f454fccd45c69a09010858ec0b6c9a1f5c71ef2ebc36fc46bb"]
|
||||
"aiken-lang/stdlib@main" = [{ secs_since_epoch = 1715005299, nanos_since_epoch = 594302000 }, "5ee55dc5ccf269bb493f4cacb32096f0191a6adb2ef39d62a1f79b8c5a8fcc7f"]
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
"plutusVersion": "v2",
|
||||
"compiler": {
|
||||
"name": "Aiken",
|
||||
"version": "v1.0.26-alpha+075668b"
|
||||
"version": "v1.0.26-alpha+c5227a2"
|
||||
}
|
||||
},
|
||||
"validators": [
|
||||
|
|
|
@ -13,4 +13,4 @@ requirements = []
|
|||
source = "github"
|
||||
|
||||
[etags]
|
||||
"aiken-lang/stdlib@main" = [{ secs_since_epoch = 1711569187, nanos_since_epoch = 164467000 }, "b3c7a0d03fa7a9f454fccd45c69a09010858ec0b6c9a1f5c71ef2ebc36fc46bb"]
|
||||
"aiken-lang/stdlib@main" = [{ secs_since_epoch = 1715005310, nanos_since_epoch = 50660000 }, "5ee55dc5ccf269bb493f4cacb32096f0191a6adb2ef39d62a1f79b8c5a8fcc7f"]
|
||||
|
|
|
@ -13,4 +13,4 @@ requirements = []
|
|||
source = "github"
|
||||
|
||||
[etags]
|
||||
"aiken-lang/stdlib@main" = [{ secs_since_epoch = 1711569187, nanos_since_epoch = 610647000 }, "b3c7a0d03fa7a9f454fccd45c69a09010858ec0b6c9a1f5c71ef2ebc36fc46bb"]
|
||||
"aiken-lang/stdlib@main" = [{ secs_since_epoch = 1715005309, nanos_since_epoch = 773612000 }, "5ee55dc5ccf269bb493f4cacb32096f0191a6adb2ef39d62a1f79b8c5a8fcc7f"]
|
||||
|
|
|
@ -13,4 +13,4 @@ requirements = []
|
|||
source = "github"
|
||||
|
||||
[etags]
|
||||
"aiken-lang/stdlib@main" = [{ secs_since_epoch = 1711569191, nanos_since_epoch = 273641000 }, "b3c7a0d03fa7a9f454fccd45c69a09010858ec0b6c9a1f5c71ef2ebc36fc46bb"]
|
||||
"aiken-lang/stdlib@main" = [{ secs_since_epoch = 1715005298, nanos_since_epoch = 330541000 }, "5ee55dc5ccf269bb493f4cacb32096f0191a6adb2ef39d62a1f79b8c5a8fcc7f"]
|
||||
|
|
|
@ -0,0 +1,7 @@
|
|||
# This file was generated by Aiken
|
||||
# You typically do not need to edit this file
|
||||
|
||||
requirements = []
|
||||
packages = []
|
||||
|
||||
[etags]
|
|
@ -0,0 +1,3 @@
|
|||
name = 'aiken-lang/acceptance_test_075'
|
||||
version = '0.0.0'
|
||||
description = ''
|
|
@ -0,0 +1,37 @@
|
|||
pub opaque type Dict<key, value> {
|
||||
inner: List<Pair<key, value>>,
|
||||
}
|
||||
|
||||
pub fn constant(a: a) -> Fuzzer<a> {
|
||||
fn(s0) { Some((s0, a)) }
|
||||
}
|
||||
|
||||
pub fn map(fuzz_a: Fuzzer<a>, f: fn(a) -> b) -> Fuzzer<b> {
|
||||
fn(s0) {
|
||||
when fuzz_a(s0) is {
|
||||
Some((s1, a)) -> Some((s1, f(a)))
|
||||
None -> None
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// NOTE: Inlining `do_list` fixes the problem. But the indirection here causes:
|
||||
//
|
||||
// --> Type mismatch expected 'pair data data' got 'data'
|
||||
pub fn list(fuzzer: Fuzzer<a>) -> Fuzzer<List<a>> {
|
||||
do_list(fuzzer, [])
|
||||
}
|
||||
|
||||
fn do_list(fuzzer, xs) -> Fuzzer<List<a>> {
|
||||
let x <- map(fuzzer)
|
||||
[x, ..xs]
|
||||
}
|
||||
|
||||
pub fn dict() -> Fuzzer<Dict<Int, Bool>> {
|
||||
list(constant(Pair(1, True)))
|
||||
|> map(fn(inner) { Dict { inner } })
|
||||
}
|
||||
|
||||
test prop_dict_between(_d via dict()) {
|
||||
True
|
||||
}
|
|
@ -13,4 +13,4 @@ requirements = []
|
|||
source = "github"
|
||||
|
||||
[etags]
|
||||
"aiken-lang/stdlib@main" = [{ secs_since_epoch = 1711569186, nanos_since_epoch = 96782000 }, "b3c7a0d03fa7a9f454fccd45c69a09010858ec0b6c9a1f5c71ef2ebc36fc46bb"]
|
||||
"aiken-lang/stdlib@main" = [{ secs_since_epoch = 1715005297, nanos_since_epoch = 729130000 }, "5ee55dc5ccf269bb493f4cacb32096f0191a6adb2ef39d62a1f79b8c5a8fcc7f"]
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
"plutusVersion": "v2",
|
||||
"compiler": {
|
||||
"name": "Aiken",
|
||||
"version": "v1.0.26-alpha+075668b"
|
||||
"version": "v1.0.26-alpha+c5227a2"
|
||||
}
|
||||
},
|
||||
"validators": [
|
||||
|
@ -31,8 +31,25 @@
|
|||
}
|
||||
}
|
||||
],
|
||||
"compiledCode": "5901ae010000323232323232322322232323225333009323232533300c3007300d3754002264646464a666026602c00426464a666024601a60266ea803854ccc048c8cc004004018894ccc05c004528099299980a99baf301a30173754603400402429444cc00c00c004c06800454ccc048c0300044cdc78010080a501616375a60260046eb8c04400458c050004cc88c94ccc044c02cc048dd50008a5eb7bdb1804dd5980b18099baa001323300100100322533301500114c0103d87a8000132323253330153371e00c6eb8c05800c4cdd2a4000660326e980052f5c026600a00a0046eacc058008c064008c05c004c8cc004004dd5980a180a980a980a980a8019129998098008a5eb7bdb1804c8c8c8c94ccc050cdc7a45000021003133018337606ea4008dd3000998030030019bab3015003375c6026004602e004602a0026eb8c04cc040dd50019bac3012001300e37540042c60206022004601e00260166ea80045261365632533300830030011533300b300a37540082930b0a99980418010008a99980598051baa00414985858c020dd50019b8748008dc3a40006eb80055cd2ab9d5573caae7d5d02ba15745",
|
||||
"hash": "c537a66202fafb789b2e76c6a5430b53b0069e223ba8ad4b5b793d51"
|
||||
"compiledCode": "5901cc010000323232323232322322232323225333009323232533300c3007300d3754002264646464a666026602c00426464a666024601a60266ea803854ccc048c8cc004004018894ccc05c004528099299980a99baf301a30173754603400402429444cc00c00c004c06800454ccc048c0300044cdc78010080a501616375a60260046eb8c04400458c050004c94ccc03cc024c040dd50008a5eb7bdb1804dd5980a18089baa00132323300100132330010013756602c602e602e602e602e00a44a66602a002297adef6c6013232323253330163372291100002153330163371e9101000021003100513301a337606ea4008dd3000998030030019bab3017003375c602a0046032004602e00244a666028002298103d87a800013232323253330153372200e0042a66602a66e3c01c0084cdd2a4000660326e980052f5c02980103d87a80001330060060033756602c0066eb8c050008c060008c058004dd7180998081baa00337586024002601c6ea800858c040c044008c03c004c02cdd50008a4c26cac64a66601060060022a66601660146ea8010526161533300830020011533300b300a37540082930b0b18041baa003370e90011b8748000dd7000ab9a5573aaae7955cfaba05742ae89",
|
||||
"hash": "df50e06b40d42b2c399ebcec4c2a8e51a6b28ef99790d950cc251caa"
|
||||
},
|
||||
{
|
||||
"title": "spend2.backtrace",
|
||||
"datum": {
|
||||
"title": "_datum",
|
||||
"schema": {
|
||||
"$ref": "#/definitions/Void"
|
||||
}
|
||||
},
|
||||
"redeemer": {
|
||||
"title": "_redeemer",
|
||||
"schema": {
|
||||
"$ref": "#/definitions/Void"
|
||||
}
|
||||
},
|
||||
"compiledCode": "58ad010000323232323232322323223225333007533300730053008375464660020026eb0c034c038c038c028dd5180698051baa00222533300c00114c0103d87a800013232533300b4a2266e9520003300f0024bd70099802002000980800118070008a511614984d958c94ccc018c010c01c00454ccc024c0200045261616375400264a6660086004600a0022a66600e600c0022930b0b1baa002370e90002b9a5573aaae7955cfaba05742ae881",
|
||||
"hash": "aae5a1fcf239d541c67a7efb006436be41c5ee7f6f4a8fd7b39b97a8"
|
||||
}
|
||||
],
|
||||
"definitions": {
|
||||
|
@ -42,6 +59,17 @@
|
|||
"Int": {
|
||||
"dataType": "integer"
|
||||
},
|
||||
"Void": {
|
||||
"title": "Unit",
|
||||
"description": "The nullary constructor.",
|
||||
"anyOf": [
|
||||
{
|
||||
"dataType": "constructor",
|
||||
"index": 0,
|
||||
"fields": []
|
||||
}
|
||||
]
|
||||
},
|
||||
"aiken/transaction/OutputReference": {
|
||||
"title": "OutputReference",
|
||||
"description": "An `OutputReference` is a unique reference to an output on-chain. The `output_index`\n corresponds to the position in the output list of the transaction (identified by its id)\n that produced that output",
|
||||
|
|
|
@ -14,11 +14,11 @@ validator(token_name: ByteArray, utxo_ref: OutputReference) {
|
|||
let ScriptContext { transaction, purpose } = ctx
|
||||
expect tx.Mint(policy_id) = purpose
|
||||
let Transaction { inputs, mint, .. } = transaction
|
||||
expect [(asset_name, amount)] =
|
||||
expect [Pair(asset_name, amount)] =
|
||||
mint
|
||||
|> value.from_minted_value
|
||||
|> value.tokens(policy_id)
|
||||
|> dict.to_list()
|
||||
|> dict.to_alist()
|
||||
when rdmr is {
|
||||
Mint -> {
|
||||
expect
|
||||
|
|
|
@ -1,18 +1,15 @@
|
|||
// use aiken/list
|
||||
// use aiken/transaction.{Output, ScriptContext}
|
||||
use aiken/list
|
||||
use aiken/transaction.{Output, ScriptContext}
|
||||
|
||||
// validator {
|
||||
// fn backtrace(_datum: Void, _redeemer: Void, context: ScriptContext) -> Bool {
|
||||
// expect Some(_) =
|
||||
// list.find(context.transaction.outputs, fn(_) { True })
|
||||
// let _ =
|
||||
// find_stuff(context)
|
||||
// True
|
||||
// }
|
||||
// }
|
||||
validator {
|
||||
fn backtrace(_datum: Void, _redeemer: Void, context: ScriptContext) -> Bool {
|
||||
expect Some(_) = list.find(context.transaction.outputs, fn(_) { True })
|
||||
let _ = find_stuff(context)
|
||||
True
|
||||
}
|
||||
}
|
||||
|
||||
// fn find_stuff(context) -> Output {
|
||||
// expect Some(stuff) =
|
||||
// list.find(context.transaction.outputs, fn(_) { True })
|
||||
// stuff
|
||||
// }
|
||||
fn find_stuff(context: ScriptContext) -> Output {
|
||||
expect Some(stuff) = list.find(context.transaction.outputs, fn(_) { True })
|
||||
stuff
|
||||
}
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
"plutusVersion": "v2",
|
||||
"compiler": {
|
||||
"name": "Aiken",
|
||||
"version": "v1.0.26-alpha+075668b"
|
||||
"version": "v1.0.26-alpha+c5227a2"
|
||||
}
|
||||
},
|
||||
"validators": [
|
||||
|
|
|
@ -13,4 +13,4 @@ requirements = []
|
|||
source = "github"
|
||||
|
||||
[etags]
|
||||
"aiken-lang/stdlib@main" = [{ secs_since_epoch = 1711569178, nanos_since_epoch = 275562000 }, "b3c7a0d03fa7a9f454fccd45c69a09010858ec0b6c9a1f5c71ef2ebc36fc46bb"]
|
||||
"aiken-lang/stdlib@main" = [{ secs_since_epoch = 1715005301, nanos_since_epoch = 189023000 }, "5ee55dc5ccf269bb493f4cacb32096f0191a6adb2ef39d62a1f79b8c5a8fcc7f"]
|
||||
|
|
|
@ -13,4 +13,4 @@ requirements = []
|
|||
source = "github"
|
||||
|
||||
[etags]
|
||||
"aiken-lang/stdlib@main" = [{ secs_since_epoch = 1711569180, nanos_since_epoch = 807185000 }, "b3c7a0d03fa7a9f454fccd45c69a09010858ec0b6c9a1f5c71ef2ebc36fc46bb"]
|
||||
"aiken-lang/stdlib@main" = [{ secs_since_epoch = 1715005307, nanos_since_epoch = 694173000 }, "5ee55dc5ccf269bb493f4cacb32096f0191a6adb2ef39d62a1f79b8c5a8fcc7f"]
|
||||
|
|
|
@ -41,16 +41,16 @@ test dict_test1() {
|
|||
|
||||
let (ask_map, asize, offer_map, osize) =
|
||||
(
|
||||
dict.from_list([(ask_input_ref, transaction.NoDatum)], compare_out_ref),
|
||||
[Pair(ask_input_ref, transaction.NoDatum)],
|
||||
1,
|
||||
dict.from_list([(offer_input_ref, transaction.NoDatum)], compare_out_ref),
|
||||
[Pair(offer_input_ref, transaction.NoDatum)],
|
||||
1,
|
||||
)
|
||||
|
||||
(ask_map, asize, offer_map, osize) == (
|
||||
dict.from_list([(ask_input_ref, transaction.NoDatum)], compare_out_ref),
|
||||
[Pair(ask_input_ref, transaction.NoDatum)],
|
||||
1,
|
||||
dict.from_list([(offer_input_ref, transaction.NoDatum)], compare_out_ref),
|
||||
[Pair(offer_input_ref, transaction.NoDatum)],
|
||||
1,
|
||||
)
|
||||
}
|
||||
|
@ -62,14 +62,9 @@ test dict_test2() {
|
|||
[(ask_input_ref, offer_input_ref)]
|
||||
|
||||
let foo =
|
||||
fn(pair: (OutputReference, OutputReference), acc: Dict<Value, Address>) {
|
||||
fn(pair: (OutputReference, OutputReference), acc: Dict<ByteArray, Address>) {
|
||||
let new_pay_map =
|
||||
dict.insert(
|
||||
acc,
|
||||
value.zero(),
|
||||
Address(VerificationKeyCredential("00"), None),
|
||||
compare_value("", "", _, _),
|
||||
)
|
||||
dict.insert(acc, "", Address(VerificationKeyCredential("00"), None))
|
||||
|
||||
new_pay_map
|
||||
}
|
||||
|
@ -87,21 +82,17 @@ test dict_test3() {
|
|||
|
||||
let (ask_map, asize, offer_map, osize) =
|
||||
(
|
||||
dict.from_list([(ask_input_ref, transaction.NoDatum)], compare_out_ref),
|
||||
[Pair(ask_input_ref, transaction.NoDatum)],
|
||||
1,
|
||||
dict.from_list([(offer_input_ref, transaction.NoDatum)], compare_out_ref),
|
||||
[Pair(offer_input_ref, transaction.NoDatum)],
|
||||
1,
|
||||
)
|
||||
|
||||
// TODO: Maybe passing Value to the key generic of dict shouldn't be possible
|
||||
let foo =
|
||||
fn(pair: (OutputReference, OutputReference), acc: Dict<Value, Address>) {
|
||||
let new_pay_map =
|
||||
dict.insert(
|
||||
acc,
|
||||
value.zero(),
|
||||
Address(VerificationKeyCredential("00"), None),
|
||||
compare_value("", "", _, _),
|
||||
)
|
||||
dict.insert(acc, "", Address(VerificationKeyCredential("00"), None))
|
||||
|
||||
new_pay_map
|
||||
}
|
||||
|
|
|
@ -13,4 +13,4 @@ requirements = []
|
|||
source = "github"
|
||||
|
||||
[etags]
|
||||
"aiken-lang/stdlib@main" = [{ secs_since_epoch = 1711569180, nanos_since_epoch = 976274000 }, "b3c7a0d03fa7a9f454fccd45c69a09010858ec0b6c9a1f5c71ef2ebc36fc46bb"]
|
||||
"aiken-lang/stdlib@main" = [{ secs_since_epoch = 1715005296, nanos_since_epoch = 102522000 }, "5ee55dc5ccf269bb493f4cacb32096f0191a6adb2ef39d62a1f79b8c5a8fcc7f"]
|
||||
|
|
|
@ -24,6 +24,6 @@ test tuple_when() {
|
|||
}
|
||||
|
||||
test t() {
|
||||
trace cbor.diagnostic(list.map([(#"", 20)], snd_pair))
|
||||
trace cbor.diagnostic(list.map([Pair(#"", 20)], snd_pair))
|
||||
True
|
||||
}
|
||||
|
|
|
@ -13,4 +13,4 @@ requirements = []
|
|||
source = "github"
|
||||
|
||||
[etags]
|
||||
"aiken-lang/stdlib@main" = [{ secs_since_epoch = 1711569179, nanos_since_epoch = 891129000 }, "b3c7a0d03fa7a9f454fccd45c69a09010858ec0b6c9a1f5c71ef2ebc36fc46bb"]
|
||||
"aiken-lang/stdlib@main" = [{ secs_since_epoch = 1715005304, nanos_since_epoch = 885730000 }, "5ee55dc5ccf269bb493f4cacb32096f0191a6adb2ef39d62a1f79b8c5a8fcc7f"]
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
"plutusVersion": "v2",
|
||||
"compiler": {
|
||||
"name": "Aiken",
|
||||
"version": "v1.0.26-alpha+075668b"
|
||||
"version": "v1.0.26-alpha+c5227a2"
|
||||
}
|
||||
},
|
||||
"validators": [
|
||||
|
|
|
@ -13,4 +13,4 @@ requirements = []
|
|||
source = "github"
|
||||
|
||||
[etags]
|
||||
"aiken-lang/stdlib@main" = [{ secs_since_epoch = 1711569182, nanos_since_epoch = 746568000 }, "b3c7a0d03fa7a9f454fccd45c69a09010858ec0b6c9a1f5c71ef2ebc36fc46bb"]
|
||||
"aiken-lang/stdlib@main" = [{ secs_since_epoch = 1715005310, nanos_since_epoch = 646894000 }, "5ee55dc5ccf269bb493f4cacb32096f0191a6adb2ef39d62a1f79b8c5a8fcc7f"]
|
||||
|
|
|
@ -3,6 +3,6 @@ version = "0.0.0"
|
|||
description = ""
|
||||
|
||||
[[dependencies]]
|
||||
name = 'aiken-lang/stdlib'
|
||||
version = 'main'
|
||||
source = 'github'
|
||||
name = "aiken-lang/stdlib"
|
||||
version = "main"
|
||||
source = "github"
|
||||
|
|
|
@ -13,4 +13,4 @@ requirements = []
|
|||
source = "github"
|
||||
|
||||
[etags]
|
||||
"aiken-lang/stdlib@main" = [{ secs_since_epoch = 1711569188, nanos_since_epoch = 217936000 }, "b3c7a0d03fa7a9f454fccd45c69a09010858ec0b6c9a1f5c71ef2ebc36fc46bb"]
|
||||
"aiken-lang/stdlib@main" = [{ secs_since_epoch = 1715005301, nanos_since_epoch = 48539000 }, "5ee55dc5ccf269bb493f4cacb32096f0191a6adb2ef39d62a1f79b8c5a8fcc7f"]
|
||||
|
|
|
@ -13,4 +13,4 @@ requirements = []
|
|||
source = "github"
|
||||
|
||||
[etags]
|
||||
"aiken-lang/stdlib@main" = [{ secs_since_epoch = 1711569179, nanos_since_epoch = 408850000 }, "b3c7a0d03fa7a9f454fccd45c69a09010858ec0b6c9a1f5c71ef2ebc36fc46bb"]
|
||||
"aiken-lang/stdlib@main" = [{ secs_since_epoch = 1715005312, nanos_since_epoch = 176254000 }, "5ee55dc5ccf269bb493f4cacb32096f0191a6adb2ef39d62a1f79b8c5a8fcc7f"]
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
"plutusVersion": "v2",
|
||||
"compiler": {
|
||||
"name": "Aiken",
|
||||
"version": "v1.0.26-alpha+075668b"
|
||||
"version": "v1.0.26-alpha+c5227a2"
|
||||
}
|
||||
},
|
||||
"validators": [
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
"plutusVersion": "v2",
|
||||
"compiler": {
|
||||
"name": "Aiken",
|
||||
"version": "v1.0.26-alpha+075668b"
|
||||
"version": "v1.0.26-alpha+c5227a2"
|
||||
}
|
||||
},
|
||||
"validators": [
|
||||
|
|
|
@ -6,7 +6,7 @@
|
|||
"plutusVersion": "v2",
|
||||
"compiler": {
|
||||
"name": "Aiken",
|
||||
"version": "v1.0.26-alpha+075668b"
|
||||
"version": "v1.0.26-alpha+c5227a2"
|
||||
},
|
||||
"license": "Apache-2.0"
|
||||
},
|
||||
|
|
|
@ -0,0 +1,7 @@
|
|||
# This file was generated by Aiken
|
||||
# You typically do not need to edit this file
|
||||
|
||||
requirements = []
|
||||
packages = []
|
||||
|
||||
[etags]
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue