diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 14d2d1d1..4632022a 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -1,148 +1,312 @@ +# Copyright 2022-2024, axodotdev +# SPDX-License-Identifier: MIT or Apache-2.0 +# +# CI that: +# +# * checks for a Git Tag that looks like a release +# * builds artifacts with cargo-dist (archives, installers, hashes) +# * uploads those artifacts to temporary workflow zip +# * on success, uploads the artifacts to a GitHub Release +# +# Note that the GitHub Release will be created with a generated +# title/body based on your changelogs. + name: Release +permissions: + contents: write + +# This task will run whenever you push a git tag that looks like a version +# like "1.0.0", "v0.1.0-prerelease.1", "my-app/0.1.0", "releases/v1.0.0", etc. +# Various formats will be parsed into a VERSION and an optional PACKAGE_NAME, where +# PACKAGE_NAME must be the name of a Cargo package in your workspace, and VERSION +# must be a Cargo-style SemVer Version (must have at least major.minor.patch). +# +# If PACKAGE_NAME is specified, then the announcement will be for that +# package (erroring out if it doesn't have the given version or isn't cargo-dist-able). +# +# If PACKAGE_NAME isn't specified, then the announcement will be for all +# (cargo-dist-able) packages in the workspace with that version (this mode is +# intended for workspaces with only one dist-able package, or with all dist-able +# packages versioned/released in lockstep). +# +# If you push multiple tags at once, separate instances of this workflow will +# spin up, creating an independent announcement for each one. However, GitHub +# will hard limit this to 3 tags per commit, as it will assume more tags is a +# mistake. +# +# If there's a prerelease-style suffix to the version, then the release(s) +# will be marked as a prerelease. on: push: - tags: ["v*.*.*"] - -env: - CARGO_TERM_COLOR: always + tags: + - '**[0-9]+.[0-9]+.[0-9]+*' + pull_request: jobs: - prepare: - name: Prepare release + # Run 'cargo dist plan' (or host) to determine what tasks we need to do + plan: runs-on: ubuntu-latest outputs: - tag_name: ${{ steps.release_info.outputs.tag_name }} - release_name: ${{ steps.release_info.outputs.release_name }} - # release_notes: ${{ steps.extract_release_notes.outputs.release_notes }} - + val: ${{ steps.plan.outputs.manifest }} + tag: ${{ !github.event.pull_request && github.ref_name || '' }} + tag-flag: ${{ !github.event.pull_request && format('--tag={0}', github.ref_name) || '' }} + publishing: ${{ !github.event.pull_request }} + env: + GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} steps: - - name: Checkout - uses: actions/checkout@v2 + - uses: actions/checkout@v4 with: - fetch-depth: 0 - - - name: Compute release name and tag - id: release_info - run: | - echo "tag_name=${GITHUB_REF_NAME}" >> $GITHUB_OUTPUT - echo "release_name=${GITHUB_REF_NAME}" >> $GITHUB_OUTPUT - - # - name: Extract release notes - # id: extract_release_notes - # run: echo "release_notes=\"$(sed -n '/^## .*$/,$p' CHANGELOG.md | sed '1d;/^## /,$d')\"" >> $GITHUB_OUTPUT - - release: - name: ${{ matrix.job.target }} (${{ matrix.job.os }}) - runs-on: ${{ matrix.job.os }} - needs: prepare - strategy: - matrix: - job: - # os: used for the runner - # platform: a generic platform name - # target: used by Cargo - # arch: either 386, arm64 or amd64 - - os: ubuntu-latest - platform: linux - target: x86_64-unknown-linux-gnu - arch: amd64 - # - os: ubuntu-latest - # platform: linux - # target: aarch64-unknown-linux-gnu - # arch: arm64 - - os: macos-latest - platform: darwin - target: x86_64-apple-darwin - arch: amd64 - - os: macos-latest - platform: darwin - target: aarch64-apple-darwin - arch: arm64 - - os: windows-latest - platform: win32 - target: x86_64-pc-windows-msvc - arch: amd64 - # - os: windows-latest - # platform: win32 - # target: aarch64-pc-windows-msvc - # arch: arm64 - - steps: - - name: Checkout - uses: actions/checkout@v2 - - - name: Install toolchain - uses: actions-rs/toolchain@v1 - with: - profile: minimal - toolchain: stable - target: ${{ matrix.job.target }} - override: true - - - uses: Swatinem/rust-cache@v1 - with: - cache-on-failure: true - - - name: Apple M1 setup - if: ${{ matrix.job.target == 'aarch64-apple-darwin' }} - run: | - echo "SDKROOT=$(xcrun -sdk macosx --show-sdk-path)" >> $GITHUB_ENV - echo "MACOSX_DEPLOYMENT_TARGET=$(xcrun -sdk macosx --show-sdk-platform-version)" >> $GITHUB_ENV - - - name: Linux AMD setup - if: ${{ matrix.job.target == 'x86_64-unknown-linux-gnu' }} - run: | - echo "RUSTFLAGS=-C target-feature=+crt-static" >> $GITHUB_ENV - - - name: Linux ARM setup - if: ${{ matrix.job.target == 'aarch64-unknown-linux-gnu' }} - run: | - sudo apt-get update -y - sudo apt-get install -y gcc-aarch64-linux-gnu libssl-dev:armhf - echo "CARGO_TARGET_AARCH64_UNKNOWN_LINUX_GNU_LINKER=aarch64-linux-gnu-gcc" >> $GITHUB_ENV - echo "RUSTFLAGS=-C target-feature=+crt-static" >> $GITHUB_ENV - - - name: Windows setup - if: ${{ matrix.job.os == 'windows-latest' }} - run: | - echo "RUSTFLAGS=-C target-feature=+crt-static" >> $GITHUB_ENV - - - name: Build binaries - uses: actions-rs/cargo@v1 - with: - command: build - args: --release --bins --target ${{ matrix.job.target }} - - - name: Archive binaries - id: artifacts - env: - PLATFORM_NAME: ${{ matrix.job.platform }} - TARGET: ${{ matrix.job.target }} - ARCH: ${{ matrix.job.arch }} - VERSION_NAME: ${{ needs.prepare.outputs.tag_name }} - run: | - if [ "$PLATFORM_NAME" == "linux" ]; then - tar -czvf "aiken_${VERSION_NAME}_${PLATFORM_NAME}_${ARCH}.tar.gz" -C ./target/${TARGET}/release aiken - echo "::set-output name=file_name::aiken_${VERSION_NAME}_${PLATFORM_NAME}_${ARCH}.tar.gz" - elif [ "$PLATFORM_NAME" == "darwin" ]; then - # We need to use gtar here otherwise the archive is corrupt. - # See: https://github.com/actions/virtual-environments/issues/2619 - gtar -czvf "aiken_${VERSION_NAME}_${PLATFORM_NAME}_${ARCH}.tar.gz" -C ./target/${TARGET}/release aiken - echo "::set-output name=file_name::aiken_${VERSION_NAME}_${PLATFORM_NAME}_${ARCH}.tar.gz" - else - cd ./target/${TARGET}/release - 7z a -tzip "aiken_${VERSION_NAME}_${PLATFORM_NAME}_${ARCH}.zip" aiken.exe - mv "aiken_${VERSION_NAME}_${PLATFORM_NAME}_${ARCH}.zip" ../../../ - echo "::set-output name=file_name::aiken_${VERSION_NAME}_${PLATFORM_NAME}_${ARCH}.zip" - fi + submodules: recursive + - name: Install cargo-dist + # we specify bash to get pipefail; it guards against the `curl` command + # failing. otherwise `sh` won't catch that `curl` returned non-0 shell: bash - - # Creates the release for this specific version - - name: Create release - uses: softprops/action-gh-release@v1 + run: "curl --proto '=https' --tlsv1.2 -LsSf https://github.com/axodotdev/cargo-dist/releases/download/v0.13.3/cargo-dist-installer.sh | sh" + # sure would be cool if github gave us proper conditionals... + # so here's a doubly-nested ternary-via-truthiness to try to provide the best possible + # functionality based on whether this is a pull_request, and whether it's from a fork. + # (PRs run on the *source* but secrets are usually on the *target* -- that's *good* + # but also really annoying to build CI around when it needs secrets to work right.) + - id: plan + run: | + cargo dist ${{ (!github.event.pull_request && format('host --steps=create --tag={0}', github.ref_name)) || 'plan' }} --output-format=json > plan-dist-manifest.json + echo "cargo dist ran successfully" + cat plan-dist-manifest.json + echo "manifest=$(jq -c "." plan-dist-manifest.json)" >> "$GITHUB_OUTPUT" + - name: "Upload dist-manifest.json" + uses: actions/upload-artifact@v4 with: - name: ${{ needs.prepare.outputs.release_name }} - tag_name: ${{ needs.prepare.outputs.tag_name }} - files: | - ${{ steps.artifacts.outputs.file_name }} + name: artifacts-plan-dist-manifest + path: plan-dist-manifest.json + + # Build and packages all the platform-specific things + build-local-artifacts: + name: build-local-artifacts (${{ join(matrix.targets, ', ') }}) + # Let the initial task tell us to not run (currently very blunt) + needs: + - plan + if: ${{ fromJson(needs.plan.outputs.val).ci.github.artifacts_matrix.include != null && (needs.plan.outputs.publishing == 'true' || fromJson(needs.plan.outputs.val).ci.github.pr_run_mode == 'upload') }} + strategy: + fail-fast: false + # Target platforms/runners are computed by cargo-dist in create-release. + # Each member of the matrix has the following arguments: + # + # - runner: the github runner + # - dist-args: cli flags to pass to cargo dist + # - install-dist: expression to run to install cargo-dist on the runner + # + # Typically there will be: + # - 1 "global" task that builds universal installers + # - N "local" tasks that build each platform's binaries and platform-specific installers + matrix: ${{ fromJson(needs.plan.outputs.val).ci.github.artifacts_matrix }} + runs-on: ${{ matrix.runner }} + env: + GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} + BUILD_MANIFEST_NAME: target/distrib/${{ join(matrix.targets, '-') }}-dist-manifest.json + steps: + - name: enable windows longpaths + run: | + git config --global core.longpaths true + - uses: actions/checkout@v4 + with: + submodules: recursive + - uses: swatinem/rust-cache@v2 + with: + key: ${{ join(matrix.targets, '-') }} + - name: Install cargo-dist + run: ${{ matrix.install_dist }} + # Get the dist-manifest + - name: Fetch local artifacts + uses: actions/download-artifact@v4 + with: + pattern: artifacts-* + path: target/distrib/ + merge-multiple: true + - name: Install dependencies + run: | + ${{ matrix.packages_install }} + - name: Build artifacts + run: | + # Actually do builds and make zips and whatnot + cargo dist build ${{ needs.plan.outputs.tag-flag }} --print=linkage --output-format=json ${{ matrix.dist_args }} > dist-manifest.json + echo "cargo dist ran successfully" + - id: cargo-dist + name: Post-build + # We force bash here just because github makes it really hard to get values up + # to "real" actions without writing to env-vars, and writing to env-vars has + # inconsistent syntax between shell and powershell. + shell: bash + run: | + # Parse out what we just built and upload it to scratch storage + echo "paths<> "$GITHUB_OUTPUT" + jq --raw-output ".upload_files[]" dist-manifest.json >> "$GITHUB_OUTPUT" + echo "EOF" >> "$GITHUB_OUTPUT" + + cp dist-manifest.json "$BUILD_MANIFEST_NAME" + - name: "Upload artifacts" + uses: actions/upload-artifact@v4 + with: + name: artifacts-build-local-${{ join(matrix.targets, '_') }} + path: | + ${{ steps.cargo-dist.outputs.paths }} + ${{ env.BUILD_MANIFEST_NAME }} + + # Build and package all the platform-agnostic(ish) things + build-global-artifacts: + needs: + - plan + - build-local-artifacts + runs-on: "ubuntu-20.04" + env: + GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} + BUILD_MANIFEST_NAME: target/distrib/global-dist-manifest.json + steps: + - uses: actions/checkout@v4 + with: + submodules: recursive + - name: Install cargo-dist + shell: bash + run: "curl --proto '=https' --tlsv1.2 -LsSf https://github.com/axodotdev/cargo-dist/releases/download/v0.13.3/cargo-dist-installer.sh | sh" + # Get all the local artifacts for the global tasks to use (for e.g. checksums) + - name: Fetch local artifacts + uses: actions/download-artifact@v4 + with: + pattern: artifacts-* + path: target/distrib/ + merge-multiple: true + - id: cargo-dist + shell: bash + run: | + cargo dist build ${{ needs.plan.outputs.tag-flag }} --output-format=json "--artifacts=global" > dist-manifest.json + echo "cargo dist ran successfully" + + # Parse out what we just built and upload it to scratch storage + echo "paths<> "$GITHUB_OUTPUT" + jq --raw-output ".upload_files[]" dist-manifest.json >> "$GITHUB_OUTPUT" + echo "EOF" >> "$GITHUB_OUTPUT" + + cp dist-manifest.json "$BUILD_MANIFEST_NAME" + - name: "Upload artifacts" + uses: actions/upload-artifact@v4 + with: + name: artifacts-build-global + path: | + ${{ steps.cargo-dist.outputs.paths }} + ${{ env.BUILD_MANIFEST_NAME }} + # Determines if we should publish/announce + host: + needs: + - plan + - build-local-artifacts + - build-global-artifacts + # Only run if we're "publishing", and only if local and global didn't fail (skipped is fine) + if: ${{ always() && needs.plan.outputs.publishing == 'true' && (needs.build-global-artifacts.result == 'skipped' || needs.build-global-artifacts.result == 'success') && (needs.build-local-artifacts.result == 'skipped' || needs.build-local-artifacts.result == 'success') }} + env: + GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} + runs-on: "ubuntu-20.04" + outputs: + val: ${{ steps.host.outputs.manifest }} + steps: + - uses: actions/checkout@v4 + with: + submodules: recursive + - name: Install cargo-dist + run: "curl --proto '=https' --tlsv1.2 -LsSf https://github.com/axodotdev/cargo-dist/releases/download/v0.13.3/cargo-dist-installer.sh | sh" + # Fetch artifacts from scratch-storage + - name: Fetch artifacts + uses: actions/download-artifact@v4 + with: + pattern: artifacts-* + path: target/distrib/ + merge-multiple: true + # This is a harmless no-op for GitHub Releases, hosting for that happens in "announce" + - id: host + shell: bash + run: | + cargo dist host ${{ needs.plan.outputs.tag-flag }} --steps=upload --steps=release --output-format=json > dist-manifest.json + echo "artifacts uploaded and released successfully" + cat dist-manifest.json + echo "manifest=$(jq -c "." dist-manifest.json)" >> "$GITHUB_OUTPUT" + - name: "Upload dist-manifest.json" + uses: actions/upload-artifact@v4 + with: + # Overwrite the previous copy + name: artifacts-dist-manifest + path: dist-manifest.json + + publish-homebrew-formula: + needs: + - plan + - host + runs-on: "ubuntu-20.04" + env: + GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} + PLAN: ${{ needs.plan.outputs.val }} + GITHUB_USER: "axo bot" + GITHUB_EMAIL: "admin+bot@axo.dev" + if: ${{ !fromJson(needs.plan.outputs.val).announcement_is_prerelease || fromJson(needs.plan.outputs.val).publish_prereleases }} + steps: + - uses: actions/checkout@v4 + with: + repository: "aiken-lang/homebrew-tap" + token: ${{ secrets.HOMEBREW_TAP_TOKEN }} + # So we have access to the formula + - name: Fetch local artifacts + uses: actions/download-artifact@v4 + with: + pattern: artifacts-* + path: Formula/ + merge-multiple: true + # This is extra complex because you can make your Formula name not match your app name + # so we need to find releases with a *.rb file, and publish with that filename. + - name: Commit formula files + run: | + git config --global user.name "${GITHUB_USER}" + git config --global user.email "${GITHUB_EMAIL}" + + for release in $(echo "$PLAN" | jq --compact-output '.releases[] | select([.artifacts[] | endswith(".rb")] | any)'); do + filename=$(echo "$release" | jq '.artifacts[] | select(endswith(".rb"))' --raw-output) + name=$(echo "$filename" | sed "s/\.rb$//") + version=$(echo "$release" | jq .app_version --raw-output) + + git add "Formula/${filename}" + git commit -m "${name} ${version}" + done + git push + + # Create a GitHub Release while uploading all files to it + announce: + needs: + - plan + - host + - publish-homebrew-formula + # use "always() && ..." to allow us to wait for all publish jobs while + # still allowing individual publish jobs to skip themselves (for prereleases). + # "host" however must run to completion, no skipping allowed! + if: ${{ always() && needs.host.result == 'success' && (needs.publish-homebrew-formula.result == 'skipped' || needs.publish-homebrew-formula.result == 'success') }} + runs-on: "ubuntu-20.04" + env: + GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} + steps: + - uses: actions/checkout@v4 + with: + submodules: recursive + - name: "Download GitHub Artifacts" + uses: actions/download-artifact@v4 + with: + pattern: artifacts-* + path: artifacts + merge-multiple: true + - name: Cleanup + run: | + # Remove the granular manifests + rm -f artifacts/*-dist-manifest.json + - name: Create GitHub Release + uses: ncipollo/release-action@v1 + with: + tag: ${{ needs.plan.outputs.tag }} + name: ${{ fromJson(needs.host.outputs.val).announcement_title }} + body: ${{ fromJson(needs.host.outputs.val).announcement_github_body }} + prerelease: ${{ fromJson(needs.host.outputs.val).announcement_is_prerelease }} + artifacts: "artifacts/*" diff --git a/CHANGELOG.md b/CHANGELOG.md index 2485c59a..59c485fd 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -13,6 +13,16 @@ - **aiken-lang**: formatter should not erase `pub` on validators. @rvcas - **aiken-lang**: error on using tuple index when a tuple is returned by a generic function. @rvcas +- **aiken-lang**: fix a regression in the Type-checker introduced in v1.0.25-alpha regarding types comparison. See #917. @KtorZ +- **aiken-lang**: Fix incongruous generics after type-checking which caused [] to be treated as a list in cases where it needed to be an empty map primitive. See #922. @KtorZ +- **aiken-lang**: Fix for generic constrs being used as functions causing type mismatch errors. @Microproofs +- **aiken-lang**: Fix for error occuring when a field holds Data that is not a constr type when compiler traces are on. @Microproofs + +### Changed +- **aiken-lang**: **MAJOR CHANGE** 2-tuples are now treated the same as 3+ tuples. To replace the representation of pairs at the uplc level, we now have a new Prelude type called Pair with 2 generic arguments. The main place you will see its usage is in the script context. For existing contracts you can continue to use 2-tuples, just note the offchain representation is an array of 2 items in CBOR. @KtorZ @Microproofs +- **aiken-lang**: Some more code gen cleanup. @Microproofs +- **aiken-lang**: New optimization for wrapped builtins found in the stdlib. @Microproofs + ## v1.0.26-alpha - 2024-03-25 diff --git a/Cargo.toml b/Cargo.toml index 8219ff6f..918f9d5c 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -2,13 +2,35 @@ members = ["crates/*"] resolver = "2" -[profile.release] -strip = true - [workspace.metadata.release] shared-version = true tag-name = "v{{version}}" +# Config for 'cargo dist' +[workspace.metadata.dist] +# The preferred cargo-dist version to use in CI (Cargo.toml SemVer syntax) +cargo-dist-version = "0.13.3" +# CI backends to support +ci = ["github"] +# The installers to generate for each app +installers = ["shell", "powershell", "npm", "homebrew", "msi"] +# A GitHub repo to push Homebrew formulas to +tap = "aiken-lang/homebrew-tap" +# Target platforms to build apps for (Rust target-triple syntax) +targets = ["aarch64-apple-darwin", "x86_64-apple-darwin", "x86_64-unknown-linux-gnu", "x86_64-pc-windows-msvc"] +# The archive format to use for windows builds (defaults .zip) +windows-archive = ".tar.gz" +# The archive format to use for non-windows builds (defaults .tar.xz) +unix-archive = ".tar.gz" +# A namespace to use when publishing this package to the npm registry +npm-scope = "@aiken-lang" +# Publish jobs to run in CI +publish-jobs = ["homebrew"] +# Publish jobs to run in CI +pr-run-mode = "plan" +# Whether to install an updater program +install-updater = false + [workspace.dependencies] walkdir = "2.3.2" pallas = "0.22.0" @@ -19,3 +41,8 @@ opt-level = 3 [profile.dev.package.similar] opt-level = 3 + +# The profile that 'cargo dist' will build with +[profile.dist] +inherits = "release" +lto = "thin" diff --git a/LICENSE b/LICENSE index 626d0981..6ee1ef24 100644 --- a/LICENSE +++ b/LICENSE @@ -187,7 +187,8 @@ identification within third-party archives. Copyright 2016-2022 Louis Pilfold (as Gleam) - Copyright 2022-Present TxPipe & Lucas Rosa (as Aiken) + Copyright 2022-2024 Cardano Foundation (as Aiken) + Copyright 2024-Present PRAGMA (as Aiken) Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. diff --git a/crates/aiken-lang/src/ast.rs b/crates/aiken-lang/src/ast.rs index 0beff3a2..622c455f 100644 --- a/crates/aiken-lang/src/ast.rs +++ b/crates/aiken-lang/src/ast.rs @@ -970,6 +970,12 @@ pub enum Annotation { location: Span, elems: Vec, }, + + Pair { + location: Span, + fst: Box, + snd: Box, + }, } impl Annotation { @@ -979,7 +985,8 @@ impl Annotation { | Annotation::Tuple { location, .. } | Annotation::Var { location, .. } | Annotation::Hole { location, .. } - | Annotation::Constructor { location, .. } => *location, + | Annotation::Constructor { location, .. } + | Annotation::Pair { location, .. } => *location, } } @@ -1081,6 +1088,18 @@ impl Annotation { } => name == o_name, _ => false, }, + Annotation::Pair { fst, snd, .. } => { + if let Annotation::Pair { + fst: o_fst, + snd: o_snd, + .. + } = other + { + fst.is_logically_equal(o_fst) && snd.is_logically_equal(o_snd) + } else { + false + } + } } } @@ -1101,6 +1120,9 @@ impl Annotation { elems.iter().find_map(|arg| arg.find_node(byte_index)) } Annotation::Var { .. } | Annotation::Hole { .. } => None, + Annotation::Pair { fst, snd, .. } => fst + .find_node(byte_index) + .or_else(|| snd.find_node(byte_index)), }; located.or(Some(Located::Annotation(self))) @@ -1225,6 +1247,12 @@ pub enum Pattern { tipo: Type, }, + Pair { + location: Span, + fst: Box, + snd: Box, + }, + Tuple { location: Span, elems: Vec, @@ -1240,6 +1268,7 @@ impl Pattern { | Pattern::List { location, .. } | Pattern::Discard { location, .. } | Pattern::Tuple { location, .. } + | Pattern::Pair { location, .. } | Pattern::Constructor { location, .. } => *location, } } @@ -1309,6 +1338,19 @@ impl TypedPattern { _ => None, }, + Pattern::Pair { fst, snd, .. } => match &**value { + Type::Pair { + fst: fst_v, + snd: snd_v, + .. + } => [fst, snd] + .into_iter() + .zip([fst_v, snd_v].iter()) + .find_map(|(e, t)| e.find_node(byte_index, t)) + .or(Some(Located::Pattern(self, value.clone()))), + _ => None, + }, + Pattern::Constructor { arguments, tipo, .. } => match &**tipo { @@ -1322,6 +1364,7 @@ impl TypedPattern { } } + // TODO: This function definition is weird, see where this is used and how. pub fn tipo(&self, value: &TypedExpr) -> Option> { match self { Pattern::Int { .. } => Some(builtins::int()), @@ -1329,7 +1372,7 @@ impl TypedPattern { Pattern::Var { .. } | Pattern::Assign { .. } | Pattern::Discard { .. } => { Some(value.tipo()) } - Pattern::List { .. } | Pattern::Tuple { .. } => None, + Pattern::List { .. } | Pattern::Tuple { .. } | Pattern::Pair { .. } => None, } } } diff --git a/crates/aiken-lang/src/builtins.rs b/crates/aiken-lang/src/builtins.rs index 6b18f941..e7ea8a13 100644 --- a/crates/aiken-lang/src/builtins.rs +++ b/crates/aiken-lang/src/builtins.rs @@ -15,11 +15,16 @@ use std::{cell::RefCell, collections::HashMap, rc::Rc}; use strum::IntoEnumIterator; use uplc::builtins::DefaultFunction; +pub const PRELUDE: &str = "aiken"; +pub const BUILTIN: &str = "aiken/builtin"; + pub const BYTE_ARRAY: &str = "ByteArray"; pub const BOOL: &str = "Bool"; pub const INT: &str = "Int"; pub const DATA: &str = "Data"; pub const LIST: &str = "List"; +pub const ALIST: &str = "AList"; +pub const PAIR: &str = "Pair"; pub const VOID: &str = "Void"; pub const G1_ELEMENT: &str = "G1Element"; pub const G2_ELEMENT: &str = "G2Element"; @@ -35,7 +40,7 @@ pub const FUZZER: &str = "Fuzzer"; /// into a compiler pipeline pub fn prelude(id_gen: &IdGenerator) -> TypeInfo { let mut prelude = TypeInfo { - name: "aiken".to_string(), + name: PRELUDE.to_string(), package: "".to_string(), kind: ModuleKind::Lib, types: HashMap::new(), @@ -320,6 +325,24 @@ pub fn prelude(id_gen: &IdGenerator) -> TypeInfo { }, ); + // Pair(a, b) + let fst_parameter = generic_var(id_gen.next()); + let snd_parameter = generic_var(id_gen.next()); + prelude.types.insert( + PAIR.to_string(), + TypeConstructor { + location: Span::empty(), + parameters: vec![fst_parameter.clone(), snd_parameter.clone()], + tipo: pair(fst_parameter.clone(), snd_parameter.clone()), + module: "".to_string(), + public: true, + }, + ); + + prelude + .types_constructors + .insert(PAIR.to_string(), vec![PAIR.to_string()]); + // String prelude.types.insert( STRING.to_string(), @@ -371,7 +394,7 @@ pub fn prelude(id_gen: &IdGenerator) -> TypeInfo { TypeConstructor { location: Span::empty(), parameters: vec![option_value.clone()], - tipo: option(option_value), + tipo: option(option_value.clone()), module: "".to_string(), public: true, }, @@ -382,12 +405,10 @@ pub fn prelude(id_gen: &IdGenerator) -> TypeInfo { vec!["Some".to_string(), "None".to_string()], ); - let some = generic_var(id_gen.next()); - prelude.values.insert( "Some".to_string(), ValueConstructor::public( - function(vec![some.clone()], option(some)), + function(vec![option_value.clone()], option(option_value.clone())), ValueConstructorVariant::Record { module: "".into(), name: "Some".to_string(), @@ -399,12 +420,10 @@ pub fn prelude(id_gen: &IdGenerator) -> TypeInfo { ), ); - let some = generic_var(id_gen.next()); - prelude.values.insert( "None".to_string(), ValueConstructor::public( - option(some), + option(option_value), ValueConstructorVariant::Record { module: "".into(), name: "None".to_string(), @@ -422,7 +441,6 @@ pub fn prelude(id_gen: &IdGenerator) -> TypeInfo { // Seeded { seed: ByteArray, choices: ByteArray } // Replayed { cursor: Int, choices: ByteArray } // } - prelude.types.insert( PRNG.to_string(), TypeConstructor { @@ -487,7 +505,6 @@ pub fn prelude(id_gen: &IdGenerator) -> TypeInfo { // // pub type Fuzzer = // fn(PRNG) -> Option<(PRNG, a)> - let fuzzer_value = generic_var(id_gen.next()); prelude.types.insert( FUZZER.to_string(), @@ -500,12 +517,28 @@ pub fn prelude(id_gen: &IdGenerator) -> TypeInfo { }, ); + // Map + // + // pub type Map = List> + let alist_key = generic_var(id_gen.next()); + let alist_value = generic_var(id_gen.next()); + prelude.types.insert( + ALIST.to_string(), + TypeConstructor { + location: Span::empty(), + parameters: vec![alist_key.clone(), alist_value.clone()], + tipo: map(alist_key, alist_value), + module: "".to_string(), + public: true, + }, + ); + prelude } pub fn plutus(id_gen: &IdGenerator) -> TypeInfo { let mut plutus = TypeInfo { - name: "aiken/builtin".to_string(), + name: BUILTIN.to_string(), package: "".to_string(), kind: ModuleKind::Lib, types: HashMap::new(), @@ -658,7 +691,7 @@ pub fn from_default_function(builtin: DefaultFunction, id_gen: &IdGenerator) -> (tipo, 2) } DefaultFunction::MapData => { - let tipo = function(vec![list(tuple(vec![data(), data()]))], data()); + let tipo = function(vec![list(pair(data(), data()))], data()); (tipo, 1) } @@ -678,12 +711,12 @@ pub fn from_default_function(builtin: DefaultFunction, id_gen: &IdGenerator) -> (tipo, 1) } DefaultFunction::UnConstrData => { - let tipo = function(vec![data()], tuple(vec![int(), list(data())])); + let tipo = function(vec![data()], pair(int(), list(data()))); (tipo, 1) } DefaultFunction::UnMapData => { - let tipo = function(vec![data()], list(tuple(vec![data(), data()]))); + let tipo = function(vec![data()], list(pair(data(), data()))); (tipo, 1) } @@ -728,7 +761,7 @@ pub fn from_default_function(builtin: DefaultFunction, id_gen: &IdGenerator) -> (tipo, 6) } DefaultFunction::MkPairData => { - let tipo = function(vec![data(), data()], tuple(vec![data(), data()])); + let tipo = function(vec![data(), data()], pair(data(), data())); (tipo, 2) } DefaultFunction::MkNilData => { @@ -736,7 +769,7 @@ pub fn from_default_function(builtin: DefaultFunction, id_gen: &IdGenerator) -> (tipo, 0) } DefaultFunction::MkNilPairData => { - let tipo = function(vec![], list(tuple(vec![data(), data()]))); + let tipo = function(vec![], list(pair(data(), data()))); (tipo, 0) } DefaultFunction::ChooseUnit => { @@ -752,13 +785,13 @@ pub fn from_default_function(builtin: DefaultFunction, id_gen: &IdGenerator) -> DefaultFunction::FstPair => { let a = generic_var(id_gen.next()); let b = generic_var(id_gen.next()); - let tipo = function(vec![tuple(vec![a.clone(), b])], a); + let tipo = function(vec![pair(a.clone(), b)], a); (tipo, 1) } DefaultFunction::SndPair => { let a = generic_var(id_gen.next()); let b = generic_var(id_gen.next()); - let tipo = function(vec![tuple(vec![a, b.clone()])], b); + let tipo = function(vec![pair(a, b.clone())], b); (tipo, 1) } DefaultFunction::ChooseList => { @@ -1334,6 +1367,14 @@ pub fn tuple(elems: Vec>) -> Rc { Rc::new(Type::Tuple { elems, alias: None }) } +pub fn pair(fst: Rc, snd: Rc) -> Rc { + Rc::new(Type::Pair { + fst, + snd, + alias: None, + }) +} + pub fn bool() -> Rc { Rc::new(Type::App { args: vec![], @@ -1397,9 +1438,43 @@ pub fn fuzzer(a: Rc) -> Rc { }) } +pub fn map(k: Rc, v: Rc) -> Rc { + Rc::new(Type::App { + public: true, + contains_opaque: false, + module: "".to_string(), + name: LIST.to_string(), + args: vec![pair(k, v)], + alias: Some( + TypeAliasAnnotation { + alias: ALIST.to_string(), + parameters: vec!["k".to_string(), "v".to_string()], + annotation: Annotation::Constructor { + location: Span::empty(), + module: None, + name: LIST.to_string(), + arguments: vec![Annotation::Pair { + location: Span::empty(), + fst: Box::new(Annotation::Var { + location: Span::empty(), + name: "k".to_string(), + }), + snd: Box::new(Annotation::Var { + location: Span::empty(), + name: "v".to_string(), + }), + }], + }, + } + .into(), + ), + }) +} + pub fn list(t: Rc) -> Rc { Rc::new(Type::App { public: true, + // FIXME: We should probably have t.contains_opaque here? contains_opaque: false, name: LIST.to_string(), module: "".to_string(), @@ -1433,6 +1508,7 @@ pub fn void() -> Rc { pub fn option(a: Rc) -> Rc { Rc::new(Type::App { public: true, + // FIXME: We should probably have t.contains_opaque here? contains_opaque: false, name: OPTION.to_string(), module: "".to_string(), diff --git a/crates/aiken-lang/src/expr.rs b/crates/aiken-lang/src/expr.rs index 7655c9ff..961adb5f 100644 --- a/crates/aiken-lang/src/expr.rs +++ b/crates/aiken-lang/src/expr.rs @@ -161,6 +161,13 @@ pub enum TypedExpr { elems: Vec, }, + Pair { + location: Span, + tipo: Rc, + fst: Box, + snd: Box, + }, + TupleIndex { location: Span, tipo: Rc, @@ -214,6 +221,7 @@ impl TypedExpr { | Self::UnOp { tipo, .. } | Self::BinOp { tipo, .. } | Self::Tuple { tipo, .. } + | Self::Pair { tipo, .. } | Self::String { tipo, .. } | Self::ByteArray { tipo, .. } | Self::TupleIndex { tipo, .. } @@ -256,6 +264,7 @@ impl TypedExpr { | TypedExpr::ErrorTerm { .. } | TypedExpr::BinOp { .. } | TypedExpr::Tuple { .. } + | TypedExpr::Pair { .. } | TypedExpr::UnOp { .. } | TypedExpr::String { .. } | TypedExpr::Sequence { .. } @@ -299,6 +308,7 @@ impl TypedExpr { | Self::List { location, .. } | Self::BinOp { location, .. } | Self::Tuple { location, .. } + | Self::Pair { location, .. } | Self::String { location, .. } | Self::UnOp { location, .. } | Self::Pipeline { location, .. } @@ -337,6 +347,7 @@ impl TypedExpr { | Self::List { location, .. } | Self::BinOp { location, .. } | Self::Tuple { location, .. } + | Self::Pair { location, .. } | Self::String { location, .. } | Self::UnOp { location, .. } | Self::Sequence { location, .. } @@ -392,6 +403,11 @@ impl TypedExpr { .find_map(|e| e.find_node(byte_index)) .or(Some(Located::Expression(self))), + TypedExpr::Pair { fst, snd, .. } => [fst, snd] + .iter() + .find_map(|e| e.find_node(byte_index)) + .or(Some(Located::Expression(self))), + TypedExpr::List { elements, tail, .. } => elements .iter() .find_map(|e| e.find_node(byte_index)) @@ -578,6 +594,12 @@ pub enum UntypedExpr { elems: Vec, }, + Pair { + location: Span, + fst: Box, + snd: Box, + }, + TupleIndex { location: Span, index: usize, @@ -771,11 +793,10 @@ impl UntypedExpr { }, uplc::ast::Constant::ProtoPair(_, _, left, right) => match tipo { - Type::Tuple { elems, .. } => Ok(UntypedExpr::Tuple { - location: Span::empty(), - elems: [left.as_ref(), right.as_ref()] + Type::Pair { fst, snd, .. } => { + let elems = [left.as_ref(), right.as_ref()] .into_iter() - .zip(elems) + .zip([fst, snd]) .map(|(arg, arg_type)| { UntypedExpr::do_reify_constant( generics, @@ -784,10 +805,16 @@ impl UntypedExpr { arg_type, ) }) - .collect::, _>>()?, - }), + .collect::, _>>()?; + + Ok(UntypedExpr::Pair { + location: Span::empty(), + fst: elems.first().unwrap().to_owned().into(), + snd: elems.last().unwrap().to_owned().into(), + }) + } _ => Err(format!( - "invalid type annotation. expected Tuple but got: {tipo:?}" + "invalid type annotation. expected Pair but got: {tipo:?}" )), }, @@ -882,9 +909,10 @@ impl UntypedExpr { location: Span::empty(), elements: kvs .into_iter() - .map(|(k, v)| UntypedExpr::Tuple { + .map(|(k, v)| UntypedExpr::Pair { location: Span::empty(), - elems: vec![UntypedExpr::reify_blind(k), UntypedExpr::reify_blind(v)], + fst: UntypedExpr::reify_blind(k).into(), + snd: UntypedExpr::reify_blind(v).into(), }) .collect::>(), tail: None, @@ -1000,6 +1028,21 @@ impl UntypedExpr { }) .collect::, _>>()?, }), + Type::Pair { fst, snd, .. } => { + let mut elems = args + .into_iter() + .zip([fst, snd]) + .map(|(arg, arg_type)| { + UntypedExpr::do_reify_data(generics, data_types, arg, arg_type) + }) + .collect::, _>>()?; + + Ok(UntypedExpr::Pair { + location: Span::empty(), + fst: elems.remove(0).into(), + snd: elems.remove(0).into(), + }) + } _ => Err(format!( "invalid type annotation. expected List but got: {tipo:?}" )), @@ -1272,6 +1315,7 @@ impl UntypedExpr { | Self::ByteArray { location, .. } | Self::BinOp { location, .. } | Self::Tuple { location, .. } + | Self::Pair { location, .. } | Self::String { location, .. } | Self::Assignment { location, .. } | Self::TupleIndex { location, .. } diff --git a/crates/aiken-lang/src/format.rs b/crates/aiken-lang/src/format.rs index 3114b435..041c6049 100644 --- a/crates/aiken-lang/src/format.rs +++ b/crates/aiken-lang/src/format.rs @@ -424,6 +424,14 @@ impl<'comments> Formatter<'comments> { Annotation::Tuple { elems, .. } => { wrap_args(elems.iter().map(|t| (self.annotation(t), false))) } + Annotation::Pair { fst, snd, .. } => "Pair" + .to_doc() + .append("<") + .append(self.annotation(fst)) + .append(break_(",", ", ")) + .append(self.annotation(snd)) + .append(">") + .group(), } .group() } @@ -979,6 +987,15 @@ impl<'comments> Formatter<'comments> { wrap_args(elems.iter().map(|e| (self.wrap_expr(e), false))).group() } + UntypedExpr::Pair { fst, snd, .. } => "Pair" + .to_doc() + .append("(") + .append(self.expr(fst, false)) + .append(break_(",", ", ")) + .append(self.expr(snd, false)) + .append(")") + .group(), + UntypedExpr::TupleIndex { index, tuple, .. } => { let suffix = Ordinal(*index + 1).suffix().to_doc(); self.expr(tuple, false) @@ -1778,6 +1795,15 @@ impl<'comments> Formatter<'comments> { wrap_args(elems.iter().map(|e| (self.pattern(e), false))).group() } + Pattern::Pair { fst, snd, .. } => "Pair" + .to_doc() + .append("(") + .append(self.pattern(fst)) + .append(break_(",", ", ")) + .append(self.pattern(snd)) + .append(")") + .group(), + Pattern::List { elements, tail, .. } => { let elements_document = join(elements.iter().map(|e| self.pattern(e)), break_(",", ", ")); diff --git a/crates/aiken-lang/src/gen_uplc.rs b/crates/aiken-lang/src/gen_uplc.rs index 1bd51d04..5ce4b72b 100644 --- a/crates/aiken-lang/src/gen_uplc.rs +++ b/crates/aiken-lang/src/gen_uplc.rs @@ -373,7 +373,13 @@ impl<'a> CodeGenerator<'a> { .. } => { let data_type = lookup_data_type_by_tipo(&self.data_types, tipo) - .expect("Creating a record with no record definition."); + .unwrap_or_else(|| + panic!( + "Creating a record of type {:?} with no record definition. Known definitions: {:?}", + tipo.to_pretty(0), + self.data_types.keys() + ) + ); let (constr_index, _) = data_type .constructors @@ -647,6 +653,11 @@ impl<'a> CodeGenerator<'a> { record, .. } => { + assert!( + !record.tipo().is_pair(), + "illegal record access on a Pair. This should have been a tuple-index access." + ); + if check_replaceable_opaque_type(&record.tipo(), &self.data_types) { self.build(record, module_build_name, &[]) } else { @@ -698,23 +709,25 @@ impl<'a> CodeGenerator<'a> { field_map, .. } => { - let data_type = lookup_data_type_by_tipo(&self.data_types, tipo); + let val_constructor = { + let data_type = lookup_data_type_by_tipo(&self.data_types, tipo); - let val_constructor = ValueConstructor::public( - tipo.clone(), - ValueConstructorVariant::Record { - name: name.clone(), - arity: *arity, - field_map: field_map.clone(), - location: Span::empty(), - module: module_name.clone(), - constructors_count: data_type - .expect("Created a module type without a definition?") - .constructors - .len() - as u16, - }, - ); + ValueConstructor::public( + tipo.clone(), + ValueConstructorVariant::Record { + name: name.clone(), + arity: *arity, + field_map: field_map.clone(), + location: Span::empty(), + module: module_name.clone(), + constructors_count: data_type + .expect("Created a module type without a definition?") + .constructors + .len() + as u16, + }, + ) + }; AirTree::var(val_constructor, name, "") } @@ -751,6 +764,12 @@ impl<'a> CodeGenerator<'a> { } }, + TypedExpr::Pair { tipo, fst, snd, .. } => AirTree::pair( + self.build(fst, module_build_name, &[]), + self.build(snd, module_build_name, &[]), + tipo.clone(), + ), + TypedExpr::Tuple { tipo, elems, .. } => AirTree::tuple( elems .iter() @@ -762,7 +781,7 @@ impl<'a> CodeGenerator<'a> { TypedExpr::TupleIndex { index, tuple, tipo, .. } => { - if tuple.tipo().is_2_tuple() { + if tuple.tipo().is_pair() { AirTree::pair_index( *index, tipo.clone(), @@ -919,6 +938,7 @@ impl<'a> CodeGenerator<'a> { AirTree::let_assignment(name, value, then) } } + Pattern::Assign { name, pattern, .. } => { let inner_pattern = self.assignment( pattern, @@ -929,6 +949,7 @@ impl<'a> CodeGenerator<'a> { ); AirTree::let_assignment(name, value, inner_pattern) } + Pattern::Discard { name, .. } => { if props.full_check { let name = &format!("__discard_expect_{}", name); @@ -959,6 +980,7 @@ impl<'a> CodeGenerator<'a> { AirTree::no_op(then) } } + Pattern::List { elements, tail, .. } => { assert!(tipo.is_list()); assert!(props.kind.is_expect()); @@ -1085,160 +1107,273 @@ impl<'a> CodeGenerator<'a> { ) } } + + Pattern::Pair { + fst, + snd, + location: _, + } => { + let mut type_map: IndexMap> = IndexMap::new(); + + for (index, arg) in tipo.get_inner_types().iter().enumerate() { + let field_type = arg.clone(); + type_map.insert(index, field_type); + } + + assert!(type_map.len() == 2); + + let mut fields = vec![]; + + let then = [fst, snd] + .iter() + .enumerate() + .rfold(then, |then, (field_index, arg)| { + let field_name = match arg.as_ref() { + Pattern::Var { name, .. } => name.to_string(), + Pattern::Assign { name, .. } => name.to_string(), + Pattern::Discard { name, .. } => { + if props.full_check { + format!("__discard_{}_{}", name, field_index) + } else { + "_".to_string() + } + } + _ => format!( + "field_{}_span_{}_{}", + field_index, + arg.location().start, + arg.location().end + ), + }; + + let arg_type = type_map.get(&field_index).unwrap_or_else(|| { + unreachable!( + "Missing type for field {} of constr {}", + field_index, field_name + ) + }); + + let val = AirTree::local_var(&field_name, arg_type.clone()); + + let then = if field_name != "_" { + self.assignment( + arg, + val, + then, + arg_type, + AssignmentProperties { + value_type: arg_type.clone(), + kind: props.kind, + remove_unused: true, + full_check: props.full_check, + msg_func: props.msg_func.clone(), + }, + ) + } else { + then + }; + + fields.push((field_index, field_name, arg_type.clone())); + + then + }); + + fields.reverse(); + + // This `value` is either value param that was passed in or + // local var + let constructor_name = format!( + "__constructor_{}_span_{}_{}", + "Pair", + pattern.location().start, + pattern.location().end + ); + + let local_value = AirTree::local_var(&constructor_name, tipo.clone()); + + let then = { + let (is_expect, msg) = if props.full_check { + (true, props.msg_func.clone()) + } else { + (false, None) + }; + assert!(fields.len() == 2); + + AirTree::pair_access( + fields + .first() + .map(|x| if x.1 == "_" { None } else { Some(x.1.clone()) }) + .unwrap(), + fields + .last() + .map(|x| if x.1 == "_" { None } else { Some(x.1.clone()) }) + .unwrap(), + tipo.clone(), + local_value, + msg, + is_expect, + then, + ) + }; + + AirTree::let_assignment(constructor_name, value, then) + } + + Pattern::Constructor { + constructor: PatternConstructor::Record { name, .. }, + .. + } if tipo.is_bool() => { + assert!(props.kind.is_expect()); + + AirTree::assert_bool(name == "True", value, props.msg_func, then) + } + + Pattern::Constructor { .. } if tipo.is_void() => { + // Void type is checked when casting from data + // So we just assign the value and move on + AirTree::let_assignment("_", value, then) + } + Pattern::Constructor { arguments, constructor: PatternConstructor::Record { name, field_map }, tipo: constr_tipo, .. } => { - if tipo.is_bool() { - assert!(props.kind.is_expect()); + // Constr execution branch + let field_map = field_map.clone(); - AirTree::assert_bool(name == "True", value, props.msg_func, then) - } else if tipo.is_void() { - AirTree::let_assignment("_", value, then) - } else { - let field_map = field_map.clone(); + let mut type_map: IndexMap> = IndexMap::new(); - let mut type_map: IndexMap> = IndexMap::new(); + for (index, arg) in constr_tipo + .arg_types() + .expect("Mismatched type") + .iter() + .enumerate() + { + let field_type = arg.clone(); - for (index, arg) in constr_tipo - .arg_types() - .expect("Mismatched type") - .iter() - .enumerate() - { - let field_type = arg.clone(); + type_map.insert(index, field_type); + } - type_map.insert(index, field_type); - } + assert!(type_map.len() >= arguments.len()); - assert!(type_map.len() >= arguments.len()); + let mut fields = vec![]; - let mut fields = vec![]; + let then = arguments + .iter() + .enumerate() + .rfold(then, |then, (index, arg)| { + let label = arg.label.clone().unwrap_or_default(); - let then = arguments - .iter() - .enumerate() - .rfold(then, |then, (index, arg)| { - let label = arg.label.clone().unwrap_or_default(); + let field_index = if let Some(field_map) = &field_map { + *field_map.fields.get(&label).map(|x| &x.0).unwrap_or(&index) + } else { + index + }; - let field_index = if let Some(field_map) = &field_map { - *field_map.fields.get(&label).map(|x| &x.0).unwrap_or(&index) - } else { - index - }; - - let field_name = match &arg.value { - Pattern::Var { name, .. } => name.to_string(), - Pattern::Assign { name, .. } => name.to_string(), - Pattern::Discard { name, .. } => { - if props.full_check { - format!("__discard_{}_{}", name, index) - } else { - "_".to_string() - } + let field_name = match &arg.value { + Pattern::Var { name, .. } => name.to_string(), + Pattern::Assign { name, .. } => name.to_string(), + Pattern::Discard { name, .. } => { + if props.full_check { + format!("__discard_{}_{}", name, index) + } else { + "_".to_string() } - _ => format!( - "field_{}_span_{}_{}", - field_index, - arg.value.location().start, - arg.value.location().end - ), - }; + } + _ => format!( + "field_{}_span_{}_{}", + field_index, + arg.value.location().start, + arg.value.location().end + ), + }; - let arg_type = type_map.get(&field_index).unwrap_or_else(|| { - unreachable!( - "Missing type for field {} of constr {}", - field_index, name - ) - }); - - let val = AirTree::local_var(&field_name, arg_type.clone()); - - let then = if field_name != "_" { - self.assignment( - &arg.value, - val, - then, - arg_type, - AssignmentProperties { - value_type: arg_type.clone(), - kind: props.kind, - remove_unused: true, - full_check: props.full_check, - msg_func: props.msg_func.clone(), - }, - ) - } else { - then - }; - - fields.push((field_index, field_name, arg_type.clone())); - - then + let arg_type = type_map.get(&field_index).unwrap_or_else(|| { + unreachable!( + "Missing type for field {} of constr {}", + field_index, name + ) }); - fields.reverse(); + let val = AirTree::local_var(&field_name, arg_type.clone()); - // This `value` is either value param that was passed in or - // local var - let constructor_name = format!( - "__constructor_{}_span_{}_{}", - name, - pattern.location().start, - pattern.location().end - ); - - let local_value = AirTree::local_var(&constructor_name, tipo.clone()); - - let then = if check_replaceable_opaque_type(tipo, &self.data_types) { - AirTree::let_assignment(&fields[0].1, local_value, then) - } else { - let (is_expect, msg) = if props.full_check { - (true, props.msg_func.clone()) - } else { - (false, None) - }; - AirTree::fields_expose(fields, local_value, msg, is_expect, then) - }; - - // TODO: See if we can combine these two if-conditions; - // - // i.e. can we lift data_type assignment out of the first if? - - let then = if props.kind.is_expect() { - let data_type = lookup_data_type_by_tipo(&self.data_types, tipo) - .unwrap_or_else(|| { - unreachable!("Failed to find definition for {}", name) - }); - - if data_type.constructors.len() > 1 || props.full_check { - let (index, _) = data_type - .constructors - .iter() - .enumerate() - .find(|(_, constr)| constr.name == *name) - .unwrap_or_else(|| { - panic!("Found constructor type {} with 0 constructors", name) - }); - - AirTree::assert_constr_index( - index, - AirTree::local_var(&constructor_name, tipo.clone()), - props.msg_func, + let then = if field_name != "_" { + self.assignment( + &arg.value, + val, then, + arg_type, + AssignmentProperties { + value_type: arg_type.clone(), + kind: props.kind, + remove_unused: true, + full_check: props.full_check, + msg_func: props.msg_func.clone(), + }, ) } else { then - } - } else { - then - }; + }; - AirTree::let_assignment(constructor_name, value, then) - } + fields.push((field_index, field_name, arg_type.clone())); + + then + }); + + fields.reverse(); + + // This `value` is either value param that was passed in or + // local var + let constructor_name = format!( + "__constructor_{}_span_{}_{}", + name, + pattern.location().start, + pattern.location().end + ); + + let local_value = AirTree::local_var(&constructor_name, tipo.clone()); + + let then = if check_replaceable_opaque_type(tipo, &self.data_types) { + AirTree::let_assignment(&fields[0].1, local_value, then) + } else { + let (is_expect, msg) = if props.full_check { + (true, props.msg_func.clone()) + } else { + (false, None) + }; + AirTree::fields_expose(fields, local_value, msg, is_expect, then) + }; + + let data_type = lookup_data_type_by_tipo(&self.data_types, tipo) + .unwrap_or_else(|| unreachable!("Failed to find definition for {}", name)); + + let then = if props.kind.is_expect() + && (data_type.constructors.len() > 1 || props.full_check) + { + let (index, _) = data_type + .constructors + .iter() + .enumerate() + .find(|(_, constr)| constr.name == *name) + .unwrap_or_else(|| { + panic!("Found constructor type {} with 0 constructors", name) + }); + + AirTree::assert_constr_index( + index, + AirTree::local_var(&constructor_name, tipo.clone()), + props.msg_func, + then, + ) + } else { + then + }; + + AirTree::let_assignment(constructor_name, value, then) } + Pattern::Tuple { elems, location, .. } => { @@ -1331,116 +1466,64 @@ impl<'a> CodeGenerator<'a> { // Shouldn't be needed but still here just in case // this function is called from anywhere else besides assignment let tipo = &convert_opaque_type(tipo, &self.data_types, true); + let uplc_type = tipo.get_uplc_type(); - if tipo.is_primitive() { - // Since we would return void anyway and ignore then we can just return value here and ignore - value - } else if tipo.is_map() { - assert!(!tipo.get_inner_types().is_empty()); + match uplc_type { + // primitives + Some( + UplcType::Integer + | UplcType::String + | UplcType::Bool + | UplcType::ByteString + | UplcType::Unit + | UplcType::Bls12_381G1Element + | UplcType::Bls12_381G2Element + | UplcType::Bls12_381MlResult, + ) => value, + // Untyped Data + Some(UplcType::Data) => value, - let inner_list_type = &tipo.get_inner_types()[0]; - let inner_pair_types = inner_list_type.get_inner_types(); + // Map type + Some(UplcType::List(_)) if tipo.is_map() => { + assert!(!tipo.get_inner_types().is_empty()); - assert!(inner_pair_types.len() == 2); + let inner_list_type = &tipo.get_inner_types()[0]; + let inner_pair_types = inner_list_type.get_inner_types(); - let map_name = format!("__map_span_{}_{}", location.start, location.end); - let pair_name = format!("__pair_span_{}_{}", location.start, location.end); - let fst_name = format!("__pair_fst_span_{}_{}", location.start, location.end); - let snd_name = format!("__pair_snd_span_{}_{}", location.start, location.end); + assert!(inner_pair_types.len() == 2); - let expect_fst = self.expect_type_assign( - &inner_pair_types[0], - AirTree::local_var(fst_name.clone(), inner_pair_types[0].clone()), - defined_data_types, - location, - msg_func.clone(), - ); + let map_name = format!("__map_span_{}_{}", location.start, location.end); + let pair_name = format!("__pair_span_{}_{}", location.start, location.end); + let fst_name = format!("__pair_fst_span_{}_{}", location.start, location.end); + let snd_name = format!("__pair_snd_span_{}_{}", location.start, location.end); - let expect_snd = self.expect_type_assign( - &inner_pair_types[1], - AirTree::local_var(snd_name.clone(), inner_pair_types[1].clone()), - defined_data_types, - location, - msg_func.clone(), - ); - - let anon_func_body = AirTree::tuple_access( - vec![fst_name, snd_name], - inner_list_type.clone(), - AirTree::local_var(&pair_name, inner_list_type.clone()), - msg_func.clone(), - msg_func.is_some(), - AirTree::let_assignment("_", expect_fst, expect_snd), - ); - - let unwrap_function = AirTree::anon_func(vec![pair_name], anon_func_body); - - let function = self.code_gen_functions.get(EXPECT_ON_LIST); - - if function.is_none() { - let expect_list_func = AirTree::expect_on_list(); - self.code_gen_functions.insert( - EXPECT_ON_LIST.to_string(), - CodeGenFunction::Function { - body: expect_list_func, - params: vec!["__list_to_check".to_string(), "__check_with".to_string()], - }, - ); - } - - if let Some(counter) = defined_data_types.get_mut(EXPECT_ON_LIST) { - *counter += 1 - } else { - defined_data_types.insert(EXPECT_ON_LIST.to_string(), 1); - } - - let func_call = AirTree::call( - AirTree::var( - ValueConstructor::public( - void(), - ValueConstructorVariant::ModuleFn { - name: EXPECT_ON_LIST.to_string(), - field_map: None, - module: "".to_string(), - arity: 1, - location, - builtin: None, - }, - ), - EXPECT_ON_LIST, - "", - ), - void(), - vec![AirTree::local_var(&map_name, tipo.clone()), unwrap_function], - ); - - AirTree::let_assignment(&map_name, value, func_call) - } else if tipo.is_list() { - assert!(!tipo.get_inner_types().is_empty()); - - let inner_list_type = &tipo.get_inner_types()[0]; - - if inner_list_type.is_data() { - value - } else { - let list_name = format!("__list_span_{}_{}", location.start, location.end); - let item_name = format!("__item_span_{}_{}", location.start, location.end); - - let expect_item = self.expect_type_assign( - inner_list_type, - AirTree::cast_from_data( - AirTree::local_var(&item_name, data()), - inner_list_type.clone(), - msg_func.clone(), - ), + let expect_fst = self.expect_type_assign( + &inner_pair_types[0], + AirTree::local_var(fst_name.clone(), inner_pair_types[0].clone()), defined_data_types, location, - msg_func, + msg_func.clone(), ); - let anon_func_body = expect_item; + let expect_snd = self.expect_type_assign( + &inner_pair_types[1], + AirTree::local_var(snd_name.clone(), inner_pair_types[1].clone()), + defined_data_types, + location, + msg_func.clone(), + ); - let unwrap_function = AirTree::anon_func(vec![item_name], anon_func_body); + let anon_func_body = AirTree::pair_access( + Some(fst_name), + Some(snd_name), + inner_list_type.clone(), + AirTree::local_var(&pair_name, inner_list_type.clone()), + msg_func.clone(), + true, + AirTree::let_assignment("_", expect_fst, expect_snd), + ); + + let unwrap_function = AirTree::anon_func(vec![pair_name], anon_func_body); let function = self.code_gen_functions.get(EXPECT_ON_LIST); @@ -1478,285 +1561,367 @@ impl<'a> CodeGenerator<'a> { "", ), void(), - vec![ - AirTree::local_var(&list_name, tipo.clone()), - unwrap_function, - ], + vec![AirTree::local_var(&map_name, tipo.clone()), unwrap_function], ); - AirTree::let_assignment(&list_name, value, func_call) + AirTree::let_assignment(&map_name, value, func_call) } - } else if tipo.is_2_tuple() { - let tuple_inner_types = tipo.get_inner_types(); + // Tuple type + Some(UplcType::List(_)) if tipo.is_tuple() => { + let tuple_inner_types = tipo.get_inner_types(); - assert!(tuple_inner_types.len() == 2); + assert!(!tuple_inner_types.is_empty()); - let pair_name = format!("__pair_span_{}_{}", location.start, location.end); + let tuple_name = format!("__tuple_span_{}_{}", location.start, location.end); - let fst_name = format!("__pair_fst_span_{}_{}", location.start, location.end); - let snd_name = format!("__pair_snd_span_{}_{}", location.start, location.end); + let mut tuple_expect_items = vec![]; - let expect_fst = self.expect_type_assign( - &tuple_inner_types[0], - AirTree::local_var(fst_name.clone(), tuple_inner_types[0].clone()), - defined_data_types, - location, - msg_func.clone(), - ); + let then = tuple_inner_types.iter().enumerate().rfold( + AirTree::void(), + |then, (index, arg)| { + let tuple_index_name = format!( + "__tuple_index_{}_span_{}_{}", + index, location.start, location.end + ); - let expect_snd = self.expect_type_assign( - &tuple_inner_types[1], - AirTree::local_var(snd_name.clone(), tuple_inner_types[1].clone()), - defined_data_types, - location, - msg_func.clone(), - ); + let expect_tuple_item = self.expect_type_assign( + arg, + AirTree::local_var(&tuple_index_name, arg.clone()), + defined_data_types, + location, + msg_func.clone(), + ); - let tuple_access = AirTree::tuple_access( - vec![fst_name, snd_name], - tipo.clone(), - AirTree::local_var(&pair_name, tipo.clone()), - msg_func.clone(), - msg_func.is_some(), - AirTree::let_assignment("_", expect_fst, expect_snd), - ); + tuple_expect_items.push(tuple_index_name); - AirTree::let_assignment(&pair_name, value, tuple_access) - } else if tipo.is_tuple() { - let tuple_inner_types = tipo.get_inner_types(); + AirTree::let_assignment("_", expect_tuple_item, then) + }, + ); - assert!(!tuple_inner_types.is_empty()); + tuple_expect_items.reverse(); - let tuple_name = format!("__tuple_span_{}_{}", location.start, location.end); + let tuple_access = AirTree::tuple_access( + tuple_expect_items, + tipo.clone(), + AirTree::local_var(&tuple_name, tipo.clone()), + msg_func, + true, + then, + ); - let mut tuple_expect_items = vec![]; + AirTree::let_assignment(&tuple_name, value, tuple_access) + } + // Regular List type + Some(UplcType::List(_)) => { + assert!(!tipo.get_inner_types().is_empty()); - let then = tuple_inner_types.iter().enumerate().rfold( - AirTree::void(), - |then, (index, arg)| { - let tuple_index_name = format!( - "__tuple_index_{}_span_{}_{}", - index, location.start, location.end - ); + let inner_list_type = &tipo.get_inner_types()[0]; - let expect_tuple_item = self.expect_type_assign( - arg, - AirTree::local_var(&tuple_index_name, arg.clone()), + if inner_list_type.is_data() { + value + } else { + let list_name = format!("__list_span_{}_{}", location.start, location.end); + let item_name = format!("__item_span_{}_{}", location.start, location.end); + + let expect_item = self.expect_type_assign( + inner_list_type, + AirTree::cast_from_data( + AirTree::local_var(&item_name, data()), + inner_list_type.clone(), + msg_func.clone(), + ), defined_data_types, location, - msg_func.clone(), + msg_func, ); - tuple_expect_items.push(tuple_index_name); + let anon_func_body = expect_item; - AirTree::let_assignment("_", expect_tuple_item, then) - }, - ); + let unwrap_function = AirTree::anon_func(vec![item_name], anon_func_body); - tuple_expect_items.reverse(); + let function = self.code_gen_functions.get(EXPECT_ON_LIST); - let tuple_access = AirTree::tuple_access( - tuple_expect_items, - tipo.clone(), - AirTree::local_var(&tuple_name, tipo.clone()), - msg_func, - true, - then, - ); - - AirTree::let_assignment(&tuple_name, value, tuple_access) - - // Constructor - } else { - let data_type = lookup_data_type_by_tipo(&self.data_types, tipo).unwrap_or_else(|| { - unreachable!("We need a data type definition for type {:#?}", tipo) - }); - - let data_type_variant = tipo - .get_inner_types() - .iter() - .map(|arg| get_arg_type_name(arg)) - .join("_"); - - assert!(data_type.typed_parameters.len() == tipo.arg_types().unwrap().len()); - - let mono_types: IndexMap> = if !data_type.typed_parameters.is_empty() { - data_type - .typed_parameters - .iter() - .zip(tipo.arg_types().unwrap()) - .flat_map(|item| get_generic_id_and_type(item.0, &item.1)) - .collect() - } else { - IndexMap::new() - }; - - let data_type_name = format!("__expect_{}_{}", data_type.name, data_type_variant); - let function = self.code_gen_functions.get(&data_type_name); - - // mutate code_gen_funcs and defined_data_types in this if branch - if function.is_none() && defined_data_types.get(&data_type_name).is_none() { - let (msg_term, error_term) = match self.tracing { - TraceLevel::Silent => (None, AirTree::error(tipo.clone(), false)), - TraceLevel::Compact | TraceLevel::Verbose => { - let msg = AirMsg::LocalVar("__param_msg".to_string()); - ( - Some(msg.clone()), - AirTree::trace( - msg.to_air_tree(), - tipo.clone(), - AirTree::error(tipo.clone(), false), - ), - ) - } - }; - - defined_data_types.insert(data_type_name.clone(), 1); - - let current_defined = defined_data_types.clone(); - let mut diff_defined_types = vec![]; - - let constr_clauses = data_type.constructors.iter().enumerate().rfold( - error_term, - |acc, (index, constr)| { - let mut constr_args = vec![]; - - let constr_then = constr.arguments.iter().enumerate().rfold( - AirTree::void(), - |then, (index, arg)| { - let arg_name = - arg.label.clone().unwrap_or(format!("__field_{index}")); - - let arg_tipo = find_and_replace_generics(&arg.tipo, &mono_types); - - constr_args.push((index, arg_name.clone(), arg_tipo.clone())); - - AirTree::let_assignment( - "_", - self.expect_type_assign( - &arg_tipo.clone(), - AirTree::local_var(arg_name, arg_tipo), - defined_data_types, - location, - msg_term.clone(), - ), - then, - ) + if function.is_none() { + let expect_list_func = AirTree::expect_on_list(); + self.code_gen_functions.insert( + EXPECT_ON_LIST.to_string(), + CodeGenFunction::Function { + body: expect_list_func, + params: vec![ + "__list_to_check".to_string(), + "__check_with".to_string(), + ], }, ); - constr_args.reverse(); - - let then = if constr_args.is_empty() { - AirTree::fields_empty( - AirTree::local_var( - format!( - "__constr_var_span_{}_{}", - location.start, location.end - ), - tipo.clone(), - ), - msg_term.clone(), - constr_then, - ) - } else { - AirTree::fields_expose( - constr_args, - AirTree::local_var( - format!( - "__constr_var_span_{}_{}", - location.start, location.end - ), - tipo.clone(), - ), - msg_term.clone(), - true, - constr_then, - ) - }; - - AirTree::clause( - format!("__subject_span_{}_{}", location.start, location.end), - AirTree::int(index), - tipo.clone(), - then, - acc, - false, - ) - }, - ); - - let when_expr = AirTree::when( - format!("__subject_span_{}_{}", location.start, location.end), - void(), - tipo.clone(), - AirTree::local_var( - format!("__constr_var_span_{}_{}", location.start, location.end), - tipo.clone(), - ), - constr_clauses, - ); - - let func_body = AirTree::let_assignment( - format!("__constr_var_span_{}_{}", location.start, location.end), - AirTree::local_var("__param_0", tipo.clone()), - when_expr, - ); - - for (inner_data_type, inner_count) in defined_data_types.iter() { - if let Some(prev_count) = current_defined.get(inner_data_type) { - if inner_count - prev_count > 0 { - diff_defined_types.push(inner_data_type.to_string()); - } - } else { - diff_defined_types.push(inner_data_type.to_string()); } + + if let Some(counter) = defined_data_types.get_mut(EXPECT_ON_LIST) { + *counter += 1 + } else { + defined_data_types.insert(EXPECT_ON_LIST.to_string(), 1); + } + + let func_call = AirTree::call( + AirTree::var( + ValueConstructor::public( + void(), + ValueConstructorVariant::ModuleFn { + name: EXPECT_ON_LIST.to_string(), + field_map: None, + module: "".to_string(), + arity: 1, + location, + builtin: None, + }, + ), + EXPECT_ON_LIST, + "", + ), + void(), + vec![ + AirTree::local_var(&list_name, tipo.clone()), + unwrap_function, + ], + ); + + AirTree::let_assignment(&list_name, value, func_call) } + } + // Pair type + Some(UplcType::Pair(_, _)) => { + let tuple_inner_types = tipo.get_inner_types(); - let code_gen_func = match self.tracing { - TraceLevel::Silent => CodeGenFunction::Function { - body: func_body, - params: vec!["__param_0".to_string()], - }, - TraceLevel::Compact | TraceLevel::Verbose => CodeGenFunction::Function { - body: func_body, - params: vec!["__param_0".to_string(), "__param_msg".to_string()], - }, - }; + assert!(tuple_inner_types.len() == 2); - self.code_gen_functions - .insert(data_type_name.clone(), code_gen_func); - } else if let Some(counter) = defined_data_types.get_mut(&data_type_name) { - *counter += 1; - } else { - defined_data_types.insert(data_type_name.to_string(), 1); + let pair_name = format!("__pair_span_{}_{}", location.start, location.end); + + let fst_name = format!("__pair_fst_span_{}_{}", location.start, location.end); + let snd_name = format!("__pair_snd_span_{}_{}", location.start, location.end); + + let expect_fst = self.expect_type_assign( + &tuple_inner_types[0], + AirTree::local_var(fst_name.clone(), tuple_inner_types[0].clone()), + defined_data_types, + location, + msg_func.clone(), + ); + + let expect_snd = self.expect_type_assign( + &tuple_inner_types[1], + AirTree::local_var(snd_name.clone(), tuple_inner_types[1].clone()), + defined_data_types, + location, + msg_func.clone(), + ); + + let pair_access = AirTree::pair_access( + Some(fst_name.clone()), + Some(snd_name.clone()), + tipo.clone(), + AirTree::local_var(&pair_name, tipo.clone()), + msg_func.clone(), + true, + AirTree::let_assignment("_", expect_fst, expect_snd), + ); + + AirTree::let_assignment(&pair_name, value, pair_access) } - let args = match self.tracing { - TraceLevel::Silent => vec![value], - TraceLevel::Compact | TraceLevel::Verbose => vec![ - value, - msg_func - .expect("should be unreachable: no msg func with tracing enabled.") - .to_air_tree(), - ], - }; + // Constr type + None => { + let data_type = + lookup_data_type_by_tipo(&self.data_types, tipo).unwrap_or_else(|| { + unreachable!("We need a data type definition for type {:#?}", tipo) + }); - let module_fn = ValueConstructorVariant::ModuleFn { - name: data_type_name.to_string(), - field_map: None, - module: "".to_string(), - arity: args.len(), - location, - builtin: None, - }; + let data_type_variant = tipo + .get_inner_types() + .iter() + .map(|arg| get_arg_type_name(arg)) + .join("_"); - let func_var = AirTree::var( - ValueConstructor::public(tipo.clone(), module_fn), - data_type_name, - "", - ); + assert!(data_type.typed_parameters.len() == tipo.arg_types().unwrap().len()); - AirTree::call(func_var, void(), args) + let mono_types: IndexMap> = if !data_type.typed_parameters.is_empty() + { + data_type + .typed_parameters + .iter() + .zip(tipo.arg_types().unwrap()) + .flat_map(|item| get_generic_id_and_type(item.0, &item.1)) + .collect() + } else { + IndexMap::new() + }; + + let data_type_name = format!("__expect_{}_{}", data_type.name, data_type_variant); + let function = self.code_gen_functions.get(&data_type_name); + + // mutate code_gen_funcs and defined_data_types in this if branch + if function.is_none() && defined_data_types.get(&data_type_name).is_none() { + let (msg_term, error_term) = match self.tracing { + TraceLevel::Silent => (None, AirTree::error(tipo.clone(), false)), + TraceLevel::Compact | TraceLevel::Verbose => { + let msg = AirMsg::LocalVar("__param_msg".to_string()); + ( + Some(msg.clone()), + AirTree::trace( + msg.to_air_tree(), + tipo.clone(), + AirTree::error(tipo.clone(), false), + ), + ) + } + }; + + defined_data_types.insert(data_type_name.clone(), 1); + + let current_defined = defined_data_types.clone(); + let mut diff_defined_types = vec![]; + + let constr_clauses = data_type.constructors.iter().enumerate().rfold( + error_term, + |acc, (index, constr)| { + let mut constr_args = vec![]; + + let constr_then = constr.arguments.iter().enumerate().rfold( + AirTree::void(), + |then, (index, arg)| { + let arg_name = + arg.label.clone().unwrap_or(format!("__field_{index}")); + + let arg_tipo = + find_and_replace_generics(&arg.tipo, &mono_types); + + constr_args.push((index, arg_name.clone(), arg_tipo.clone())); + + AirTree::let_assignment( + "_", + self.expect_type_assign( + &arg_tipo.clone(), + AirTree::local_var(arg_name, arg_tipo), + defined_data_types, + location, + msg_term.clone(), + ), + then, + ) + }, + ); + constr_args.reverse(); + + let then = if constr_args.is_empty() { + AirTree::fields_empty( + AirTree::local_var( + format!( + "__constr_var_span_{}_{}", + location.start, location.end + ), + tipo.clone(), + ), + msg_term.clone(), + constr_then, + ) + } else { + AirTree::fields_expose( + constr_args, + AirTree::local_var( + format!( + "__constr_var_span_{}_{}", + location.start, location.end + ), + tipo.clone(), + ), + msg_term.clone(), + true, + constr_then, + ) + }; + + AirTree::clause( + format!("__subject_span_{}_{}", location.start, location.end), + AirTree::int(index), + tipo.clone(), + then, + acc, + false, + ) + }, + ); + + let when_expr = AirTree::when( + format!("__subject_span_{}_{}", location.start, location.end), + void(), + tipo.clone(), + AirTree::local_var( + format!("__constr_var_span_{}_{}", location.start, location.end), + tipo.clone(), + ), + constr_clauses, + ); + + let func_body = AirTree::let_assignment( + format!("__constr_var_span_{}_{}", location.start, location.end), + AirTree::local_var("__param_0", tipo.clone()), + when_expr, + ); + + for (inner_data_type, inner_count) in defined_data_types.iter() { + if let Some(prev_count) = current_defined.get(inner_data_type) { + if inner_count - prev_count > 0 { + diff_defined_types.push(inner_data_type.to_string()); + } + } else { + diff_defined_types.push(inner_data_type.to_string()); + } + } + + let code_gen_func = match self.tracing { + TraceLevel::Silent => CodeGenFunction::Function { + body: func_body, + params: vec!["__param_0".to_string()], + }, + TraceLevel::Compact | TraceLevel::Verbose => CodeGenFunction::Function { + body: func_body, + params: vec!["__param_0".to_string(), "__param_msg".to_string()], + }, + }; + + self.code_gen_functions + .insert(data_type_name.clone(), code_gen_func); + } else if let Some(counter) = defined_data_types.get_mut(&data_type_name) { + *counter += 1; + } else { + defined_data_types.insert(data_type_name.to_string(), 1); + } + + let args = match self.tracing { + TraceLevel::Silent => vec![value], + TraceLevel::Compact | TraceLevel::Verbose => vec![ + value, + msg_func + .expect("should be unreachable: no msg func with tracing enabled.") + .to_air_tree(), + ], + }; + + let module_fn = ValueConstructorVariant::ModuleFn { + name: data_type_name.to_string(), + field_map: None, + module: "".to_string(), + arity: args.len(), + location, + builtin: None, + }; + + let func_var = AirTree::var( + ValueConstructor::public(tipo.clone(), module_fn), + data_type_name, + "", + ); + + AirTree::call(func_var, void(), args) + } } } @@ -1799,6 +1964,7 @@ impl<'a> CodeGenerator<'a> { } match &mut props.specific_clause { + // TODO: Implement PairClause and PairClauseGuard SpecificClause::ConstrClause => { let data_type = lookup_data_type_by_tipo(&self.data_types, subject_tipo); @@ -2098,6 +2264,27 @@ impl<'a> CodeGenerator<'a> { ) } } + SpecificClause::PairClause => { + let (_, pattern_assigns) = + self.clause_pattern(&clause.pattern, subject_tipo, props, clause_then); + + let mut next_clause_props = ClauseProperties::init( + subject_tipo, + props.clause_var_name.clone(), + props.original_subject_name.clone(), + ); + + AirTree::wrap_clause( + pattern_assigns, + self.handle_each_clause( + rest_clauses, + final_clause, + subject_tipo, + &mut next_clause_props, + module_name, + ), + ) + } } } else { // handle final_clause @@ -2270,7 +2457,7 @@ impl<'a> CodeGenerator<'a> { elems, subject_tipo.clone(), tail.is_some(), - AirTree::local_var(&props.original_subject_name, subject_tipo.clone()), + AirTree::local_var(&props.clause_var_name, subject_tipo.clone()), // One special usage of list access here // So for the msg we pass in empty string if tracing is on // Since check_last_item is false this will never get added to the final uplc anyway @@ -2296,6 +2483,78 @@ impl<'a> CodeGenerator<'a> { (AirTree::void(), list_assign) } + + Pattern::Pair { fst, snd, .. } => { + let items_type = subject_tipo.get_inner_types(); + + let mut name_index_assigns = vec![]; + + let next_then = + [fst, snd] + .iter() + .enumerate() + .rfold(then, |inner_then, (index, element)| { + let elem_name = match element.as_ref() { + Pattern::Var { name, .. } => Some(name.to_string()), + Pattern::Assign { name, .. } => Some(name.to_string()), + Pattern::Discard { .. } => None, + _ => Some(format!( + "pair_index_{}_span_{}_{}", + index, + element.location().start, + element.location().end + )), + }; + + let mut pair_props = ClauseProperties::init_inner( + &items_type[index], + elem_name.clone().unwrap_or_else(|| "_".to_string()), + elem_name.clone().unwrap_or_else(|| "_".to_string()), + props.final_clause, + ); + + let elem = if elem_name.is_some() { + self.nested_clause_condition( + element, + &items_type[index], + &mut pair_props, + inner_then, + ) + } else { + inner_then + }; + + props.complex_clause = + props.complex_clause || pair_props.complex_clause; + + name_index_assigns.push((elem_name, index)); + + elem + }); + + name_index_assigns.reverse(); + + let field_assign = if name_index_assigns.iter().all(|s| s.0.is_none()) { + next_then + } else { + AirTree::pair_access( + name_index_assigns[0].0.clone(), + name_index_assigns[1].0.clone(), + subject_tipo.clone(), + AirTree::local_var(props.clause_var_name.clone(), subject_tipo.clone()), + None, + false, + next_then, + ) + }; + + (AirTree::void(), field_assign) + } + + Pattern::Constructor { name, .. } if subject_tipo.is_bool() => { + (AirTree::bool(name == "True"), then) + } + Pattern::Constructor { name, arguments, @@ -2303,130 +2562,120 @@ impl<'a> CodeGenerator<'a> { tipo: function_tipo, .. } => { - if subject_tipo.is_bool() { - (AirTree::bool(name == "True"), then) - } else { - assert!( - matches!(function_tipo.as_ref().clone(), Type::Fn { .. }) - || matches!(function_tipo.as_ref().clone(), Type::App { .. }) - ); - let data_type = lookup_data_type_by_tipo(&self.data_types, subject_tipo) - .unwrap_or_else(|| { - unreachable!( - "Code Gen should have the definition for this constructor {}", - name - ) - }); + assert!( + matches!(function_tipo.as_ref().clone(), Type::Fn { .. }) + || matches!(function_tipo.as_ref().clone(), Type::App { .. }) + ); + let data_type = lookup_data_type_by_tipo(&self.data_types, subject_tipo) + .unwrap_or_else(|| { + unreachable!( + "Code Gen should have the definition for this constructor {}", + name + ) + }); - assert!(!data_type.constructors.is_empty()); + assert!(!data_type.constructors.is_empty()); - let (constr_index, _) = data_type - .constructors + let (constr_index, _) = data_type + .constructors + .iter() + .enumerate() + .find(|(_, dt)| &dt.name == name) + .unwrap(); + + let field_map = match constructor { + PatternConstructor::Record { field_map, .. } => field_map.clone(), + }; + + let mut type_map: IndexMap> = IndexMap::new(); + + for (index, arg) in function_tipo.arg_types().unwrap().iter().enumerate() { + let field_type = arg.clone(); + type_map.insert(index, field_type); + } + + let mut fields = vec![]; + + let next_then = + arguments .iter() .enumerate() - .find(|(_, dt)| &dt.name == name) - .unwrap(); + .rfold(then, |inner_then, (index, arg)| { + let label = arg.label.clone().unwrap_or_default(); - let field_map = match constructor { - PatternConstructor::Record { field_map, .. } => field_map.clone(), - }; + let field_index = if let Some(field_map) = &field_map { + *field_map.fields.get(&label).map(|x| &x.0).unwrap_or(&index) + } else { + index + }; - let mut type_map: IndexMap> = IndexMap::new(); + let field_name = match &arg.value { + Pattern::Var { name, .. } => name.to_string(), + Pattern::Assign { name, .. } => name.to_string(), + Pattern::Discard { .. } => "_".to_string(), + _ => format!( + "field_{}_span_{}_{}", + field_index, + arg.value.location().start, + arg.value.location().end + ), + }; - for (index, arg) in function_tipo.arg_types().unwrap().iter().enumerate() { - let field_type = arg.clone(); - type_map.insert(index, field_type); - } - - let mut fields = vec![]; - - let next_then = - arguments - .iter() - .enumerate() - .rfold(then, |inner_then, (index, arg)| { - let label = arg.label.clone().unwrap_or_default(); - - let field_index = if let Some(field_map) = &field_map { - *field_map.fields.get(&label).map(|x| &x.0).unwrap_or(&index) - } else { - index - }; - - let field_name = match &arg.value { - Pattern::Var { name, .. } => name.to_string(), - Pattern::Assign { name, .. } => name.to_string(), - Pattern::Discard { .. } => "_".to_string(), - _ => format!( - "field_{}_span_{}_{}", - field_index, - arg.value.location().start, - arg.value.location().end - ), - }; - - let arg_type = type_map.get(&field_index).unwrap_or_else(|| { - unreachable!( - "Missing type for field {} of constr {}", - field_index, name - ) - }); - - let mut field_props = ClauseProperties::init_inner( - arg_type, - field_name.clone(), - field_name.clone(), - props.final_clause, - ); - - let statement = if field_name != "_" { - self.nested_clause_condition( - &arg.value, - arg_type, - &mut field_props, - inner_then, - ) - } else { - inner_then - }; - - props.complex_clause = - props.complex_clause || field_props.complex_clause; - - fields.push((field_index, field_name, arg_type.clone())); - - statement + let arg_type = type_map.get(&field_index).unwrap_or_else(|| { + unreachable!( + "Missing type for field {} of constr {}", + field_index, name + ) }); - fields.reverse(); + let mut field_props = ClauseProperties::init_inner( + arg_type, + field_name.clone(), + field_name.clone(), + props.final_clause, + ); - let field_assign = - if check_replaceable_opaque_type(subject_tipo, &self.data_types) { - AirTree::let_assignment( - &fields[0].1, - AirTree::local_var( - props.clause_var_name.clone(), - subject_tipo.clone(), - ), - next_then, - ) - } else if fields.iter().all(|s| s.1 == "_") { - next_then - } else { - AirTree::fields_expose( - fields, - AirTree::local_var( - props.clause_var_name.clone(), - subject_tipo.clone(), - ), - None, - false, - next_then, - ) - }; + let statement = if field_name != "_" { + self.nested_clause_condition( + &arg.value, + arg_type, + &mut field_props, + inner_then, + ) + } else { + inner_then + }; - (AirTree::int(constr_index), field_assign) - } + props.complex_clause = + props.complex_clause || field_props.complex_clause; + + fields.push((field_index, field_name, arg_type.clone())); + + statement + }); + + fields.reverse(); + + let field_assign = if check_replaceable_opaque_type(subject_tipo, &self.data_types) + { + AirTree::let_assignment( + &fields[0].1, + AirTree::local_var(props.clause_var_name.clone(), subject_tipo.clone()), + next_then, + ) + } else if fields.iter().all(|s| s.1 == "_") { + next_then + } else { + AirTree::fields_expose( + fields, + AirTree::local_var(props.clause_var_name.clone(), subject_tipo.clone()), + None, + false, + next_then, + ) + }; + + (AirTree::int(constr_index), field_assign) } Pattern::Tuple { elems, .. } => { let items_type = subject_tipo.get_inner_types(); @@ -2548,7 +2797,7 @@ impl<'a> CodeGenerator<'a> { AirTree::tuple_access( names, subject_tipo.clone(), - AirTree::local_var(&props.original_subject_name, subject_tipo.clone()), + AirTree::local_var(&props.clause_var_name, subject_tipo.clone()), None, false, tuple_name_assigns, @@ -2690,6 +2939,12 @@ impl<'a> CodeGenerator<'a> { }) } } + + Pattern::Pair { .. } => { + let (_, assign) = self.clause_pattern(pattern, subject_tipo, props, then); + assign + } + Pattern::Constructor { name: constr_name, .. } => { @@ -3759,6 +4014,7 @@ impl<'a> CodeGenerator<'a> { arg_stack.push(arg); } } + assert!(arg_stack.len() == 1, "Expected one term on the stack"); arg_stack.pop().unwrap() } @@ -3833,7 +4089,7 @@ impl<'a> CodeGenerator<'a> { } DefaultFunction::HeadList - if !constructor.tipo.return_type().unwrap().is_2_tuple() => + if !constructor.tipo.return_type().unwrap().is_pair() => { builder::undata_builtin( builtin, @@ -3845,7 +4101,7 @@ impl<'a> CodeGenerator<'a> { DefaultFunction::MkCons | DefaultFunction::MkPairData => { unimplemented!( - "MkCons and MkPairData should be handled by an anon function or using [] or ( a, b, .., z).\n" + "MkCons and MkPairData should be handled by an anon function or using [] or ( a, b, .., z) or Pair {{fst:a, snd: b}}.\n" ) } _ => { @@ -3906,6 +4162,7 @@ impl<'a> CodeGenerator<'a> { ValueConstructorVariant::Record { name: constr_name, .. } => { + // TODO handle pair if constructor.tipo.is_bool() { Some(Term::bool(constr_name == "True")) } else if constructor.tipo.is_void() { @@ -3915,7 +4172,13 @@ impl<'a> CodeGenerator<'a> { &self.data_types, &constructor.tipo, ) - .unwrap(); + .unwrap_or_else(|| { + panic!( + "could not find data-type definition for {} within known set: {:?}", + constructor.tipo.to_pretty(0), + self.data_types.keys() + ) + }); let (constr_index, constr_type) = data_type .constructors @@ -3943,19 +4206,25 @@ impl<'a> CodeGenerator<'a> { let eval_program: Program = program.remove_no_inlines().try_into().unwrap(); - let evaluated_term: Term = - eval_program.eval(ExBudget::default()).result().unwrap(); + let evaluated_term: Term = eval_program + .eval(ExBudget::default()) + .result() + .expect("Evaluated a constant record and got an error"); + term = evaluated_term.try_into().unwrap(); } else { - for (index, arg) in constr_type.arguments.iter().enumerate().rev() { + for (index, arg) in constructor + .tipo + .arg_types() + .unwrap() + .iter() + .enumerate() + .rev() + { term = Term::mk_cons() .apply(convert_type_to_data( - Term::var( - arg.label - .clone() - .unwrap_or_else(|| format!("arg_{index}")), - ), - &arg.tipo, + Term::var(format!("arg_{index}")), + arg, )) .apply(term); } @@ -3964,10 +4233,8 @@ impl<'a> CodeGenerator<'a> { .apply(Term::integer(constr_index.into())) .apply(term); - for (index, arg) in constr_type.arguments.iter().enumerate().rev() { - term = term.lambda( - arg.label.clone().unwrap_or_else(|| format!("arg_{index}")), - ) + for (index, _) in constr_type.arguments.iter().enumerate().rev() { + term = term.lambda(format!("arg_{index}")) } } Some(term) @@ -4197,8 +4464,11 @@ impl<'a> CodeGenerator<'a> { let eval_program: Program = program.remove_no_inlines().try_into().unwrap(); - let evaluated_term: Term = - eval_program.eval(ExBudget::max()).result().unwrap(); + let result = eval_program.eval(ExBudget::max()).result(); + + let evaluated_term: Term = result.unwrap_or_else(|e| { + panic!("Evaluated a zero argument function and received this error: {e:#?}") + }); Some(evaluated_term.try_into().unwrap()) } else { @@ -4221,8 +4491,9 @@ impl<'a> CodeGenerator<'a> { arg_vec.push(arg_stack.pop().unwrap()); } - let tipo = match tipo.as_ref() { + let ret_tipo = match tipo.as_ref() { Type::Fn { ret, .. } => ret, + // In this case the Air Opcode only holds the return type and not the function type _ => &tipo, }; @@ -4237,11 +4508,11 @@ impl<'a> CodeGenerator<'a> { } DefaultFunction::FstPair | DefaultFunction::SndPair => { - builder::undata_builtin(&func, count, tipo, arg_vec) + builder::undata_builtin(&func, count, ret_tipo, arg_vec) } - DefaultFunction::HeadList if !tipo.is_2_tuple() => { - builder::undata_builtin(&func, count, tipo, arg_vec) + DefaultFunction::HeadList if !tipo.is_pair() => { + builder::undata_builtin(&func, count, ret_tipo, arg_vec) } DefaultFunction::MkCons | DefaultFunction::MkPairData => { @@ -4273,138 +4544,103 @@ impl<'a> CodeGenerator<'a> { let left = arg_stack.pop().unwrap(); let right = arg_stack.pop().unwrap(); - let builtin = if tipo.is_int() { - Term::equals_integer() - } else if tipo.is_string() { - Term::equals_string() - } else if tipo.is_bytearray() { - Term::equals_bytestring() - } else if tipo.is_bls381_12_g1() { - Term::bls12_381_g1_equal() - } else if tipo.is_bls381_12_g2() { - Term::bls12_381_g2_equal() - } else if tipo.is_ml_result() { - panic!("ML Result equality is not supported") - } else { - Term::equals_data() + let uplc_type = tipo.get_uplc_type(); + + let term = match name { + BinOp::And => left.delayed_if_then_else(right, Term::bool(false)), + BinOp::Or => left.delayed_if_then_else(Term::bool(true), right), + BinOp::Eq | BinOp::NotEq => { + let builtin = match &uplc_type { + Some(UplcType::Integer) => Term::equals_integer(), + Some(UplcType::String) => Term::equals_string(), + Some(UplcType::ByteString) => Term::equals_bytestring(), + Some(UplcType::Bls12_381G1Element) => Term::bls12_381_g1_equal(), + Some(UplcType::Bls12_381G2Element) => Term::bls12_381_g2_equal(), + Some(UplcType::Bool | UplcType::Unit) => Term::unit(), + Some(UplcType::List(_) | UplcType::Pair(_, _) | UplcType::Data) + | None => Term::equals_data(), + Some(UplcType::Bls12_381MlResult) => { + panic!("ML Result equality is not supported") + } + }; + + let binop_eq = + match uplc_type { + Some(UplcType::Bool) => { + if matches!(name, BinOp::Eq) { + left.delayed_if_then_else( + right.clone(), + right.if_then_else(Term::bool(false), Term::bool(true)), + ) + } else { + left.delayed_if_then_else( + right + .clone() + .if_then_else(Term::bool(false), Term::bool(true)), + right, + ) + } + } + Some(UplcType::List(_)) if tipo.is_map() => builtin + .apply(Term::map_data().apply(left)) + .apply(Term::map_data().apply(right)), + Some(UplcType::List(_)) => builtin + .apply(Term::list_data().apply(left)) + .apply(Term::list_data().apply(right)), + Some(UplcType::Pair(_, _)) => builtin + .apply(Term::map_data().apply( + Term::mk_cons().apply(left).apply(Term::empty_map()), + )) + .apply(Term::map_data().apply( + Term::mk_cons().apply(right).apply(Term::empty_map()), + )), + Some( + UplcType::Data + | UplcType::Bls12_381G1Element + | UplcType::Bls12_381G2Element + | UplcType::Bls12_381MlResult + | UplcType::Integer + | UplcType::String + | UplcType::ByteString, + ) + | None => builtin.apply(left).apply(right), + Some(UplcType::Unit) => { + left.choose_unit(right.choose_unit(Term::bool(true))) + } + }; + + if !tipo.is_bool() && matches!(name, BinOp::NotEq) { + binop_eq.if_then_else(Term::bool(false), Term::bool(true)) + } else { + binop_eq + } + } + BinOp::LtInt => Term::Builtin(DefaultFunction::LessThanInteger) + .apply(left) + .apply(right), + BinOp::LtEqInt => Term::Builtin(DefaultFunction::LessThanEqualsInteger) + .apply(left) + .apply(right), + BinOp::GtEqInt => Term::Builtin(DefaultFunction::LessThanEqualsInteger) + .apply(right) + .apply(left), + BinOp::GtInt => Term::Builtin(DefaultFunction::LessThanInteger) + .apply(right) + .apply(left), + BinOp::AddInt => Term::add_integer().apply(left).apply(right), + BinOp::SubInt => Term::Builtin(DefaultFunction::SubtractInteger) + .apply(left) + .apply(right), + BinOp::MultInt => Term::Builtin(DefaultFunction::MultiplyInteger) + .apply(left) + .apply(right), + BinOp::DivInt => Term::Builtin(DefaultFunction::DivideInteger) + .apply(left) + .apply(right), + BinOp::ModInt => Term::Builtin(DefaultFunction::ModInteger) + .apply(left) + .apply(right), }; - - let term = - match name { - BinOp::And => left.delayed_if_then_else(right, Term::bool(false)), - BinOp::Or => left.delayed_if_then_else(Term::bool(true), right), - BinOp::Eq => { - if tipo.is_bool() { - let term = left.delayed_if_then_else( - right.clone(), - right.if_then_else(Term::bool(false), Term::bool(true)), - ); - - return Some(term); - } else if tipo.is_map() { - let term = builtin - .apply(Term::map_data().apply(left)) - .apply(Term::map_data().apply(right)); - - return Some(term); - } else if tipo.is_tuple() - && matches!(tipo.get_uplc_type(), UplcType::Pair(_, _)) - { - let term = builtin - .apply(Term::map_data().apply( - Term::mk_cons().apply(left).apply(Term::empty_map()), - )) - .apply(Term::map_data().apply( - Term::mk_cons().apply(right).apply(Term::empty_map()), - )); - - return Some(term); - } else if tipo.is_list() || tipo.is_tuple() { - let term = builtin - .apply(Term::list_data().apply(left)) - .apply(Term::list_data().apply(right)); - - return Some(term); - } else if tipo.is_void() { - let term = left.choose_unit(right.choose_unit(Term::bool(true))); - - return Some(term); - } - - builtin.apply(left).apply(right) - } - BinOp::NotEq => { - if tipo.is_bool() { - let term = left.delayed_if_then_else( - right - .clone() - .if_then_else(Term::bool(false), Term::bool(true)), - right, - ); - - return Some(term); - } else if tipo.is_map() { - let term = builtin - .apply(Term::map_data().apply(left)) - .apply(Term::map_data().apply(right)) - .if_then_else(Term::bool(false), Term::bool(true)); - - return Some(term); - } else if tipo.is_tuple() - && matches!(tipo.get_uplc_type(), UplcType::Pair(_, _)) - { - let term = builtin - .apply(Term::map_data().apply( - Term::mk_cons().apply(left).apply(Term::empty_map()), - )) - .apply(Term::map_data().apply( - Term::mk_cons().apply(right).apply(Term::empty_map()), - )) - .if_then_else(Term::bool(false), Term::bool(true)); - - return Some(term); - } else if tipo.is_list() || tipo.is_tuple() { - let term = builtin - .apply(Term::list_data().apply(left)) - .apply(Term::list_data().apply(right)) - .if_then_else(Term::bool(false), Term::bool(true)); - - return Some(term); - } else if tipo.is_void() { - return Some(Term::bool(false)); - } - - builtin - .apply(left) - .apply(right) - .if_then_else(Term::bool(false), Term::bool(true)) - } - BinOp::LtInt => Term::Builtin(DefaultFunction::LessThanInteger) - .apply(left) - .apply(right), - BinOp::LtEqInt => Term::Builtin(DefaultFunction::LessThanEqualsInteger) - .apply(left) - .apply(right), - BinOp::GtEqInt => Term::Builtin(DefaultFunction::LessThanEqualsInteger) - .apply(right) - .apply(left), - BinOp::GtInt => Term::Builtin(DefaultFunction::LessThanInteger) - .apply(right) - .apply(left), - BinOp::AddInt => Term::add_integer().apply(left).apply(right), - BinOp::SubInt => Term::Builtin(DefaultFunction::SubtractInteger) - .apply(left) - .apply(right), - BinOp::MultInt => Term::Builtin(DefaultFunction::MultiplyInteger) - .apply(left) - .apply(right), - BinOp::DivInt => Term::Builtin(DefaultFunction::DivideInteger) - .apply(left) - .apply(right), - BinOp::ModInt => Term::Builtin(DefaultFunction::ModInteger) - .apply(left) - .apply(right), - }; Some(term) } Air::DefineFunc { @@ -4556,8 +4792,11 @@ impl<'a> CodeGenerator<'a> { let eval_program: Program = program.remove_no_inlines().try_into().unwrap(); - let evaluated_term: Term = - eval_program.eval(ExBudget::default()).result().unwrap(); + let evaluated_term: Term = eval_program + .eval(ExBudget::default()) + .result() + .expect("Evaluated on unwrapping a data constant and got an error"); + term = evaluated_term.try_into().unwrap(); } @@ -4581,8 +4820,11 @@ impl<'a> CodeGenerator<'a> { let eval_program: Program = program.remove_no_inlines().try_into().unwrap(); - let evaluated_term: Term = - eval_program.eval(ExBudget::default()).result().unwrap(); + let evaluated_term: Term = eval_program + .eval(ExBudget::default()) + .result() + .expect("Evaluated on wrapping a constant into data and got an error"); + term = evaluated_term.try_into().unwrap(); } else { term = builder::convert_type_to_data(term, &tipo); @@ -4628,20 +4870,29 @@ impl<'a> CodeGenerator<'a> { } => { let subject = arg_stack.pop().unwrap(); - let subject = if tipo.is_int() - || tipo.is_bytearray() - || tipo.is_string() - || tipo.is_list() - || tipo.is_tuple() - || tipo.is_bool() - { - subject - } else { - Term::var( + let uplc_type = tipo.get_uplc_type(); + + let subject = match uplc_type { + Some( + UplcType::Bool + | UplcType::Integer + | UplcType::String + | UplcType::ByteString + | UplcType::Unit + | UplcType::List(_) + | UplcType::Pair(_, _) + | UplcType::Bls12_381G1Element + | UplcType::Bls12_381G2Element + | UplcType::Bls12_381MlResult, + ) => subject, + + Some(UplcType::Data) => subject, + + None => Term::var( self.special_functions .use_function_uplc(CONSTR_INDEX_EXPOSER.to_string()), ) - .apply(subject) + .apply(subject), }; let mut term = arg_stack.pop().unwrap(); @@ -4659,67 +4910,68 @@ impl<'a> CodeGenerator<'a> { let clause = arg_stack.pop().unwrap(); // the body to be run if the clause matches - let mut body = arg_stack.pop().unwrap(); + let body = arg_stack.pop().unwrap(); // the next branch in the when expression - let mut term = arg_stack.pop().unwrap(); + let term = arg_stack.pop().unwrap(); - if tipo.is_bool() { - let other_clauses = if complex_clause { - Term::var("__other_clauses_delayed") - } else { - term.clone().delay() - }; + let other_clauses = if complex_clause { + Term::var("__other_clauses_delayed") + } else { + term.clone().delay() + }; + let body = if tipo.is_bool() { if matches!(clause, Term::Constant(boolean) if matches!(boolean.as_ref(), UplcConstant::Bool(true))) { - body = Term::var(subject_name) + Term::var(subject_name) .if_then_else(body.delay(), other_clauses) - .force(); + .force() } else { - body = Term::var(subject_name) + Term::var(subject_name) .if_then_else(other_clauses, body.delay()) - .force(); - } - - if complex_clause { - term = body.lambda("__other_clauses_delayed").apply(term.delay()); - } else { - term = body; + .force() } } else { - let condition = if tipo.is_int() { - Term::equals_integer() + let uplc_type = tipo.get_uplc_type(); + + let condition = match uplc_type { + Some( + UplcType::Bool + | UplcType::Unit + | UplcType::List(_) + | UplcType::Pair(_, _) + | UplcType::Bls12_381MlResult, + ) => unreachable!("{:#?}", tipo), + Some(UplcType::Data) => unimplemented!(), + Some(UplcType::Integer) => Term::equals_integer() .apply(clause) - .apply(Term::var(subject_name)) - } else if tipo.is_bytearray() { - Term::equals_bytestring() + .apply(Term::var(subject_name)), + Some(UplcType::String) => Term::equals_string() .apply(clause) - .apply(Term::var(subject_name)) - } else if tipo.is_string() { - Term::equals_string() + .apply(Term::var(subject_name)), + Some(UplcType::ByteString) => Term::equals_bytestring() .apply(clause) - .apply(Term::var(subject_name)) - } else if tipo.is_list() || tipo.is_tuple() { - unreachable!("{:#?}", tipo) - } else { - Term::equals_integer() + .apply(Term::var(subject_name)), + Some(UplcType::Bls12_381G1Element) => Term::bls12_381_g1_equal() .apply(clause) - .apply(Term::var(subject_name)) + .apply(Term::var(subject_name)), + Some(UplcType::Bls12_381G2Element) => Term::bls12_381_g2_equal() + .apply(clause) + .apply(Term::var(subject_name)), + None => Term::equals_integer() + .apply(clause) + .apply(Term::var(subject_name)), }; - if complex_clause { - term = condition - .if_then_else(body.delay(), Term::var("__other_clauses_delayed")) - .force() - .lambda("__other_clauses_delayed") - .apply(term.delay()); - } else { - term = condition.delayed_if_then_else(body, term); - } - } + condition.if_then_else(body.delay(), other_clauses).force() + }; - Some(term) + if complex_clause { + Some(body.lambda("__other_clauses_delayed").apply(term.delay())) + } else { + Some(body) + } } Air::ListClause { tail_name, @@ -4778,31 +5030,49 @@ impl<'a> CodeGenerator<'a> { .apply(next_clause.delay()); } - if tipo.is_2_tuple() { - for (index, name) in indices.iter() { - if name == "_" { - continue; - } - let builtin = if *index == 0 { - Term::fst_pair() - } else { - Term::snd_pair() - }; - - term = term.lambda(name).apply(builder::known_data_to_type( - builtin.apply(Term::var(subject_name.clone())), - &tuple_types[*index].clone(), - )); - } - } else { - for (index, name) in indices.iter() { - term = term.lambda(name.clone()).apply(builder::known_data_to_type( - Term::head_list() - .apply(Term::var(subject_name.clone()).repeat_tail_list(*index)), - &tuple_types[*index].clone(), - )); - } + for (index, name) in indices.iter() { + term = term.lambda(name.clone()).apply(builder::known_data_to_type( + Term::head_list() + .apply(Term::var(subject_name.clone()).repeat_tail_list(*index)), + &tuple_types[*index].clone(), + )); } + + Some(term) + } + Air::PairClause { + subject_tipo: tipo, + complex_clause, + subject_name, + fst_name, + snd_name, + } => { + let mut term = arg_stack.pop().unwrap(); + + let next_clause = arg_stack.pop().unwrap(); + + let pair_types = tipo.get_inner_types(); + + if complex_clause { + term = term + .lambda("__other_clauses_delayed") + .apply(next_clause.delay()); + } + + if let Some(fst) = fst_name { + term = term.lambda(fst).apply(builder::known_data_to_type( + Term::fst_pair().apply(Term::var(subject_name.clone())), + &pair_types[0].clone(), + )); + } + + if let Some(snd) = snd_name { + term = term.lambda(snd).apply(builder::known_data_to_type( + Term::snd_pair().apply(Term::var(subject_name.clone())), + &pair_types[1].clone(), + )); + } + Some(term) } Air::ClauseGuard { @@ -4813,50 +5083,62 @@ impl<'a> CodeGenerator<'a> { let then = arg_stack.pop().unwrap(); + let term = Term::var("__other_clauses_delayed"); + if tipo.is_bool() { - let mut term = Term::var("__other_clauses_delayed"); if matches!(checker, Term::Constant(boolean) if matches!(boolean.as_ref(), UplcConstant::Bool(true))) { - term = Term::var(subject_name) - .if_then_else(then.delay(), term) - .force(); + Some( + Term::var(subject_name) + .if_then_else(then.delay(), term) + .force(), + ) } else { - term = Term::var(subject_name) - .if_then_else(term, then.delay()) - .force(); + Some( + Term::var(subject_name) + .if_then_else(term, then.delay()) + .force(), + ) } - Some(term) } else if tipo.is_void() { Some(then.lambda("_").apply(Term::var(subject_name))) } else { - let condition = if tipo.is_int() { - Term::equals_integer() + let uplc_type = tipo.get_uplc_type(); + + let condition = match uplc_type { + Some( + UplcType::Bool + | UplcType::Unit + | UplcType::List(_) + | UplcType::Pair(_, _) + | UplcType::Bls12_381MlResult, + ) => unreachable!("{:#?}", tipo), + Some(UplcType::Data) => unimplemented!(), + Some(UplcType::Integer) => Term::equals_integer() .apply(checker) - .apply(Term::var(subject_name)) - } else if tipo.is_bytearray() { - Term::equals_bytestring() + .apply(Term::var(subject_name)), + Some(UplcType::String) => Term::equals_string() .apply(checker) - .apply(Term::var(subject_name)) - } else if tipo.is_string() { - Term::equals_string() + .apply(Term::var(subject_name)), + Some(UplcType::ByteString) => Term::equals_bytestring() .apply(checker) - .apply(Term::var(subject_name)) - } else if tipo.is_list() || tipo.is_tuple() { - unreachable!() - } else { - Term::equals_integer().apply(checker).apply( + .apply(Term::var(subject_name)), + Some(UplcType::Bls12_381G1Element) => Term::bls12_381_g1_equal() + .apply(checker) + .apply(Term::var(subject_name)), + Some(UplcType::Bls12_381G2Element) => Term::bls12_381_g2_equal() + .apply(checker) + .apply(Term::var(subject_name)), + None => Term::equals_integer().apply(checker).apply( Term::var( self.special_functions .use_function_uplc(CONSTR_INDEX_EXPOSER.to_string()), ) .apply(Term::var(subject_name)), - ) + ), }; - let term = condition - .if_then_else(then.delay(), Term::var("__other_clauses_delayed")) - .force(); - Some(term) + Some(condition.if_then_else(then.delay(), term).force()) } } Air::ListClauseGuard { @@ -4899,31 +5181,40 @@ impl<'a> CodeGenerator<'a> { let tuple_types = tipo.get_inner_types(); - if tuple_types.len() == 2 { - for (index, name) in indices.iter() { - if name == "_" { - continue; - } - let builtin = if *index == 0 { - Term::fst_pair() - } else { - Term::snd_pair() - }; - - term = term.lambda(name).apply(builder::known_data_to_type( - builtin.apply(Term::var(subject_name.clone())), - &tuple_types[*index].clone(), - )); - } - } else { - for (index, name) in indices.iter() { - term = term.lambda(name.clone()).apply(builder::known_data_to_type( - Term::head_list() - .apply(Term::var(subject_name.clone()).repeat_tail_list(*index)), - &tuple_types[*index].clone(), - )); - } + for (index, name) in indices.iter() { + term = term.lambda(name.clone()).apply(builder::known_data_to_type( + Term::head_list() + .apply(Term::var(subject_name.clone()).repeat_tail_list(*index)), + &tuple_types[*index].clone(), + )); } + + Some(term) + } + Air::PairGuard { + subject_tipo: tipo, + subject_name, + fst_name, + snd_name, + } => { + let mut term = arg_stack.pop().unwrap(); + + let tuple_types = tipo.get_inner_types(); + + if let Some(fst) = fst_name { + term = term.lambda(fst).apply(builder::known_data_to_type( + Term::fst_pair().apply(Term::var(subject_name.clone())), + &tuple_types[0].clone(), + )); + } + + if let Some(snd) = snd_name { + term = term.lambda(snd).apply(builder::known_data_to_type( + Term::snd_pair().apply(Term::var(subject_name.clone())), + &tuple_types[1].clone(), + )); + } + Some(term) } Air::Finally => { @@ -4981,8 +5272,11 @@ impl<'a> CodeGenerator<'a> { let eval_program: Program = program.remove_no_inlines().try_into().unwrap(); - let evaluated_term: Term = - eval_program.eval(ExBudget::default()).result().unwrap(); + let evaluated_term: Term = eval_program + .eval(ExBudget::default()) + .result() + .expect("Evaluated a constant record with args and got an error"); + term = evaluated_term.try_into().unwrap(); } @@ -5080,34 +5374,9 @@ impl<'a> CodeGenerator<'a> { if constants.len() == args.len() { let data_constants = builder::convert_constants_to_data(constants); - if count == 2 { - let term = Term::Constant( - UplcConstant::ProtoPair( - UplcType::Data, - UplcType::Data, - data_constants[0].clone().into(), - data_constants[1].clone().into(), - ) - .into(), - ); - Some(term) - } else { - let term = Term::Constant( - UplcConstant::ProtoList(UplcType::Data, data_constants).into(), - ); - Some(term) - } - } else if count == 2 { - let term = Term::mk_pair_data() - .apply(builder::convert_type_to_data( - args[0].clone(), - &tuple_sub_types[0], - )) - .apply(builder::convert_type_to_data( - args[1].clone(), - &tuple_sub_types[1], - )); - + let term = Term::Constant( + UplcConstant::ProtoList(UplcType::Data, data_constants).into(), + ); Some(term) } else { let mut term = Term::empty_list(); @@ -5119,6 +5388,39 @@ impl<'a> CodeGenerator<'a> { Some(term) } } + Air::Pair { tipo } => { + let fst = arg_stack.pop().unwrap(); + let snd = arg_stack.pop().unwrap(); + + match (extract_constant(&fst), extract_constant(&snd)) { + (Some(fst), Some(snd)) => { + let mut pair_fields = builder::convert_constants_to_data(vec![fst, snd]); + let term = Term::Constant( + UplcConstant::ProtoPair( + UplcType::Data, + UplcType::Data, + pair_fields.remove(0).into(), + pair_fields.remove(0).into(), + ) + .into(), + ); + Some(term) + } + _ => { + let term = Term::mk_pair_data() + .apply(builder::convert_type_to_data( + fst, + &tipo.get_inner_types()[0], + )) + .apply(builder::convert_type_to_data( + snd, + &tipo.get_inner_types()[1], + )); + + Some(term) + } + } + } Air::RecordUpdate { highest_index, indices, @@ -5247,67 +5549,75 @@ impl<'a> CodeGenerator<'a> { let mut term = arg_stack.pop().unwrap(); let list_id = self.id_gen.next(); - if tipo.is_2_tuple() { - assert!(names.len() == 2); + let mut id_list = vec![]; + id_list.push(list_id); - if names[1] != "_" { - term = term.lambda(names[1].clone()).apply(if is_expect { - convert_data_to_type( - Term::snd_pair().apply(Term::var(format!("__tuple_{list_id}"))), - &inner_types[1], - ) - } else { - known_data_to_type( - Term::snd_pair().apply(Term::var(format!("__tuple_{list_id}"))), - &inner_types[1], - ) - }); - } + names.iter().for_each(|_| { + id_list.push(self.id_gen.next()); + }); - if names[0] != "_" { - term = term.lambda(names[0].clone()).apply(if is_expect { - convert_data_to_type( - Term::fst_pair().apply(Term::var(format!("__tuple_{list_id}"))), - &inner_types[0], - ) - } else { - known_data_to_type( - Term::fst_pair().apply(Term::var(format!("__tuple_{list_id}"))), - &inner_types[0], - ) - }) - } + let names_types = names + .into_iter() + .zip(inner_types) + .zip(id_list) + .map(|((name, tipo), id)| (name, tipo, id)) + .collect_vec(); - term = term.lambda(format!("__tuple_{list_id}")).apply(value); + term = builder::list_access_to_uplc( + &names_types, + false, + term, + false, + is_expect.into(), + error_term, + ) + .apply(value); - Some(term) - } else { - let mut id_list = vec![]; - id_list.push(list_id); + Some(term) + } + Air::PairAccessor { + fst, + snd, + tipo, + is_expect, + } => { + let inner_types = tipo.get_inner_types(); + let value = arg_stack.pop().unwrap(); - names.iter().for_each(|_| { - id_list.push(self.id_gen.next()); + let mut term = arg_stack.pop().unwrap(); + let list_id = self.id_gen.next(); + + if let Some(name) = snd { + term = term.lambda(name).apply(if is_expect { + convert_data_to_type( + Term::snd_pair().apply(Term::var(format!("__pair_{list_id}"))), + &inner_types[1], + ) + } else { + known_data_to_type( + Term::snd_pair().apply(Term::var(format!("__pair_{list_id}"))), + &inner_types[1], + ) }); - - let names_types = names - .into_iter() - .zip(inner_types) - .zip(id_list) - .map(|((name, tipo), id)| (name, tipo, id)) - .collect_vec(); - - term = builder::list_access_to_uplc( - &names_types, - false, - term, - false, - is_expect.into(), - error_term, - ) - .apply(value); - - Some(term) } + + if let Some(name) = fst { + term = term.lambda(name).apply(if is_expect { + convert_data_to_type( + Term::fst_pair().apply(Term::var(format!("__pair_{list_id}"))), + &inner_types[0], + ) + } else { + known_data_to_type( + Term::fst_pair().apply(Term::var(format!("__pair_{list_id}"))), + &inner_types[0], + ) + }) + } + + term = term.lambda(format!("__pair_{list_id}")).apply(value); + + Some(term) } Air::Trace { .. } => { let text = arg_stack.pop().unwrap(); diff --git a/crates/aiken-lang/src/gen_uplc/air.rs b/crates/aiken-lang/src/gen_uplc/air.rs index c86c29bd..74acc6bd 100644 --- a/crates/aiken-lang/src/gen_uplc/air.rs +++ b/crates/aiken-lang/src/gen_uplc/air.rs @@ -51,6 +51,9 @@ pub enum Air { tipo: Rc, count: usize, }, + Pair { + tipo: Rc, + }, Void, Var { constructor: ValueConstructor, @@ -136,6 +139,13 @@ pub enum Air { subject_name: String, complex_clause: bool, }, + PairClause { + subject_tipo: Rc, + subject_name: String, + fst_name: Option, + snd_name: Option, + complex_clause: bool, + }, ClauseGuard { subject_name: String, subject_tipo: Rc, @@ -151,6 +161,12 @@ pub enum Air { indices: IndexSet<(usize, String)>, subject_name: String, }, + PairGuard { + subject_tipo: Rc, + subject_name: String, + fst_name: Option, + snd_name: Option, + }, Finally, // If If { @@ -190,6 +206,13 @@ pub enum Air { tipo: Rc, is_expect: bool, }, + // Tuple Access + PairAccessor { + fst: Option, + snd: Option, + tipo: Rc, + is_expect: bool, + }, // Misc. ErrorTerm { tipo: Rc, diff --git a/crates/aiken-lang/src/gen_uplc/builder.rs b/crates/aiken-lang/src/gen_uplc/builder.rs index 519ce5fc..bc68b175 100644 --- a/crates/aiken-lang/src/gen_uplc/builder.rs +++ b/crates/aiken-lang/src/gen_uplc/builder.rs @@ -95,6 +95,7 @@ pub enum SpecificClause { TupleClause { defined_tuple_indices: IndexSet<(usize, String)>, }, + PairClause, } impl ClauseProperties { @@ -123,6 +124,15 @@ impl ClauseProperties { defined_tuple_indices: IndexSet::new(), }, } + } else if t.is_pair() { + ClauseProperties { + clause_var_name: constr_var, + complex_clause: false, + original_subject_name: subject_name, + needs_constr_var: false, + final_clause: false, + specific_clause: SpecificClause::PairClause, + } } else { ClauseProperties { clause_var_name: constr_var, @@ -165,6 +175,15 @@ impl ClauseProperties { defined_tuple_indices: IndexSet::new(), }, } + } else if t.is_pair() { + ClauseProperties { + clause_var_name: constr_var, + complex_clause: false, + original_subject_name: subject_name, + needs_constr_var: false, + final_clause, + specific_clause: SpecificClause::PairClause, + } } else { ClauseProperties { clause_var_name: constr_var, @@ -340,38 +359,25 @@ pub fn handle_clause_guard(clause_guard: &TypedClauseGuard) -> AirTree { } pub fn get_generic_variant_name(t: &Rc) -> String { - if t.is_string() { - "_string".to_string() - } else if t.is_int() { - "_int".to_string() - } else if t.is_bool() { - "_bool".to_string() - } else if t.is_bytearray() { - "_bytearray".to_string() - } else if t.is_bls381_12_g1() { - "_bls381_12_g1".to_string() - } else if t.is_bls381_12_g2() { - "_bls381_12_g2".to_string() - } else if t.is_ml_result() { - "_ml_result".to_string() - } else if t.is_map() { - "_map".to_string() - } else if t.is_2_tuple() { - "_pair".to_string() - } else if t.is_list() { - "_list".to_string() - } else if t.is_tuple() { - "_tuple".to_string() - } else if t.is_unbound() { - "_unbound".to_string() - } else { - let full_type = "_data".to_string(); + let uplc_type = t.get_uplc_type(); - if t.is_generic() { - panic!("FOUND A POLYMORPHIC TYPE. EXPECTED MONOMORPHIC TYPE"); + match uplc_type { + Some(UplcType::Bool) => "_bool".to_string(), + Some(UplcType::Integer) => "_int".to_string(), + Some(UplcType::String) => "_string".to_string(), + Some(UplcType::ByteString) => "_bytearray".to_string(), + Some(UplcType::Unit) => "_void".to_string(), + Some(UplcType::List(_)) if t.is_map() => "_map".to_string(), + Some(UplcType::List(_)) => "_list".to_string(), + Some(UplcType::Pair(_, _)) => "_pair".to_string(), + Some(UplcType::Bls12_381G1Element) => "_bls381_12_g1".to_string(), + Some(UplcType::Bls12_381G2Element) => "_bls381_12_g2".to_string(), + Some(UplcType::Bls12_381MlResult) => "_ml_result".to_string(), + None if t.is_unbound() => "_unbound".to_string(), + None if t.is_generic() => { + unreachable!("FOUND A POLYMORPHIC TYPE. EXPECTED MONOMORPHIC TYPE") } - - full_type + None | Some(UplcType::Data) => "_data".to_string(), } } @@ -673,11 +679,14 @@ pub fn pattern_has_conditions( Pattern::Tuple { elems, .. } => elems .iter() .any(|elem| pattern_has_conditions(elem, data_types)), + Pattern::Pair { fst, snd, .. } => { + pattern_has_conditions(fst, data_types) || pattern_has_conditions(snd, data_types) + } Pattern::Constructor { arguments, tipo, .. } => { - let data_type = - lookup_data_type_by_tipo(data_types, tipo).expect("Data type not found"); + let data_type = lookup_data_type_by_tipo(data_types, tipo) + .unwrap_or_else(|| panic!("Data type not found: {:#?}", tipo)); data_type.constructors.len() > 1 || arguments @@ -931,62 +940,54 @@ pub fn find_list_clause_or_default_first(clauses: &[TypedClause]) -> &TypedClaus } pub fn known_data_to_type(term: Term, field_type: &Type) -> Term { - if field_type.is_int() { - Term::un_i_data().apply(term) - } else if field_type.is_bytearray() { - Term::un_b_data().apply(term) - } else if field_type.is_void() { - Term::unit().lambda("_").apply(term) - } else if field_type.is_map() { - Term::unmap_data().apply(term) - } else if field_type.is_string() { - Term::Builtin(DefaultFunction::DecodeUtf8).apply(Term::un_b_data().apply(term)) - } else if field_type.is_tuple() && matches!(field_type.get_uplc_type(), UplcType::Pair(_, _)) { - Term::mk_pair_data() + let uplc_type = field_type.get_uplc_type(); + + match uplc_type { + Some(UplcType::Integer) => Term::un_i_data().apply(term), + Some(UplcType::ByteString) => Term::un_b_data().apply(term), + Some(UplcType::Bool) => Term::less_than_integer() + .apply(Term::integer(0.into())) + .apply(Term::fst_pair().apply(Term::unconstr_data().apply(term))), + Some(UplcType::String) => Term::decode_utf8().apply(Term::un_b_data().apply(term)), + Some(UplcType::Unit) => Term::unit().lambda("_").apply(term), + Some(UplcType::List(_)) if field_type.is_map() => Term::unmap_data().apply(term), + Some(UplcType::List(_)) => Term::unlist_data().apply(term), + Some(UplcType::Pair(_, _)) => Term::mk_pair_data() .apply(Term::head_list().apply(Term::var("__list_data"))) .apply(Term::head_list().apply(Term::tail_list().apply(Term::var("__list_data")))) .lambda("__list_data") - .apply(Term::unlist_data().apply(term)) - } else if field_type.is_list() || field_type.is_tuple() { - Term::unlist_data().apply(term) - } else if field_type.is_bool() { - Term::less_than_integer() - .apply(Term::integer(0.into())) - .apply(Term::fst_pair().apply(Term::unconstr_data().apply(term))) - } else if field_type.is_bls381_12_g1() { - Term::bls12_381_g1_uncompress().apply(Term::un_b_data().apply(term)) - } else if field_type.is_bls381_12_g2() { - Term::bls12_381_g2_uncompress().apply(Term::un_b_data().apply(term)) - } else if field_type.is_ml_result() { - panic!("ML Result not supported") - } else { - term + .apply(Term::unlist_data().apply(term)), + + Some(UplcType::Bls12_381G1Element) => { + Term::bls12_381_g1_uncompress().apply(Term::un_b_data().apply(term)) + } + Some(UplcType::Bls12_381G2Element) => { + Term::bls12_381_g2_uncompress().apply(Term::un_b_data().apply(term)) + } + Some(UplcType::Bls12_381MlResult) => panic!("ML Result not supported"), + Some(UplcType::Data) | None => term, } } pub fn unknown_data_to_type(term: Term, field_type: &Type) -> Term { - if field_type.is_int() { - Term::un_i_data().apply(term) - } else if field_type.is_bytearray() { - Term::un_b_data().apply(term) - } else if field_type.is_void() { - Term::equals_integer() - .apply(Term::integer(0.into())) - .apply(Term::fst_pair().apply(Term::var("__pair__"))) - .delayed_if_then_else( - Term::snd_pair() - .apply(Term::var("__pair__")) - .delayed_choose_list(Term::unit(), Term::Error), - Term::Error, - ) - .lambda("__pair__") - .apply(Term::unconstr_data().apply(term)) - } else if field_type.is_map() { - Term::unmap_data().apply(term) - } else if field_type.is_string() { - Term::Builtin(DefaultFunction::DecodeUtf8).apply(Term::un_b_data().apply(term)) - } else if field_type.is_tuple() && matches!(field_type.get_uplc_type(), UplcType::Pair(_, _)) { - Term::tail_list() + let uplc_type = field_type.get_uplc_type(); + + match uplc_type { + Some(UplcType::Integer) => Term::un_i_data().apply(term), + Some(UplcType::ByteString) => Term::un_b_data().apply(term), + Some(UplcType::String) => Term::decode_utf8().apply(Term::un_b_data().apply(term)), + Some(UplcType::List(_)) if field_type.is_map() => Term::unmap_data().apply(term), + Some(UplcType::List(_)) => Term::unlist_data().apply(term), + + Some(UplcType::Bls12_381G1Element) => { + Term::bls12_381_g1_uncompress().apply(Term::un_b_data().apply(term)) + } + Some(UplcType::Bls12_381G2Element) => { + Term::bls12_381_g2_uncompress().apply(Term::un_b_data().apply(term)) + } + Some(UplcType::Bls12_381MlResult) => panic!("ML Result not supported"), + + Some(UplcType::Pair(_, _)) => Term::tail_list() .apply(Term::tail_list().apply(Term::var("__list_data"))) .delayed_choose_list( Term::mk_pair_data() @@ -997,11 +998,8 @@ pub fn unknown_data_to_type(term: Term, field_type: &Type) -> Term { Term::Error, ) .lambda("__list_data") - .apply(Term::unlist_data().apply(term)) - } else if field_type.is_list() || field_type.is_tuple() { - Term::unlist_data().apply(term) - } else if field_type.is_bool() { - Term::snd_pair() + .apply(Term::unlist_data().apply(term)), + Some(UplcType::Bool) => Term::snd_pair() .apply(Term::var("__pair__")) .delayed_choose_list( Term::equals_integer() @@ -1017,25 +1015,35 @@ pub fn unknown_data_to_type(term: Term, field_type: &Type) -> Term { Term::Error, ) .lambda("__pair__") - .apply(Term::unconstr_data().apply(term)) - } else if field_type.is_bls381_12_g1() { - Term::bls12_381_g1_uncompress().apply(Term::un_b_data().apply(term)) - } else if field_type.is_bls381_12_g2() { - Term::bls12_381_g2_uncompress().apply(Term::un_b_data().apply(term)) - } else if field_type.is_ml_result() { - panic!("ML Result not supported") - } else { - term + .apply(Term::unconstr_data().apply(term)), + Some(UplcType::Unit) => Term::equals_integer() + .apply(Term::integer(0.into())) + .apply(Term::fst_pair().apply(Term::var("__pair__"))) + .delayed_if_then_else( + Term::snd_pair() + .apply(Term::var("__pair__")) + .delayed_choose_list(Term::unit(), Term::Error), + Term::Error, + ) + .lambda("__pair__") + .apply(Term::unconstr_data().apply(term)), + + Some(UplcType::Data) | None => term, } } +/// Due to the nature of the types BLS12_381_G1Element and BLS12_381_G2Element and String coming from bytearray +/// We don't have error handling if the bytearray is not properly aligned to the type. Oh well lol +/// For BLS12_381_G1Element and BLS12_381_G2Element, hash to group exists so just adopt that. pub fn unknown_data_to_type_debug( term: Term, field_type: &Type, error_term: Term, ) -> Term { - if field_type.is_int() { - Term::var("__val") + let uplc_type = field_type.get_uplc_type(); + + match uplc_type { + Some(UplcType::Integer) => Term::var("__val") .delayed_choose_data( error_term.clone(), error_term.clone(), @@ -1044,9 +1052,8 @@ pub fn unknown_data_to_type_debug( error_term.clone(), ) .lambda("__val") - .apply(term) - } else if field_type.is_bytearray() { - Term::var("__val") + .apply(term), + Some(UplcType::ByteString) => Term::var("__val") .delayed_choose_data( error_term.clone(), error_term.clone(), @@ -1055,28 +1062,19 @@ pub fn unknown_data_to_type_debug( Term::un_b_data().apply(Term::var("__val")), ) .lambda("__val") - .apply(term) - } else if field_type.is_void() { - Term::var("__val") + .apply(term), + Some(UplcType::String) => Term::var("__val") .delayed_choose_data( - Term::equals_integer() - .apply(Term::integer(0.into())) - .apply(Term::fst_pair().apply(Term::unconstr_data().apply(Term::var("__val")))) - .delayed_if_then_else( - Term::snd_pair() - .apply(Term::unconstr_data().apply(Term::var("__val"))) - .delayed_choose_list(Term::unit(), error_term.clone()), - error_term.clone(), - ), error_term.clone(), error_term.clone(), error_term.clone(), error_term.clone(), + Term::decode_utf8().apply(Term::un_b_data().apply(Term::var("__val"))), ) .lambda("__val") - .apply(term) - } else if field_type.is_map() { - Term::var("__val") + .apply(term), + + Some(UplcType::List(_)) if field_type.is_map() => Term::var("__val") .delayed_choose_data( error_term.clone(), Term::unmap_data().apply(Term::var("__val")), @@ -1085,21 +1083,40 @@ pub fn unknown_data_to_type_debug( error_term.clone(), ) .lambda("__val") - .apply(term) - } else if field_type.is_string() { - Term::var("__val") + .apply(term), + Some(UplcType::List(_)) => Term::var("__val") + .delayed_choose_data( + error_term.clone(), + error_term.clone(), + Term::unlist_data().apply(Term::var("__val")), + error_term.clone(), + error_term.clone(), + ) + .lambda("__val") + .apply(term), + + Some(UplcType::Bls12_381G1Element) => Term::var("__val") .delayed_choose_data( error_term.clone(), error_term.clone(), error_term.clone(), error_term.clone(), - Term::Builtin(DefaultFunction::DecodeUtf8) - .apply(Term::un_b_data().apply(Term::var("__val"))), + Term::bls12_381_g1_uncompress().apply(Term::un_b_data().apply(Term::var("__val"))), ) .lambda("__val") - .apply(term) - } else if field_type.is_tuple() && matches!(field_type.get_uplc_type(), UplcType::Pair(_, _)) { - Term::var("__val") + .apply(term), + Some(UplcType::Bls12_381G2Element) => Term::var("__val") + .delayed_choose_data( + error_term.clone(), + error_term.clone(), + error_term.clone(), + error_term.clone(), + Term::bls12_381_g2_uncompress().apply(Term::un_b_data().apply(Term::var("__val"))), + ) + .lambda("__val") + .apply(term), + Some(UplcType::Bls12_381MlResult) => panic!("ML Result not supported"), + Some(UplcType::Pair(_, _)) => Term::var("__val") .delayed_choose_data( error_term.clone(), error_term.clone(), @@ -1129,20 +1146,8 @@ pub fn unknown_data_to_type_debug( error_term.clone(), ) .lambda("__val") - .apply(term) - } else if field_type.is_list() || field_type.is_tuple() { - Term::var("__val") - .delayed_choose_data( - error_term.clone(), - error_term.clone(), - Term::unlist_data().apply(Term::var("__val")), - error_term.clone(), - error_term.clone(), - ) - .lambda("__val") - .apply(term) - } else if field_type.is_bool() { - Term::var("__val") + .apply(term), + Some(UplcType::Bool) => Term::var("__val") .delayed_choose_data( Term::snd_pair() .apply(Term::var("__pair__")) @@ -1167,35 +1172,29 @@ pub fn unknown_data_to_type_debug( error_term.clone(), ) .lambda("__val") - .apply(term) - } else if field_type.is_bls381_12_g1() { - Term::var("__val") + .apply(term), + Some(UplcType::Unit) => Term::var("__val") .delayed_choose_data( + Term::equals_integer() + .apply(Term::integer(0.into())) + .apply(Term::fst_pair().apply(Term::unconstr_data().apply(Term::var("__val")))) + .delayed_if_then_else( + Term::snd_pair() + .apply(Term::unconstr_data().apply(Term::var("__val"))) + .delayed_choose_list(Term::unit(), error_term.clone()), + error_term.clone(), + ), error_term.clone(), error_term.clone(), error_term.clone(), error_term.clone(), - Term::bls12_381_g1_uncompress().apply(Term::un_b_data().apply(Term::var("__val"))), ) .lambda("__val") - .apply(term) - } else if field_type.is_bls381_12_g2() { - Term::var("__val") - .delayed_choose_data( - error_term.clone(), - error_term.clone(), - error_term.clone(), - error_term.clone(), - Term::bls12_381_g2_uncompress().apply(Term::un_b_data().apply(Term::var("__val"))), - ) - .lambda("__val") - .apply(term) - } else if field_type.is_ml_result() { - panic!("ML Result not supported") - } else if field_type.is_data() { - term - } else { - Term::var("__val") + .apply(term), + + Some(UplcType::Data) => term, + // constr type + None => Term::var("__val") .delayed_choose_data( Term::var("__val"), error_term.clone(), @@ -1204,7 +1203,7 @@ pub fn unknown_data_to_type_debug( error_term.clone(), ) .lambda("__val") - .apply(term) + .apply(term), } } @@ -1296,25 +1295,23 @@ pub fn convert_constants_to_data(constants: Vec>) -> Vec, field_type: &Rc) -> Term { - if field_type.is_bytearray() { - Term::b_data().apply(term) - } else if field_type.is_int() { - Term::i_data().apply(term) - } else if field_type.is_void() { - term.choose_unit(Term::Constant( - UplcConstant::Data(PlutusData::Constr(Constr { - tag: convert_constr_to_tag(0).unwrap(), - any_constructor: None, - fields: vec![], - })) - .into(), - )) - } else if field_type.is_map() { - Term::map_data().apply(term) - } else if field_type.is_string() { - Term::b_data().apply(Term::Builtin(DefaultFunction::EncodeUtf8).apply(term)) - } else if field_type.is_tuple() && matches!(field_type.get_uplc_type(), UplcType::Pair(_, _)) { - Term::list_data() + let uplc_type = field_type.get_uplc_type(); + + match uplc_type { + Some(UplcType::Integer) => Term::i_data().apply(term), + Some(UplcType::String) => Term::b_data().apply(Term::encode_utf8().apply(term)), + Some(UplcType::ByteString) => Term::b_data().apply(term), + Some(UplcType::List(_)) if field_type.is_map() => Term::map_data().apply(term), + Some(UplcType::List(_)) => Term::list_data().apply(term), + + Some(UplcType::Bls12_381G1Element) => { + Term::b_data().apply(Term::bls12_381_g1_compress().apply(term)) + } + Some(UplcType::Bls12_381G2Element) => { + Term::b_data().apply(Term::bls12_381_g2_compress().apply(term)) + } + Some(UplcType::Bls12_381MlResult) => panic!("ML Result not supported"), + Some(UplcType::Pair(_, _)) => Term::list_data() .apply( Term::mk_cons() .apply(Term::fst_pair().apply(Term::var("__pair"))) @@ -1325,11 +1322,18 @@ pub fn convert_type_to_data(term: Term, field_type: &Rc) -> Term Term::Constant( + UplcConstant::Data(PlutusData::Constr(Constr { + tag: convert_constr_to_tag(0).unwrap(), + any_constructor: None, + fields: vec![], + })) + .into(), + ) + .lambda("_") + .apply(term), + Some(UplcType::Bool) => term.if_then_else( Term::Constant( UplcConstant::Data(PlutusData::Constr(Constr { tag: convert_constr_to_tag(1).unwrap(), @@ -1346,15 +1350,9 @@ pub fn convert_type_to_data(term: Term, field_type: &Rc) -> Term term, } } @@ -1410,7 +1408,7 @@ pub fn list_access_to_uplc( let head_item = |name, tipo: &Rc, tail_name: &str| { if name == "_" { Term::unit() - } else if matches!(tipo.get_uplc_type(), UplcType::Pair(_, _)) && is_list_accessor { + } else if tipo.is_pair() && is_list_accessor { Term::head_list().apply(Term::var(tail_name.to_string())) } else if matches!(expect_level, ExpectLevel::Full) { // Expect level is full so we have an unknown piece of data to cast @@ -1765,7 +1763,7 @@ pub fn get_list_elements_len_and_tail( pub fn cast_validator_args(term: Term, arguments: &[TypedArg]) -> Term { let mut term = term; for arg in arguments.iter().rev() { - if !matches!(arg.tipo.get_uplc_type(), UplcType::Data) { + if !matches!(arg.tipo.get_uplc_type(), Some(UplcType::Data) | None) { term = term .lambda(arg.arg_name.get_variable_name().unwrap_or("_")) .apply(known_data_to_type( @@ -1852,6 +1850,7 @@ pub fn air_holds_msg(air: &Air) -> bool { Air::FieldsExpose { is_expect, .. } | Air::TupleAccessor { is_expect, .. } + | Air::PairAccessor { is_expect, .. } | Air::CastFromData { is_expect, .. } => *is_expect, Air::ListAccessor { expect_level, .. } => { diff --git a/crates/aiken-lang/src/gen_uplc/tree.rs b/crates/aiken-lang/src/gen_uplc/tree.rs index 9666a63a..8de60875 100644 --- a/crates/aiken-lang/src/gen_uplc/tree.rs +++ b/crates/aiken-lang/src/gen_uplc/tree.rs @@ -162,6 +162,13 @@ pub enum AirTree { subject_name: String, then: Box, }, + PairGuard { + subject_tipo: Rc, + subject_name: String, + fst_name: Option, + snd_name: Option, + then: Box, + }, // Field Access FieldsExpose { indices: Vec<(usize, String, Rc)>, @@ -195,6 +202,16 @@ pub enum AirTree { msg: Option, then: Box, }, + // Pair Access + PairAccessor { + fst: Option, + snd: Option, + tipo: Rc, + is_expect: bool, + msg: Option, + pair: Box, + then: Box, + }, // Misc. FieldsEmpty { constr: Box, @@ -237,6 +254,11 @@ pub enum AirTree { tipo: Rc, items: Vec, }, + Pair { + tipo: Rc, + fst: Box, + snd: Box, + }, Void, Var { constructor: ValueConstructor, @@ -320,6 +342,16 @@ pub enum AirTree { otherwise: Box, }, + PairClause { + subject_tipo: Rc, + subject_name: String, + fst_name: Option, + snd_name: Option, + complex_clause: bool, + then: Box, + otherwise: Box, + }, + Finally { pattern: Box, then: Box, @@ -363,20 +395,25 @@ impl AirTree { value: value.to_string(), } } + pub fn string(value: impl ToString) -> AirTree { AirTree::String { value: value.to_string(), } } + pub fn byte_array(bytes: Vec) -> AirTree { AirTree::ByteArray { bytes } } + pub fn curve(point: Curve) -> AirTree { AirTree::CurvePoint { point } } + pub fn bool(value: bool) -> AirTree { AirTree::Bool { value } } + pub fn list(mut items: Vec, tipo: Rc, tail: Option) -> AirTree { if let Some(tail) = tail { items.push(tail); @@ -394,12 +431,23 @@ impl AirTree { } } } + pub fn tuple(items: Vec, tipo: Rc) -> AirTree { AirTree::Tuple { tipo, items } } + + pub fn pair(fst: AirTree, snd: AirTree, tipo: Rc) -> AirTree { + AirTree::Pair { + tipo, + fst: fst.into(), + snd: snd.into(), + } + } + pub fn void() -> AirTree { AirTree::Void } + pub fn var( constructor: ValueConstructor, name: impl ToString, @@ -411,6 +459,7 @@ impl AirTree { variant_name: variant_name.to_string(), } } + pub fn local_var(name: impl ToString, tipo: Rc) -> AirTree { AirTree::Var { constructor: ValueConstructor::public( @@ -423,6 +472,7 @@ impl AirTree { variant_name: "".to_string(), } } + pub fn call(func: AirTree, tipo: Rc, args: Vec) -> AirTree { AirTree::Call { tipo, @@ -453,6 +503,7 @@ impl AirTree { then: then.into(), } } + pub fn define_cyclic_func( func_name: impl ToString, module_name: impl ToString, @@ -468,15 +519,18 @@ impl AirTree { then: then.into(), } } + pub fn anon_func(params: Vec, func_body: AirTree) -> AirTree { AirTree::Fn { params, func_body: func_body.into(), } } + pub fn builtin(func: DefaultFunction, tipo: Rc, args: Vec) -> AirTree { AirTree::Builtin { func, tipo, args } } + pub fn binop( op: BinOp, tipo: Rc, @@ -492,12 +546,14 @@ impl AirTree { argument_tipo, } } + pub fn unop(op: UnOp, arg: AirTree) -> AirTree { AirTree::UnOp { op, arg: arg.into(), } } + pub fn let_assignment(name: impl ToString, value: AirTree, then: AirTree) -> AirTree { AirTree::Let { name: name.to_string(), @@ -505,6 +561,7 @@ impl AirTree { then: then.into(), } } + pub fn cast_from_data(value: AirTree, tipo: Rc, msg: Option) -> AirTree { AirTree::CastFromData { tipo, @@ -512,12 +569,14 @@ impl AirTree { msg, } } + pub fn cast_to_data(value: AirTree, tipo: Rc) -> AirTree { AirTree::CastToData { tipo, value: value.into(), } } + pub fn assert_constr_index( constr_index: usize, constr: AirTree, @@ -531,6 +590,7 @@ impl AirTree { then: then.into(), } } + pub fn assert_bool( is_true: bool, value: AirTree, @@ -544,6 +604,7 @@ impl AirTree { then: then.into(), } } + pub fn when( subject_name: impl ToString, tipo: Rc, @@ -559,6 +620,7 @@ impl AirTree { clauses: clauses.into(), } } + pub fn clause( subject_name: impl ToString, pattern: AirTree, @@ -576,6 +638,7 @@ impl AirTree { otherwise: otherwise.into(), } } + pub fn list_clause( tail_name: impl ToString, subject_tipo: Rc, @@ -593,6 +656,7 @@ impl AirTree { otherwise: otherwise.into(), } } + pub fn tuple_clause( subject_name: impl ToString, subject_tipo: Rc, @@ -612,12 +676,34 @@ impl AirTree { otherwise: otherwise.into(), } } + + pub fn pair_clause( + subject_name: impl ToString, + subject_tipo: Rc, + fst_name: Option, + snd_name: Option, + then: AirTree, + otherwise: AirTree, + complex_clause: bool, + ) -> AirTree { + AirTree::PairClause { + subject_tipo, + subject_name: subject_name.to_string(), + fst_name, + snd_name, + complex_clause, + then: then.into(), + otherwise: otherwise.into(), + } + } + pub fn wrap_clause(then: AirTree, otherwise: AirTree) -> AirTree { AirTree::WrapClause { then: then.into(), otherwise: otherwise.into(), } } + pub fn clause_guard( subject_name: impl ToString, pattern: AirTree, @@ -631,6 +717,7 @@ impl AirTree { then: then.into(), } } + pub fn list_clause_guard( tail_name: impl ToString, subject_tipo: Rc, @@ -646,6 +733,7 @@ impl AirTree { then: then.into(), } } + pub fn tuple_clause_guard( subject_name: impl ToString, subject_tipo: Rc, @@ -659,12 +747,30 @@ impl AirTree { then: then.into(), } } + + pub fn pair_clause_guard( + subject_name: impl ToString, + subject_tipo: Rc, + fst_name: Option, + snd_name: Option, + then: AirTree, + ) -> AirTree { + AirTree::PairGuard { + subject_name: subject_name.to_string(), + subject_tipo, + fst_name, + snd_name, + then: then.into(), + } + } + pub fn finally(pattern: AirTree, then: AirTree) -> AirTree { AirTree::Finally { pattern: pattern.into(), then: then.into(), } } + pub fn if_branches( mut branches: Vec<(AirTree, AirTree)>, tipo: Rc, @@ -691,6 +797,7 @@ impl AirTree { final_if } + pub fn create_constr(tag: usize, tipo: Rc, args: Vec) -> AirTree { AirTree::Constr { tag, tipo, args } } @@ -710,6 +817,7 @@ impl AirTree { args, } } + pub fn index_access(function_name: String, tipo: Rc, list_of_fields: AirTree) -> AirTree { AirTree::cast_from_data( AirTree::call( @@ -740,6 +848,7 @@ impl AirTree { None, ) } + pub fn fields_expose( indices: Vec<(usize, String, Rc)>, record: AirTree, @@ -775,6 +884,7 @@ impl AirTree { then: then.into(), } } + pub fn list_expose( tail_head_names: Vec<(String, String)>, tail: Option<(String, String)>, @@ -788,6 +898,7 @@ impl AirTree { then: then.into(), } } + pub fn tuple_access( names: Vec, tipo: Rc, @@ -805,6 +916,27 @@ impl AirTree { then: then.into(), } } + + pub fn pair_access( + fst: Option, + snd: Option, + tipo: Rc, + pair: AirTree, + msg: Option, + is_expect: bool, + then: AirTree, + ) -> AirTree { + AirTree::PairAccessor { + fst, + snd, + tipo, + is_expect, + msg, + pair: pair.into(), + then: then.into(), + } + } + pub fn pair_index(index: usize, tipo: Rc, tuple: AirTree) -> AirTree { AirTree::cast_from_data( AirTree::builtin( @@ -820,9 +952,11 @@ impl AirTree { None, ) } + pub fn error(tipo: Rc, validator: bool) -> AirTree { AirTree::ErrorTerm { tipo, validator } } + pub fn trace(msg: AirTree, tipo: Rc, then: AirTree) -> AirTree { AirTree::Trace { tipo, @@ -840,6 +974,7 @@ impl AirTree { pub fn no_op(then: AirTree) -> AirTree { AirTree::NoOp { then: then.into() } } + pub fn fields_empty(constr: AirTree, msg: Option, then: AirTree) -> AirTree { AirTree::FieldsEmpty { constr: constr.into(), @@ -847,6 +982,7 @@ impl AirTree { then: then.into(), } } + pub fn list_empty(list: AirTree, msg: Option, then: AirTree) -> AirTree { AirTree::ListEmpty { list: list.into(), @@ -1058,6 +1194,21 @@ impl AirTree { }); then.create_air_vec(air_vec); } + AirTree::PairGuard { + subject_tipo, + subject_name, + fst_name, + snd_name, + then, + } => { + air_vec.push(Air::PairGuard { + subject_tipo: subject_tipo.clone(), + subject_name: subject_name.clone(), + fst_name: fst_name.clone(), + snd_name: snd_name.clone(), + }); + then.create_air_vec(air_vec); + } AirTree::FieldsExpose { indices, record, @@ -1134,6 +1285,29 @@ impl AirTree { tuple.create_air_vec(air_vec); then.create_air_vec(air_vec); } + AirTree::PairAccessor { + fst, + snd, + tipo, + is_expect, + msg, + pair, + then, + } => { + air_vec.push(Air::PairAccessor { + fst: fst.clone(), + snd: snd.clone(), + tipo: tipo.clone(), + is_expect: *is_expect, + }); + + if let Some(msg) = msg { + msg.to_air_tree().create_air_vec(air_vec); + } + + pair.create_air_vec(air_vec); + then.create_air_vec(air_vec); + } AirTree::FieldsEmpty { constr, msg, then } => { air_vec.push(Air::FieldsEmpty); @@ -1189,6 +1363,11 @@ impl AirTree { item.create_air_vec(air_vec); } } + AirTree::Pair { tipo, fst, snd } => { + air_vec.push(Air::Pair { tipo: tipo.clone() }); + fst.create_air_vec(air_vec); + snd.create_air_vec(air_vec); + } AirTree::Void => air_vec.push(Air::Void), AirTree::Var { constructor, @@ -1334,6 +1513,25 @@ impl AirTree { then.create_air_vec(air_vec); otherwise.create_air_vec(air_vec); } + AirTree::PairClause { + subject_tipo, + subject_name, + fst_name, + snd_name, + complex_clause, + then, + otherwise, + } => { + air_vec.push(Air::PairClause { + subject_tipo: subject_tipo.clone(), + subject_name: subject_name.clone(), + fst_name: fst_name.clone(), + snd_name: snd_name.clone(), + complex_clause: *complex_clause, + }); + then.create_air_vec(air_vec); + otherwise.create_air_vec(air_vec); + } AirTree::Finally { pattern, then } => { air_vec.push(Air::Finally); pattern.create_air_vec(air_vec); @@ -1398,6 +1596,7 @@ impl AirTree { AirTree::CurvePoint { point } => point.tipo(), AirTree::List { tipo, .. } | AirTree::Tuple { tipo, .. } + | AirTree::Pair { tipo, .. } | AirTree::Call { tipo, .. } | AirTree::Builtin { tipo, .. } | AirTree::BinOp { tipo, .. } @@ -1420,6 +1619,7 @@ impl AirTree { | AirTree::ListClause { then, .. } | AirTree::WrapClause { then, .. } | AirTree::TupleClause { then, .. } + | AirTree::PairClause { then, .. } | AirTree::Finally { then, .. } | AirTree::Let { then, .. } | AirTree::DefineFunc { then, .. } @@ -1429,10 +1629,12 @@ impl AirTree { | AirTree::ClauseGuard { then, .. } | AirTree::ListClauseGuard { then, .. } | AirTree::TupleGuard { then, .. } + | AirTree::PairGuard { then, .. } | AirTree::FieldsExpose { then, .. } | AirTree::ListAccessor { then, .. } | AirTree::ListExpose { then, .. } | AirTree::TupleAccessor { then, .. } + | AirTree::PairAccessor { then, .. } | AirTree::FieldsEmpty { then, .. } | AirTree::ListEmpty { then, .. } | AirTree::NoOp { then } => then.return_type(), @@ -1443,18 +1645,18 @@ impl AirTree { match self { AirTree::ClauseGuard { subject_tipo, .. } | AirTree::ListClauseGuard { subject_tipo, .. } - | AirTree::TupleGuard { subject_tipo, .. } => vec![subject_tipo], + | AirTree::PairGuard { subject_tipo, .. } + | AirTree::TupleGuard { subject_tipo, .. } + | AirTree::Clause { subject_tipo, .. } + | AirTree::ListClause { subject_tipo, .. } + | AirTree::TupleClause { subject_tipo, .. } + | AirTree::PairClause { subject_tipo, .. } => vec![subject_tipo], + AirTree::ListAccessor { tipo, .. } | AirTree::ListExpose { tipo, .. } - | AirTree::TupleAccessor { tipo, .. } => vec![tipo], - AirTree::FieldsExpose { indices, .. } => { - let mut types = vec![]; - for (_, _, tipo) in indices { - types.push(tipo); - } - types - } - AirTree::List { tipo, .. } + | AirTree::TupleAccessor { tipo, .. } + | AirTree::PairAccessor { tipo, .. } + | AirTree::List { tipo, .. } | AirTree::Tuple { tipo, .. } | AirTree::Call { tipo, .. } | AirTree::Builtin { tipo, .. } @@ -1463,7 +1665,17 @@ impl AirTree { | AirTree::If { tipo, .. } | AirTree::Constr { tipo, .. } | AirTree::ErrorTerm { tipo, .. } - | AirTree::Trace { tipo, .. } => vec![tipo], + | AirTree::Trace { tipo, .. } + | AirTree::Pair { tipo, .. } => vec![tipo], + + AirTree::FieldsExpose { indices, .. } => { + let mut types = vec![]; + for (_, _, tipo) in indices { + types.push(tipo); + } + types + } + AirTree::Var { constructor, .. } => { vec![constructor.tipo.borrow_mut()] } @@ -1477,9 +1689,7 @@ impl AirTree { AirTree::When { tipo, subject_tipo, .. } => vec![tipo, subject_tipo], - AirTree::Clause { subject_tipo, .. } - | AirTree::ListClause { subject_tipo, .. } - | AirTree::TupleClause { subject_tipo, .. } => vec![subject_tipo], + AirTree::RecordUpdate { tipo, indices, .. } => { let mut types = vec![tipo]; for (_, tipo) in indices { @@ -1487,9 +1697,24 @@ impl AirTree { } types } - _ => { - vec![] - } + AirTree::Let { .. } + | AirTree::DefineFunc { .. } + | AirTree::DefineCyclicFuncs { .. } + | AirTree::AssertConstr { .. } + | AirTree::AssertBool { .. } + | AirTree::FieldsEmpty { .. } + | AirTree::ListEmpty { .. } + | AirTree::NoOp { .. } + | AirTree::Int { .. } + | AirTree::String { .. } + | AirTree::ByteArray { .. } + | AirTree::CurvePoint { .. } + | AirTree::Bool { .. } + | AirTree::Void + | AirTree::Fn { .. } + | AirTree::UnOp { .. } + | AirTree::WrapClause { .. } + | AirTree::Finally { .. } => vec![], } } @@ -1525,7 +1750,8 @@ impl AirTree { tree_path.push(current_depth, depth_index); let mut tuple_then_index = None; - // Assignments/Statements get traversed here + // Assignments'/Statements' values get traversed here + // Then the body under these assignments/statements get traversed later on match self { AirTree::Let { value, .. } => { value.do_traverse_tree_with( @@ -1592,6 +1818,15 @@ impl AirTree { apply_with_func_last, ); } + AirTree::PairAccessor { pair, .. } => { + pair.do_traverse_tree_with( + tree_path, + current_depth + 1, + index_count.next_number(), + with, + apply_with_func_last, + ); + } AirTree::FieldsEmpty { constr, .. } => { constr.do_traverse_tree_with( tree_path, @@ -1629,7 +1864,49 @@ impl AirTree { apply_with_func_last, ); } - _ => {} + AirTree::PairClause { otherwise, .. } => { + tuple_then_index = Some(index_count.next_number()); + otherwise.do_traverse_tree_with( + tree_path, + current_depth + 1, + index_count.next_number(), + with, + apply_with_func_last, + ); + } + AirTree::DefineFunc { .. } + | AirTree::DefineCyclicFuncs { .. } + | AirTree::ListClauseGuard { .. } + | AirTree::TupleGuard { .. } + | AirTree::PairGuard { .. } + | AirTree::ListExpose { .. } + | AirTree::NoOp { .. } + | AirTree::Int { .. } + | AirTree::String { .. } + | AirTree::ByteArray { .. } + | AirTree::CurvePoint { .. } + | AirTree::Bool { .. } + | AirTree::List { .. } + | AirTree::Tuple { .. } + | AirTree::Pair { .. } + | AirTree::Void + | AirTree::Var { .. } + | AirTree::Call { .. } + | AirTree::Fn { .. } + | AirTree::Builtin { .. } + | AirTree::BinOp { .. } + | AirTree::UnOp { .. } + | AirTree::CastFromData { .. } + | AirTree::CastToData { .. } + | AirTree::Clause { .. } + | AirTree::ListClause { .. } + | AirTree::WrapClause { .. } + | AirTree::Finally { .. } + | AirTree::If { .. } + | AirTree::Constr { .. } + | AirTree::RecordUpdate { .. } + | AirTree::ErrorTerm { .. } + | AirTree::Trace { .. } => {} } if !apply_with_func_last { @@ -1645,11 +1922,13 @@ impl AirTree { | AirTree::FieldsExpose { then, .. } | AirTree::ListAccessor { then, .. } | AirTree::TupleAccessor { then, .. } + | AirTree::PairAccessor { then, .. } | AirTree::FieldsEmpty { then, .. } | AirTree::ListEmpty { then, .. } | AirTree::ListExpose { then, .. } | AirTree::ListClauseGuard { then, .. } | AirTree::TupleGuard { then, .. } + | AirTree::PairGuard { then, .. } | AirTree::NoOp { then } => { then.do_traverse_tree_with( tree_path, @@ -1681,6 +1960,19 @@ impl AirTree { apply_with_func_last, ); } + AirTree::PairClause { then, .. } => { + let Some(index) = tuple_then_index else { + unreachable!() + }; + + then.do_traverse_tree_with( + tree_path, + current_depth + 1, + index, + with, + apply_with_func_last, + ); + } AirTree::List { items, .. } => { for item in items { item.do_traverse_tree_with( @@ -1703,6 +1995,23 @@ impl AirTree { ); } } + AirTree::Pair { fst, snd, .. } => { + fst.do_traverse_tree_with( + tree_path, + current_depth + 1, + index_count.next_number(), + with, + apply_with_func_last, + ); + + snd.do_traverse_tree_with( + tree_path, + current_depth + 1, + index_count.next_number(), + with, + apply_with_func_last, + ); + } AirTree::Call { func, args, .. } => { func.do_traverse_tree_with( tree_path, @@ -2071,6 +2380,13 @@ impl AirTree { panic!("Tree Path index outside tree children nodes") } } + AirTree::PairGuard { then, .. } => { + if *index == 0 { + then.as_mut().do_find_air_tree_node(tree_path_iter) + } else { + panic!("Tree Path index outside tree children nodes") + } + } AirTree::FieldsExpose { record, then, .. } => { if *index == 0 { record.as_mut().do_find_air_tree_node(tree_path_iter) @@ -2105,6 +2421,15 @@ impl AirTree { panic!("Tree Path index outside tree children nodes") } } + AirTree::PairAccessor { pair, then, .. } => { + if *index == 0 { + pair.as_mut().do_find_air_tree_node(tree_path_iter) + } else if *index == 1 { + then.as_mut().do_find_air_tree_node(tree_path_iter) + } else { + panic!("Tree Path index outside tree children nodes") + } + } AirTree::NoOp { then } => { if *index == 0 { then.as_mut().do_find_air_tree_node(tree_path_iter) @@ -2112,8 +2437,7 @@ impl AirTree { panic!("Tree Path index outside tree children nodes") } } - AirTree::DefineFunc { .. } => unreachable!(), - AirTree::DefineCyclicFuncs { .. } => unreachable!(), + AirTree::DefineFunc { .. } | AirTree::DefineCyclicFuncs { .. } => unreachable!(), AirTree::FieldsEmpty { constr, then, .. } => { if *index == 0 { constr.as_mut().do_find_air_tree_node(tree_path_iter) @@ -2140,6 +2464,15 @@ impl AirTree { .expect("Tree Path index outside tree children nodes"); item.do_find_air_tree_node(tree_path_iter) } + AirTree::Pair { fst, snd, .. } => { + if *index == 0 { + fst.as_mut().do_find_air_tree_node(tree_path_iter) + } else if *index == 1 { + snd.as_mut().do_find_air_tree_node(tree_path_iter) + } else { + panic!("Tree Path index outside tree children nodes") + } + } AirTree::Call { func, args, .. } => { children_nodes.push(func.as_mut()); children_nodes.extend(args.iter_mut()); @@ -2243,6 +2576,17 @@ impl AirTree { panic!("Tree Path index outside tree children nodes") } } + AirTree::PairClause { + then, otherwise, .. + } => { + if *index == 0 { + then.as_mut().do_find_air_tree_node(tree_path_iter) + } else if *index == 1 { + otherwise.as_mut().do_find_air_tree_node(tree_path_iter) + } else { + panic!("Tree Path index outside tree children nodes") + } + } AirTree::Finally { pattern, then } => { if *index == 0 { pattern.as_mut().do_find_air_tree_node(tree_path_iter) @@ -2291,7 +2635,15 @@ impl AirTree { panic!("Tree Path index outside tree children nodes") } } - _ => { + + AirTree::Int { .. } + | AirTree::String { .. } + | AirTree::ByteArray { .. } + | AirTree::CurvePoint { .. } + | AirTree::Bool { .. } + | AirTree::Void + | AirTree::Var { .. } + | AirTree::ErrorTerm { .. } => { panic!("A tree node with no children was encountered with a longer tree path.") } } diff --git a/crates/aiken-lang/src/parser/annotation.rs b/crates/aiken-lang/src/parser/annotation.rs index 8afdac55..148947e5 100644 --- a/crates/aiken-lang/src/parser/annotation.rs +++ b/crates/aiken-lang/src/parser/annotation.rs @@ -1,8 +1,9 @@ -use chumsky::prelude::*; - -use crate::ast; - use super::{error::ParseError, token::Token}; +use crate::{ + ast, + builtins::{PAIR, PRELUDE}, +}; +use chumsky::prelude::*; pub fn parser() -> impl Parser { recursive(|expression| { @@ -14,6 +15,31 @@ pub fn parser() -> impl Parser { name, } }), + // Pair + select! {Token::Name { name } if name == PRELUDE => name} + .then_ignore(just(Token::Dot)) + .or_not() + .then_ignore(select! {Token::UpName { name } if name == PAIR => name}) + .ignore_then( + expression + .clone() + .separated_by(just(Token::Comma)) + .exactly(2) + .delimited_by(just(Token::Less), just(Token::Greater)), + ) + .map_with_span(|elems: Vec, span| ast::Annotation::Pair { + location: span, + fst: elems + .first() + .expect("Pair should have exactly 2 elements") + .to_owned() + .into(), + snd: elems + .last() + .expect("Pair should have exactly 2 elements") + .to_owned() + .into(), + }), // Tuple expression .clone() diff --git a/crates/aiken-lang/src/parser/expr/chained.rs b/crates/aiken-lang/src/parser/expr/chained.rs index 05b7f893..13ea7940 100644 --- a/crates/aiken-lang/src/parser/expr/chained.rs +++ b/crates/aiken-lang/src/parser/expr/chained.rs @@ -1,20 +1,11 @@ -use chumsky::prelude::*; - -use super::anonymous_function::parser as anonymous_function; -use super::assignment; -use super::block::parser as block; -use super::bytearray::parser as bytearray; -use super::if_else::parser as if_else; -use super::int::parser as int; -use super::list::parser as list; -use super::record::parser as record; -use super::record_update::parser as record_update; -use super::string::parser as string; -use super::tuple::parser as tuple; -use super::var::parser as var; -use super::when::parser as when; -use super::{and_or_chain, anonymous_binop::parser as anonymous_binop}; - +use super::{ + and_or_chain, anonymous_binop::parser as anonymous_binop, + anonymous_function::parser as anonymous_function, assignment, block::parser as block, + bytearray::parser as bytearray, if_else::parser as if_else, int::parser as int, + list::parser as list, pair::parser as pair, record::parser as record, + record_update::parser as record_update, string::parser as string, tuple::parser as tuple, + var::parser as var, when::parser as when, +}; use crate::{ expr::UntypedExpr, parser::{ @@ -23,6 +14,7 @@ use crate::{ token::Token, }, }; +use chumsky::prelude::*; pub fn parser<'a>( sequence: Recursive<'a, Token, UntypedExpr, ParseError>, @@ -58,6 +50,7 @@ pub fn chain_start<'a>( choice(( string(), int(), + pair(expression.clone()), record_update(expression.clone()), record(expression.clone()), field_access::constructor(), diff --git a/crates/aiken-lang/src/parser/expr/mod.rs b/crates/aiken-lang/src/parser/expr/mod.rs index d9f6afb2..b415ddc4 100644 --- a/crates/aiken-lang/src/parser/expr/mod.rs +++ b/crates/aiken-lang/src/parser/expr/mod.rs @@ -12,6 +12,7 @@ mod fail_todo_trace; mod if_else; mod int; mod list; +mod pair; mod record; mod record_update; mod sequence; @@ -31,6 +32,7 @@ pub use fail_todo_trace::parser as fail_todo_trace; pub use if_else::parser as if_else; pub use int::parser as int; pub use list::parser as list; +pub use pair::parser as pair; pub use record::parser as record; pub use record_update::parser as record_update; pub use sequence::parser as sequence; diff --git a/crates/aiken-lang/src/parser/expr/pair.rs b/crates/aiken-lang/src/parser/expr/pair.rs new file mode 100644 index 00000000..59359baa --- /dev/null +++ b/crates/aiken-lang/src/parser/expr/pair.rs @@ -0,0 +1,53 @@ +use crate::{ + builtins::{PAIR, PRELUDE}, + expr::UntypedExpr, + parser::{error::ParseError, token::Token}, +}; +use chumsky::prelude::*; + +pub fn parser( + r: Recursive<'_, Token, UntypedExpr, ParseError>, +) -> impl Parser + '_ { + select! {Token::Name { name } if name == PRELUDE => name} + .then_ignore(just(Token::Dot)) + .or_not() + .then_ignore(select! {Token::UpName { name } if name == PAIR => name}) + .ignore_then( + r.clone() + .separated_by(just(Token::Comma)) + .exactly(2) + .allow_trailing() + .delimited_by( + choice((just(Token::LeftParen), just(Token::NewLineLeftParen))), + just(Token::RightParen), + ) + .map_with_span(|elems, location| UntypedExpr::Pair { + location, + fst: elems + .first() + .expect("Pair should have exactly 2 elements") + .to_owned() + .into(), + snd: elems + .last() + .expect("Pair should have exactly 2 elements") + .to_owned() + .into(), + }), + ) +} + +#[cfg(test)] +mod tests { + use crate::assert_expr; + + #[test] + fn basic_pair() { + assert_expr!(r#"Pair(1, 2)"#); + } + + #[test] + fn pair_from_prelude() { + assert_expr!(r#"aiken.Pair(1, 2)"#); + } +} diff --git a/crates/aiken-lang/src/parser/expr/snapshots/basic_pair.snap b/crates/aiken-lang/src/parser/expr/snapshots/basic_pair.snap new file mode 100644 index 00000000..c352868e --- /dev/null +++ b/crates/aiken-lang/src/parser/expr/snapshots/basic_pair.snap @@ -0,0 +1,21 @@ +--- +source: crates/aiken-lang/src/parser/expr/pair.rs +description: "Code:\n\nPair(1, 2)" +--- +Pair { + location: 4..10, + fst: UInt { + location: 5..6, + value: "1", + base: Decimal { + numeric_underscore: false, + }, + }, + snd: UInt { + location: 8..9, + value: "2", + base: Decimal { + numeric_underscore: false, + }, + }, +} diff --git a/crates/aiken-lang/src/parser/expr/snapshots/pair_from_prelude.snap b/crates/aiken-lang/src/parser/expr/snapshots/pair_from_prelude.snap new file mode 100644 index 00000000..a3e3a8d1 --- /dev/null +++ b/crates/aiken-lang/src/parser/expr/snapshots/pair_from_prelude.snap @@ -0,0 +1,21 @@ +--- +source: crates/aiken-lang/src/parser/expr/pair.rs +description: "Code:\n\naiken.Pair(1, 2)" +--- +Pair { + location: 10..16, + fst: UInt { + location: 11..12, + value: "1", + base: Decimal { + numeric_underscore: false, + }, + }, + snd: UInt { + location: 14..15, + value: "2", + base: Decimal { + numeric_underscore: false, + }, + }, +} diff --git a/crates/aiken-lang/src/parser/pattern/mod.rs b/crates/aiken-lang/src/parser/pattern/mod.rs index 02722fb4..21989a73 100644 --- a/crates/aiken-lang/src/parser/pattern/mod.rs +++ b/crates/aiken-lang/src/parser/pattern/mod.rs @@ -4,30 +4,32 @@ mod constructor; mod discard; mod int; mod list; +mod pair; mod tuple; mod var; -pub use constructor::parser as constructor; -pub use discard::parser as discard; -pub use int::parser as int; -pub use list::parser as list; -pub use tuple::parser as tuple; -pub use var::parser as var; - use crate::{ ast::UntypedPattern, parser::{error::ParseError, token::Token}, }; +pub use constructor::parser as constructor; +pub use discard::parser as discard; +pub use int::parser as int; +pub use list::parser as list; +pub use pair::parser as pair; +pub use tuple::parser as tuple; +pub use var::parser as var; pub fn parser() -> impl Parser { - recursive(|expression| { + recursive(|pattern| { choice(( - var(expression.clone()), - constructor(expression.clone()), + var(pattern.clone()), + pair(pattern.clone()), + constructor(pattern.clone()), discard(), int(), - tuple(expression.clone()), - list(expression), + tuple(pattern.clone()), + list(pattern), )) .then( just(Token::As) @@ -47,3 +49,63 @@ pub fn parser() -> impl Parser { }) }) } + +#[cfg(test)] +mod tests { + use crate::assert_pattern; + + #[test] + fn pattern_var() { + assert_pattern!("foo"); + } + + #[test] + fn pattern_discard_unnamed() { + assert_pattern!("_"); + } + + #[test] + fn pattern_discard_named() { + assert_pattern!("_foo"); + } + + #[test] + fn pattern_pair_discards() { + assert_pattern!("Pair(_, _)"); + } + + #[test] + fn pattern_pair_explicit_depth_1() { + assert_pattern!("Pair(14, True)"); + } + + #[test] + fn pattern_pair_explicit_depth_2() { + assert_pattern!("Pair([1,2,3], Pair((14, 42), _))"); + } + + #[test] + fn pattern_constructor_no_labels() { + assert_pattern!("Foo(a, b)"); + } + + #[test] + fn pattern_constructor_labels() { + assert_pattern!("Foo { a, b }"); + } + + #[test] + fn pattern_constructor_spread() { + assert_pattern!("Foo { a, .. }"); + } + + #[test] + fn pattern_constructor_pair_interleaved() { + assert_pattern!("Foo(a, Pair(1, 2))"); + } + + #[test] + fn pattern_list_spread() { + assert_pattern!("[head, ..]"); + } +} diff --git a/crates/aiken-lang/src/parser/pattern/pair.rs b/crates/aiken-lang/src/parser/pattern/pair.rs new file mode 100644 index 00000000..58be7614 --- /dev/null +++ b/crates/aiken-lang/src/parser/pattern/pair.rs @@ -0,0 +1,25 @@ +use crate::{ + ast::UntypedPattern, + builtins::PAIR, + parser::{error::ParseError, token::Token}, +}; +use chumsky::prelude::*; + +pub fn parser( + pattern: Recursive<'_, Token, UntypedPattern, ParseError>, +) -> impl Parser + '_ { + select! {Token::UpName { name } if name == PAIR => name} + .ignore_then(choice(( + just(Token::LeftParen), + just(Token::NewLineLeftParen), + ))) + .then(pattern.clone()) + .then_ignore(just(Token::Comma)) + .then(pattern.clone()) + .then_ignore(just(Token::RightParen)) + .map_with_span(|((_name, fst), snd), location| UntypedPattern::Pair { + fst: Box::new(fst), + snd: Box::new(snd), + location, + }) +} diff --git a/crates/aiken-lang/src/parser/pattern/snapshots/pattern_constructor_labels.snap b/crates/aiken-lang/src/parser/pattern/snapshots/pattern_constructor_labels.snap new file mode 100644 index 00000000..ed12efb8 --- /dev/null +++ b/crates/aiken-lang/src/parser/pattern/snapshots/pattern_constructor_labels.snap @@ -0,0 +1,35 @@ +--- +source: crates/aiken-lang/src/parser/pattern/mod.rs +description: "Code:\n\nFoo { a, b }" +--- +Constructor { + is_record: true, + location: 0..12, + name: "Foo", + arguments: [ + CallArg { + label: Some( + "a", + ), + location: 6..7, + value: Var { + location: 6..7, + name: "a", + }, + }, + CallArg { + label: Some( + "b", + ), + location: 9..10, + value: Var { + location: 9..10, + name: "b", + }, + }, + ], + module: None, + constructor: (), + with_spread: false, + tipo: (), +} diff --git a/crates/aiken-lang/src/parser/pattern/snapshots/pattern_constructor_no_labels.snap b/crates/aiken-lang/src/parser/pattern/snapshots/pattern_constructor_no_labels.snap new file mode 100644 index 00000000..0d04a988 --- /dev/null +++ b/crates/aiken-lang/src/parser/pattern/snapshots/pattern_constructor_no_labels.snap @@ -0,0 +1,31 @@ +--- +source: crates/aiken-lang/src/parser/pattern/mod.rs +description: "Code:\n\nFoo(a, b)" +--- +Constructor { + is_record: false, + location: 0..9, + name: "Foo", + arguments: [ + CallArg { + label: None, + location: 4..5, + value: Var { + location: 4..5, + name: "a", + }, + }, + CallArg { + label: None, + location: 7..8, + value: Var { + location: 7..8, + name: "b", + }, + }, + ], + module: None, + constructor: (), + with_spread: false, + tipo: (), +} diff --git a/crates/aiken-lang/src/parser/pattern/snapshots/pattern_constructor_pair_interleaved.snap b/crates/aiken-lang/src/parser/pattern/snapshots/pattern_constructor_pair_interleaved.snap new file mode 100644 index 00000000..78af775c --- /dev/null +++ b/crates/aiken-lang/src/parser/pattern/snapshots/pattern_constructor_pair_interleaved.snap @@ -0,0 +1,44 @@ +--- +source: crates/aiken-lang/src/parser/pattern/mod.rs +description: "Code:\n\nFoo(a, Pair(1, 2))" +--- +Constructor { + is_record: false, + location: 0..18, + name: "Foo", + arguments: [ + CallArg { + label: None, + location: 4..5, + value: Var { + location: 4..5, + name: "a", + }, + }, + CallArg { + label: None, + location: 7..17, + value: Pair { + location: 7..17, + fst: Int { + location: 12..13, + value: "1", + base: Decimal { + numeric_underscore: false, + }, + }, + snd: Int { + location: 15..16, + value: "2", + base: Decimal { + numeric_underscore: false, + }, + }, + }, + }, + ], + module: None, + constructor: (), + with_spread: false, + tipo: (), +} diff --git a/crates/aiken-lang/src/parser/pattern/snapshots/pattern_constructor_spread.snap b/crates/aiken-lang/src/parser/pattern/snapshots/pattern_constructor_spread.snap new file mode 100644 index 00000000..499d0e00 --- /dev/null +++ b/crates/aiken-lang/src/parser/pattern/snapshots/pattern_constructor_spread.snap @@ -0,0 +1,25 @@ +--- +source: crates/aiken-lang/src/parser/pattern/mod.rs +description: "Code:\n\nFoo { a, .. }" +--- +Constructor { + is_record: true, + location: 0..13, + name: "Foo", + arguments: [ + CallArg { + label: Some( + "a", + ), + location: 6..7, + value: Var { + location: 6..7, + name: "a", + }, + }, + ], + module: None, + constructor: (), + with_spread: true, + tipo: (), +} diff --git a/crates/aiken-lang/src/parser/pattern/snapshots/pattern_discard_named.snap b/crates/aiken-lang/src/parser/pattern/snapshots/pattern_discard_named.snap new file mode 100644 index 00000000..bc0185c9 --- /dev/null +++ b/crates/aiken-lang/src/parser/pattern/snapshots/pattern_discard_named.snap @@ -0,0 +1,8 @@ +--- +source: crates/aiken-lang/src/parser/pattern/mod.rs +description: "Code:\n\n_foo" +--- +Discard { + name: "_foo", + location: 0..4, +} diff --git a/crates/aiken-lang/src/parser/pattern/snapshots/pattern_discard_unnamed.snap b/crates/aiken-lang/src/parser/pattern/snapshots/pattern_discard_unnamed.snap new file mode 100644 index 00000000..47feccfd --- /dev/null +++ b/crates/aiken-lang/src/parser/pattern/snapshots/pattern_discard_unnamed.snap @@ -0,0 +1,8 @@ +--- +source: crates/aiken-lang/src/parser/pattern/mod.rs +description: "Code:\n\n_" +--- +Discard { + name: "_", + location: 0..1, +} diff --git a/crates/aiken-lang/src/parser/pattern/snapshots/pattern_list_spread.snap b/crates/aiken-lang/src/parser/pattern/snapshots/pattern_list_spread.snap new file mode 100644 index 00000000..723df47e --- /dev/null +++ b/crates/aiken-lang/src/parser/pattern/snapshots/pattern_list_spread.snap @@ -0,0 +1,19 @@ +--- +source: crates/aiken-lang/src/parser/pattern/mod.rs +description: "Code:\n\n[head, ..]" +--- +List { + location: 0..10, + elements: [ + Var { + location: 1..5, + name: "head", + }, + ], + tail: Some( + Discard { + name: "_", + location: 9..10, + }, + ), +} diff --git a/crates/aiken-lang/src/parser/pattern/snapshots/pattern_pair_discards.snap b/crates/aiken-lang/src/parser/pattern/snapshots/pattern_pair_discards.snap new file mode 100644 index 00000000..d385168d --- /dev/null +++ b/crates/aiken-lang/src/parser/pattern/snapshots/pattern_pair_discards.snap @@ -0,0 +1,15 @@ +--- +source: crates/aiken-lang/src/parser/pattern/mod.rs +description: "Code:\n\nPair(_, _)" +--- +Pair { + location: 0..10, + fst: Discard { + name: "_", + location: 5..6, + }, + snd: Discard { + name: "_", + location: 8..9, + }, +} diff --git a/crates/aiken-lang/src/parser/pattern/snapshots/pattern_pair_explicit_depth_1.snap b/crates/aiken-lang/src/parser/pattern/snapshots/pattern_pair_explicit_depth_1.snap new file mode 100644 index 00000000..1b7a751a --- /dev/null +++ b/crates/aiken-lang/src/parser/pattern/snapshots/pattern_pair_explicit_depth_1.snap @@ -0,0 +1,24 @@ +--- +source: crates/aiken-lang/src/parser/pattern/mod.rs +description: "Code:\n\nPair(14, True)" +--- +Pair { + location: 0..14, + fst: Int { + location: 5..7, + value: "14", + base: Decimal { + numeric_underscore: false, + }, + }, + snd: Constructor { + is_record: false, + location: 9..13, + name: "True", + arguments: [], + module: None, + constructor: (), + with_spread: false, + tipo: (), + }, +} diff --git a/crates/aiken-lang/src/parser/pattern/snapshots/pattern_pair_explicit_depth_2.snap b/crates/aiken-lang/src/parser/pattern/snapshots/pattern_pair_explicit_depth_2.snap new file mode 100644 index 00000000..249066df --- /dev/null +++ b/crates/aiken-lang/src/parser/pattern/snapshots/pattern_pair_explicit_depth_2.snap @@ -0,0 +1,60 @@ +--- +source: crates/aiken-lang/src/parser/pattern/mod.rs +description: "Code:\n\nPair([1,2,3], Pair((14, 42), _))" +--- +Pair { + location: 0..32, + fst: List { + location: 5..12, + elements: [ + Int { + location: 6..7, + value: "1", + base: Decimal { + numeric_underscore: false, + }, + }, + Int { + location: 8..9, + value: "2", + base: Decimal { + numeric_underscore: false, + }, + }, + Int { + location: 10..11, + value: "3", + base: Decimal { + numeric_underscore: false, + }, + }, + ], + tail: None, + }, + snd: Pair { + location: 14..31, + fst: Tuple { + location: 19..27, + elems: [ + Int { + location: 20..22, + value: "14", + base: Decimal { + numeric_underscore: false, + }, + }, + Int { + location: 24..26, + value: "42", + base: Decimal { + numeric_underscore: false, + }, + }, + ], + }, + snd: Discard { + name: "_", + location: 29..30, + }, + }, +} diff --git a/crates/aiken-lang/src/parser/pattern/snapshots/pattern_var.snap b/crates/aiken-lang/src/parser/pattern/snapshots/pattern_var.snap new file mode 100644 index 00000000..2f3512a5 --- /dev/null +++ b/crates/aiken-lang/src/parser/pattern/snapshots/pattern_var.snap @@ -0,0 +1,8 @@ +--- +source: crates/aiken-lang/src/parser/pattern/mod.rs +description: "Code:\n\nfoo" +--- +Var { + location: 0..3, + name: "foo", +} diff --git a/crates/aiken-lang/src/tests/check.rs b/crates/aiken-lang/src/tests/check.rs index a188efc4..bfdadd58 100644 --- a/crates/aiken-lang/src/tests/check.rs +++ b/crates/aiken-lang/src/tests/check.rs @@ -2295,3 +2295,106 @@ fn tuple_access_on_call() { assert!(check(parse(source_code)).is_ok()) } + +#[test] +fn partial_eq_call_args() { + let source_code = r#" + fn foo(a: Int, b: Int, c: Bool) -> Int { + todo + } + + fn main() -> Int { + foo(14, 42) + } + "#; + + assert!(matches!( + dbg!(check(parse(source_code))), + Err((_, Error::IncorrectFieldsArity { .. })) + )); +} + +#[test] +fn partial_eq_callback_args() { + let source_code = r#" + fn foo(cb: fn(Int, Int, Bool) -> Int) -> Int { + todo + } + + fn main() -> Int { + foo(fn(a, b) { a + b }) + } + "#; + + assert!(matches!( + dbg!(check(parse(source_code))), + Err((_, Error::CouldNotUnify { .. })) + )); +} + +#[test] +fn partial_eq_callback_return() { + let source_code = r#" + fn foo(cb: fn(Int, Int) -> (Int, Int, Bool)) -> Int { + todo + } + + fn main() -> Int { + foo(fn(a, b) { (a, b) }) + } + "#; + + assert!(matches!( + dbg!(check(parse(source_code))), + Err((_, Error::CouldNotUnify { .. })) + )); +} + +#[test] +fn pair_access_on_call() { + let source_code = r#" + use aiken/builtin + + pub fn list_at(xs: List, index: Int) -> a { + if index == 0 { + builtin.head_list(xs) + } else { + list_at(builtin.tail_list(xs), index - 1) + } + } + + fn foo() { + [list_at([Pair(1, 2)], 0).2nd, ..[1, 2]] + } + "#; + + assert!(check(parse(source_code)).is_ok()) +} + +#[test] +fn pair_index_out_of_bound() { + let source_code = r#" + pub fn foo() { + Pair(1, 2).3rd + } + "#; + + assert!(matches!( + dbg!(check_validator(parse(source_code))), + Err((_, Error::PairIndexOutOfBound { .. })) + )) +} + +#[test] +fn not_indexable() { + let source_code = r#" + pub fn foo() { + "foo".1st + } + "#; + + assert!(matches!( + dbg!(check_validator(parse(source_code))), + Err((_, Error::NotIndexable { .. })) + )) +} diff --git a/crates/aiken-lang/src/tests/format.rs b/crates/aiken-lang/src/tests/format.rs index 1649a7e5..2b4162dd 100644 --- a/crates/aiken-lang/src/tests/format.rs +++ b/crates/aiken-lang/src/tests/format.rs @@ -792,3 +792,13 @@ fn superfluous_parens_in_binop() { "# ); } + +#[test] +fn format_pairs() { + assert_format!( + r#" + pub fn foo(x: Pair) { + Pair(x.1st, x.2nd) + }"# + ); +} diff --git a/crates/aiken-lang/src/tests/snapshots/format_pairs.snap b/crates/aiken-lang/src/tests/snapshots/format_pairs.snap new file mode 100644 index 00000000..2b4d6277 --- /dev/null +++ b/crates/aiken-lang/src/tests/snapshots/format_pairs.snap @@ -0,0 +1,7 @@ +--- +source: crates/aiken-lang/src/tests/format.rs +description: "Code:\n\npub fn foo(x: Pair) {\n Pair(x.1st, x.2nd)\n}" +--- +pub fn foo(x: Pair) { + Pair(x.1st, x.2nd) +} diff --git a/crates/aiken-lang/src/tipo.rs b/crates/aiken-lang/src/tipo.rs index de766117..f29257c3 100644 --- a/crates/aiken-lang/src/tipo.rs +++ b/crates/aiken-lang/src/tipo.rs @@ -71,6 +71,12 @@ pub enum Type { elems: Vec>, alias: Option>, }, + + Pair { + fst: Rc, + snd: Rc, + alias: Option>, + }, } impl PartialEq for Type { @@ -96,6 +102,7 @@ impl PartialEq for Type { name == name2 && module == module2 && public == public2 + && args.len() == args2.len() && args.iter().zip(args2).all(|(left, right)| left == right) } else { false @@ -109,7 +116,9 @@ impl PartialEq for Type { alias: _, } = other { - ret == ret2 && args.iter().zip(args2).all(|(left, right)| left == right) + ret == ret2 + && args.len() == args2.len() + && args.iter().zip(args2).all(|(left, right)| left == right) } else { false } @@ -117,7 +126,8 @@ impl PartialEq for Type { Type::Tuple { elems, alias: _ } => { if let Type::Tuple { elems: elems2, .. } = other { - elems.iter().zip(elems2).all(|(left, right)| left == right) + elems.len() == elems2.len() + && elems.iter().zip(elems2).all(|(left, right)| left == right) } else { false } @@ -134,6 +144,18 @@ impl PartialEq for Type { false } } + Type::Pair { fst, snd, .. } => { + if let Type::Pair { + fst: fst2, + snd: snd2, + .. + } = other + { + fst == fst2 && snd == snd2 + } else { + false + } + } } } } @@ -144,7 +166,8 @@ impl Type { Type::App { alias, .. } | Type::Fn { alias, .. } | Type::Var { alias, .. } - | Type::Tuple { alias, .. } => alias.clone(), + | Type::Tuple { alias, .. } + | Type::Pair { alias, .. } => alias.clone(), } } @@ -179,6 +202,7 @@ impl Type { } => Type::Fn { args, ret, alias }, Type::Var { tipo, alias: _ } => Type::Var { tipo, alias }, Type::Tuple { elems, alias: _ } => Type::Tuple { elems, alias }, + Type::Pair { fst, snd, alias: _ } => Type::Pair { fst, snd, alias }, }) } @@ -191,6 +215,7 @@ impl Type { _ => None, }, Type::Tuple { .. } => Some((String::new(), "Tuple".to_string())), + Type::Pair { .. } => Some((String::new(), "Pair".to_string())), } } @@ -204,6 +229,7 @@ impl Type { } => *opaque || args.iter().any(|arg| arg.contains_opaque()), Type::Tuple { elems, .. } => elems.iter().any(|elem| elem.contains_opaque()), Type::Fn { .. } => false, + Type::Pair { fst, snd, .. } => fst.contains_opaque() || snd.contains_opaque(), } } @@ -214,7 +240,7 @@ impl Type { } => { *contains_opaque = opaque; } - Type::Fn { .. } | Type::Var { .. } | Type::Tuple { .. } => (), + Type::Fn { .. } | Type::Var { .. } | Type::Tuple { .. } | Type::Pair { .. } => (), } } @@ -241,12 +267,23 @@ impl Type { } pub fn is_primitive(&self) -> bool { - self.is_bool() - || self.is_bytearray() - || self.is_int() - || self.is_string() - || self.is_void() - || self.is_data() + let uplc_type = self.get_uplc_type(); + match uplc_type { + Some( + UplcType::Bool + | UplcType::Integer + | UplcType::String + | UplcType::ByteString + | UplcType::Unit + | UplcType::Bls12_381G1Element + | UplcType::Bls12_381G2Element + | UplcType::Bls12_381MlResult + | UplcType::Data, + ) => true, + + None => false, + Some(UplcType::List(_) | UplcType::Pair(_, _)) => false, + } } pub fn is_void(&self) -> bool { @@ -339,7 +376,7 @@ impl Type { } if "List" == name && module.is_empty() => args .first() .expect("unreachable: List should have an inner type") - .is_2_tuple(), + .is_pair(), Self::Var { tipo, .. } => tipo.borrow().is_map(), _ => false, } @@ -347,16 +384,16 @@ impl Type { pub fn is_tuple(&self) -> bool { match self { - Type::Var { tipo, .. } => tipo.borrow().is_tuple(), - Type::Tuple { .. } => true, + Self::Var { tipo, .. } => tipo.borrow().is_tuple(), + Self::Tuple { .. } => true, _ => false, } } - pub fn is_2_tuple(&self) -> bool { + pub fn is_pair(&self) -> bool { match self { - Type::Var { tipo, .. } => tipo.borrow().is_2_tuple(), - Type::Tuple { elems, .. } => elems.len() == 2, + Self::Var { tipo, .. } => tipo.borrow().is_pair(), + Self::Pair { .. } => true, _ => false, } } @@ -371,7 +408,7 @@ impl Type { pub fn is_generic(&self) -> bool { match self { - Type::App { args, .. } => { + Self::App { args, .. } => { let mut is_a_generic = false; for arg in args { is_a_generic = is_a_generic || arg.is_generic(); @@ -379,24 +416,29 @@ impl Type { is_a_generic } - Type::Var { tipo, .. } => tipo.borrow().is_generic(), - Type::Tuple { elems, .. } => { + Self::Var { tipo, .. } => tipo.borrow().is_generic(), + Self::Tuple { elems, .. } => { let mut is_a_generic = false; for elem in elems { is_a_generic = is_a_generic || elem.is_generic(); } is_a_generic } - Type::Fn { args, ret, .. } => { + Self::Fn { args, ret, .. } => { let mut is_a_generic = false; for arg in args { is_a_generic = is_a_generic || arg.is_generic(); } is_a_generic || ret.is_generic() } + Self::Pair { fst, snd, .. } => fst.is_generic() || snd.is_generic(), } } + // TODO: Self::App { args, ..} looks fishy, because App's args are referring + // to _type parameters_ not to value types unlike Fn's args. So this function + // definition is probably wrong. Luckily, we likely never hit the `Self::App` + // case at all. pub fn arg_types(&self) -> Option>> { match self { Self::Fn { args, .. } => Some(args.clone()), @@ -408,7 +450,7 @@ impl Type { pub fn get_generic(&self) -> Option { match self { - Type::Var { tipo, .. } => tipo.borrow().get_generic(), + Self::Var { tipo, .. } => tipo.borrow().get_generic(), _ => None, } } @@ -426,7 +468,13 @@ impl Type { Self::Var { tipo, .. } => tipo.borrow().get_inner_types(), _ => vec![], } - } else if matches!(self.get_uplc_type(), UplcType::Data) { + } else if self.is_pair() { + match self { + Self::Pair { fst, snd, .. } => vec![fst.clone(), snd.clone()], + Self::Var { tipo, .. } => tipo.borrow().get_inner_types(), + _ => vec![], + } + } else if self.get_uplc_type().is_none() { match self { Type::App { args, .. } => args.clone(), Type::Fn { args, ret, .. } => { @@ -442,39 +490,35 @@ impl Type { } } - pub fn get_uplc_type(&self) -> UplcType { + pub fn get_uplc_type(&self) -> Option { if self.is_int() { - UplcType::Integer + Some(UplcType::Integer) } else if self.is_bytearray() { - UplcType::ByteString + Some(UplcType::ByteString) } else if self.is_string() { - UplcType::String + Some(UplcType::String) } else if self.is_bool() { - UplcType::Bool + Some(UplcType::Bool) + } else if self.is_void() { + Some(UplcType::Unit) } else if self.is_map() { - UplcType::List(UplcType::Pair(UplcType::Data.into(), UplcType::Data.into()).into()) - } else if self.is_list() { - UplcType::List(UplcType::Data.into()) - } else if self.is_tuple() { - match self { - Self::Tuple { elems, .. } => { - if elems.len() == 2 { - UplcType::Pair(UplcType::Data.into(), UplcType::Data.into()) - } else { - UplcType::List(UplcType::Data.into()) - } - } - Self::Var { tipo, .. } => tipo.borrow().get_uplc_type().unwrap(), - _ => unreachable!(), - } + Some(UplcType::List( + UplcType::Pair(UplcType::Data.into(), UplcType::Data.into()).into(), + )) + } else if self.is_list() || self.is_tuple() { + Some(UplcType::List(UplcType::Data.into())) + } else if self.is_pair() { + Some(UplcType::Pair(UplcType::Data.into(), UplcType::Data.into())) } else if self.is_bls381_12_g1() { - UplcType::Bls12_381G1Element + Some(UplcType::Bls12_381G1Element) } else if self.is_bls381_12_g2() { - UplcType::Bls12_381G2Element + Some(UplcType::Bls12_381G2Element) } else if self.is_ml_result() { - UplcType::Bls12_381MlResult + Some(UplcType::Bls12_381MlResult) + } else if self.is_data() { + Some(UplcType::Data) } else { - UplcType::Data + None } } @@ -555,6 +599,13 @@ impl Type { TypeVar::Link { tipo, .. } => tipo.find_private_type(), }, + Self::Pair { fst, snd, .. } => { + if let Some(private_type) = fst.find_private_type() { + Some(private_type) + } else { + snd.find_private_type() + } + } } } @@ -734,6 +785,16 @@ pub fn convert_opaque_type( } .into() } + Type::Pair { fst, snd, alias } => { + let fst = convert_opaque_type(fst, data_types, deep); + let snd = convert_opaque_type(snd, data_types, deep); + Type::Pair { + fst, + snd, + alias: alias.clone(), + } + .into() + } } } } @@ -758,9 +819,12 @@ pub fn find_and_replace_generics( mono_types: &IndexMap>, ) -> Rc { if let Some(id) = tipo.get_generic() { - // If a generic does not have a type we know of - // like a None in option then just use same type - mono_types.get(&id).unwrap_or(tipo).clone() + mono_types + .get(&id) + .unwrap_or_else(|| { + panic!("Unknown generic id {id:?} for type {tipo:?} in mono_types {mono_types:#?}"); + }) + .clone() } else if tipo.is_generic() { match &**tipo { Type::App { @@ -823,6 +887,16 @@ pub fn find_and_replace_generics( TypeVar::Generic { .. } | TypeVar::Unbound { .. } => unreachable!(), } } + Type::Pair { fst, snd, alias } => { + let fst = find_and_replace_generics(fst, mono_types); + let snd = find_and_replace_generics(snd, mono_types); + Type::Pair { + fst, + snd, + alias: alias.clone(), + } + .into() + } } } else { tipo.clone() @@ -951,9 +1025,9 @@ impl TypeVar { } } - pub fn is_2_tuple(&self) -> bool { + pub fn is_pair(&self) -> bool { match self { - Self::Link { tipo } => tipo.is_2_tuple(), + Self::Link { tipo } => tipo.is_pair(), _ => false, } } @@ -1001,13 +1075,6 @@ impl TypeVar { } } } - - pub fn get_uplc_type(&self) -> Option { - match self { - Self::Link { tipo } => Some(tipo.get_uplc_type()), - _ => None, - } - } } #[derive(Debug, Clone, PartialEq, serde::Serialize, serde::Deserialize)] diff --git a/crates/aiken-lang/src/tipo/environment.rs b/crates/aiken-lang/src/tipo/environment.rs index 9f49c871..972df9ad 100644 --- a/crates/aiken-lang/src/tipo/environment.rs +++ b/crates/aiken-lang/src/tipo/environment.rs @@ -8,10 +8,11 @@ use super::{ use crate::{ ast::{ Annotation, CallArg, DataType, Definition, Function, ModuleConstant, ModuleKind, - RecordConstructor, RecordConstructorArg, Span, TypeAlias, TypedDefinition, TypedPattern, - UnqualifiedImport, UntypedArg, UntypedDefinition, Use, Validator, PIPE_VARIABLE, + RecordConstructor, RecordConstructorArg, Span, TypeAlias, TypedDefinition, TypedFunction, + TypedPattern, UnqualifiedImport, UntypedArg, UntypedDefinition, UntypedFunction, Use, + Validator, PIPE_VARIABLE, }, - builtins::{function, generic_var, tuple, unbound_var}, + builtins::{function, generic_var, pair, tuple, unbound_var}, tipo::{fields::FieldMap, TypeAliasAnnotation}, IdGenerator, }; @@ -54,6 +55,12 @@ pub struct Environment<'a> { /// Values defined in the current module (or the prelude) pub module_values: HashMap, + /// Top-level function definitions from the module + pub module_functions: HashMap, + + /// Top-level functions that have been inferred + pub inferred_functions: HashMap, + previous_id: u64, /// Values defined in the current function (or the prelude) @@ -644,6 +651,13 @@ impl<'a> Environment<'a> { ), alias.clone(), ), + Type::Pair { fst, snd, alias } => Type::with_alias( + pair( + self.instantiate(fst.clone(), ids, hydrator), + self.instantiate(snd.clone(), ids, hydrator), + ), + alias.clone(), + ), } } @@ -700,9 +714,11 @@ impl<'a> Environment<'a> { previous_id: id_gen.next(), id_gen, ungeneralised_functions: HashSet::new(), + inferred_functions: HashMap::new(), module_types: prelude.types.clone(), module_types_constructors: prelude.types_constructors.clone(), module_values: HashMap::new(), + module_functions: HashMap::new(), imported_modules: HashMap::new(), unused_modules: HashMap::new(), unqualified_imported_names: HashMap::new(), @@ -1194,6 +1210,8 @@ impl<'a> Environment<'a> { &fun.location, )?; + self.module_functions.insert(fun.name.clone(), fun); + if !fun.public { self.init_usage(fun.name.clone(), EntityKind::PrivateFunction, fun.location); } @@ -1537,6 +1555,28 @@ impl<'a> Environment<'a> { Ok(()) } + ( + Type::Pair { + fst: lhs_fst, + snd: lhs_snd, + alias: _, + }, + Type::Pair { + fst: rhs_fst, + snd: rhs_snd, + alias: _, + }, + ) => { + for (a, b) in [lhs_fst, lhs_snd].into_iter().zip([rhs_fst, rhs_snd]) { + unify_enclosed_type( + lhs.clone(), + rhs.clone(), + self.unify(a.clone(), b.clone(), location, false), + )?; + } + Ok(()) + } + ( Type::Fn { args: args1, @@ -1794,6 +1834,12 @@ fn unify_unbound_type(tipo: Rc, own_id: u64, location: Span) -> Result<(), Ok(()) } + Type::Pair { fst, snd, alias: _ } => { + unify_unbound_type(fst.clone(), own_id, location)?; + unify_unbound_type(snd.clone(), own_id, location)?; + + Ok(()) + } Type::Var { .. } => unreachable!(), } @@ -1970,5 +2016,12 @@ pub(crate) fn generalise(t: Rc, ctx_level: usize) -> Rc { ), alias.clone(), ), + Type::Pair { fst, snd, alias } => Type::with_alias( + pair( + generalise(fst.clone(), ctx_level), + generalise(snd.clone(), ctx_level), + ), + alias.clone(), + ), } } diff --git a/crates/aiken-lang/src/tipo/error.rs b/crates/aiken-lang/src/tipo/error.rs index c9f41e88..a056385b 100644 --- a/crates/aiken-lang/src/tipo/error.rs +++ b/crates/aiken-lang/src/tipo/error.rs @@ -508,18 +508,16 @@ If you really meant to return that last expression, try to replace it with the f name: String, }, - #[error( - "I tripped over an attempt to access tuple elements on something else than a tuple.\n" - )] + #[error("I tripped over an attempt to access elements on something that isn't indexable.\n")] #[diagnostic(url("https://aiken-lang.org/language-tour/primitive-types#tuples"))] - #[diagnostic(code("illegal::tuple_index"))] + #[diagnostic(code("illegal::indexable"))] #[diagnostic(help( - r#"Because you used a tuple-index on an element, I assumed it had to be a tuple but instead I found something of type: + r#"Because you used an ordinal index on an element, I assumed it had to be a tuple or a pair but instead I found something of type: ╰─▶ {type_info}"#, type_info = tipo.to_pretty(0).if_supports_color(Stdout, |s| s.red()) ))] - NotATuple { + NotIndexable { #[label] location: Span, tipo: Rc, @@ -675,12 +673,25 @@ You can help me by providing a type-annotation for 'x', as such: #[diagnostic(url("https://aiken-lang.org/language-tour/primitive-types#tuples"))] #[diagnostic(code("invalid::tuple_index"))] TupleIndexOutOfBound { - #[label] + #[label("out of bounds")] location: Span, index: usize, size: usize, }, + #[error( + "I discovered an attempt to access the {} element of a {}.\n", + Ordinal(*index + 1).to_string().if_supports_color(Stdout, |s| s.purple()), + "Pair".if_supports_color(Stdout, |s| s.bright_blue()).if_supports_color(Stdout, |s| s.bold()), + )] + #[diagnostic(url("https://aiken-lang.org/language-tour/primitive-types#pairs"))] + #[diagnostic(code("invalid::pair_index"))] + PairIndexOutOfBound { + #[label("out of bounds")] + location: Span, + index: usize, + }, + #[error( "I tripped over the following labeled argument: {}.\n", label.if_supports_color(Stdout, |s| s.purple()) @@ -1035,7 +1046,7 @@ impl ExtraData for Error { | Error::MissingVarInAlternativePattern { .. } | Error::MultiValidatorEqualArgs { .. } | Error::NonLocalClauseGuardVariable { .. } - | Error::NotATuple { .. } + | Error::NotIndexable { .. } | Error::NotExhaustivePatternMatch { .. } | Error::NotFn { .. } | Error::PositionalArgumentAfterLabeled { .. } @@ -1045,6 +1056,7 @@ impl ExtraData for Error { | Error::RecursiveType { .. } | Error::RedundantMatchClause { .. } | Error::TupleIndexOutOfBound { .. } + | Error::PairIndexOutOfBound { .. } | Error::UnexpectedLabeledArg { .. } | Error::UnexpectedLabeledArgInPattern { .. } | Error::UnknownLabels { .. } diff --git a/crates/aiken-lang/src/tipo/exhaustive.rs b/crates/aiken-lang/src/tipo/exhaustive.rs index 12801be4..a69dbf99 100644 --- a/crates/aiken-lang/src/tipo/exhaustive.rs +++ b/crates/aiken-lang/src/tipo/exhaustive.rs @@ -1,11 +1,10 @@ -use std::{collections::BTreeMap, iter, ops::Deref}; - -use itertools::Itertools; - use crate::{ - ast, builtins, + ast, + builtins::{self}, tipo::{self, environment::Environment, error::Error}, }; +use itertools::Itertools; +use std::{collections::BTreeMap, iter, ops::Deref}; const NIL_NAME: &str = "[]"; const CONS_NAME: &str = "::"; @@ -87,8 +86,8 @@ impl PatternStack { Some(self.chain_tail_into_iter(vec![Pattern::Wildcard; arity].into_iter())) } Pattern::Literal(_) => unreachable!( - "constructors and literals should never align in pattern match exhaustiveness checks." - ), + "constructors and literals should never align in pattern match exhaustiveness checks." + ), } } @@ -598,6 +597,13 @@ pub(super) fn simplify( Ok(Pattern::Constructor(name.to_string(), alts, args)) } + ast::Pattern::Pair { fst, snd, location } => simplify( + environment, + &ast::Pattern::Tuple { + elems: vec![*fst.clone(), *snd.clone()], + location: *location, + }, + ), ast::Pattern::Tuple { elems, .. } => { let mut args = vec![]; diff --git a/crates/aiken-lang/src/tipo/expr.rs b/crates/aiken-lang/src/tipo/expr.rs index 7aa932f9..c6b4a634 100644 --- a/crates/aiken-lang/src/tipo/expr.rs +++ b/crates/aiken-lang/src/tipo/expr.rs @@ -1,5 +1,7 @@ use super::{ - environment::{assert_no_labeled_arguments, collapse_links, EntityKind, Environment}, + environment::{ + assert_no_labeled_arguments, collapse_links, generalise, EntityKind, Environment, + }, error::{Error, Warning}, hydrator::Hydrator, pattern::PatternTyper, @@ -9,14 +11,15 @@ use super::{ use crate::{ ast::{ self, Annotation, Arg, ArgName, AssignmentKind, AssignmentPattern, BinOp, Bls12_381Point, - ByteArrayFormatPreference, CallArg, ClauseGuard, Constant, Curve, IfBranch, + ByteArrayFormatPreference, CallArg, ClauseGuard, Constant, Curve, Function, IfBranch, LogicalOpChainKind, Pattern, RecordUpdateSpread, Span, TraceKind, TraceLevel, Tracing, TypedArg, TypedCallArg, TypedClause, TypedClauseGuard, TypedIfBranch, TypedPattern, TypedRecordUpdateArg, UnOp, UntypedArg, UntypedAssignmentKind, UntypedClause, - UntypedClauseGuard, UntypedIfBranch, UntypedPattern, UntypedRecordUpdateArg, + UntypedClauseGuard, UntypedFunction, UntypedIfBranch, UntypedPattern, + UntypedRecordUpdateArg, }, builtins::{ - bool, byte_array, function, g1_element, g2_element, int, list, string, tuple, void, + bool, byte_array, function, g1_element, g2_element, int, list, pair, string, tuple, void, }, expr::{FnStyle, TypedExpr, UntypedExpr}, format, @@ -26,12 +29,126 @@ use crate::{ use std::{cmp::Ordering, collections::HashMap, ops::Deref, rc::Rc}; use vec1::Vec1; +pub(crate) fn infer_function( + fun: &UntypedFunction, + module_name: &str, + hydrators: &mut HashMap, + environment: &mut Environment<'_>, + lines: &LineNumbers, + tracing: Tracing, +) -> Result, TypedExpr, TypedArg>, Error> { + if let Some(typed_fun) = environment.inferred_functions.get(&fun.name) { + return Ok(typed_fun.clone()); + }; + + let Function { + doc, + location, + name, + public, + arguments, + body, + return_annotation, + end_position, + can_error, + return_type: _, + } = fun; + + let preregistered_fn = environment + .get_variable(name) + .expect("Could not find preregistered type for function"); + + let field_map = preregistered_fn.field_map().cloned(); + + let preregistered_type = preregistered_fn.tipo.clone(); + + let (args_types, return_type) = preregistered_type + .function_types() + .unwrap_or_else(|| panic!("Preregistered type for fn {name} was not a fn")); + + // Infer the type using the preregistered args + return types as a starting point + let (tipo, arguments, body, safe_to_generalise) = environment.in_new_scope(|environment| { + let args = arguments + .iter() + .zip(&args_types) + .map(|(arg_name, tipo)| arg_name.to_owned().set_type(tipo.clone())) + .collect(); + + let hydrator = hydrators + .remove(name) + .unwrap_or_else(|| panic!("Could not find hydrator for fn {name}")); + + let mut expr_typer = ExprTyper::new(environment, hydrators, lines, tracing); + + expr_typer.hydrator = hydrator; + + let (args, body, return_type) = + expr_typer.infer_fn_with_known_types(args, body.to_owned(), Some(return_type))?; + + let args_types = args.iter().map(|a| a.tipo.clone()).collect(); + + let tipo = function(args_types, return_type); + + let safe_to_generalise = !expr_typer.ungeneralised_function_used; + + Ok::<_, Error>((tipo, args, body, safe_to_generalise)) + })?; + + // Assert that the inferred type matches the type of any recursive call + environment.unify(preregistered_type, tipo.clone(), *location, false)?; + + // Generalise the function if safe to do so + let tipo = if safe_to_generalise { + environment.ungeneralised_functions.remove(name); + + let tipo = generalise(tipo, 0); + + let module_fn = ValueConstructorVariant::ModuleFn { + name: name.clone(), + field_map, + module: module_name.to_owned(), + arity: arguments.len(), + location: *location, + builtin: None, + }; + + environment.insert_variable(name.clone(), module_fn, tipo.clone()); + + tipo + } else { + tipo + }; + + let inferred_fn = Function { + doc: doc.clone(), + location: *location, + name: name.clone(), + public: *public, + arguments, + return_annotation: return_annotation.clone(), + return_type: tipo + .return_type() + .expect("Could not find return type for fn"), + body, + can_error: *can_error, + end_position: *end_position, + }; + + environment + .inferred_functions + .insert(name.to_string(), inferred_fn.clone()); + + Ok(inferred_fn) +} + #[derive(Debug)] pub(crate) struct ExprTyper<'a, 'b> { pub(crate) lines: &'a LineNumbers, pub(crate) environment: &'a mut Environment<'b>, + pub(crate) hydrators: &'a mut HashMap, + // We tweak the tracing behavior during type-check. Traces are either kept or left out of the // typed AST depending on this setting. pub(crate) tracing: Tracing, @@ -46,6 +163,22 @@ pub(crate) struct ExprTyper<'a, 'b> { } impl<'a, 'b> ExprTyper<'a, 'b> { + pub fn new( + environment: &'a mut Environment<'b>, + hydrators: &'a mut HashMap, + lines: &'a LineNumbers, + tracing: Tracing, + ) -> Self { + Self { + hydrator: Hydrator::new(), + environment, + hydrators, + tracing, + ungeneralised_function_used: false, + lines, + } + } + fn check_when_exhaustiveness( &mut self, typed_clauses: &[TypedClause], @@ -226,6 +359,8 @@ impl<'a, 'b> ExprTyper<'a, 'b> { UntypedExpr::Tuple { location, elems } => self.infer_tuple(elems, location), + UntypedExpr::Pair { location, fst, snd } => self.infer_pair(*fst, *snd, location), + UntypedExpr::String { location, value } => Ok(self.infer_string(value, location)), UntypedExpr::LogicalOpChain { @@ -837,7 +972,11 @@ impl<'a, 'b> ExprTyper<'a, 'b> { .get(module) .and_then(|module| module.accessors.get(name)), - _something_without_fields => return Err(unknown_field(vec![])), + Type::Pair { .. } => self.environment.accessors.get("Pair"), + + _something_without_fields => { + return Err(unknown_field(vec![])); + } } .ok_or_else(|| unknown_field(vec![]))?; @@ -2016,6 +2155,26 @@ impl<'a, 'b> ExprTyper<'a, 'b> { } } + fn infer_pair( + &mut self, + fst: UntypedExpr, + snd: UntypedExpr, + location: Span, + ) -> Result { + let typed_fst = self.infer(fst)?; + ensure_serialisable(false, typed_fst.tipo(), location)?; + + let typed_snd = self.infer(snd)?; + ensure_serialisable(false, typed_snd.tipo(), location)?; + + Ok(TypedExpr::Pair { + location, + tipo: pair(typed_fst.tipo(), typed_snd.tipo()), + fst: typed_fst.into(), + snd: typed_snd.into(), + }) + } + fn infer_tuple(&mut self, elems: Vec, location: Span) -> Result { let mut typed_elems = vec![]; @@ -2039,13 +2198,13 @@ impl<'a, 'b> ExprTyper<'a, 'b> { fn infer_tuple_index( &mut self, - tuple: UntypedExpr, + tuple_or_pair: UntypedExpr, index: usize, location: Span, ) -> Result { - let tuple = self.infer(tuple)?; + let tuple_or_pair = self.infer(tuple_or_pair)?; - let tipo = match *collapse_links(tuple.tipo()) { + let tipo = match *collapse_links(tuple_or_pair.tipo()) { Type::Tuple { ref elems, alias: _, @@ -2061,9 +2220,22 @@ impl<'a, 'b> ExprTyper<'a, 'b> { Ok(elems[index].clone()) } } - _ => Err(Error::NotATuple { + Type::Pair { + ref fst, + ref snd, + alias: _, + } => { + if index == 0 { + Ok(fst.clone()) + } else if index == 1 { + Ok(snd.clone()) + } else { + Err(Error::PairIndexOutOfBound { location, index }) + } + } + _ => Err(Error::NotIndexable { location, - tipo: tuple.tipo(), + tipo: tuple_or_pair.tipo(), }), }?; @@ -2071,7 +2243,7 @@ impl<'a, 'b> ExprTyper<'a, 'b> { location, tipo, index, - tuple: Box::new(tuple), + tuple: Box::new(tuple_or_pair), }) } @@ -2145,17 +2317,40 @@ impl<'a, 'b> ExprTyper<'a, 'b> { variables: self.environment.local_value_names(), })?; - // Note whether we are using an ungeneralised function so that we can - // tell if it is safe to generalise this function after inference has - // completed. - if matches!( - &constructor.variant, - ValueConstructorVariant::ModuleFn { .. } - ) { + if let ValueConstructorVariant::ModuleFn { name: fn_name, .. } = + &constructor.variant + { + // Note whether we are using an ungeneralised function so that we can + // tell if it is safe to generalise this function after inference has + // completed. let is_ungeneralised = self.environment.ungeneralised_functions.contains(name); self.ungeneralised_function_used = self.ungeneralised_function_used || is_ungeneralised; + + // In case we use another function, infer it first before going further. + // This ensures we have as much information possible about the function + // when we start inferring expressions using it (i.e. calls). + // + // In a way, this achieves a cheap topological processing of definitions + // where we infer used definitions first. And as a consequence, it solves + // issues where expressions would be wrongly assigned generic variables + // from other definitions. + if let Some(fun) = self.environment.module_functions.remove(fn_name) { + // NOTE: Recursive functions should not run into this multiple time. + // If we have no hydrator for this function, it means that we have already + // encountered it. + if self.hydrators.get(&fun.name).is_some() { + infer_function( + fun, + self.environment.current_module, + self.hydrators, + self.environment, + self.lines, + self.tracing, + )?; + } + } } // Register the value as seen for detection of unused values @@ -2284,20 +2479,6 @@ impl<'a, 'b> ExprTyper<'a, 'b> { self.environment.instantiate(t, ids, &self.hydrator) } - pub fn new( - environment: &'a mut Environment<'b>, - lines: &'a LineNumbers, - tracing: Tracing, - ) -> Self { - Self { - hydrator: Hydrator::new(), - environment, - tracing, - ungeneralised_function_used: false, - lines, - } - } - pub fn new_unbound_var(&mut self) -> Rc { self.environment.new_unbound_var() } @@ -2339,6 +2520,7 @@ fn assert_no_assignment(expr: &UntypedExpr) -> Result<(), Error> { | UntypedExpr::Sequence { .. } | UntypedExpr::String { .. } | UntypedExpr::Tuple { .. } + | UntypedExpr::Pair { .. } | UntypedExpr::TupleIndex { .. } | UntypedExpr::UnOp { .. } | UntypedExpr::Var { .. } @@ -2442,5 +2624,9 @@ pub fn ensure_serialisable(allow_fn: bool, t: Rc, location: Span) -> Resul location, ), }, + Type::Pair { fst, snd, .. } => { + ensure_serialisable(false, fst.clone(), location)?; + ensure_serialisable(false, snd.clone(), location) + } } } diff --git a/crates/aiken-lang/src/tipo/hydrator.rs b/crates/aiken-lang/src/tipo/hydrator.rs index f4d3ec04..9b66a301 100644 --- a/crates/aiken-lang/src/tipo/hydrator.rs +++ b/crates/aiken-lang/src/tipo/hydrator.rs @@ -5,7 +5,7 @@ use super::{ }; use crate::{ ast::Annotation, - builtins::{function, tuple}, + builtins::{function, pair, tuple}, tipo::Span, }; use std::{collections::HashMap, rc::Rc}; @@ -246,6 +246,12 @@ impl Hydrator { Ok(tuple(typed_elems)) } + Annotation::Pair { fst, snd, .. } => { + let fst = self.do_type_from_annotation(fst, environment, unbounds)?; + let snd = self.do_type_from_annotation(snd, environment, unbounds)?; + + Ok(pair(fst, snd)) + } }?; Ok(environment.annotate(return_type, annotation)) diff --git a/crates/aiken-lang/src/tipo/infer.rs b/crates/aiken-lang/src/tipo/infer.rs index 7b3e9b0c..f43d061a 100644 --- a/crates/aiken-lang/src/tipo/infer.rs +++ b/crates/aiken-lang/src/tipo/infer.rs @@ -1,5 +1,5 @@ use super::{ - environment::{generalise, EntityKind, Environment}, + environment::{EntityKind, Environment}, error::{Error, UnifyErrorSituation, Warning}, expr::ExprTyper, hydrator::Hydrator, @@ -8,15 +8,13 @@ use super::{ use crate::{ ast::{ Annotation, Arg, ArgName, ArgVia, DataType, Definition, Function, ModuleConstant, - ModuleKind, RecordConstructor, RecordConstructorArg, Tracing, TypeAlias, TypedArg, - TypedDefinition, TypedFunction, TypedModule, UntypedArg, UntypedDefinition, UntypedModule, - Use, Validator, + ModuleKind, RecordConstructor, RecordConstructorArg, Tracing, TypeAlias, TypedDefinition, + TypedFunction, TypedModule, UntypedDefinition, UntypedModule, Use, Validator, }, builtins, - builtins::{function, fuzzer, generic_var}, - expr::{TypedExpr, UntypedExpr}, + builtins::{fuzzer, generic_var}, line_numbers::LineNumbers, - tipo::{Span, Type, TypeVar}, + tipo::{expr::infer_function, Span, Type, TypeVar}, IdGenerator, }; use std::{borrow::Borrow, collections::HashMap, ops::Deref, rc::Rc}; @@ -31,9 +29,10 @@ impl UntypedModule { tracing: Tracing, warnings: &mut Vec, ) -> Result { - let name = self.name.clone(); + let module_name = self.name.clone(); let docs = std::mem::take(&mut self.docs); - let mut environment = Environment::new(id_gen.clone(), &name, &kind, modules, warnings); + let mut environment = + Environment::new(id_gen.clone(), &module_name, &kind, modules, warnings); let mut type_names = HashMap::with_capacity(self.definitions.len()); let mut value_names = HashMap::with_capacity(self.definitions.len()); @@ -50,14 +49,20 @@ impl UntypedModule { // earlier in the module. environment.register_types( self.definitions.iter().collect(), - &name, + &module_name, &mut hydrators, &mut type_names, )?; // Register values so they can be used in functions earlier in the module. for def in self.definitions() { - environment.register_values(def, &name, &mut hydrators, &mut value_names, kind)?; + environment.register_values( + def, + &module_name, + &mut hydrators, + &mut value_names, + kind, + )?; } // Infer the types of each definition in the module @@ -83,7 +88,7 @@ impl UntypedModule { for def in consts.into_iter().chain(not_consts) { let definition = infer_definition( def, - &name, + &module_name, &mut hydrators, &mut environment, &self.lines, @@ -96,7 +101,7 @@ impl UntypedModule { // Generalise functions now that the entire module has been inferred let definitions = definitions .into_iter() - .map(|def| environment.generalise_definition(def, &name)) + .map(|def| environment.generalise_definition(def, &module_name)) .collect(); // Generate warnings for unused items @@ -105,7 +110,7 @@ impl UntypedModule { // Remove private and imported types and values to create the public interface environment .module_types - .retain(|_, info| info.public && info.module == name); + .retain(|_, info| info.public && info.module == module_name); environment.module_values.retain(|_, info| info.public); @@ -134,12 +139,12 @@ impl UntypedModule { Ok(TypedModule { docs, - name: name.clone(), + name: module_name.clone(), definitions, kind, lines: self.lines, type_info: TypeInfo { - name, + name: module_name, types, types_constructors, values, @@ -162,7 +167,7 @@ fn infer_definition( ) -> Result { match def { Definition::Fn(f) => Ok(Definition::Fn(infer_function( - f, + &f, module_name, hydrators, environment, @@ -219,19 +224,8 @@ fn infer_definition( }; } - let Definition::Fn(mut typed_fun) = infer_definition( - Definition::Fn(fun), - module_name, - hydrators, - environment, - lines, - tracing, - )? - else { - unreachable!( - "validator definition inferred as something other than a function?" - ) - }; + let mut typed_fun = + infer_function(&fun, module_name, hydrators, environment, lines, tracing)?; if !typed_fun.return_type.is_bool() { return Err(Error::ValidatorMustReturnBool { @@ -270,19 +264,14 @@ fn infer_definition( let params = params.into_iter().chain(other.arguments); other.arguments = params.collect(); - let Definition::Fn(mut other_typed_fun) = infer_definition( - Definition::Fn(other), + let mut other_typed_fun = infer_function( + &other, module_name, hydrators, environment, lines, tracing, - )? - else { - unreachable!( - "validator definition inferred as something other than a function?" - ) - }; + )?; if !other_typed_fun.return_type.is_bool() { return Err(Error::ValidatorMustReturnBool { @@ -341,8 +330,8 @@ fn infer_definition( }); } - let typed_via = - ExprTyper::new(environment, lines, tracing).infer(arg.via.clone())?; + let typed_via = ExprTyper::new(environment, hydrators, lines, tracing) + .infer(arg.via.clone())?; let hydrator: &mut Hydrator = hydrators.get_mut(&f.name).unwrap(); @@ -406,7 +395,7 @@ fn infer_definition( }?; let typed_f = infer_function( - f.into(), + &f.into(), module_name, hydrators, environment, @@ -635,8 +624,8 @@ fn infer_definition( value, tipo: _, }) => { - let typed_expr = - ExprTyper::new(environment, lines, tracing).infer_const(&annotation, *value)?; + let typed_expr = ExprTyper::new(environment, hydrators, lines, tracing) + .infer_const(&annotation, *value)?; let tipo = typed_expr.tipo(); @@ -671,106 +660,6 @@ fn infer_definition( } } -fn infer_function( - f: Function<(), UntypedExpr, UntypedArg>, - module_name: &String, - hydrators: &mut HashMap, - environment: &mut Environment<'_>, - lines: &LineNumbers, - tracing: Tracing, -) -> Result, TypedExpr, TypedArg>, Error> { - let Function { - doc, - location, - name, - public, - arguments, - body, - return_annotation, - end_position, - can_error, - return_type: _, - } = f; - - let preregistered_fn = environment - .get_variable(&name) - .expect("Could not find preregistered type for function"); - - let field_map = preregistered_fn.field_map().cloned(); - - let preregistered_type = preregistered_fn.tipo.clone(); - - let (args_types, return_type) = preregistered_type - .function_types() - .expect("Preregistered type for fn was not a fn"); - - // Infer the type using the preregistered args + return types as a starting point - let (tipo, arguments, body, safe_to_generalise) = environment.in_new_scope(|environment| { - let args = arguments - .into_iter() - .zip(&args_types) - .map(|(arg_name, tipo)| arg_name.set_type(tipo.clone())) - .collect(); - - let mut expr_typer = ExprTyper::new(environment, lines, tracing); - - expr_typer.hydrator = hydrators - .remove(&name) - .expect("Could not find hydrator for fn"); - - let (args, body, return_type) = - expr_typer.infer_fn_with_known_types(args, body, Some(return_type))?; - - let args_types = args.iter().map(|a| a.tipo.clone()).collect(); - - let tipo = function(args_types, return_type); - - let safe_to_generalise = !expr_typer.ungeneralised_function_used; - - Ok::<_, Error>((tipo, args, body, safe_to_generalise)) - })?; - - // Assert that the inferred type matches the type of any recursive call - environment.unify(preregistered_type, tipo.clone(), location, false)?; - - // Generalise the function if safe to do so - let tipo = if safe_to_generalise { - environment.ungeneralised_functions.remove(&name); - - let tipo = generalise(tipo, 0); - - let module_fn = ValueConstructorVariant::ModuleFn { - name: name.clone(), - field_map, - module: module_name.to_owned(), - arity: arguments.len(), - location, - builtin: None, - }; - - environment.insert_variable(name.clone(), module_fn, tipo.clone()); - - tipo - } else { - tipo - }; - - Ok(Function { - doc, - location, - name, - public, - arguments, - return_annotation, - return_type: tipo - .return_type() - .expect("Could not find return type for fn"), - body, - can_error, - end_position, - }) -} - fn infer_fuzzer( environment: &mut Environment<'_>, expected_inner_type: Option>, @@ -843,7 +732,7 @@ fn infer_fuzzer( }), }, - Type::App { .. } | Type::Tuple { .. } => Err(could_not_unify()), + Type::App { .. } | Type::Tuple { .. } | Type::Pair { .. } => Err(could_not_unify()), } } @@ -894,5 +783,14 @@ fn annotate_fuzzer(tipo: &Type, location: &Span) -> Result { location: *location, tipo: Rc::new(tipo.clone()), }), + Type::Pair { fst, snd, .. } => { + let fst = annotate_fuzzer(fst, location)?; + let snd = annotate_fuzzer(snd, location)?; + Ok(Annotation::Pair { + fst: Box::new(fst), + snd: Box::new(snd), + location: *location, + }) + } } } diff --git a/crates/aiken-lang/src/tipo/pattern.rs b/crates/aiken-lang/src/tipo/pattern.rs index f2f7582f..a601ff6c 100644 --- a/crates/aiken-lang/src/tipo/pattern.rs +++ b/crates/aiken-lang/src/tipo/pattern.rs @@ -8,7 +8,7 @@ use super::{ }; use crate::{ ast::{CallArg, Pattern, Span, TypedPattern, UntypedPattern}, - builtins::{int, list, tuple}, + builtins::{int, list, pair, tuple}, }; use itertools::Itertools; use std::{ @@ -236,6 +236,46 @@ impl<'a, 'b> PatternTyper<'a, 'b> { }), }, + Pattern::Pair { fst, snd, location } => match collapse_links(tipo.clone()).deref() { + Type::Pair { + fst: t_fst, + snd: t_snd, + .. + } => { + let fst = Box::new(self.unify(*fst, t_fst.clone(), None, false)?); + let snd = Box::new(self.unify(*snd, t_snd.clone(), None, false)?); + Ok(Pattern::Pair { fst, snd, location }) + } + + Type::Var { .. } => { + let t_fst = self.environment.new_unbound_var(); + let t_snd = self.environment.new_unbound_var(); + + self.environment.unify( + pair(t_fst.clone(), t_snd.clone()), + tipo, + location, + false, + )?; + + let fst = Box::new(self.unify(*fst, t_fst, None, false)?); + let snd = Box::new(self.unify(*snd, t_snd, None, false)?); + + Ok(Pattern::Pair { fst, snd, location }) + } + + _ => Err(Error::CouldNotUnify { + given: pair( + self.environment.new_unbound_var(), + self.environment.new_unbound_var(), + ), + expected: tipo, + situation: None, + location, + rigid_type_names: HashMap::new(), + }), + }, + Pattern::Tuple { elems, location } => match collapse_links(tipo.clone()).deref() { Type::Tuple { elems: type_elems, .. diff --git a/crates/aiken-lang/src/tipo/pretty.rs b/crates/aiken-lang/src/tipo/pretty.rs index 65745820..99313a2c 100644 --- a/crates/aiken-lang/src/tipo/pretty.rs +++ b/crates/aiken-lang/src/tipo/pretty.rs @@ -86,6 +86,9 @@ impl Printer { Type::Var { tipo: typ, .. } => self.type_var_doc(&typ.borrow()), Type::Tuple { elems, .. } => self.args_to_aiken_doc(elems).surround("(", ")"), + Type::Pair { fst, snd, .. } => self + .args_to_aiken_doc(&[fst.clone(), snd.clone()]) + .surround("Pair<", ">"), } } @@ -120,7 +123,8 @@ impl Printer { fn type_var_doc<'a>(&mut self, typ: &TypeVar) -> Document<'a> { match typ { TypeVar::Link { tipo: ref typ, .. } => self.print(typ), - TypeVar::Unbound { id, .. } | TypeVar::Generic { id, .. } => self.generic_type_var(*id), + TypeVar::Generic { id, .. } => self.generic_type_var(*id), + TypeVar::Unbound { .. } => "?".to_doc(), } } @@ -246,6 +250,21 @@ fn resolve_alias( result } + ( + Annotation::Pair { fst, snd, .. }, + Type::Pair { + fst: t_fst, + snd: t_snd, + .. + }, + ) => { + let mut result = None; + for (ann, t) in [fst, snd].into_iter().zip([t_fst, t_snd]) { + result = result.or_else(|| resolve_one(parameter, ann, t.clone())); + } + result + } + (Annotation::Var { name, .. }, ..) if name == parameter => Some(typ), _ => None, @@ -455,7 +474,7 @@ mod tests { tipo: Rc::new(RefCell::new(TypeVar::Unbound { id: 2231 })), alias: None, }, - "a", + "?", ); assert_string!( function( @@ -468,7 +487,7 @@ mod tests { alias: None, }), ), - "fn(a) -> b", + "fn(?) -> ?", ); assert_string!( function( diff --git a/crates/aiken-project/Cargo.toml b/crates/aiken-project/Cargo.toml index 57729eb9..99d710a4 100644 --- a/crates/aiken-project/Cargo.toml +++ b/crates/aiken-project/Cargo.toml @@ -3,7 +3,7 @@ name = "aiken-project" description = "Aiken project utilities" version = "1.0.26-alpha" edition = "2021" -repository = "https://github.com/aiken-lang/aiken/crates/project" +repository = "https://github.com/aiken-lang/aiken" homepage = "https://github.com/aiken-lang/aiken" license = "Apache-2.0" authors = [ diff --git a/crates/aiken-project/src/blueprint/definitions.rs b/crates/aiken-project/src/blueprint/definitions.rs index 0a290de4..f71457be 100644 --- a/crates/aiken-project/src/blueprint/definitions.rs +++ b/crates/aiken-project/src/blueprint/definitions.rs @@ -145,6 +145,13 @@ impl Reference { elems = Self::from_types(elems, type_parameters) ), }, + Type::Pair { fst, snd, .. } => Self { + inner: format!( + "Pair{fst}{snd}", + fst = Self::from_type(fst, type_parameters), + snd = Self::from_type(snd, type_parameters) + ), + }, // NOTE: // diff --git a/crates/aiken-project/src/blueprint/schema.rs b/crates/aiken-project/src/blueprint/schema.rs index a649c5fc..206d2891 100644 --- a/crates/aiken-project/src/blueprint/schema.rs +++ b/crates/aiken-project/src/blueprint/schema.rs @@ -384,6 +384,7 @@ impl Annotated { } }, Type::Fn { .. } => unreachable!(), + Type::Pair { .. } => unreachable!(), } } } diff --git a/crates/aiken-project/src/blueprint/snapshots/aiken_project__blueprint__validator__tests__free_vars.snap b/crates/aiken-project/src/blueprint/snapshots/aiken_project__blueprint__validator__tests__free_vars.snap index cdde7523..af354a21 100644 --- a/crates/aiken-project/src/blueprint/snapshots/aiken_project__blueprint__validator__tests__free_vars.snap +++ b/crates/aiken-project/src/blueprint/snapshots/aiken_project__blueprint__validator__tests__free_vars.snap @@ -9,7 +9,7 @@ Schema { Var { tipo: RefCell { value: Generic { - id: 33, + id: 35, }, }, alias: None, diff --git a/crates/aiken-project/src/blueprint/snapshots/aiken_project__blueprint__validator__tests__list_2_tuples_as_map.snap b/crates/aiken-project/src/blueprint/snapshots/aiken_project__blueprint__validator__tests__list_2_tuples_as_map.snap index 58d34124..f0feef6b 100644 --- a/crates/aiken-project/src/blueprint/snapshots/aiken_project__blueprint__validator__tests__list_2_tuples_as_map.snap +++ b/crates/aiken-project/src/blueprint/snapshots/aiken_project__blueprint__validator__tests__list_2_tuples_as_map.snap @@ -10,8 +10,8 @@ description: "Code:\n\ntype Dict {\n inner: List<(ByteArray, valu "$ref": "#/definitions/test_module~1Dict$test_module~1UUID_Int" } }, - "compiledCode": "59014e010000323232323232323232232253330054a22930a9980324811856616c696461746f722072657475726e65642066616c736500136563253330043370e900018031baa0011325333009001153300600416132533300a300c002132498c8cc004004008894ccc03000452613233003003300f0023232325333333012001153300c00a16153300c00a16153300c00a161375a0022a660180142c601a00464a6666660220022a660160122c2a660160122c2a660160122c2a660160122c26eb8004c02c004c03400454cc01c01458c94cccccc03400454cc01c014584dd58008a998038028b0a998038028b0a998038028b180500098039baa001153300500316533333300a001100115330040021615330040021615330040021615330040021649011972656465656d65723a20446963743c555549442c20496e743e005734ae7155ceaab9e5573eae855d12ba41", - "hash": "de6d51e2a272ec0ab73566bbb32700ad5864fdd01290dd925e35ebb4", + "compiledCode": "59019c010000323232323232323232232253330054a22930a9980324811856616c696461746f722072657475726e65642066616c736500136563253330043370e900018031baa0011325333009001153300600416132533300a300c002132498c8cc004004008894ccc03000452613233003003300f0023232533300e001153300b00916132325333010001153300d00b1613253330113013002149854cc03803058c94cccccc05000454cc0380305854cc0380305854cc038030584dd68008a998070060b180880098088011929999998090008a998060050b0a998060050b0a998060050b0a998060050b09bae001300f0015333333010001153300a00816153300a00816137580022a660140102c2a660140102c601a0022a6600e00a2c64a66666601a0022a6600e00a2c2a6600e00a2c26eb000454cc01c0145854cc01c01458c028004c01cdd50008a998028018b299999980500088008a998020010b0a998020010b0a998020010b0a998020010b2491972656465656d65723a20446963743c555549442c20496e743e005734ae7155ceaab9e5573eae855d12ba41", + "hash": "6027685dde99d967b45333852fe9f59531237d85fcb6b6feb2890672", "definitions": { "ByteArray": { "dataType": "bytes" diff --git a/crates/aiken-project/src/blueprint/snapshots/aiken_project__blueprint__validator__tests__recursive_generic_types.snap b/crates/aiken-project/src/blueprint/snapshots/aiken_project__blueprint__validator__tests__recursive_generic_types.snap index 04517398..4b665b6a 100644 --- a/crates/aiken-project/src/blueprint/snapshots/aiken_project__blueprint__validator__tests__recursive_generic_types.snap +++ b/crates/aiken-project/src/blueprint/snapshots/aiken_project__blueprint__validator__tests__recursive_generic_types.snap @@ -16,8 +16,8 @@ description: "Code:\n\npub type LinkedList {\n Cons(a, LinkedList)\n Nil "$ref": "#/definitions/test_module~1LinkedList$Int" } }, - "compiledCode": "590409010000323232323232323232322323232322322533300b4a22930a998062491856616c696461746f722072657475726e65642066616c736500136563300353333330100011001153300a00716153300a00716153300a00716153300a00716007323253330093005001132533300e001153300b00916132533300f3011002132498c8cc00400400888c94ccc038c0280044c94ccc04c00454cc040038584c8c94ccc05400454cc048040584c94ccc058c0600084c926330070070011533013011163253333330190011001153301301116153301301116153301301116153301301116301600130160023253333330170011325333015301400115333011300c301300114a22a666022601a6026002294054cc0480405854cc04804058dd50008a998088078b0a998088078b0a998088078b0a998088078b180a00098089baa0021533300e300900115333012301137540042930a998078068b0a998078068b18079baa001153300c00a163253333330120011001153300c00a16153300c00a16153300c00a16153300c00a16300f001300c37540042a6660126008002264a66601c0022a660160122c26464a6660200022a6601a0162c264a66602260260042649319191980080099299999980b8008a998088078b0a998088078b09bac001153301100f16153301100f16301200322533301400114984c8cc00c00cc05c008c8cc02d4cccccc060004400454cc0480405854cc0480405854cc0480405854cc04804058040c054004c94cccccc05400454cc03c0345854cc03c0345854cc03c0345854cc03c034584dd700098078008a998070060b19299999980a0008a998070060b0a998070060b09929998090008a998078068b09929998098008a998080070b0a999809980a800899bb030140023014001153301000e16301400137580022a6601c0182c2a6601c0182c6022002602200464a6666660240022a660180142c2a660180142c2a660180142c26eb400454cc03002858c03c004c030dd50010a998050040b18051baa001533333300e00410041533008006161533008006161533008006161533008006162232330010010032232533300b30070011325333010001153300d00516132325333012001153300f0071613253330133015002132498cc01c01c00454cc04002058c94cccccc058004400454cc0400205854cc0400205854cc0400205854cc04002058c04c004c04c008c94cccccc05000454cc0380185854cc0380185854cc038018584dd68008a998070030b180880098071baa0021533300b30060011533300f300e37540042930a998060020b0a998060020b18061baa001370e90011b87480012411972656465656d65723a204c696e6b65644c6973743c496e743e0049010a646174756d3a20466f6f005734ae7155ceaab9e5573eae855d12ba41", - "hash": "451dccdc86f334c88e491fba78784300aa8f5523298f83b17b258d7d", + "compiledCode": "590403010000323232323232323232322323232322322533300b4a22930a998062491856616c696461746f722072657475726e65642066616c736500136563300353333330100011001153300a00716153300a00716153300a00716153300a00716007323253330093005001132533300e001153300b00916132533300f3011002132498c8cc00400400888c94ccc038c0280044c94ccc04c00454cc040038584c8c94ccc05400454cc048040584c94ccc058c0600084c926330070070011533013011163253333330190011001153301301116153301301116153301301116153301301116301600130160023253333330170011325333015301400115333011300c301300114a22a666022601a6026002294054cc0480405854cc04804058dd50008a998088078b0a998088078b0a998088078b0a998088078b180a00098089baa0021533300e300900115333012301137540042930a998078068b0a998078068b18079baa001153300c00a163253333330120011001153300c00a16153300c00a16153300c00a16153300c00a16300f001300c37540042a6660126008002264a66601c0022a660160122c26464a6660200022a6601a0162c264a66602260260042649329998088008a998070060b0991929998098008a998080070b099299980a180b00109924c646600200200444a66602c00229309919801801980c8011919806a99999980d00088008a9980a0090b0a9980a0090b0a9980a0090b0a9980a0090b009180b8008a998088078b19299999980b8008a998088078b0a998088078b09bac001153301100f16153301100f1630140013014002325333333015001153300f00d16153300f00d16153300f00d16153300f00d161375c00260240022a6601c0182c64a6666660280022a6601c0182c2a6601c0182c26eb000454cc0380305854cc03803058c044004c044008c94cccccc04800454cc0300285854cc0300285854cc030028584dd68008a998060050b180780098061baa002153300a00816300a3754002a66666601c00820082a6601000c2c2a6601000c2c2a6601000c2c2a6601000c2c4464660020020064464a666016600e002264a6660200022a6601a00a2c26464a6660240022a6601e00e2c264a666026602a00426493198038038008a998080040b19299999980b00088008a998080040b0a998080040b0a998080040b0a998080040b1809800980980119299999980a0008a998070030b0a998070030b0a998070030b09bad001153300e006163011001300e37540042a666016600c0022a66601e601c6ea8008526153300c00416153300c00416300c37540026e1d2002370e90002491972656465656d65723a204c696e6b65644c6973743c496e743e0049010a646174756d3a20466f6f005734ae7155ceaab9e5573eae855d12ba41", + "hash": "ff1413d8a35753076ff26df84e7829fde430f9920208fe0ba8ae3c52", "definitions": { "Bool": { "title": "Bool", diff --git a/crates/aiken-project/src/blueprint/snapshots/aiken_project__blueprint__validator__tests__tuples.snap b/crates/aiken-project/src/blueprint/snapshots/aiken_project__blueprint__validator__tests__tuples.snap index 6eaba3b0..d2369ff1 100644 --- a/crates/aiken-project/src/blueprint/snapshots/aiken_project__blueprint__validator__tests__tuples.snap +++ b/crates/aiken-project/src/blueprint/snapshots/aiken_project__blueprint__validator__tests__tuples.snap @@ -16,8 +16,8 @@ description: "Code:\n\nvalidator {\n fn tuples(datum: (Int, ByteArray), redeeme "$ref": "#/definitions/Tuple$Int_Int_Int" } }, - "compiledCode": "5901fc0100003232323232323232323223232232322533300a4a22930a99805a491856616c696461746f722072657475726e65642066616c73650013656533300c00115330090061613232533300e001153300b00816132325333010001153300d00a1613253330113013002149854cc03802c58c94cccccc05000454cc03802c5854cc03802c5854cc03802c584dd68008a998070058b180880098088011929999998090008a998060048b0a998060048b0a998060048b09bad001153300c00916300f001300f002325333333010001153300a00716153300a00716153300a007161375a0022a6601400e2c601a002a66666601c0022a6601000a2c2a6601000a2c26eb000454cc0200145854cc02001458c8c94cccccc03800454cc0200185854cc0200185854cc0200185854cc020018584dd700098048011929999998068008a998038028b0a998038028b0a998038028b09bad0011533007005163007001533333300b0011533005003161533005003161325333009001153300600416132533300a0011533007005161533300a300c001133760601600460160022a6600e00a2c60160026eb000454cc01400c5854cc01400c592411972656465656d65723a2028496e742c20496e742c20496e742900490117646174756d3a2028496e742c2042797465417272617929005734ae7155ceaab9e5573eae855d12ba41", - "hash": "9f4b38854cc56274f9baee929c3de458a0375d56fd5b47e8fe36f063", + "compiledCode": "5901ed010000323232323232323223232232322533300a4a22930a99805a491856616c696461746f722072657475726e65642066616c73650013656533300a00115330090061613232533300c001153300b0081613232533300e001153300d00a16132533300f3011002149854cc03802c58c94cccccc04800454cc03802c5854cc03802c5854cc03802c584dd68008a998070058b180780098078011929999998080008a998060048b0a998060048b0a998060048b09bad001153300c00916300d001300d00232533333300e001153300a00716153300a00716153300a007161375a0022a6601400e2c6016002a6666660180022a6601000a2c2a6601000a2c26eb000454cc0200145854cc020014594ccc01c00454cc018010584c8c94ccc02400454cc020018584c94ccc028c03000852615330090071632533333300d0011533009007161533009007161533009007161533009007161375c0026014002601400464a6666660160022a6600e00a2c2a6600e00a2c2a6600e00a2c26eb400454cc01c01458c0200054cccccc02400454cc01400c5854cc01400c584dd60008a998028018b0a998028018b24811972656465656d65723a2028496e742c20496e742c20496e742900490117646174756d3a2028496e742c2042797465417272617929005734ae7155cfaba15744ae91", + "hash": "9d5de9c290eb221450ae3f039b1ce3b9057f06cff7498d761e223eee", "definitions": { "ByteArray": { "dataType": "bytes" diff --git a/crates/aiken-project/src/snapshots/aiken_project__export__tests__cannot_export_generics.snap b/crates/aiken-project/src/snapshots/aiken_project__export__tests__cannot_export_generics.snap index a94ac91f..69be98f0 100644 --- a/crates/aiken-project/src/snapshots/aiken_project__export__tests__cannot_export_generics.snap +++ b/crates/aiken-project/src/snapshots/aiken_project__export__tests__cannot_export_generics.snap @@ -9,7 +9,7 @@ Schema { Var { tipo: RefCell { value: Generic { - id: 33, + id: 35, }, }, alias: None, diff --git a/crates/aiken-project/src/snapshots/aiken_project__export__tests__recursive_types.snap b/crates/aiken-project/src/snapshots/aiken_project__export__tests__recursive_types.snap index 126f64f0..16e9cdff 100644 --- a/crates/aiken-project/src/snapshots/aiken_project__export__tests__recursive_types.snap +++ b/crates/aiken-project/src/snapshots/aiken_project__export__tests__recursive_types.snap @@ -18,8 +18,8 @@ description: "Code:\n\npub type Foo {\n Empty\n Bar(a, Foo)\n}\n\npub fn } } ], - "compiledCode": "5901c3010000323232323222323232323253330083002300937540062a666010600460126ea801052000001001132323232533300b3004300c375400c2646464a66601c600e601e6ea80284c8cdc019b80003375a60240026600c0046024602600260206ea8028010c040c044008dd6980780098069baa006001132533300b3005300c375400c2a666016600860186ea801c4c8cdc01bad300f001330034c103d8798000300f3010001300d375400e00200226466e00dd698070009980118071807800a60103d8798000300c375400a600200244464646464a66601e601260206ea800854ccc03cc024c040dd50018a4000002002264a66601e601060206ea80084c8c8c94ccc048c02cc04cdd500309919b80337000066eb4c058004ccc02c02c008c058c05c004c050dd5003002180a180a8011bad301300130113754004002264a66601e601260206ea800854ccc03cc020c040dd500189919b80375a6026002666010010980103d8798000301330140013011375400600200226466e00dd6980900099980380398091809800a60103d879800030103754002601c004601c00266ec0008004dc3a40046e1d20003006002300600133760004002ae6955ceaab9e5742ae89", - "hash": "4ce96c928b3be798496fca0ec3666d15d09004115df638801715b5e8", + "compiledCode": "5901d501000032323232323222323232323253330083002300937540062a666010600460126ea801052000001001132323232533300b3004300c375400c2646464a66601c600e601e6ea80284c8cdc019b80003375a60260026600c0046026602800260206ea8028010c044c048008dd6980800098069baa006001132533300b3005300c375400c2a666016600860186ea801c4c8cdc01bad3010001330034c103d879800030103011001300d375400e00200226466e00dd698078009980118079808000a60103d8798000300c375400a600200244464646464a66601e601260206ea800854ccc03cc024c040dd50018a4000002002264a66601e601060206ea80084c8c8c94ccc048c02cc04cdd500309919b80337000066eb4c05c004ccc02c02c008c05cc060004c050dd5003002180a980b0011bad301400130113754004002264a66601e601260206ea800854ccc03cc020c040dd500189919b80375a6028002666010010980103d8798000301430150013011375400600200226466e00dd698098009998038039809980a000a60103d8798000301037540026022004602060220026601c0046601c00297ae0370e90011b8748000c024008c020c024004cc018008cc0180052f5c0ae6955ceaab9e5740ae855d101", + "hash": "dca86b6e092019b67ef310ba8360682d7bf8284cc728c6b525fb0b0d", "definitions": { "Int": { "dataType": "integer" diff --git a/crates/aiken-project/src/test_framework.rs b/crates/aiken-project/src/test_framework.rs index f6dfe3d5..fe327ef8 100644 --- a/crates/aiken-project/src/test_framework.rs +++ b/crates/aiken-project/src/test_framework.rs @@ -70,6 +70,7 @@ impl Test { .map(|cst| (cst, side.tipo())) }; + // Assertion at this point is evaluated so it's not just a normal assertion Some(Assertion { bin_op, head: as_constant(generator, head.expect("cannot be Err at this point")), @@ -1128,10 +1129,16 @@ impl Assertion { .to_string() }; + // head did not map to a constant if self.head.is_err() { return red("program failed"); } + // any value in tail did not map to a constant + if self.tail.is_err() { + return red("program failed"); + } + fn fmt_side(side: &UntypedExpr, stream: Stream) -> String { let __ = "│".if_supports_color(stream, |s| s.red()); diff --git a/crates/aiken-project/src/tests/gen_uplc.rs b/crates/aiken-project/src/tests/gen_uplc.rs index 8291573f..37459078 100644 --- a/crates/aiken-project/src/tests/gen_uplc.rs +++ b/crates/aiken-project/src/tests/gen_uplc.rs @@ -680,10 +680,10 @@ fn acceptance_test_6_if_else() { } #[test] -fn acceptance_test_6_equals() { +fn acceptance_test_6_equals_pair() { let src = r#" test foo() { - (1, []) == (1, []) + Pair(1, []) == Pair(1, []) } "#; @@ -725,7 +725,46 @@ fn acceptance_test_6_equals() { } #[test] -fn acceptance_test_7_unzip() { +fn acceptance_test_6_equals_tuple() { + let src = r#" + test foo() { + (1, []) == (1, []) + } + "#; + + assert_uplc( + src, + Term::equals_data() + .apply( + Term::list_data().apply(Term::Constant( + Constant::ProtoList( + Type::Data, + vec![ + Constant::Data(Data::integer(1.into())), + Constant::Data(Data::list(vec![])), + ], + ) + .into(), + )), + ) + .apply( + Term::list_data().apply(Term::Constant( + Constant::ProtoList( + Type::Data, + vec![ + Constant::Data(Data::integer(1.into())), + Constant::Data(Data::list(vec![])), + ], + ) + .into(), + )), + ), + false, + ); +} + +#[test] +fn acceptance_test_7_unzip_tuple() { let src = r#" pub fn unzip(xs: List<(a, b)>) -> (List, List) { when xs is { @@ -744,6 +783,127 @@ fn acceptance_test_7_unzip() { } "#; + assert_uplc( + src, + Term::equals_data() + .apply( + Term::list_data().apply( + Term::var("unzip") + .lambda("unzip") + .apply(Term::var("unzip").apply(Term::var("unzip"))) + .lambda("unzip") + .apply( + Term::var("xs") + .delayed_choose_list( + Term::list_values(vec![ + Constant::Data(Data::list(vec![])), + Constant::Data(Data::list(vec![])), + ]), + Term::mk_cons() + .apply( + Term::list_data().apply( + Term::mk_cons() + .apply(Term::i_data().apply(Term::var("a"))) + .apply(Term::var("a_tail")), + ), + ) + .apply( + Term::mk_cons() + .apply( + Term::list_data().apply( + Term::mk_cons() + .apply( + Term::b_data() + .apply(Term::var("b")), + ) + .apply(Term::var("b_tail")), + ), + ) + .apply(Term::empty_list()), + ) + .lambda("b_tail") + .apply(Term::unlist_data().apply(Term::head_list().apply( + Term::tail_list().apply(Term::var("tail_tuple")), + ))) + .lambda("a_tail") + .apply(Term::unlist_data().apply( + Term::head_list().apply(Term::var("tail_tuple")), + )) + .lambda("tail_tuple") + .apply( + Term::var("unzip") + .apply(Term::var("unzip")) + .apply(Term::var("rest")), + ) + .lambda("b") + .apply(Term::un_b_data().apply(Term::head_list().apply( + Term::tail_list().apply(Term::var("head_tuple")), + ))) + .lambda("a") + .apply(Term::un_i_data().apply( + Term::head_list().apply(Term::var("head_tuple")), + )) + .lambda("rest") + .apply(Term::tail_list().apply(Term::var("xs"))) + .lambda("head_tuple") + .apply( + Term::unlist_data() + .apply(Term::head_list().apply(Term::var("xs"))), + ), + ) + .lambda("xs") + .lambda("unzip"), + ) + .apply(Term::var("x")), + ), + ) + .apply(Term::list_data().apply(Term::list_values(vec![ + Constant::Data(Data::list(vec![ + Data::integer(3.into()), + Data::integer(4.into()), + ])), + Constant::Data(Data::list(vec![ + Data::bytestring(vec![85]), + Data::bytestring(vec![119, 153]), + ])), + ]))) + .lambda("x") + .apply(Term::list_values(vec![ + Constant::Data(Data::list(vec![ + Data::integer(3.into()), + Data::bytestring(vec![85]), + ])), + Constant::Data(Data::list(vec![ + Data::integer(4.into()), + Data::bytestring(vec![119, 153]), + ])), + ])), + false, + ); +} + +#[test] +fn acceptance_test_7_unzip_pair() { + let src = r#" + type AList = List> + + pub fn unzip(xs: AList) -> Pair, List> { + when xs is { + [] -> Pair([], []) + [Pair(a, b), ..rest] -> { + let Pair(a_tail, b_tail) = unzip(rest) + Pair([a, ..a_tail], [b, ..b_tail]) + } + } + } + + test unzip1() { + let x = [Pair(3, #"55"), Pair(4, #"7799")] + + unzip(x) == Pair([3, 4], [#"55", #"7799"]) + } + "#; + assert_uplc( src, Term::equals_data() @@ -1413,16 +1573,16 @@ fn acceptance_test_14_list_creation() { #[test] fn acceptance_test_15_zero_arg() { let src = r#" - pub opaque type Map { - inner: List<(key, value)>, + pub opaque type AList { + inner: List>, } pub fn new() { - Map { inner: [] } + AList { inner: [] } } test new_1() { - new() == Map { inner: [] } + new() == AList { inner: [] } } "#; @@ -1720,9 +1880,9 @@ fn acceptance_test_19_map_wrap_void() { .apply( Term::mk_cons() .apply( - Term::var("f").apply(Term::var("a")).choose_unit( - Term::data(Data::constr(0, vec![])), - ), + Term::data(Data::constr(0, vec![])) + .lambda("_") + .apply(Term::var("f").apply(Term::var("a"))), ) .apply(Term::empty_list()), ) @@ -1956,48 +2116,51 @@ fn acceptance_test_22_filter_map() { #[test] fn acceptance_test_23_to_list() { let src = r#" - pub opaque type AssocList { - inner: List<(key, value)>, - } + pub type AList = + List> - pub fn new() -> AssocList { - AssocList { inner: [] } - } + pub opaque type AssocList { + inner: AList, + } - pub fn to_list(m: AssocList) -> List<(key, value)> { - m.inner - } + pub fn new() -> AssocList { + AssocList { inner: [] } + } - pub fn insert( - in m: AssocList, - key k: key, - value v: value, - ) -> AssocList { - AssocList { inner: do_insert(m.inner, k, v) } - } + pub fn to_list(m: AssocList) -> AList { + m.inner + } - fn do_insert(elems: List<(key, value)>, k: key, v: value) -> List<(key, value)> { - when elems is { - [] -> - [(k, v)] - [(k2, v2), ..rest] -> - if k == k2 { - [(k, v), ..rest] - } else { - [(k2, v2), ..do_insert(rest, k, v)] + pub fn insert( + in m: AssocList, + key k: key, + value v: value, + ) -> AssocList { + AssocList { inner: do_insert(m.inner, k, v) } + } + + fn do_insert(elems: AList, k: key, v: value) -> AList { + when elems is { + [] -> + [Pair(k, v)] + [Pair(k2, v2), ..rest] -> + if k == k2 { + [Pair(k, v), ..rest] + } else { + [Pair(k2, v2), ..do_insert(rest, k, v)] + } } } - } - fn fixture_1() { - new() - |> insert("foo", 42) - |> insert("bar", 14) - } + fn fixture_1() { + new() + |> insert("foo", 42) + |> insert("bar", 14) + } - test to_list_2() { - to_list(fixture_1()) == [("foo", 42), ("bar", 14)] - } + test to_list_2() { + to_list(fixture_1()) == [Pair("foo", 42), Pair("bar", 14)] + } "#; assert_uplc( @@ -2036,7 +2199,7 @@ fn acceptance_test_23_to_list() { } #[test] -fn acceptance_test_24_map2() { +fn acceptance_test_24_map_pair() { let src = r#" pub fn map2( opt_a: Option, @@ -2057,7 +2220,7 @@ fn acceptance_test_24_map2() { } test map2_3() { - map2(Some(14), Some(42), fn(a, b) { (a, b) }) == Some((14, 42)) + map2(Some(14), Some(42), fn(a, b) { Pair(a, b) }) == Some(Pair(14, 42)) } "#; @@ -2178,6 +2341,129 @@ fn acceptance_test_24_map2() { ); } +#[test] +fn acceptance_test_24_map2() { + let src = r#" + pub fn map2( + opt_a: Option, + opt_b: Option, + f: fn(a, b) -> result, + ) -> Option { + when opt_a is { + None -> + None + Some(a) -> + when opt_b is { + None -> + None + Some(b) -> + Some(f(a, b)) + } + } + } + + test map2_3() { + map2(Some(14), Some(42), fn(a, b) { (a, b) }) == Some((14, 42)) + } + "#; + + assert_uplc( + src, + Term::equals_data() + .apply( + Term::var("map2") + .lambda("map2") + .apply( + Term::equals_integer() + .apply(Term::integer(1.into())) + .apply(Term::var("opt_a_index")) + .delayed_if_then_else( + Term::Constant(Constant::Data(Data::constr(1, vec![])).into()), + Term::equals_integer() + .apply(Term::integer(1.into())) + .apply(Term::var("opt_b_index")) + .delayed_if_then_else( + Term::Constant( + Constant::Data(Data::constr(1, vec![])).into(), + ), + Term::constr_data() + .apply(Term::integer(0.into())) + .apply( + Term::mk_cons() + .apply( + Term::list_data().apply( + Term::var("f") + .apply(Term::var("a")) + .apply(Term::var("b")), + ), + ) + .apply(Term::empty_list()), + ) + .lambda("b") + .apply(Term::un_i_data().apply( + Term::head_list().apply(Term::var("opt_b_fields")), + )) + .lambda("opt_b_fields") + .apply( + Term::var(CONSTR_FIELDS_EXPOSER) + .apply(Term::var("opt_b")), + ), + ) + .lambda("opt_b_index") + .apply( + Term::var(CONSTR_INDEX_EXPOSER).apply(Term::var("opt_b")), + ) + .lambda("a") + .apply( + Term::un_i_data().apply( + Term::head_list().apply(Term::var("opt_a_fields")), + ), + ) + .lambda("opt_a_fields") + .apply( + Term::var(CONSTR_FIELDS_EXPOSER).apply(Term::var("opt_a")), + ), + ) + .lambda("opt_a_index") + .apply(Term::var(CONSTR_INDEX_EXPOSER).apply(Term::var("opt_a"))) + .lambda("f") + .lambda("opt_b") + .lambda("opt_a"), + ) + .apply(Term::Constant( + Constant::Data(Data::constr(0, vec![Data::integer(14.into())])).into(), + )) + .apply(Term::Constant( + Constant::Data(Data::constr(0, vec![Data::integer(42.into())])).into(), + )) + .apply( + Term::mk_cons() + .apply(Term::i_data().apply(Term::var("a"))) + .apply( + Term::mk_cons() + .apply(Term::i_data().apply(Term::var("b"))) + .apply(Term::empty_list()), + ) + .lambda("b") + .lambda("a"), + ), + ) + .apply(Term::Constant( + Constant::Data(Data::constr( + 0, + vec![Data::list(vec![ + Data::integer(14.into()), + Data::integer(42.into()), + ])], + )) + .into(), + )) + .constr_fields_exposer() + .constr_index_exposer(), + false, + ); +} + #[test] fn acceptance_test_25_void_equal() { let src = r#" @@ -2708,25 +2994,25 @@ fn acceptance_test_28_unique_list() { } #[test] -fn acceptance_test_29_union() { +fn acceptance_test_29_union_pair() { let src = r#" pub opaque type AssocList { - inner: List<(key, value)>, + inner: AList, } pub fn new() -> AssocList { AssocList { inner: [] } } - pub fn from_list(xs: List<(key, value)>) -> AssocList { + pub fn from_list(xs: AList) -> AssocList { AssocList { inner: do_from_list(xs) } } - fn do_from_list(xs: List<(key, value)>) -> List<(key, value)> { + fn do_from_list(xs: AList) -> AList { when xs is { [] -> [] - [(k, v), ..rest] -> + [Pair(k, v), ..rest] -> do_insert(do_from_list(rest), k, v) } } @@ -2739,15 +3025,15 @@ fn acceptance_test_29_union() { AssocList { inner: do_insert(m.inner, k, v) } } - fn do_insert(elems: List<(key, value)>, k: key, v: value) -> List<(key, value)> { + fn do_insert(elems: AList, k: key, v: value) -> AList { when elems is { [] -> - [(k, v)] - [(k2, v2), ..rest] -> + [Pair(k, v)] + [Pair(k2, v2), ..rest] -> if k == k2 { - [(k, v), ..rest] + [Pair(k, v), ..rest] } else { - [(k2, v2), ..do_insert(rest, k, v)] + [Pair(k2, v2), ..do_insert(rest, k, v)] } } } @@ -2760,13 +3046,13 @@ fn acceptance_test_29_union() { } fn do_union( - left: List<(key, value)>, - right: List<(key, value)>, - ) -> List<(key, value)> { + left: AList, + right: AList, + ) -> AList { when left is { [] -> right - [(k, v), ..rest] -> + [Pair(k, v), ..rest] -> do_union(rest, do_insert(right, k, v)) } } @@ -2936,6 +3222,273 @@ fn acceptance_test_29_union() { ); } +#[test] +fn acceptance_test_29_union_tuple() { + let src = r#" + pub opaque type AssocList { + inner: List<(key, value)>, + } + + pub fn new() -> AssocList { + AssocList { inner: [] } + } + + pub fn from_list(xs: List<(key, value)>) -> AssocList { + AssocList { inner: do_from_list(xs) } + } + + fn do_from_list(xs: List<(key, value)>) -> List<(key, value)> { + when xs is { + [] -> + [] + [(k, v), ..rest] -> + do_insert(do_from_list(rest), k, v) + } + } + + pub fn insert( + in m: AssocList, + key k: key, + value v: value, + ) -> AssocList { + AssocList { inner: do_insert(m.inner, k, v) } + } + + fn do_insert(elems: List<(key, value)>, k: key, v: value) -> List<(key, value)> { + when elems is { + [] -> + [(k, v)] + [(k2, v2), ..rest] -> + if k == k2 { + [(k, v), ..rest] + } else { + [(k2, v2), ..do_insert(rest, k, v)] + } + } + } + + pub fn union( + left: AssocList, + right: AssocList, + ) -> AssocList { + AssocList { inner: do_union(left.inner, right.inner) } + } + + fn do_union( + left: List<(key, value)>, + right: List<(key, value)>, + ) -> List<(key, value)> { + when left is { + [] -> + right + [(k, v), ..rest] -> + do_union(rest, do_insert(right, k, v)) + } + } + + fn fixture_1() { + new() + |> insert("foo", 42) + |> insert("bar", 14) + } + + test union_1() { + union(fixture_1(), new()) == fixture_1() + } + + "#; + + assert_uplc( + src, + Term::equals_data() + .apply( + Term::list_data().apply( + Term::var("union") + .lambda("union") + .apply( + Term::var("do_union") + .apply(Term::var("left")) + .apply(Term::var("right")) + .lambda("right") + .lambda("left"), + ) + .lambda("do_union") + .apply(Term::var("do_union").apply(Term::var("do_union"))) + .lambda("do_union") + .apply( + Term::var("left") + .delayed_choose_list( + Term::var("right"), + Term::var("do_union") + .apply(Term::var("do_union")) + .apply(Term::var("rest")) + .apply( + Term::var("do_insert") + .apply(Term::var("right")) + .apply(Term::var("k")) + .apply(Term::var("v")), + ) + .lambda("v") + .apply( + Term::un_i_data().apply(Term::head_list().apply( + Term::tail_list().apply(Term::var("tuple")), + )), + ) + .lambda("k") + .apply( + Term::un_b_data() + .apply(Term::head_list().apply(Term::var("tuple"))), + ) + .lambda("rest") + .apply(Term::tail_list().apply(Term::var("left"))) + .lambda("tuple") + .apply( + Term::unlist_data() + .apply(Term::head_list().apply(Term::var("left"))), + ), + ) + .lambda("right") + .lambda("left") + .lambda("do_union"), + ) + .lambda("do_insert") + .apply( + Term::var("do_insert") + .apply(Term::var("do_insert")) + .apply(Term::var("elems")) + .lambda("do_insert") + .apply( + Term::var("elems") + .delayed_choose_list( + Term::mk_cons() + .apply( + Term::list_data().apply( + Term::mk_cons() + .apply( + Term::b_data() + .apply(Term::var("k")), + ) + .apply( + Term::mk_cons() + .apply( + Term::i_data() + .apply(Term::var("v")), + ) + .apply(Term::empty_list()), + ), + ), + ) + .apply(Term::empty_list()), + Term::equals_bytestring() + .apply(Term::var("k")) + .apply(Term::var("k2")) + .delayed_if_then_else( + Term::mk_cons() + .apply( + Term::list_data().apply( + Term::mk_cons() + .apply( + Term::b_data() + .apply(Term::var("k")), + ) + .apply( + Term::mk_cons() + .apply( + Term::i_data() + .apply( + Term::var( + "v", + ), + ), + ) + .apply( + Term::empty_list(), + ), + ), + ), + ) + .apply(Term::var("rest")), + Term::mk_cons() + .apply( + Term::list_data().apply( + Term::mk_cons() + .apply( + Term::b_data() + .apply(Term::var("k2")), + ) + .apply( + Term::mk_cons() + .apply( + Term::i_data() + .apply( + Term::var( + "v2", + ), + ), + ) + .apply( + Term::empty_list(), + ), + ), + ), + ) + .apply( + Term::var("do_insert") + .apply(Term::var("do_insert")) + .apply(Term::var("rest")), + ), + ) + .lambda("v2") + .apply(Term::un_i_data().apply( + Term::head_list().apply( + Term::tail_list().apply(Term::var("tuple")), + ), + )) + .lambda("k2") + .apply(Term::un_b_data().apply( + Term::head_list().apply(Term::var("tuple")), + )) + .lambda("rest") + .apply(Term::tail_list().apply(Term::var("elems"))) + .lambda("tuple") + .apply(Term::unlist_data().apply( + Term::head_list().apply(Term::var("elems")), + )), + ) + .lambda("elems") + .lambda("do_insert"), + ) + .lambda("v") + .lambda("k") + .lambda("elems"), + ) + .apply(Term::list_values(vec![ + Constant::Data(Data::list(vec![ + Data::bytestring("foo".as_bytes().to_vec()), + Data::integer(42.into()), + ])), + Constant::Data(Data::list(vec![ + Data::bytestring("bar".as_bytes().to_vec()), + Data::integer(14.into()), + ])), + ])) + .apply(Term::empty_list()), + ), + ) + .apply(Term::data(Data::list(vec![ + Data::list(vec![ + Data::bytestring("foo".as_bytes().to_vec()), + Data::integer(42.into()), + ]), + Data::list(vec![ + Data::bytestring("bar".as_bytes().to_vec()), + Data::integer(14.into()), + ]), + ]))), + false, + ); +} + #[test] fn acceptance_test_30_abs() { let src = r#" @@ -3171,14 +3724,16 @@ fn when_tuple_deconstruction() { .lambda("other_clauses") .apply(Term::bool(true).delay()) .lambda("dat") - .apply(Term::fst_pair().apply(Term::var("pair_subject"))) + .apply(Term::head_list().apply(Term::var("pair_subject"))) .lambda("red") - .apply(Term::snd_pair().apply(Term::var("pair_subject"))) + .apply(Term::head_list().apply(Term::tail_list().apply(Term::var("pair_subject")))) .lambda("pair_subject") .apply( - Term::mk_pair_data() - .apply(Term::var("dat")) - .apply(Term::var("red")), + Term::mk_cons().apply(Term::var("dat")).apply( + Term::mk_cons() + .apply(Term::var("red")) + .apply(Term::empty_list()), + ), ) .delayed_if_then_else( Term::unit(), @@ -3462,19 +4017,22 @@ fn when_tuple_empty_lists() { .delay(), ) .lambda("bucket_tuple_snd") - .apply( - Term::unlist_data() - .apply(Term::snd_pair().apply(Term::var("bucket_tuple"))), - ) + .apply(Term::unlist_data().apply( + Term::head_list().apply(Term::tail_list().apply(Term::var("bucket_tuple"))), + )) .delay(), ) .lambda("bucket_tuple_fst") - .apply(Term::unlist_data().apply(Term::fst_pair().apply(Term::var("bucket_tuple")))) + .apply(Term::unlist_data().apply(Term::head_list().apply(Term::var("bucket_tuple")))) .lambda("bucket_tuple") .apply( - Term::mk_pair_data() + Term::mk_cons() .apply(Term::list_data().apply(Term::var("bucket1"))) - .apply(Term::list_data().apply(Term::var("bucket2"))), + .apply( + Term::mk_cons() + .apply(Term::list_data().apply(Term::var("bucket2"))) + .apply(Term::empty_list()), + ), ) .lambda("bucket2") .apply(Term::list_values(vec![ @@ -3832,7 +4390,7 @@ fn record_update_output_2_vals() { type Output { address: Address, - value: List<(ByteArray, List<(ByteArray, Int)>)>, + value: List>>>, datum: Datum, script_ref: Option, } @@ -3934,7 +4492,7 @@ fn record_update_output_1_val() { type Output { address: Address, - value: List<(ByteArray, List<(ByteArray, Int)>)>, + value: List>>>, datum: Datum, script_ref: Option, } @@ -4035,7 +4593,7 @@ fn record_update_output_first_last_val() { type Output { address: Address, - value: List<(ByteArray, List<(ByteArray, Int)>)>, + value: List>>>, datum: Datum, script_ref: Option, } @@ -5618,7 +6176,7 @@ fn opaque_value_in_datum() { } opaque type Dict { - inner: List<(ByteArray, v)> + inner: List> } type Dat { @@ -5631,9 +6189,9 @@ fn opaque_value_in_datum() { fn spend(dat: Dat, red: Data, ctx: Data) { let val = dat.a - expect [(_, amount)] = val.inner.inner + expect [Pair(_, amount)] = val.inner.inner - let final_amount = [(#"AA", 4)] |> Dict + let final_amount = [Pair(#"AA", 4)] |> Dict final_amount == amount @@ -5723,7 +6281,7 @@ fn opaque_value_in_datum() { src, Term::var("val") .delayed_choose_list( - Term::Error.delayed_trace(Term::var("expect[(_,amount)]=val.inner.inner")), + Term::Error.delayed_trace(Term::var("expect[Pair(_,amount)]=val.inner.inner")), Term::tail_list() .apply(Term::var("val")) .delayed_choose_list( @@ -5742,7 +6300,8 @@ fn opaque_value_in_datum() { Term::unmap_data() .apply(Term::snd_pair().apply(Term::var("tuple_item_0"))), ), - Term::Error.delayed_trace(Term::var("expect[(_,amount)]=val.inner.inner")), + Term::Error + .delayed_trace(Term::var("expect[Pair(_,amount)]=val.inner.inner")), ) .lambda("tuple_item_0") .apply(Term::head_list().apply(Term::var("val"))), @@ -5860,8 +6419,8 @@ fn opaque_value_in_datum() { ) .lambda("dat") .constr_fields_exposer() - .lambda("expect[(_,amount)]=val.inner.inner") - .apply(Term::string("expect [(_, amount)] = val.inner.inner")) + .lambda("expect[Pair(_,amount)]=val.inner.inner") + .apply(Term::string("expect [Pair(_, amount)] = val.inner.inner")) .lambda("dat:Dat") .apply(Term::string("dat: Dat")) .constr_index_exposer(), @@ -5877,7 +6436,7 @@ fn opaque_value_in_test() { } pub opaque type Dict { - inner: List<(ByteArray, v)> + inner: List> } pub type Dat { @@ -5886,7 +6445,7 @@ fn opaque_value_in_test() { } pub fn dat_new() -> Dat { - let v = Value { inner: Dict { inner: [("", [(#"aa", 4)] |> Dict)] } } + let v = Value { inner: Dict { inner: [Pair("", [Pair(#"aa", 4)] |> Dict)] } } Dat { c: 0, a: v @@ -5899,9 +6458,9 @@ fn opaque_value_in_test() { let val = dat.a - expect [(_, amount)] = val.inner.inner + expect [Pair(_, amount)] = val.inner.inner - let final_amount = [(#"AA", 4)] |> Dict + let final_amount = [Pair(#"AA", 4)] |> Dict final_amount == amount } @@ -5911,7 +6470,7 @@ fn opaque_value_in_test() { src, Term::var("val") .delayed_choose_list( - Term::Error.delayed_trace(Term::var("expect[(_,amount)]=val.inner.inner")), + Term::Error.delayed_trace(Term::var("expect[Pair(_,amount)]=val.inner.inner")), Term::tail_list() .apply(Term::var("val")) .delayed_choose_list( @@ -5930,7 +6489,8 @@ fn opaque_value_in_test() { Term::unmap_data() .apply(Term::snd_pair().apply(Term::var("tuple_item_0"))), ), - Term::Error.delayed_trace(Term::var("expect[(_,amount)]=val.inner.inner")), + Term::Error + .delayed_trace(Term::var("expect[Pair(_,amount)]=val.inner.inner")), ) .lambda("tuple_item_0") .apply(Term::head_list().apply(Term::var("val"))), @@ -5964,8 +6524,8 @@ fn opaque_value_in_test() { )])) .into(), )])) - .lambda("expect[(_,amount)]=val.inner.inner") - .apply(Term::string("expect [(_, amount)] = val.inner.inner")) + .lambda("expect[Pair(_,amount)]=val.inner.inner") + .apply(Term::string("expect [Pair(_, amount)] = val.inner.inner")) .constr_fields_exposer(), false, ); @@ -6006,8 +6566,8 @@ fn head_list_on_map() { use aiken/builtin test exp_none() { - let x = [(1, ""), (2, #"aa")] - builtin.head_list(x) == (1, "") + let x = [Pair(1, ""), Pair(2, #"aa")] + builtin.head_list(x) == Pair(1, "") } "#; @@ -6116,34 +6676,19 @@ fn tuple_2_match() { .lambda("x2") .apply( Term::un_i_data().apply( - Term::fst_pair().apply(Term::var("field_0_pair")), + Term::head_list().apply(Term::var("field_0_pair")), ), ) .lambda("y2") - .apply( - Term::un_i_data().apply( - Term::snd_pair().apply(Term::var("field_0_pair")), + .apply(Term::un_i_data().apply( + Term::head_list().apply( + Term::tail_list().apply(Term::var("field_0_pair")), ), - ) + )) .lambda("field_0_pair") - .apply( - Term::mk_pair_data() - .apply( - Term::head_list().apply(Term::var("__list_data")), - ) - .apply(Term::head_list().apply(Term::var("__tail"))) - .lambda("__tail") - .apply( - Term::tail_list().apply(Term::var("__list_data")), - ) - .lambda("__list_data") - .apply( - Term::unlist_data().apply( - Term::head_list() - .apply(Term::var("tuple_index_1_fields")), - ), - ), - ) + .apply(Term::unlist_data().apply( + Term::head_list().apply(Term::var("tuple_index_1_fields")), + )) .lambda("tuple_index_1_fields") .apply( Term::var(CONSTR_FIELDS_EXPOSER) @@ -6156,24 +6701,20 @@ fn tuple_2_match() { .lambda("x1") .apply( Term::un_i_data() - .apply(Term::fst_pair().apply(Term::var("field_0_pair"))), + .apply(Term::head_list().apply(Term::var("field_0_pair"))), ) .lambda("y1") .apply( - Term::un_i_data() - .apply(Term::snd_pair().apply(Term::var("field_0_pair"))), + Term::un_i_data().apply( + Term::head_list() + .apply(Term::tail_list().apply(Term::var("field_0_pair"))), + ), ) .lambda("field_0_pair") .apply( - Term::mk_pair_data() - .apply(Term::head_list().apply(Term::var("__list_data"))) - .apply(Term::head_list().apply(Term::var("__tail"))) - .lambda("__tail") - .apply(Term::tail_list().apply(Term::var("__list_data"))) - .lambda("__list_data") - .apply(Term::unlist_data().apply( - Term::head_list().apply(Term::var("tuple_index_0_fields")), - )), + Term::unlist_data().apply( + Term::head_list().apply(Term::var("tuple_index_0_fields")), + ), ) .lambda("tuple_index_0_fields") .apply( @@ -6237,14 +6778,16 @@ fn tuple_2_match() { .delay(), ) .lambda("tuple_index_0") - .apply(Term::fst_pair().apply(Term::var("input"))) + .apply(Term::head_list().apply(Term::var("input"))) .lambda("tuple_index_1") - .apply(Term::snd_pair().apply(Term::var("input"))) + .apply(Term::head_list().apply(Term::tail_list().apply(Term::var("input")))) .lambda("input") .apply( - Term::mk_pair_data() - .apply(Term::var("ec1")) - .apply(Term::var("ec2")), + Term::mk_cons().apply(Term::var("ec1")).apply( + Term::mk_cons() + .apply(Term::var("ec2")) + .apply(Term::empty_list()), + ), ) .lambda("ec2") .lambda("ec1"), diff --git a/crates/aiken/Cargo.toml b/crates/aiken/Cargo.toml index 26631d37..ad98b2af 100644 --- a/crates/aiken/Cargo.toml +++ b/crates/aiken/Cargo.toml @@ -13,6 +13,12 @@ authors = [ ] rust-version = "1.66.1" +[package.metadata.wix] +upgrade-guid = "288B160D-418A-4558-91B9-7C38CFD789C7" +path-guid = "4EB8FCD6-261B-4F6C-B7DB-CFA67B4E6960" +license = false +eula = false + [dependencies] clap = { version = "4.1.8", features = [ "derive", diff --git a/crates/aiken/wix/main.wxs b/crates/aiken/wix/main.wxs new file mode 100644 index 00000000..1e3b67cf --- /dev/null +++ b/crates/aiken/wix/main.wxs @@ -0,0 +1,228 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + 1 + 1 + + + + + + + + + + + + + + + + + + diff --git a/crates/uplc/Cargo.toml b/crates/uplc/Cargo.toml index c5c0e1c2..ca07006f 100644 --- a/crates/uplc/Cargo.toml +++ b/crates/uplc/Cargo.toml @@ -3,7 +3,7 @@ name = "uplc" description = "Utilities for working with Untyped Plutus Core" version = "1.0.26-alpha" edition = "2021" -repository = "https://github.com/aiken-lang/aiken/crates/uplc" +repository = "https://github.com/aiken-lang/aiken" homepage = "https://github.com/aiken-lang/aiken" license = "Apache-2.0" authors = ["Lucas Rosa ", "Kasey White "] diff --git a/examples/acceptance_tests/036/aiken.lock b/examples/acceptance_tests/036/aiken.lock index acecc43e..95389892 100644 --- a/examples/acceptance_tests/036/aiken.lock +++ b/examples/acceptance_tests/036/aiken.lock @@ -13,4 +13,4 @@ requirements = [] source = "github" [etags] -"aiken-lang/stdlib@main" = [{ secs_since_epoch = 1711569180, nanos_since_epoch = 895108000 }, "b3c7a0d03fa7a9f454fccd45c69a09010858ec0b6c9a1f5c71ef2ebc36fc46bb"] +"aiken-lang/stdlib@main" = [{ secs_since_epoch = 1715005298, nanos_since_epoch = 528741000 }, "5ee55dc5ccf269bb493f4cacb32096f0191a6adb2ef39d62a1f79b8c5a8fcc7f"] diff --git a/examples/acceptance_tests/036/plutus.json b/examples/acceptance_tests/036/plutus.json index 43e78d9c..713d6e08 100644 --- a/examples/acceptance_tests/036/plutus.json +++ b/examples/acceptance_tests/036/plutus.json @@ -5,7 +5,7 @@ "plutusVersion": "v2", "compiler": { "name": "Aiken", - "version": "v1.0.26-alpha+075668b" + "version": "v1.0.26-alpha+c5227a2" } }, "validators": [ @@ -42,8 +42,8 @@ "$ref": "#/definitions/Data" } }, - "compiledCode": "58d501000032323232323232222533300432330010013758601460166016600e6ea8c028c01cdd50011129998048008a501325333007333007533300a3253330083370e900118049baa00114bd6f7b63009bab300d300a375400264660020026eacc034c038c028dd518068019129998060008a60103d87a80001323232533300c3371e91105000000000000375c601a006266e95200033010374c00297ae01330050050023756601a0046020004601c00229445282504a229444cc00c00c004c030004526136565734aae7555cf2ab9f5740ae855d101", - "hash": "416db3eec35a0e94198a5123de948b773f98d4fcba87f581598a8068" + "compiledCode": "58ef01000032323232323232222533300432330010013758601460166016600e6ea8c028c01cdd50011129998048008a501325333007333007533300a3253330083370e900118049baa00114bd6f7b63009bab300d300a375400264660020026eacc034c038c028dd518068019129998060008a60103d87a8000132323232533300d33722911050000000000000021533300d3371e91010500000000000000213374a9000198089ba60014bd700a6103d87a80001330060060033756601c0066eb8c030008c040008c0380045288a504a094452889980180180098060008a4c26cacae6955ceaab9e5573eae815d0aba21", + "hash": "f56561e01063b11146809755d9907147e79d3166aa5c65fba4040fd1" } ], "definitions": { diff --git a/examples/acceptance_tests/047/plutus.json b/examples/acceptance_tests/047/plutus.json index c106e87a..13aef599 100644 --- a/examples/acceptance_tests/047/plutus.json +++ b/examples/acceptance_tests/047/plutus.json @@ -5,7 +5,7 @@ "plutusVersion": "v2", "compiler": { "name": "Aiken", - "version": "v1.0.26-alpha+075668b" + "version": "v1.0.26-alpha+c5227a2" } }, "validators": [ diff --git a/examples/acceptance_tests/054/aiken.lock b/examples/acceptance_tests/054/aiken.lock index 97c93220..422eeec6 100644 --- a/examples/acceptance_tests/054/aiken.lock +++ b/examples/acceptance_tests/054/aiken.lock @@ -13,4 +13,4 @@ requirements = [] source = "github" [etags] -"aiken-lang/stdlib@main" = [{ secs_since_epoch = 1711569179, nanos_since_epoch = 784240000 }, "b3c7a0d03fa7a9f454fccd45c69a09010858ec0b6c9a1f5c71ef2ebc36fc46bb"] +"aiken-lang/stdlib@main" = [{ secs_since_epoch = 1715005296, nanos_since_epoch = 102490000 }, "5ee55dc5ccf269bb493f4cacb32096f0191a6adb2ef39d62a1f79b8c5a8fcc7f"] diff --git a/examples/acceptance_tests/055/aiken.lock b/examples/acceptance_tests/055/aiken.lock index 5b15affc..fb7e5edd 100644 --- a/examples/acceptance_tests/055/aiken.lock +++ b/examples/acceptance_tests/055/aiken.lock @@ -13,4 +13,4 @@ requirements = [] source = "github" [etags] -"aiken-lang/stdlib@main" = [{ secs_since_epoch = 1711569192, nanos_since_epoch = 806001000 }, "b3c7a0d03fa7a9f454fccd45c69a09010858ec0b6c9a1f5c71ef2ebc36fc46bb"] +"aiken-lang/stdlib@main" = [{ secs_since_epoch = 1715005303, nanos_since_epoch = 72632000 }, "5ee55dc5ccf269bb493f4cacb32096f0191a6adb2ef39d62a1f79b8c5a8fcc7f"] diff --git a/examples/acceptance_tests/055/lib/tests.ak b/examples/acceptance_tests/055/lib/tests.ak index 8b906a5e..eba79737 100644 --- a/examples/acceptance_tests/055/lib/tests.ak +++ b/examples/acceptance_tests/055/lib/tests.ak @@ -85,10 +85,10 @@ const bar = #"626172" fn fixture_1() { dict.new() - |> dict.insert(fooz, 42, bytearray.compare) - |> dict.insert(bar, 14, bytearray.compare) + |> dict.insert(fooz, 42) + |> dict.insert(bar, 14) } test union_1() { - dict.union(fixture_1(), dict.new(), bytearray.compare) == fixture_1() + dict.union(fixture_1(), dict.new()) == fixture_1() } diff --git a/examples/acceptance_tests/061/aiken.lock b/examples/acceptance_tests/061/aiken.lock index 5013276d..67a2e006 100644 --- a/examples/acceptance_tests/061/aiken.lock +++ b/examples/acceptance_tests/061/aiken.lock @@ -13,4 +13,4 @@ requirements = [] source = "github" [etags] -"aiken-lang/stdlib@main" = [{ secs_since_epoch = 1711569192, nanos_since_epoch = 805967000 }, "b3c7a0d03fa7a9f454fccd45c69a09010858ec0b6c9a1f5c71ef2ebc36fc46bb"] +"aiken-lang/stdlib@main" = [{ secs_since_epoch = 1715005305, nanos_since_epoch = 39479000 }, "5ee55dc5ccf269bb493f4cacb32096f0191a6adb2ef39d62a1f79b8c5a8fcc7f"] diff --git a/examples/acceptance_tests/061/lib/tests.ak b/examples/acceptance_tests/061/lib/tests.ak index ee0411cb..912aa64d 100644 --- a/examples/acceptance_tests/061/lib/tests.ak +++ b/examples/acceptance_tests/061/lib/tests.ak @@ -61,7 +61,7 @@ pub fn tx_1() -> Transaction { fee: value.zero(), mint: value.from_asset(#"000000", #"00", -1) |> value.to_minted_value, certificates: [], - withdrawals: dict.new(), + withdrawals: [], validity_range: Interval { lower_bound: IntervalBound { bound_type: PositiveInfinity, @@ -73,7 +73,7 @@ pub fn tx_1() -> Transaction { }, }, extra_signatories: [keyhash], - redeemers: dict.new(), + redeemers: [], datums: dict.new(), id: TransactionId { hash: #"" }, } diff --git a/examples/acceptance_tests/063/aiken.lock b/examples/acceptance_tests/063/aiken.lock index f742eba1..868431e6 100644 --- a/examples/acceptance_tests/063/aiken.lock +++ b/examples/acceptance_tests/063/aiken.lock @@ -13,4 +13,4 @@ requirements = [] source = "github" [etags] -"aiken-lang/stdlib@main" = [{ secs_since_epoch = 1711569179, nanos_since_epoch = 348000000 }, "b3c7a0d03fa7a9f454fccd45c69a09010858ec0b6c9a1f5c71ef2ebc36fc46bb"] +"aiken-lang/stdlib@main" = [{ secs_since_epoch = 1715005310, nanos_since_epoch = 773339000 }, "5ee55dc5ccf269bb493f4cacb32096f0191a6adb2ef39d62a1f79b8c5a8fcc7f"] diff --git a/examples/acceptance_tests/067/aiken.lock b/examples/acceptance_tests/067/aiken.lock index 45b52933..7588b6c5 100644 --- a/examples/acceptance_tests/067/aiken.lock +++ b/examples/acceptance_tests/067/aiken.lock @@ -13,4 +13,4 @@ requirements = [] source = "github" [etags] -"aiken-lang/stdlib@main" = [{ secs_since_epoch = 1711569194, nanos_since_epoch = 888685000 }, "b3c7a0d03fa7a9f454fccd45c69a09010858ec0b6c9a1f5c71ef2ebc36fc46bb"] +"aiken-lang/stdlib@main" = [{ secs_since_epoch = 1715005316, nanos_since_epoch = 645681000 }, "5ee55dc5ccf269bb493f4cacb32096f0191a6adb2ef39d62a1f79b8c5a8fcc7f"] diff --git a/examples/acceptance_tests/068/aiken.lock b/examples/acceptance_tests/068/aiken.lock index 658a7f0d..7a2dc344 100644 --- a/examples/acceptance_tests/068/aiken.lock +++ b/examples/acceptance_tests/068/aiken.lock @@ -13,4 +13,4 @@ requirements = [] source = "github" [etags] -"aiken-lang/stdlib@main" = [{ secs_since_epoch = 1711569186, nanos_since_epoch = 741545000 }, "b3c7a0d03fa7a9f454fccd45c69a09010858ec0b6c9a1f5c71ef2ebc36fc46bb"] +"aiken-lang/stdlib@main" = [{ secs_since_epoch = 1715005314, nanos_since_epoch = 209079000 }, "5ee55dc5ccf269bb493f4cacb32096f0191a6adb2ef39d62a1f79b8c5a8fcc7f"] diff --git a/examples/acceptance_tests/068/lib/tests.ak b/examples/acceptance_tests/068/lib/tests.ak index 3af1365d..6398dddf 100644 --- a/examples/acceptance_tests/068/lib/tests.ak +++ b/examples/acceptance_tests/068/lib/tests.ak @@ -24,9 +24,9 @@ pub fn from_asset( ) -> Value { let asset = dict.new() - |> dict.insert(asset_name, quantity, bytearray.compare) + |> dict.insert(asset_name, quantity) dict.new() - |> dict.insert(policy_id, asset, bytearray.compare) + |> dict.insert(policy_id, asset) |> Value } @@ -47,7 +47,6 @@ pub fn add(left v0: Value, right v1: Value) -> Value { Some(q) } }, - bytearray.compare, ) if dict.is_empty(result) { @@ -56,7 +55,6 @@ pub fn add(left v0: Value, right v1: Value) -> Value { Some(result) } }, - bytearray.compare, ) |> Value } diff --git a/examples/acceptance_tests/068/plutus.json b/examples/acceptance_tests/068/plutus.json index 051f63d3..0c8d332e 100644 --- a/examples/acceptance_tests/068/plutus.json +++ b/examples/acceptance_tests/068/plutus.json @@ -2,7 +2,11 @@ "preamble": { "title": "aiken-lang/acceptance_test_068", "version": "0.0.0", - "plutusVersion": "v2" + "plutusVersion": "v2", + "compiler": { + "name": "Aiken", + "version": "v1.0.26-alpha+a44ed4c" + } }, "validators": [] } \ No newline at end of file diff --git a/examples/acceptance_tests/069/aiken.lock b/examples/acceptance_tests/069/aiken.lock index 32ff065c..d777b64b 100644 --- a/examples/acceptance_tests/069/aiken.lock +++ b/examples/acceptance_tests/069/aiken.lock @@ -13,4 +13,4 @@ requirements = [] source = "github" [etags] -"aiken-lang/stdlib@main" = [{ secs_since_epoch = 1711569179, nanos_since_epoch = 533207000 }, "b3c7a0d03fa7a9f454fccd45c69a09010858ec0b6c9a1f5c71ef2ebc36fc46bb"] +"aiken-lang/stdlib@main" = [{ secs_since_epoch = 1715005310, nanos_since_epoch = 708386000 }, "5ee55dc5ccf269bb493f4cacb32096f0191a6adb2ef39d62a1f79b8c5a8fcc7f"] diff --git a/examples/acceptance_tests/070/aiken.lock b/examples/acceptance_tests/070/aiken.lock index 0833cfa3..d92c3d03 100644 --- a/examples/acceptance_tests/070/aiken.lock +++ b/examples/acceptance_tests/070/aiken.lock @@ -13,4 +13,4 @@ requirements = [] source = "github" [etags] -"aiken-lang/stdlib@main" = [{ secs_since_epoch = 1711569178, nanos_since_epoch = 711093000 }, "b3c7a0d03fa7a9f454fccd45c69a09010858ec0b6c9a1f5c71ef2ebc36fc46bb"] +"aiken-lang/stdlib@main" = [{ secs_since_epoch = 1715005301, nanos_since_epoch = 544359000 }, "5ee55dc5ccf269bb493f4cacb32096f0191a6adb2ef39d62a1f79b8c5a8fcc7f"] diff --git a/examples/acceptance_tests/071/aiken.lock b/examples/acceptance_tests/071/aiken.lock index 723780f0..a8c72e69 100644 --- a/examples/acceptance_tests/071/aiken.lock +++ b/examples/acceptance_tests/071/aiken.lock @@ -13,4 +13,4 @@ requirements = [] source = "github" [etags] -"aiken-lang/stdlib@main" = [{ secs_since_epoch = 1711569181, nanos_since_epoch = 605261000 }, "b3c7a0d03fa7a9f454fccd45c69a09010858ec0b6c9a1f5c71ef2ebc36fc46bb"] +"aiken-lang/stdlib@main" = [{ secs_since_epoch = 1715005299, nanos_since_epoch = 594302000 }, "5ee55dc5ccf269bb493f4cacb32096f0191a6adb2ef39d62a1f79b8c5a8fcc7f"] diff --git a/examples/acceptance_tests/071/plutus.json b/examples/acceptance_tests/071/plutus.json index f60d5d28..16b170f1 100644 --- a/examples/acceptance_tests/071/plutus.json +++ b/examples/acceptance_tests/071/plutus.json @@ -5,7 +5,7 @@ "plutusVersion": "v2", "compiler": { "name": "Aiken", - "version": "v1.0.26-alpha+075668b" + "version": "v1.0.26-alpha+c5227a2" } }, "validators": [ diff --git a/examples/acceptance_tests/072/aiken.lock b/examples/acceptance_tests/072/aiken.lock index 4e8216b5..73abdf6c 100644 --- a/examples/acceptance_tests/072/aiken.lock +++ b/examples/acceptance_tests/072/aiken.lock @@ -13,4 +13,4 @@ requirements = [] source = "github" [etags] -"aiken-lang/stdlib@main" = [{ secs_since_epoch = 1711569187, nanos_since_epoch = 164467000 }, "b3c7a0d03fa7a9f454fccd45c69a09010858ec0b6c9a1f5c71ef2ebc36fc46bb"] +"aiken-lang/stdlib@main" = [{ secs_since_epoch = 1715005310, nanos_since_epoch = 50660000 }, "5ee55dc5ccf269bb493f4cacb32096f0191a6adb2ef39d62a1f79b8c5a8fcc7f"] diff --git a/examples/acceptance_tests/073/aiken.lock b/examples/acceptance_tests/073/aiken.lock index 54983447..25702f67 100644 --- a/examples/acceptance_tests/073/aiken.lock +++ b/examples/acceptance_tests/073/aiken.lock @@ -13,4 +13,4 @@ requirements = [] source = "github" [etags] -"aiken-lang/stdlib@main" = [{ secs_since_epoch = 1711569187, nanos_since_epoch = 610647000 }, "b3c7a0d03fa7a9f454fccd45c69a09010858ec0b6c9a1f5c71ef2ebc36fc46bb"] +"aiken-lang/stdlib@main" = [{ secs_since_epoch = 1715005309, nanos_since_epoch = 773612000 }, "5ee55dc5ccf269bb493f4cacb32096f0191a6adb2ef39d62a1f79b8c5a8fcc7f"] diff --git a/examples/acceptance_tests/074/aiken.lock b/examples/acceptance_tests/074/aiken.lock index 9a850837..89335bff 100644 --- a/examples/acceptance_tests/074/aiken.lock +++ b/examples/acceptance_tests/074/aiken.lock @@ -13,4 +13,4 @@ requirements = [] source = "github" [etags] -"aiken-lang/stdlib@main" = [{ secs_since_epoch = 1711569191, nanos_since_epoch = 273641000 }, "b3c7a0d03fa7a9f454fccd45c69a09010858ec0b6c9a1f5c71ef2ebc36fc46bb"] +"aiken-lang/stdlib@main" = [{ secs_since_epoch = 1715005298, nanos_since_epoch = 330541000 }, "5ee55dc5ccf269bb493f4cacb32096f0191a6adb2ef39d62a1f79b8c5a8fcc7f"] diff --git a/examples/acceptance_tests/075/aiken.lock b/examples/acceptance_tests/075/aiken.lock new file mode 100644 index 00000000..6e350cda --- /dev/null +++ b/examples/acceptance_tests/075/aiken.lock @@ -0,0 +1,7 @@ +# This file was generated by Aiken +# You typically do not need to edit this file + +requirements = [] +packages = [] + +[etags] diff --git a/examples/acceptance_tests/075/aiken.toml b/examples/acceptance_tests/075/aiken.toml new file mode 100644 index 00000000..71fd3f18 --- /dev/null +++ b/examples/acceptance_tests/075/aiken.toml @@ -0,0 +1,3 @@ +name = 'aiken-lang/acceptance_test_075' +version = '0.0.0' +description = '' diff --git a/examples/acceptance_tests/075/lib/tests.ak b/examples/acceptance_tests/075/lib/tests.ak new file mode 100644 index 00000000..832dd0ed --- /dev/null +++ b/examples/acceptance_tests/075/lib/tests.ak @@ -0,0 +1,37 @@ +pub opaque type Dict { + inner: List>, +} + +pub fn constant(a: a) -> Fuzzer { + fn(s0) { Some((s0, a)) } +} + +pub fn map(fuzz_a: Fuzzer, f: fn(a) -> b) -> Fuzzer { + fn(s0) { + when fuzz_a(s0) is { + Some((s1, a)) -> Some((s1, f(a))) + None -> None + } + } +} + +// NOTE: Inlining `do_list` fixes the problem. But the indirection here causes: +// +// --> Type mismatch expected 'pair data data' got 'data' +pub fn list(fuzzer: Fuzzer) -> Fuzzer> { + do_list(fuzzer, []) +} + +fn do_list(fuzzer, xs) -> Fuzzer> { + let x <- map(fuzzer) + [x, ..xs] +} + +pub fn dict() -> Fuzzer> { + list(constant(Pair(1, True))) + |> map(fn(inner) { Dict { inner } }) +} + +test prop_dict_between(_d via dict()) { + True +} diff --git a/examples/acceptance_tests/077/aiken.lock b/examples/acceptance_tests/077/aiken.lock index ed29406c..f1c124f1 100644 --- a/examples/acceptance_tests/077/aiken.lock +++ b/examples/acceptance_tests/077/aiken.lock @@ -13,4 +13,4 @@ requirements = [] source = "github" [etags] -"aiken-lang/stdlib@main" = [{ secs_since_epoch = 1711569186, nanos_since_epoch = 96782000 }, "b3c7a0d03fa7a9f454fccd45c69a09010858ec0b6c9a1f5c71ef2ebc36fc46bb"] +"aiken-lang/stdlib@main" = [{ secs_since_epoch = 1715005297, nanos_since_epoch = 729130000 }, "5ee55dc5ccf269bb493f4cacb32096f0191a6adb2ef39d62a1f79b8c5a8fcc7f"] diff --git a/examples/acceptance_tests/077/plutus.json b/examples/acceptance_tests/077/plutus.json index dec11497..a38eb0fe 100644 --- a/examples/acceptance_tests/077/plutus.json +++ b/examples/acceptance_tests/077/plutus.json @@ -5,7 +5,7 @@ "plutusVersion": "v2", "compiler": { "name": "Aiken", - "version": "v1.0.26-alpha+075668b" + "version": "v1.0.26-alpha+c5227a2" } }, "validators": [ @@ -31,8 +31,25 @@ } } ], - "compiledCode": "5901ae010000323232323232322322232323225333009323232533300c3007300d3754002264646464a666026602c00426464a666024601a60266ea803854ccc048c8cc004004018894ccc05c004528099299980a99baf301a30173754603400402429444cc00c00c004c06800454ccc048c0300044cdc78010080a501616375a60260046eb8c04400458c050004cc88c94ccc044c02cc048dd50008a5eb7bdb1804dd5980b18099baa001323300100100322533301500114c0103d87a8000132323253330153371e00c6eb8c05800c4cdd2a4000660326e980052f5c026600a00a0046eacc058008c064008c05c004c8cc004004dd5980a180a980a980a980a8019129998098008a5eb7bdb1804c8c8c8c94ccc050cdc7a45000021003133018337606ea4008dd3000998030030019bab3015003375c6026004602e004602a0026eb8c04cc040dd50019bac3012001300e37540042c60206022004601e00260166ea80045261365632533300830030011533300b300a37540082930b0a99980418010008a99980598051baa00414985858c020dd50019b8748008dc3a40006eb80055cd2ab9d5573caae7d5d02ba15745", - "hash": "c537a66202fafb789b2e76c6a5430b53b0069e223ba8ad4b5b793d51" + "compiledCode": "5901cc010000323232323232322322232323225333009323232533300c3007300d3754002264646464a666026602c00426464a666024601a60266ea803854ccc048c8cc004004018894ccc05c004528099299980a99baf301a30173754603400402429444cc00c00c004c06800454ccc048c0300044cdc78010080a501616375a60260046eb8c04400458c050004c94ccc03cc024c040dd50008a5eb7bdb1804dd5980a18089baa00132323300100132330010013756602c602e602e602e602e00a44a66602a002297adef6c6013232323253330163372291100002153330163371e9101000021003100513301a337606ea4008dd3000998030030019bab3017003375c602a0046032004602e00244a666028002298103d87a800013232323253330153372200e0042a66602a66e3c01c0084cdd2a4000660326e980052f5c02980103d87a80001330060060033756602c0066eb8c050008c060008c058004dd7180998081baa00337586024002601c6ea800858c040c044008c03c004c02cdd50008a4c26cac64a66601060060022a66601660146ea8010526161533300830020011533300b300a37540082930b0b18041baa003370e90011b8748000dd7000ab9a5573aaae7955cfaba05742ae89", + "hash": "df50e06b40d42b2c399ebcec4c2a8e51a6b28ef99790d950cc251caa" + }, + { + "title": "spend2.backtrace", + "datum": { + "title": "_datum", + "schema": { + "$ref": "#/definitions/Void" + } + }, + "redeemer": { + "title": "_redeemer", + "schema": { + "$ref": "#/definitions/Void" + } + }, + "compiledCode": "58ad010000323232323232322323223225333007533300730053008375464660020026eb0c034c038c038c028dd5180698051baa00222533300c00114c0103d87a800013232533300b4a2266e9520003300f0024bd70099802002000980800118070008a511614984d958c94ccc018c010c01c00454ccc024c0200045261616375400264a6660086004600a0022a66600e600c0022930b0b1baa002370e90002b9a5573aaae7955cfaba05742ae881", + "hash": "aae5a1fcf239d541c67a7efb006436be41c5ee7f6f4a8fd7b39b97a8" } ], "definitions": { @@ -42,6 +59,17 @@ "Int": { "dataType": "integer" }, + "Void": { + "title": "Unit", + "description": "The nullary constructor.", + "anyOf": [ + { + "dataType": "constructor", + "index": 0, + "fields": [] + } + ] + }, "aiken/transaction/OutputReference": { "title": "OutputReference", "description": "An `OutputReference` is a unique reference to an output on-chain. The `output_index`\n corresponds to the position in the output list of the transaction (identified by its id)\n that produced that output", diff --git a/examples/acceptance_tests/077/validators/spend.ak b/examples/acceptance_tests/077/validators/spend.ak index 86ca3f22..d0615fa0 100644 --- a/examples/acceptance_tests/077/validators/spend.ak +++ b/examples/acceptance_tests/077/validators/spend.ak @@ -14,11 +14,11 @@ validator(token_name: ByteArray, utxo_ref: OutputReference) { let ScriptContext { transaction, purpose } = ctx expect tx.Mint(policy_id) = purpose let Transaction { inputs, mint, .. } = transaction - expect [(asset_name, amount)] = + expect [Pair(asset_name, amount)] = mint |> value.from_minted_value |> value.tokens(policy_id) - |> dict.to_list() + |> dict.to_alist() when rdmr is { Mint -> { expect diff --git a/examples/acceptance_tests/077/validators/spend2.ak b/examples/acceptance_tests/077/validators/spend2.ak index 6ce6daa6..1e84d996 100644 --- a/examples/acceptance_tests/077/validators/spend2.ak +++ b/examples/acceptance_tests/077/validators/spend2.ak @@ -1,18 +1,15 @@ -// use aiken/list -// use aiken/transaction.{Output, ScriptContext} +use aiken/list +use aiken/transaction.{Output, ScriptContext} -// validator { -// fn backtrace(_datum: Void, _redeemer: Void, context: ScriptContext) -> Bool { -// expect Some(_) = -// list.find(context.transaction.outputs, fn(_) { True }) -// let _ = -// find_stuff(context) -// True -// } -// } +validator { + fn backtrace(_datum: Void, _redeemer: Void, context: ScriptContext) -> Bool { + expect Some(_) = list.find(context.transaction.outputs, fn(_) { True }) + let _ = find_stuff(context) + True + } +} -// fn find_stuff(context) -> Output { -// expect Some(stuff) = -// list.find(context.transaction.outputs, fn(_) { True }) -// stuff -// } +fn find_stuff(context: ScriptContext) -> Output { + expect Some(stuff) = list.find(context.transaction.outputs, fn(_) { True }) + stuff +} diff --git a/examples/acceptance_tests/079/plutus.json b/examples/acceptance_tests/079/plutus.json index 53d75ee4..fe6fe2a6 100644 --- a/examples/acceptance_tests/079/plutus.json +++ b/examples/acceptance_tests/079/plutus.json @@ -5,7 +5,7 @@ "plutusVersion": "v2", "compiler": { "name": "Aiken", - "version": "v1.0.26-alpha+075668b" + "version": "v1.0.26-alpha+c5227a2" } }, "validators": [ diff --git a/examples/acceptance_tests/082/aiken.lock b/examples/acceptance_tests/082/aiken.lock index bd046168..8b45e97e 100644 --- a/examples/acceptance_tests/082/aiken.lock +++ b/examples/acceptance_tests/082/aiken.lock @@ -13,4 +13,4 @@ requirements = [] source = "github" [etags] -"aiken-lang/stdlib@main" = [{ secs_since_epoch = 1711569178, nanos_since_epoch = 275562000 }, "b3c7a0d03fa7a9f454fccd45c69a09010858ec0b6c9a1f5c71ef2ebc36fc46bb"] +"aiken-lang/stdlib@main" = [{ secs_since_epoch = 1715005301, nanos_since_epoch = 189023000 }, "5ee55dc5ccf269bb493f4cacb32096f0191a6adb2ef39d62a1f79b8c5a8fcc7f"] diff --git a/examples/acceptance_tests/083/aiken.lock b/examples/acceptance_tests/083/aiken.lock index 667a69df..a0b1f2bb 100644 --- a/examples/acceptance_tests/083/aiken.lock +++ b/examples/acceptance_tests/083/aiken.lock @@ -13,4 +13,4 @@ requirements = [] source = "github" [etags] -"aiken-lang/stdlib@main" = [{ secs_since_epoch = 1711569180, nanos_since_epoch = 807185000 }, "b3c7a0d03fa7a9f454fccd45c69a09010858ec0b6c9a1f5c71ef2ebc36fc46bb"] +"aiken-lang/stdlib@main" = [{ secs_since_epoch = 1715005307, nanos_since_epoch = 694173000 }, "5ee55dc5ccf269bb493f4cacb32096f0191a6adb2ef39d62a1f79b8c5a8fcc7f"] diff --git a/examples/acceptance_tests/083/lib/tests.ak b/examples/acceptance_tests/083/lib/tests.ak index 3ec19658..6820bbc6 100644 --- a/examples/acceptance_tests/083/lib/tests.ak +++ b/examples/acceptance_tests/083/lib/tests.ak @@ -41,16 +41,16 @@ test dict_test1() { let (ask_map, asize, offer_map, osize) = ( - dict.from_list([(ask_input_ref, transaction.NoDatum)], compare_out_ref), + [Pair(ask_input_ref, transaction.NoDatum)], 1, - dict.from_list([(offer_input_ref, transaction.NoDatum)], compare_out_ref), + [Pair(offer_input_ref, transaction.NoDatum)], 1, ) (ask_map, asize, offer_map, osize) == ( - dict.from_list([(ask_input_ref, transaction.NoDatum)], compare_out_ref), + [Pair(ask_input_ref, transaction.NoDatum)], 1, - dict.from_list([(offer_input_ref, transaction.NoDatum)], compare_out_ref), + [Pair(offer_input_ref, transaction.NoDatum)], 1, ) } @@ -62,14 +62,9 @@ test dict_test2() { [(ask_input_ref, offer_input_ref)] let foo = - fn(pair: (OutputReference, OutputReference), acc: Dict) { + fn(pair: (OutputReference, OutputReference), acc: Dict) { let new_pay_map = - dict.insert( - acc, - value.zero(), - Address(VerificationKeyCredential("00"), None), - compare_value("", "", _, _), - ) + dict.insert(acc, "", Address(VerificationKeyCredential("00"), None)) new_pay_map } @@ -87,21 +82,17 @@ test dict_test3() { let (ask_map, asize, offer_map, osize) = ( - dict.from_list([(ask_input_ref, transaction.NoDatum)], compare_out_ref), + [Pair(ask_input_ref, transaction.NoDatum)], 1, - dict.from_list([(offer_input_ref, transaction.NoDatum)], compare_out_ref), + [Pair(offer_input_ref, transaction.NoDatum)], 1, ) + // TODO: Maybe passing Value to the key generic of dict shouldn't be possible let foo = fn(pair: (OutputReference, OutputReference), acc: Dict) { let new_pay_map = - dict.insert( - acc, - value.zero(), - Address(VerificationKeyCredential("00"), None), - compare_value("", "", _, _), - ) + dict.insert(acc, "", Address(VerificationKeyCredential("00"), None)) new_pay_map } diff --git a/examples/acceptance_tests/084/aiken.lock b/examples/acceptance_tests/084/aiken.lock index 7d09ce6b..eaae717a 100644 --- a/examples/acceptance_tests/084/aiken.lock +++ b/examples/acceptance_tests/084/aiken.lock @@ -13,4 +13,4 @@ requirements = [] source = "github" [etags] -"aiken-lang/stdlib@main" = [{ secs_since_epoch = 1711569180, nanos_since_epoch = 976274000 }, "b3c7a0d03fa7a9f454fccd45c69a09010858ec0b6c9a1f5c71ef2ebc36fc46bb"] +"aiken-lang/stdlib@main" = [{ secs_since_epoch = 1715005296, nanos_since_epoch = 102522000 }, "5ee55dc5ccf269bb493f4cacb32096f0191a6adb2ef39d62a1f79b8c5a8fcc7f"] diff --git a/examples/acceptance_tests/084/lib/tests.ak b/examples/acceptance_tests/084/lib/tests.ak index f3986b14..36ea112f 100644 --- a/examples/acceptance_tests/084/lib/tests.ak +++ b/examples/acceptance_tests/084/lib/tests.ak @@ -24,6 +24,6 @@ test tuple_when() { } test t() { - trace cbor.diagnostic(list.map([(#"", 20)], snd_pair)) + trace cbor.diagnostic(list.map([Pair(#"", 20)], snd_pair)) True } diff --git a/examples/acceptance_tests/086/aiken.lock b/examples/acceptance_tests/086/aiken.lock index 4f59fb82..c2c56121 100644 --- a/examples/acceptance_tests/086/aiken.lock +++ b/examples/acceptance_tests/086/aiken.lock @@ -13,4 +13,4 @@ requirements = [] source = "github" [etags] -"aiken-lang/stdlib@main" = [{ secs_since_epoch = 1711569179, nanos_since_epoch = 891129000 }, "b3c7a0d03fa7a9f454fccd45c69a09010858ec0b6c9a1f5c71ef2ebc36fc46bb"] +"aiken-lang/stdlib@main" = [{ secs_since_epoch = 1715005304, nanos_since_epoch = 885730000 }, "5ee55dc5ccf269bb493f4cacb32096f0191a6adb2ef39d62a1f79b8c5a8fcc7f"] diff --git a/examples/acceptance_tests/086/plutus.json b/examples/acceptance_tests/086/plutus.json index 8c3b22fd..04926d3a 100644 --- a/examples/acceptance_tests/086/plutus.json +++ b/examples/acceptance_tests/086/plutus.json @@ -5,7 +5,7 @@ "plutusVersion": "v2", "compiler": { "name": "Aiken", - "version": "v1.0.26-alpha+075668b" + "version": "v1.0.26-alpha+c5227a2" } }, "validators": [ diff --git a/examples/acceptance_tests/087/aiken.lock b/examples/acceptance_tests/087/aiken.lock index 1e356af2..7428caa2 100644 --- a/examples/acceptance_tests/087/aiken.lock +++ b/examples/acceptance_tests/087/aiken.lock @@ -13,4 +13,4 @@ requirements = [] source = "github" [etags] -"aiken-lang/stdlib@main" = [{ secs_since_epoch = 1711569182, nanos_since_epoch = 746568000 }, "b3c7a0d03fa7a9f454fccd45c69a09010858ec0b6c9a1f5c71ef2ebc36fc46bb"] +"aiken-lang/stdlib@main" = [{ secs_since_epoch = 1715005310, nanos_since_epoch = 646894000 }, "5ee55dc5ccf269bb493f4cacb32096f0191a6adb2ef39d62a1f79b8c5a8fcc7f"] diff --git a/examples/acceptance_tests/087/aiken.toml b/examples/acceptance_tests/087/aiken.toml index c2c9c00d..b4ae8714 100644 --- a/examples/acceptance_tests/087/aiken.toml +++ b/examples/acceptance_tests/087/aiken.toml @@ -3,6 +3,6 @@ version = "0.0.0" description = "" [[dependencies]] -name = 'aiken-lang/stdlib' -version = 'main' -source = 'github' +name = "aiken-lang/stdlib" +version = "main" +source = "github" diff --git a/examples/acceptance_tests/088/aiken.lock b/examples/acceptance_tests/088/aiken.lock index 2ef818e3..9afbb376 100644 --- a/examples/acceptance_tests/088/aiken.lock +++ b/examples/acceptance_tests/088/aiken.lock @@ -13,4 +13,4 @@ requirements = [] source = "github" [etags] -"aiken-lang/stdlib@main" = [{ secs_since_epoch = 1711569188, nanos_since_epoch = 217936000 }, "b3c7a0d03fa7a9f454fccd45c69a09010858ec0b6c9a1f5c71ef2ebc36fc46bb"] +"aiken-lang/stdlib@main" = [{ secs_since_epoch = 1715005301, nanos_since_epoch = 48539000 }, "5ee55dc5ccf269bb493f4cacb32096f0191a6adb2ef39d62a1f79b8c5a8fcc7f"] diff --git a/examples/acceptance_tests/089/aiken.lock b/examples/acceptance_tests/089/aiken.lock index 76c029ed..d5390b36 100644 --- a/examples/acceptance_tests/089/aiken.lock +++ b/examples/acceptance_tests/089/aiken.lock @@ -13,4 +13,4 @@ requirements = [] source = "github" [etags] -"aiken-lang/stdlib@main" = [{ secs_since_epoch = 1711569179, nanos_since_epoch = 408850000 }, "b3c7a0d03fa7a9f454fccd45c69a09010858ec0b6c9a1f5c71ef2ebc36fc46bb"] +"aiken-lang/stdlib@main" = [{ secs_since_epoch = 1715005312, nanos_since_epoch = 176254000 }, "5ee55dc5ccf269bb493f4cacb32096f0191a6adb2ef39d62a1f79b8c5a8fcc7f"] diff --git a/examples/acceptance_tests/089/plutus.json b/examples/acceptance_tests/089/plutus.json index 5bff548d..7821027d 100644 --- a/examples/acceptance_tests/089/plutus.json +++ b/examples/acceptance_tests/089/plutus.json @@ -5,7 +5,7 @@ "plutusVersion": "v2", "compiler": { "name": "Aiken", - "version": "v1.0.26-alpha+075668b" + "version": "v1.0.26-alpha+c5227a2" } }, "validators": [ diff --git a/examples/acceptance_tests/090/plutus.json b/examples/acceptance_tests/090/plutus.json index c96bc088..575a026c 100644 --- a/examples/acceptance_tests/090/plutus.json +++ b/examples/acceptance_tests/090/plutus.json @@ -5,7 +5,7 @@ "plutusVersion": "v2", "compiler": { "name": "Aiken", - "version": "v1.0.26-alpha+075668b" + "version": "v1.0.26-alpha+c5227a2" } }, "validators": [ diff --git a/examples/acceptance_tests/096/plutus.json b/examples/acceptance_tests/096/plutus.json index 235c630b..4a6d5fca 100644 --- a/examples/acceptance_tests/096/plutus.json +++ b/examples/acceptance_tests/096/plutus.json @@ -6,7 +6,7 @@ "plutusVersion": "v2", "compiler": { "name": "Aiken", - "version": "v1.0.26-alpha+075668b" + "version": "v1.0.26-alpha+c5227a2" }, "license": "Apache-2.0" }, diff --git a/examples/acceptance_tests/100/aiken.lock b/examples/acceptance_tests/100/aiken.lock new file mode 100644 index 00000000..6e350cda --- /dev/null +++ b/examples/acceptance_tests/100/aiken.lock @@ -0,0 +1,7 @@ +# This file was generated by Aiken +# You typically do not need to edit this file + +requirements = [] +packages = [] + +[etags] diff --git a/examples/acceptance_tests/100/aiken.toml b/examples/acceptance_tests/100/aiken.toml new file mode 100644 index 00000000..3d8a526c --- /dev/null +++ b/examples/acceptance_tests/100/aiken.toml @@ -0,0 +1,9 @@ +name = "aiken-lang/acceptance_test_100" +version = "0.0.0" +license = "Apache-2.0" +description = "Aiken contracts for project 'aiken-lang/100'" + +[repository] +user = "aiken-lang" +project = "100" +platform = "github" diff --git a/examples/acceptance_tests/100/lib/foo.ak b/examples/acceptance_tests/100/lib/foo.ak new file mode 100644 index 00000000..44889694 --- /dev/null +++ b/examples/acceptance_tests/100/lib/foo.ak @@ -0,0 +1,14 @@ +pub fn list(fuzzer: Option) -> Option> { + inner(fuzzer, []) +} + +fn inner(fuzzer, xs) -> Option> { + when fuzzer is { + None -> Some(xs) + Some(x) -> Some([x, ..xs]) + } +} + +test foo() { + list(None) == Some([]) +} diff --git a/examples/acceptance_tests/101/aiken.lock b/examples/acceptance_tests/101/aiken.lock new file mode 100644 index 00000000..f9ae89d3 --- /dev/null +++ b/examples/acceptance_tests/101/aiken.lock @@ -0,0 +1,28 @@ +# This file was generated by Aiken +# You typically do not need to edit this file + +[[requirements]] +name = "aiken-lang/stdlib" +version = "main" +source = "github" + +[[requirements]] +name = "aiken-lang/fuzz" +version = "main" +source = "github" + +[[packages]] +name = "aiken-lang/stdlib" +version = "main" +requirements = [] +source = "github" + +[[packages]] +name = "aiken-lang/fuzz" +version = "main" +requirements = [] +source = "github" + +[etags] +"aiken-lang/fuzz@main" = [{ secs_since_epoch = 1715005297, nanos_since_epoch = 939440000 }, "d7aadd4a9b25589bd6d5e3bbedcd809cdf97fe3eddb365cf89cd6ac6bc829643"] +"aiken-lang/stdlib@main" = [{ secs_since_epoch = 1715005297, nanos_since_epoch = 681402000 }, "5ee55dc5ccf269bb493f4cacb32096f0191a6adb2ef39d62a1f79b8c5a8fcc7f"] diff --git a/examples/acceptance_tests/101/aiken.toml b/examples/acceptance_tests/101/aiken.toml new file mode 100644 index 00000000..3020ceb9 --- /dev/null +++ b/examples/acceptance_tests/101/aiken.toml @@ -0,0 +1,19 @@ +name = "aiken-lang/acceptance_test_101" +version = "0.0.0" +license = "Apache-2.0" +description = "Aiken contracts for project 'aiken-lang/101'" + +[repository] +user = "aiken-lang" +project = "101" +platform = "github" + +[[dependencies]] +name = "aiken-lang/stdlib" +version = "main" +source = "github" + +[[dependencies]] +name = "aiken-lang/fuzz" +version = "main" +source = "github" diff --git a/examples/acceptance_tests/101/lib/foo.ak b/examples/acceptance_tests/101/lib/foo.ak new file mode 100644 index 00000000..9f7abbf8 --- /dev/null +++ b/examples/acceptance_tests/101/lib/foo.ak @@ -0,0 +1,11 @@ +use aiken/fuzz + +test prop_option_distribution(opt via fuzz.option(fuzz.int())) { + fuzz.label( + when opt is { + None -> @"None" + Some(..) -> @"Some" + }, + ) + True +} diff --git a/examples/acceptance_tests/102/aiken.lock b/examples/acceptance_tests/102/aiken.lock new file mode 100644 index 00000000..d9ba2570 --- /dev/null +++ b/examples/acceptance_tests/102/aiken.lock @@ -0,0 +1,16 @@ +# This file was generated by Aiken +# You typically do not need to edit this file + +[[requirements]] +name = "aiken-lang/stdlib" +version = "main" +source = "github" + +[[packages]] +name = "aiken-lang/stdlib" +version = "main" +requirements = [] +source = "github" + +[etags] +"aiken-lang/stdlib@main" = [{ secs_since_epoch = 1715006768, nanos_since_epoch = 293270000 }, "5ee55dc5ccf269bb493f4cacb32096f0191a6adb2ef39d62a1f79b8c5a8fcc7f"] diff --git a/examples/acceptance_tests/102/aiken.toml b/examples/acceptance_tests/102/aiken.toml new file mode 100644 index 00000000..78db1db2 --- /dev/null +++ b/examples/acceptance_tests/102/aiken.toml @@ -0,0 +1,14 @@ +name = "aiken-lang/102" +version = "0.0.0" +license = "Apache-2.0" +description = "Aiken contracts for project 'aiken-lang/102'" + +[repository] +user = "aiken-lang" +project = "102" +platform = "github" + +[[dependencies]] +name = "aiken-lang/stdlib" +version = "main" +source = "github" diff --git a/examples/acceptance_tests/102/lib/tests.ak b/examples/acceptance_tests/102/lib/tests.ak new file mode 100644 index 00000000..a8889ab7 --- /dev/null +++ b/examples/acceptance_tests/102/lib/tests.ak @@ -0,0 +1,11 @@ +test panic_aiken() fail { + and { + True, + should_fail(1), + } +} + +fn should_fail(num) { + expect num == 0 + True +} diff --git a/examples/acceptance_tests/script_context/aiken.lock b/examples/acceptance_tests/script_context/aiken.lock index c1c66ebb..e722420b 100644 --- a/examples/acceptance_tests/script_context/aiken.lock +++ b/examples/acceptance_tests/script_context/aiken.lock @@ -13,4 +13,4 @@ requirements = [] source = "github" [etags] -"aiken-lang/stdlib@main" = [{ secs_since_epoch = 1711569203, nanos_since_epoch = 197638000 }, "b3c7a0d03fa7a9f454fccd45c69a09010858ec0b6c9a1f5c71ef2ebc36fc46bb"] +"aiken-lang/stdlib@main" = [{ secs_since_epoch = 1715005324, nanos_since_epoch = 924582000 }, "5ee55dc5ccf269bb493f4cacb32096f0191a6adb2ef39d62a1f79b8c5a8fcc7f"] diff --git a/examples/acceptance_tests/script_context/plutus.json b/examples/acceptance_tests/script_context/plutus.json index 81d1121b..8083af4d 100644 --- a/examples/acceptance_tests/script_context/plutus.json +++ b/examples/acceptance_tests/script_context/plutus.json @@ -5,7 +5,7 @@ "plutusVersion": "v2", "compiler": { "name": "Aiken", - "version": "v1.0.26-alpha+075668b" + "version": "v1.0.26-alpha+c5227a2" } }, "validators": [ @@ -23,8 +23,8 @@ "$ref": "#/definitions/Void" } }, - "compiledCode": "59035a0100003232323232323223232232253330073253330083330083375e601a601c601c601c601c601c601c601c601c601c601c601c60146ea8c004c028dd5001260126d8799f58200000000000000000000000000000000000000000000000000000000000000000ff004a09444c94ccc024c94ccc028cdc3a400460166ea80044c94ccc02ccdd7980218069baa0014c0126d8799f58200000000000000000000000000000000000000000000000000000000000000000ff0013009375a6006601a6ea8004528180798061baa001163001300b3754006264646464a66601a6464a6660240022c2a666024602a002264a66602066ebcc020c048dd50009ba6300448202a35ae41c54ccc040cdd7980498091baa30093012375400298122d8799f581c11111111111111111111111111111111111111111111111111111111ff00153330105333010300e30113754601060246ea8c024c048dd50008a5014a22a666020600a600660246ea80044cdd7980398091baa0014c0103d87a800014a029405280a503014001163758600260206ea8c01cc040dd500411809980a180a000899baf3004300f3754600c601e6ea801cdd31800a40a8294094ccc034c02c00452f5bded8c0264646600200297adef6c6022533301300113301433760981014000374c00697adef6c60132323232533301430093300a488100002133018337609801014000374c00e00a2a66602866e3d22100002133018337609801014000374c00e00626603066ec0dd48011ba6001330060060033756602a0066eb8c04c008c05c008c054004c8cc0040052f5bded8c044a66602400226602666ec13001014000375000697adef6c601323232325333013300833009488100002133017337609801014000375000e00a2a66602666e3d22100002133017337609801014000375000e00626602e66ec0dd48011ba800133006006003375a60280066eb8c048008c058008c050004dd7a60103d879800022533300c33720004002298103d87980001533300c3371e0040022980103d87a800014c103d87b80002300f30103010301000114a04601c601e00229408c034004526136563253330063004300700115333009300800114985858dd5000992999802180118028008a99980398030008a4c2c2c6ea8008dc3a4000ae6955ceaab9e5573eae815d0aba201", - "hash": "2c19b6a631b86636d34dec01ccc7929a4cd2ab0690f8b08cca44d735" + "compiledCode": "5903290100003232323232323223232232253330073253330083330083375e601a601c601c601c601c601c601c601c601c601c601c601c60146ea8c004c028dd5001260126d8799f58200000000000000000000000000000000000000000000000000000000000000000ff004a09444c94ccc024c94ccc028cdc3a400460166ea80044c94ccc02ccdd7980218069baa0014c0126d8799f58200000000000000000000000000000000000000000000000000000000000000000ff0013009375a6006601a6ea8004528180798061baa001163001300b375400626464a6660166464a6660200022c2a6660206026002264a66601c66ebcc018c040dd50009ba6300448202a35ae41c54ccc038cdd7980398081baa30073010375400298122d8799f581c11111111111111111111111111111111111111111111111111111111ff001533300e533300e300c300f3754600c60206ea8c01cc040dd50008a5014a22a66601c66ebcc00cc040dd5000a60103d879800013375e600a60206ea800530103d87a800014a029405280a5030120011637586002601c6ea8c014c038dd50031180898091809000899baf3002300d37546008601a6ea8014dd31800a40a8294094ccc02cc02400452f5bded8c0264646600200297adef6c6022533301100113301233760981014000374c00697adef6c60132323232533301233720910100002133016337609801014000374c00e00a2a66602466e3d22100002133016337609801014000374c00e00626602c66ec0dd48011ba600133006006003375660260066eb8c044008c054008c04c004c8cc0040052f5bded8c044a66602000226602266ec13001014000375000697adef6c60132323232533301133720910100002133015337609801014000375000e00a2a66602266e3d22100002133015337609801014000375000e00626602a66ec0dd48011ba800133006006003375a60240066eb8c040008c050008c0480048c03cc040c040c0400045281180718078008a502300d00114984d958c94ccc018c010c01c00454ccc024c0200045261616375400264a6660086004600a0022a66600e600c0022930b0b1baa002370e90002b9a5573aaae7955cfaba05742ae881", + "hash": "c1f48659f124f9dcd673c40fef80ecfa5b42a17949ff12fab7f1c269" }, { "title": "deploy.spend", @@ -40,8 +40,8 @@ "$ref": "#/definitions/Data" } }, - "compiledCode": "59022b01000032323232323232222533300453330043375e00698103d879800013253330053323223253330093370e900018051baa00113232533300b3375e980106d8799f182aff0000213370e600a00890020a50533300a3370e900018059baa00113232533300f3012002149858dd6980800098061baa00116300e300b37540022c64646600200200644a66601c0022980103d87a80001323232533300e3371e00c6eb8c03c00c4cdd2a40006602400297ae0133005005002300f0023012002301000137286ecd30106d8799f182aff00300100122533300a0011480004cdc02400466004004601a0026eacc028c02cc02cc02cc02cc02cc02cc02cc02cc02cc02cc01cdd5180098039baa00213232533300a00116132533300b00116132533300932533300a3375e600a60186ea800530126d87a9f5820fcaa61fb85676101d9e3398a484674e71c45c3fd41b492682f3b0054f4cf3273ff0013375e601e60206020602060186ea800530122d8799f581ce37db487fbd58c45d059bcbf5cd6b1604d3bec16cf888f1395a4ebc4ff0014a0601c0062a66601266ebcc038c03cc02cdd5180298059baa0014c012ad8799fd8799fd8799f581c66666666666666666666666666666666666666666666666666666666ffffff00153330093370e900218051baa3004300b37540022944585280a50300d001300d0013758600260106ea8c008c020dd500191805980618060008a502300a00114a029309b2b2b9a5573aaae7955cfaba05742ae881", - "hash": "6aa6a0ba9b1706f85f3d06f8a026db77191ed91a019afddb63a5db26" + "compiledCode": "59023e01000032323232323232222533300453330043375e00698103d879800013253330053323223253330093370e900018051baa00113232533300b3375e980106d8799f182aff0000213370e600a00890020a50533300a3370e900018059baa00113232533300f3012002149858dd6980800098061baa00116300e300b37540022c64646600200200644a66601c0022980103d87a8000132323232533300f3372200e0042a66601e66e3c01c0084cdd2a40006602600297ae014c0103d87a80001330060060033010003375c601c004602400460200026e50dd9a6106d8799f182aff00300100122533300a0011480004cdc02400466004004601a0026eacc028c02cc02cc02cc02cc02cc02cc02cc02cc02cc02cc01cdd5180098039baa00213232533300a00116132533300b00116132533300932533300a3375e600a60186ea800530126d87a9f5820fcaa61fb85676101d9e3398a484674e71c45c3fd41b492682f3b0054f4cf3273ff0013375e601e60206020602060186ea800530122d8799f581ce37db487fbd58c45d059bcbf5cd6b1604d3bec16cf888f1395a4ebc4ff0014a0601c0062a66601266ebcc038c03cc02cdd5180298059baa0014c012ad8799fd8799fd8799f581c66666666666666666666666666666666666666666666666666666666ffffff00153330093370e900218051baa3004300b37540022944585280a50300d001300d0013758600260106ea8c008c020dd500191805980618060008a502300a00114a029309b2b2b9a5573aaae7955cfaba05742ae881", + "hash": "a77ca3bb8551334cbed20bafe31b3a2ac2d1862d065b7994864ebc5b" }, { "title": "mint.mint", @@ -51,8 +51,8 @@ "$ref": "#/definitions/Data" } }, - "compiledCode": "5902b4010000323232323232322253330033232323232533233009323232533300f3012002132533300d3005300e3754002266e3c008dd7180918079baa001163008300e37540142c6eb8c040004cc004004cc010c00cdd5980298061baa3007300c37540109110022533300e00114bd70099807980618080009980100118088008a999191980599912999806980298071baa002132533300e3005300f37540022c266e1cdd6980998081baa001483c850c8cc004004cc88c94ccc044c020c048dd50008a5eb7bdb1804dd5980b18099baa001323300100100322533301500114c0103d87a8000132323253330153371e00c6eb8c05800c4c02ccc064dd3000a5eb804cc014014008dd5980b001180c801180b80098039bab3009301037540046eb8c04cc040dd50019129998090008a6103d87a8000132323253330123371e91103666f6f00375c6026006260106602c6ea00052f5c026600a00a0046eb4c04c008c058008c05000458c01cc034dd5004980418069baa009132323232533300f3007301037540022a66601e66ebc038c050c044dd50008980318019bab300230113754601860226ea80345280b1991191980080080191299980a8008a6103d87a8000132323253330153375e00c602c006260166603200297ae01330050050023016002301900230170013756600260206ea8c02cc040dd5006180518081baa00c2301330143014301430143014301430143014301400130010012253330100011480004cdc0240046600400460260026e952000370e90010a50370e90000a5023300200148810022323300100100322533300d00114bd6f7b630099191919299980719b8f0070021003133012337606ea4008dd3000998030030019bab300f003375c601a0046022004601e0024601660186018601860180024601460160024601200229309b2b2b9a5573aaae7955cfaba05742ae89", - "hash": "148b7dd21a30a5176713f946d88d0a3617a9d10705473319f6e3c04a" + "compiledCode": "590300010000323232323232322253330033232323232533233009323232533300f3012002132533300d3005300e3754002266e3c008dd7180918079baa001163008300e37540142c6eb8c040004cc004004cc010c00cdd5980298061baa3007300c37540109110022533300e00114bd70099807980618080009980100118088008a999191980599912999806980298071baa002132533300e3005300f37540022c266e1cdd6980998081baa001483c850c8cc004004c94ccc03cc018c040dd50008a5eb7bdb1804dd5980a18089baa00132323300100130093756601660246ea8010894ccc0500045300103d87a800013232323253330153372200e0042a66602a66e3c01c0084c02ccc064dd3000a5eb80530103d87a80001330060060033756602c0066eb8c050008c060008c058004dd7180998081baa00322533301200114c103d87a800013232323253330133372291103666f6f00002153330133371e910103666f6f000021300933017375000297ae014c0103d87a8000133006006003375a60280066eb8c048008c058008c05000458c01cc034dd5004980418069baa009132323232533300f3007301037540022a66601e66ebc038c044c8cdd8180a800980a980b0009bac3014301137540022600c60046eacc010c044dd5180618089baa00d14a02c64660020026eacc010c044dd5180618089baa00d22533301300114c103d87a80001323253330123375e6026004601c60286ea80404c020cc058dd39980b18098011980b180a00125eb812f5c0266008008002602e004602a002600200244a66602200229000099b8048008cc008008c0500048c044c048c048c048c048c048c048c048c048c048004dd2a40006e1d200214a06e1d200014a046600400291010022323300100100322533300d00114bd6f7b630099191919299980719b910070021533300e3371e00e0042006200a26602466ec0dd48011ba6001330060060033756601e0066eb8c034008c044008c03c0048c02cc030c030c030c0300048c028c02c0048c024004526136565734aae7555cf2ab9f5740ae855d101", + "hash": "f557530d177449d0609a60cbe4f48b221c04a6ca7f9f235fcdc8c741" }, { "title": "withdrawals.spend", @@ -68,8 +68,8 @@ "$ref": "#/definitions/Void" } }, - "compiledCode": "5902010100003232323232323223223225333006323232533300932533300a3370e900118059baa0011613370e6eb4c03cc030dd5000a40a8660026eacc008c02cdd5180198059baa0044c0126d8799fd8799f581c22222222222222222222222222222222222222222222222222222222ffff001533300932533300a3370e900118059baa0011613370e6eb4c03cc030dd5000a4038660026eacc008c02cdd5180198059baa0044c126d8799fd87a9f581cafddc16c18e7d8de379fb9aad39b3d1b5afd27603e5ebac818432a72ffff0013375e6e9cc8cc004004dd5980198061baa3004300c375400a44a66601c002297ae013300f300c3010001330020023011001374e6601a98126d8799fd8799f581c22222222222222222222222222222222222222222222222222222222ffff003300d4c126d8799fd87a9f581cafddc16c18e7d8de379fb9aad39b3d1b5afd27603e5ebac818432a72ffff004bd700a5014a044646600200200644a66601e002298103d87a80001323232533300f3375e00c6020006266e95200033013375000297ae0133005005002375a6020004602600460220024601a601c601c601c601c601c601c0024601800229309b2b19299980299b8748000c01800454ccc020c01c0045261616375400264a66600666e1d2000300400115333006300500114985858dd5000ab9a5573aaae7955cfaba05742ae89", - "hash": "4d223be4711a6154da0c1e9ff7f158a6716d4e8eacde3e3922f3fae9" + "compiledCode": "5902310100003232323232323223223225333006323232533300932533300a3370e900118059baa0011613370e6eb4c030c8cdd81808000980818088009bac300f300c3754002902a198009bab3002300b3754600660166ea80108cdd79805800a6126d8799fd8799f581c22222222222222222222222222222222222222222222222222222222ffff001533300932533300a3370e900118059baa0011613370e6eb4c030c8cdd81808000980818088009bac300f300c3754002900e198009bab3002300b3754600660166ea80108cdd79805800a60126d8799fd87a9f581cafddc16c18e7d8de379fb9aad39b3d1b5afd27603e5ebac818432a72ffff0013375e6e9cc8cc004004dd5980198061baa3004300c375400a44a66601c002297ae013300f300c3010001330020023011001374e6601a98126d8799fd8799f581c22222222222222222222222222222222222222222222222222222222ffff003300d4c126d8799fd87a9f581cafddc16c18e7d8de379fb9aad39b3d1b5afd27603e5ebac818432a72ffff004bd700a5014a044646600200200644a66601e002298103d87a800013232533300e300500213374a9000198091ba733012300f0023301230100024bd7025eb804cc010010004c04c008c0440048c034c038c038c038c038c038c0380048c030004526136563253330053370e900018030008a99980418038008a4c2c2c6ea8004c94ccc00ccdc3a400060080022a66600c600a0022930b0b1baa0015734aae7555cf2ab9f5740ae855d11", + "hash": "0ce02b31bca5fbbbb9e4e1f050781b2272590945dc6e1094dc3d2bde" } ], "definitions": { diff --git a/examples/acceptance_tests/script_context/validators/basic.ak b/examples/acceptance_tests/script_context/validators/basic.ak index 0a923df9..8f33ec40 100644 --- a/examples/acceptance_tests/script_context/validators/basic.ak +++ b/examples/acceptance_tests/script_context/validators/basic.ak @@ -1,5 +1,6 @@ use aiken/option -use aiken/transaction.{NoDatum, ScriptContext, Spend, TransactionId} +use aiken/transaction.{NoDatum, + ScriptContext, Spend, Transaction, TransactionId} use aiken/transaction/credential.{VerificationKeyCredential} use aiken/transaction/value @@ -14,7 +15,7 @@ validator { } } -fn assert_id(transaction) { +fn assert_id(transaction: Transaction) { transaction.id != TransactionId( #"0000000000000000000000000000000000000000000000000000000000000000", ) @@ -30,14 +31,13 @@ fn assert_purpose(purpose) { } } -fn assert_fee(transaction) { +fn assert_fee(transaction: Transaction) { transaction.fee == value.from_lovelace(42) } -fn assert_outputs(transaction) { +fn assert_outputs(transaction: Transaction) { when transaction.outputs is { - [output] -> - and { + [output] -> and { output.value == value.from_lovelace(1000000000), output.address.payment_credential == VerificationKeyCredential( #"11111111111111111111111111111111111111111111111111111111", diff --git a/examples/acceptance_tests/script_context/validators/deploy.ak b/examples/acceptance_tests/script_context/validators/deploy.ak index 1bdf78cd..7003d412 100644 --- a/examples/acceptance_tests/script_context/validators/deploy.ak +++ b/examples/acceptance_tests/script_context/validators/deploy.ak @@ -1,7 +1,7 @@ use aiken/builtin -use aiken/dict +use aiken/dict.{Dict} use aiken/hash.{blake2b_256} -use aiken/transaction.{DatumHash, InlineDatum, ScriptContext} +use aiken/transaction.{DatumHash, InlineDatum, Output, ScriptContext} use aiken/transaction/credential.{Inline, VerificationKeyCredential} validator { @@ -23,7 +23,7 @@ type MyDatum { MyDatum(Int) } -fn assert_datums(datums) { +fn assert_datums(datums: Dict) { let my_datum = MyDatum(42) expect Some(datum) = @@ -42,7 +42,7 @@ fn assert_outputs(outputs) { } } -fn assert_first_output(output) { +fn assert_first_output(output: Output) { and { output.datum == DatumHash( #"fcaa61fb85676101d9e3398a484674e71c45c3fd41b492682f3b0054f4cf3273", @@ -53,7 +53,7 @@ fn assert_first_output(output) { } } -fn assert_second_output(output) { +fn assert_second_output(output: Output) { and { output.address.stake_credential == Some( Inline( diff --git a/examples/acceptance_tests/script_context/validators/mint.ak b/examples/acceptance_tests/script_context/validators/mint.ak index a622d935..c2c28245 100644 --- a/examples/acceptance_tests/script_context/validators/mint.ak +++ b/examples/acceptance_tests/script_context/validators/mint.ak @@ -1,5 +1,6 @@ use aiken/dict -use aiken/transaction.{Mint, ScriptContext} +use aiken/list +use aiken/transaction.{Mint, ScriptContext, Transaction} use aiken/transaction/value validator { @@ -12,7 +13,7 @@ validator { } } -fn assert_purpose(ctx) { +fn assert_purpose(ctx: ScriptContext) { expect [my_policy_id] = ctx.transaction.mint |> value.from_minted_value @@ -24,7 +25,7 @@ fn assert_purpose(ctx) { my_policy_id == policy_id } -fn assert_mint(purpose, transaction) { +fn assert_mint(purpose, transaction: Transaction) { expect Mint(policy_id) = purpose let tokens = value.tokens(transaction.mint |> value.from_minted_value, policy_id) @@ -35,7 +36,8 @@ fn assert_mint(purpose, transaction) { } } -fn assert_redeemers(ctx, my_redeemer) { - expect Some(redeemer) = dict.get(ctx.transaction.redeemers, ctx.purpose) - my_redeemer == redeemer && dict.size(ctx.transaction.redeemers) == 1 +fn assert_redeemers(ctx: ScriptContext, my_redeemer) { + expect Some(Pair(_, redeemer)) = + list.find(ctx.transaction.redeemers, fn(kv) { kv.1st == ctx.purpose }) + my_redeemer == redeemer && list.length(ctx.transaction.redeemers) == 1 } diff --git a/examples/acceptance_tests/script_context/validators/withdrawals.ak b/examples/acceptance_tests/script_context/validators/withdrawals.ak index f9254fd3..c2931005 100644 --- a/examples/acceptance_tests/script_context/validators/withdrawals.ak +++ b/examples/acceptance_tests/script_context/validators/withdrawals.ak @@ -1,4 +1,4 @@ -use aiken/dict +use aiken/list use aiken/transaction.{ScriptContext} use aiken/transaction/credential.{ Inline, ScriptCredential, VerificationKeyCredential, @@ -21,15 +21,17 @@ validator { ) and { - when dict.get(ctx.transaction.withdrawals, alice) is { + when + list.find(ctx.transaction.withdrawals, fn(kv) { kv.1st == alice }) + is { None -> fail @"alice's withdrawal not found" - Some(value) -> value == 42 + Some(value) -> value.2nd == 42 }, - when dict.get(ctx.transaction.withdrawals, bob) is { + when list.find(ctx.transaction.withdrawals, fn(kv) { kv.1st == bob }) is { None -> fail @"bob's withdrawal not found" - Some(value) -> value == 14 + Some(value) -> value.2nd == 14 }, - dict.keys(ctx.transaction.withdrawals) == [alice, bob], + list.map(ctx.transaction.withdrawals, fn(kv) { kv.1st }) == [alice, bob], } } } diff --git a/examples/gift_card/aiken.lock b/examples/gift_card/aiken.lock index d9ce1ad3..2f7651e9 100644 --- a/examples/gift_card/aiken.lock +++ b/examples/gift_card/aiken.lock @@ -13,4 +13,4 @@ requirements = [] source = "github" [etags] -"aiken-lang/stdlib@main" = [{ secs_since_epoch = 1696594381, nanos_since_epoch = 842483000 }, "a721cf2738274f806efefb5a33c6ff9ae049476f0d45a42049b71793949f4d1d"] +"aiken-lang/stdlib@main" = [{ secs_since_epoch = 1714852860, nanos_since_epoch = 160588000 }, "5ee55dc5ccf269bb493f4cacb32096f0191a6adb2ef39d62a1f79b8c5a8fcc7f"] diff --git a/examples/gift_card/plutus.json b/examples/gift_card/plutus.json index 617e2805..88164468 100644 --- a/examples/gift_card/plutus.json +++ b/examples/gift_card/plutus.json @@ -6,7 +6,7 @@ "plutusVersion": "v2", "compiler": { "name": "Aiken", - "version": "v1.0.19-alpha+d56d518" + "version": "v1.0.26-alpha+b669db8" }, "license": "Apache-2.0" }, @@ -33,8 +33,8 @@ } } ], - "compiledCode": "59050a0100003232323232323232322322253330073370e90001803000899299980419191919299980619b8748000c02c0044c8c8c8c8c8c8c8c8c8c8c8c8c8c8c8c8c94ccc074cdc3a400000226464646464646464a66604a002264646600200201844a66605600229444c8c8c8c94ccc0b0cc0240288cdc78008018a999816191980080080d9129998188008a50132325333030323232323253330353375e00266e95200433039375201697ae013375e602a60660080202940c0e4004c0e4004c0e0008c0d8004c0b8008528899802002000981a80118198008a99981619b87001480084cc01801800c5280b0b1bad302d003375c6056004605e004605a00266e95200233029375203097ae014a066002014466e3c08400488c8cc00400400c894ccc0a800452809919299981498028010a51133004004001302e002375c60580026466600200200a97ae022253330253370e00490000800899980180199b8100248008cc0a4dd49b94337160040080026eccc004c07c0088c098004c090040dd69811800980d80c099198008008019129998110008a511323253330213370e6eb4c08c009200113300400400114a0604c0046048002603602e64664464a66603e66e1d20020011002137566048603a006603a004646600200200444a666042002298103d87a800013232323253330223371e02a004266e95200033026374c00297ae0133006006003375660460066eb8c084008c094008c08c0052f5bded8c0646600200200e44a666040002297adef6c6013232323253330213371e911000021003133025337606ea4008dd3000998030030019bab3022003375c6040004604800460440026eb0c07c004c07c004c078004c074004c070008dd5980d000980d000980c8011bac30170013017001301600237586028002601800a6eb8c048004c02800458c040004c040008c038004c018008526136563253330083370e900000089919299980698080010a4c2c6eb4c038004c01800c54ccc020cdc3a40040022a666016600c0062930b0b180300109911929998051919191919191919191919299980a99b8748008c0500204c8c8c94ccc060cdc3a4000602e0022646464646464646464a66604266e1d20023020001132323370e6464664464a66605066e1d200200110021375a605a604c006604c00464a66604c66e1d200200114c0103d87a8000132323300100100222533302c00114c103d87a8000132323232533302d3371e048004266e95200033031375000297ae0133006006003375a605c0066eb8c0b0008c0c0008c0b8004dd5981598120011812000a4000646600200200444a6660520022980103d87a8000132323232533302a3371e010004266e9520003302e374c00297ae0133006006003375660560066eb8c0a4008c0b4008c0ac004c8cc004004040894ccc0a000452f5bded8c0264646464a66605266e3d22100002100313302d337606ea4008dd3000998030030019bab302a003375c60500046058004605400290009bae3027001301f001163025001301d0013023001301b001302100130210013018001301e001301600116323300100100922533301c00114c103d87a800013232533301b3375e6040603200400a266e9520003301f0024bd700998020020009810001180f000980d80098098040b1bab30190013019001301800130170013016002375860280026018006602400260240046020002601000429309b2b1bae004300c3005001375c0024600a6ea80048c00cdd5000ab9a5573aaae7955cfaba05742ae881", - "hash": "e616488e0b79189f22b849f099aba2b2434f84f57beb587d55116bc3" + "compiledCode": "5904fc01000032323232323232232225323232333008300330093754008264a666012646464a666018600e601a6ea80044c8c8c8c8c8c8c8c94ccc050c03cc054dd50088991919299980b99800802919b8f016001132323300100100622533301d00114a2264646464a66603c66010012466e3c00400c54ccc078c8cc00400403c894ccc08c00452809929998109919299981199baf3028302930290023374a9002198139ba90074bd70099baf300f302537540020182940c09c004c08cdd518130010a5113300300300130260011533301e301800113300600600314a02c2c6eb4c07c00cdd7180e8011810801180f80099ba548008cc06cdd4805a5eb8052811191980080080191299980e8008a50132533301b3004375c604000429444cc00c00c004c080004c8c8ccc004004dd6980e180c9baa0144bd70111299980c980a0010800899980180199b8000248004cc074dd49b94337160040080026eccc004c05cdd5180d0039180d0008991980080080111299980c8008a5113253330173370e6eb4c064c070009200113300300300114a0603800264a666028601c602a6ea800452f5bded8c026eacc064c058dd5000991980080099198008009bab301a00422533301900114bd6f7b630099191919299980d19b91489000021533301a3371e9101000021003100513301e337606ea4008dd3000998030030019bab301b003375c6032004603a004603600244a666030002298103d87a800013232323253330193372201a0042a66603266e3c0340084c048cc074dd3000a5eb80530103d87a8000133006006003375660340066eb8c060008c070008c068004dd6180b980c180c180c180c000980b980b8011bac30150013015301500237586026002601e6ea800cdd7180898071baa0011630103011002300f001300b375400a29309b2b192999804980200089919299980718088010a4c2c6eb4c03c004c02cdd50030a99980498018008a99980618059baa00614985858c024dd5002899911299980599191919192999808180518089baa004132325333012300d30133754002264a666026601a60286ea80044cdc3999119299980b1808180b9baa0011480004dd6980d980c1baa0013253330163010301737540022980103d87a8000132330010013756603860326ea8008894ccc06c004530103d87a8000132323232533301c337220280042a66603866e3c0500084c054cc080dd4000a5eb80530103d87a8000133006006003375a603a0066eb8c06c008c07c008c074004c8cc00400400c894ccc0680045300103d87a8000132323232533301b3372200e0042a66603666e3c01c0084c050cc07cdd3000a5eb80530103d87a8000133006006003375660380066eb8c068008c078008c070004c8cc004004014894ccc06000452f5bded8c0264646464a66603266e4522100002153330193371e9101000021003100513301d337606ea4008dd3000998030030019bab301a003375c6030004603800460340026eb8c060c054dd5000a40022c602e60286ea8c05cc050dd5180b980c180a1baa3017301437540022c646600200200844a66602c002298103d87a80001323253330153375e6034602e6ea80080144c038cc0640092f5c026600800800260340046030002602a60246ea801058dd5980a180a980a980a980a8011bac3013001300f3754602400460226024002601a6ea800452613656375c00a601a60146ea8010dd2a40006e1d2002370e90001bae0015734aae7555cf2ab9f5740ae855d101", + "hash": "fd38243b432e2b4fd502294f6c5addc5c6979cf289e99f85916b503f" }, { "title": "multi.gift_card", @@ -52,8 +52,8 @@ } } ], - "compiledCode": "59050a0100003232323232323232322322253330073370e90001803000899299980419191919299980619b8748000c02c0044c8c8c8c8c8c8c8c8c8c8c8c8c8c8c8c8c94ccc074cdc3a400000226464646464646464a66604a002264646600200201844a66605600229444c8c8c8c94ccc0b0cc0240288cdc78008018a999816191980080080d9129998188008a50132325333030323232323253330353375e00266e95200433039375201697ae013375e602a60660080202940c0e4004c0e4004c0e0008c0d8004c0b8008528899802002000981a80118198008a99981619b87001480084cc01801800c5280b0b1bad302d003375c6056004605e004605a00266e95200233029375203097ae014a066002014466e3c08400488c8cc00400400c894ccc0a800452809919299981498028010a51133004004001302e002375c60580026466600200200a97ae022253330253370e00490000800899980180199b8100248008cc0a4dd49b94337160040080026eccc004c07c0088c098004c090040dd69811800980d80c099198008008019129998110008a511323253330213370e6eb4c08c009200113300400400114a0604c0046048002603602e64664464a66603e66e1d20020011002137566048603a006603a004646600200200444a666042002298103d87a800013232323253330223371e02a004266e95200033026374c00297ae0133006006003375660460066eb8c084008c094008c08c0052f5bded8c0646600200200e44a666040002297adef6c6013232323253330213371e911000021003133025337606ea4008dd3000998030030019bab3022003375c6040004604800460440026eb0c07c004c07c004c078004c074004c070008dd5980d000980d000980c8011bac30170013017001301600237586028002601800a6eb8c048004c02800458c040004c040008c038004c018008526136563253330083370e900000089919299980698080010a4c2c6eb4c038004c01800c54ccc020cdc3a40040022a666016600c0062930b0b180300109911929998051919191919191919191919299980a99b8748008c0500204c8c8c94ccc060cdc3a4000602e0022646464646464646464a66604266e1d20023020001132323370e6464664464a66605066e1d200200110021375a605a604c006604c00464a66604c66e1d200200114c0103d87a8000132323300100100222533302c00114c103d87a8000132323232533302d3371e048004266e95200033031375000297ae0133006006003375a605c0066eb8c0b0008c0c0008c0b8004dd5981598120011812000a4000646600200200444a6660520022980103d87a8000132323232533302a3371e010004266e9520003302e374c00297ae0133006006003375660560066eb8c0a4008c0b4008c0ac004c8cc004004040894ccc0a000452f5bded8c0264646464a66605266e3d22100002100313302d337606ea4008dd3000998030030019bab302a003375c60500046058004605400290009bae3027001301f001163025001301d0013023001301b001302100130210013018001301e001301600116323300100100922533301c00114c103d87a800013232533301b3375e6040603200400a266e9520003301f0024bd700998020020009810001180f000980d80098098040b1bab30190013019001301800130170013016002375860280026018006602400260240046020002601000429309b2b1bae004300c3005001375c0024600a6ea80048c00cdd5000ab9a5573aaae7955cfaba05742ae881", - "hash": "e616488e0b79189f22b849f099aba2b2434f84f57beb587d55116bc3" + "compiledCode": "5904fc01000032323232323232232225323232333008300330093754008264a666012646464a666018600e601a6ea80044c8c8c8c8c8c8c8c94ccc050c03cc054dd50088991919299980b99800802919b8f016001132323300100100622533301d00114a2264646464a66603c66010012466e3c00400c54ccc078c8cc00400403c894ccc08c00452809929998109919299981199baf3028302930290023374a9002198139ba90074bd70099baf300f302537540020182940c09c004c08cdd518130010a5113300300300130260011533301e301800113300600600314a02c2c6eb4c07c00cdd7180e8011810801180f80099ba548008cc06cdd4805a5eb8052811191980080080191299980e8008a50132533301b3004375c604000429444cc00c00c004c080004c8c8ccc004004dd6980e180c9baa0144bd70111299980c980a0010800899980180199b8000248004cc074dd49b94337160040080026eccc004c05cdd5180d0039180d0008991980080080111299980c8008a5113253330173370e6eb4c064c070009200113300300300114a0603800264a666028601c602a6ea800452f5bded8c026eacc064c058dd5000991980080099198008009bab301a00422533301900114bd6f7b630099191919299980d19b91489000021533301a3371e9101000021003100513301e337606ea4008dd3000998030030019bab301b003375c6032004603a004603600244a666030002298103d87a800013232323253330193372201a0042a66603266e3c0340084c048cc074dd3000a5eb80530103d87a8000133006006003375660340066eb8c060008c070008c068004dd6180b980c180c180c180c000980b980b8011bac30150013015301500237586026002601e6ea800cdd7180898071baa0011630103011002300f001300b375400a29309b2b192999804980200089919299980718088010a4c2c6eb4c03c004c02cdd50030a99980498018008a99980618059baa00614985858c024dd5002899911299980599191919192999808180518089baa004132325333012300d30133754002264a666026601a60286ea80044cdc3999119299980b1808180b9baa0011480004dd6980d980c1baa0013253330163010301737540022980103d87a8000132330010013756603860326ea8008894ccc06c004530103d87a8000132323232533301c337220280042a66603866e3c0500084c054cc080dd4000a5eb80530103d87a8000133006006003375a603a0066eb8c06c008c07c008c074004c8cc00400400c894ccc0680045300103d87a8000132323232533301b3372200e0042a66603666e3c01c0084c050cc07cdd3000a5eb80530103d87a8000133006006003375660380066eb8c068008c078008c070004c8cc004004014894ccc06000452f5bded8c0264646464a66603266e4522100002153330193371e9101000021003100513301d337606ea4008dd3000998030030019bab301a003375c6030004603800460340026eb8c060c054dd5000a40022c602e60286ea8c05cc050dd5180b980c180a1baa3017301437540022c646600200200844a66602c002298103d87a80001323253330153375e6034602e6ea80080144c038cc0640092f5c026600800800260340046030002602a60246ea801058dd5980a180a980a980a980a8011bac3013001300f3754602400460226024002601a6ea800452613656375c00a601a60146ea8010dd2a40006e1d2002370e90001bae0015734aae7555cf2ab9f5740ae855d101", + "hash": "fd38243b432e2b4fd502294f6c5addc5c6979cf289e99f85916b503f" }, { "title": "oneshot.gift_card", @@ -77,8 +77,8 @@ } } ], - "compiledCode": "5901f901000032323232323232323223222232533300932323232533300d3370e9000180600089919191919191919191919299980d980f0010991919299980d99b87480000044c94ccc070cdc3a40006036002264a66603a66e1c011200213371e00a0322940c06800458c8cc004004030894ccc080004530103d87a800013232533301f3375e6048603a004034266e952000330230024bd70099802002000981200118110008a99980d99b87002480044cdc780180b8a503019014375a60360046eb8c06400458c070004c8cc88c94ccc068cdc3a4004002200426eacc07cc06000cc060008c8cc004004008894ccc0700045300103d87a8000132323232533301d3371e01e004266e95200033021374c00297ae01330060060033756603c0066eb8c070008c080008c0780052f5bded8c0646600200200444a666036002297adef6c60132323232533301c3371e911000021003133020337606ea4008dd3000998030030019bab301d003375c6036004603e004603a0026eacc068004c068004c064004c060004c05c008dd6180a80098068029bae3013001300b0011630110013011002300f001300700214984d958c94ccc024cdc3a40000022a666018600e0062930b0a99980499b874800800454ccc030c01c00c52616163007002375c0024600a6ea80048c00cdd5000ab9a5573aaae7955cfaba05742ae89", - "hash": "e98009836402d3b8b30af400d67ed4c0d2ebed025a38a219cf1d30ad" + "compiledCode": "5901f5010000323232323232322322232323225333009323232533300c3007300d3754002264646464a666026602c00426464a666024601a60266ea803854ccc048c034c04cdd5191980080080311299980b8008a60103d87a80001323253330163375e603660306ea800804c4cdd2a40006603400497ae0133004004001301b002301900115333012300c00113371e00402029405854ccc048cdc3800a4002266e3c0080405281bad3013002375c60220022c602800264a66601e601260206ea800452f5bded8c026eacc050c044dd500099191980080099198008009bab3016301730173017301700522533301500114bd6f7b630099191919299980b19b91488100002153330163371e9101000021003100513301a337606ea4008dd3000998030030019bab3017003375c602a0046032004602e00244a666028002298103d87a800013232323253330153372200e0042a66602a66e3c01c0084cdd2a4000660326e980052f5c02980103d87a80001330060060033756602c0066eb8c050008c060008c058004dd7180998081baa00337586024002601c6ea800858c040c044008c03c004c02cdd50008a4c26cac64a66601060060022a66601660146ea8010526161533300830020011533300b300a37540082930b0b18041baa003370e90011b8748000dd7000ab9a5573aaae7955cfaba05742ae89", + "hash": "0c0d17d9095fe6b07a2727403e2c6f2dff8042ed7c300cb67a2577a2" }, { "title": "oneshot.redeem", @@ -108,8 +108,8 @@ } } ], - "compiledCode": "5901340100003232323232323232232232222533300932323232323232323232533301630190021323253330153370e0029000899b8f00201214a06eb4c058008dd7180a0008b180b80099199119299980a99b874800800440084dd5980d180a001980a9baa002323300100100222533301700114c0103d87a800013232323253330183371e026004266e9520003301c374c00297ae0133006006003375660320066eb8c05c008c06c008c0640052f5bded8c0646600200200444a66602c002297adef6c6013232323253330173371e91100002100313301b337606ea4008dd3000998030030019bab3018003375c602c004603400460300026eacc054004c054004c050004c04c004c048004c028004c03c004c02000452613656375c0026eb80048c010dd5000ab9a5573aaae7955cfaba05742ae881", - "hash": "fceef3e0b9e483291b55f9a1ce82f30a78910f5ab7658c51cb38a673" + "compiledCode": "5901320100003232323232323223223222253330083232533300d3010002132533300b3370e6eb4c034009200113371e0020122940dd718058008b180700099299980499b8748008c028dd50008a5eb7bdb1804dd5980718059baa001323300100132330010013756601e602060206020602060186ea8c03cc030dd50019129998070008a5eb7bdb1804c8c8c8c94ccc03ccdc8a45000021533300f3371e91010000210031005133013337606ea4008dd3000998030030019bab3010003375c601c0046024004602000244a66601a002298103d87a8000132323232533300e337220140042a66601c66e3c0280084cdd2a4000660246e980052f5c02980103d87a80001330060060033756601e0066eb8c034008c044008c03c00452613656375c0026eb80055cd2ab9d5573caae7d5d02ba157441", + "hash": "39faa048196bb6b30f50815475e9d16b22e7a0ef6de5935b408ca617" } ], "definitions": { diff --git a/examples/gift_card/validators/multi.ak b/examples/gift_card/validators/multi.ak index 5d227961..4eff81ad 100644 --- a/examples/gift_card/validators/multi.ak +++ b/examples/gift_card/validators/multi.ak @@ -1,5 +1,5 @@ +use aiken/builtin use aiken/bytearray -use aiken/cbor use aiken/dict use aiken/hash.{blake2b_256} use aiken/list @@ -60,13 +60,13 @@ validator(creator: ByteArray) { mint |> value.from_minted_value |> value.tokens(policy_id) - |> dict.to_list() + |> dict.to_alist() when rdmr is { Mint(total) -> { expect [input, ..] = inputs // Base is created from serializing a utxo ref being spent. Thus this guarantees a unique base - let base = cbor.serialise(input.output_reference) + let base = builtin.serialise_data(input.output_reference) // Create a list of expected token names let expected_minted_token_names = create_expected_minted_nfts(base, total, []) @@ -86,7 +86,7 @@ validator(creator: ByteArray) { list.all( minted_assets, fn(asset) { - let (_, amount) = asset + let Pair(_, amount) = asset amount == -1 }, ) @@ -111,14 +111,14 @@ fn insert(self: List, e: a, compare: fn(a, a) -> Ordering) -> List { // and has a corresponding ouput with datum containing token name. // Otherwise fail fn check_mint_and_outputs( - minted_assets: List<(ByteArray, Int)>, + minted_assets: AList, outputs: List, expected_assets: List, validator_cred: PaymentCredential, ) -> Bool { when minted_assets is { [] -> True - [(minted_asset_name, quantity), ..rest_assets] -> { + [Pair(minted_asset_name, quantity), ..rest_assets] -> { expect list.any( expected_assets, diff --git a/examples/gift_card/validators/oneshot.ak b/examples/gift_card/validators/oneshot.ak index 8bceee02..31cacf08 100644 --- a/examples/gift_card/validators/oneshot.ak +++ b/examples/gift_card/validators/oneshot.ak @@ -16,11 +16,11 @@ validator(token_name: ByteArray, utxo_ref: OutputReference) { let Transaction { inputs, mint, .. } = transaction - expect [(asset_name, amount)] = + expect [Pair(asset_name, amount)] = mint |> value.from_minted_value |> value.tokens(policy_id) - |> dict.to_list() + |> dict.to_alist() when rdmr is { Mint -> { @@ -39,11 +39,11 @@ validator(token_name: ByteArray, policy_id: ByteArray) { let Transaction { mint, .. } = transaction - expect [(asset_name, amount)] = + expect [Pair(asset_name, amount)] = mint |> value.from_minted_value |> value.tokens(policy_id) - |> dict.to_list() + |> dict.to_alist() amount == -1 && asset_name == token_name } diff --git a/examples/hello_world/aiken.lock b/examples/hello_world/aiken.lock index 985d9775..2ea540a4 100644 --- a/examples/hello_world/aiken.lock +++ b/examples/hello_world/aiken.lock @@ -13,4 +13,4 @@ requirements = [] source = "github" [etags] -"aiken-lang/stdlib@main" = [{ secs_since_epoch = 1696594311, nanos_since_epoch = 857119000 }, "a721cf2738274f806efefb5a33c6ff9ae049476f0d45a42049b71793949f4d1d"] +"aiken-lang/stdlib@main" = [{ secs_since_epoch = 1714410781, nanos_since_epoch = 118482000 }, "2a710731e0127ec3e21c6c3962a0254c98602e7428b33fc4fcaa67ab368ce1b1"]