Fix Int/BigInt pivot

We've been wrongly representing large ints as BigInt, causing them to
  behave differently in the VM through builtins like 'serialise_data'.

  Indeed, we expect anything that fits in 8 bytes to be encoded as Major
  Type 0 or 1. But we were switching to encoding as Major type 6
  (tagged, PosBigInt, NegBigInt) for much smaller values! Anything
  outside of the range [-2^32, 2^32-1] would be treated as big int
  (positive or negative).

  Why? Because we checked whether a value i would fit in an i64, and if
  it didn't we treated it as big int. But the reality is more subtle...
  Fortunately, Rust has i128 and the minicbor library implements TryFrom
  which enforces that the value fits in a range of [-2^64, 2^64 - 1], so
  we're back on track easily.
This commit is contained in:
KtorZ 2024-02-25 12:16:07 +01:00 committed by Lucas
parent 8d59ba1c77
commit 46c357df7b
6 changed files with 33 additions and 9 deletions

View File

@ -1,13 +1,14 @@
# Changelog # Changelog
## v1.0.25-alpha - ## v1.0.25-alpha - UNRELEASED
### Added ### Added
- **aiken-lang**: Data now has a generic argument that can be used to specify the blueprint type. @KtorZ - **aiken-lang**: Data now has a generic argument that can be used to specify the blueprint type. @KtorZ
### Fixed ### Fixed
- **uplc**: `serialise_data` builtin wrongly encoding some larger ints as tagged CBOR bigints, instead of plain integers over 9 bytes. @KtorZ
### Changed ### Changed

View File

@ -281,9 +281,9 @@ impl Data {
hex::encode(bytes) hex::encode(bytes)
} }
pub fn integer(i: BigInt) -> PlutusData { pub fn integer(i: BigInt) -> PlutusData {
match i.to_i64() { match i.to_i128().map(|n| n.try_into()) {
Some(i) => PlutusData::BigInt(alonzo::BigInt::Int(i.into())), Some(Ok(i)) => PlutusData::BigInt(alonzo::BigInt::Int(i)),
None => { _ => {
let (sign, bytes) = i.to_bytes_be(); let (sign, bytes) = i.to_bytes_be();
match sign { match sign {
num_bigint::Sign::Minus => { num_bigint::Sign::Minus => {

View File

@ -410,10 +410,14 @@ pub fn from_pallas_bigint(n: &babbage::BigInt) -> BigInt {
} }
pub fn to_pallas_bigint(n: &BigInt) -> babbage::BigInt { pub fn to_pallas_bigint(n: &BigInt) -> babbage::BigInt {
if let Some(i) = n.to_i64() { if let Some(i) = n.to_i128() {
let pallas_int: pallas::codec::utils::Int = i.into(); if let Ok(i) = i.try_into() {
babbage::BigInt::Int(pallas_int) let pallas_int: pallas::codec::utils::Int = i;
} else if n.is_positive() { return babbage::BigInt::Int(pallas_int);
}
}
if n.is_positive() {
let (_, bytes) = n.to_bytes_be(); let (_, bytes) = n.to_bytes_be();
babbage::BigInt::BigUInt(bytes.into()) babbage::BigInt::BigUInt(bytes.into())
} else { } else {

View File

@ -0,0 +1,7 @@
# This file was generated by Aiken
# You typically do not need to edit this file
requirements = []
packages = []
[etags]

View File

@ -0,0 +1,3 @@
name = "aiken-lang/acceptance_test_094"
version = "0.0.0"
description = ""

View File

@ -0,0 +1,9 @@
use aiken/builtin
test u32_boundary_down() {
builtin.serialise_data(0xdeadbeefdeadbeef) == #"1bdeadbeefdeadbeef"
}
test u32_boundary_up() {
builtin.serialise_data(-0xdeadbeefdeadbeef) == #"3bdeadbeefdeadbeee"
}