diff --git a/CHANGELOG.md b/CHANGELOG.md index fb18ce7e..e6efd067 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -18,6 +18,7 @@ - **aiken-lang**: `|>` operator can now be formatted as a single (short) line or forced over multiline in a flexible manner - **aiken-lang**: the compiler now provides better feedback for type holes (i.e. `_`) in type annotations - **aiken-lang**: assignment and clause guard are now always formatted on a new line +- **aiken-lang**: unused let-bindings are now fully removed from generated code and discarded unused let-binding now raise a warning ## [v0.0.29] - 2023-MM-DD diff --git a/crates/aiken-lang/src/ast.rs b/crates/aiken-lang/src/ast.rs index 2dee3bb8..4ed4abef 100644 --- a/crates/aiken-lang/src/ast.rs +++ b/crates/aiken-lang/src/ast.rs @@ -795,14 +795,20 @@ pub enum Pattern { }, /// The creation of a variable. - /// e.g. `assert [this_is_a_var, .._] = x` + /// e.g. `expect [this_is_a_var, .._] = x` + /// e.g. `let foo = 42` Var { location: Span, name: String, }, /// A name given to a sub-pattern using the `as` keyword. - /// e.g. `assert (1, [_, _] as the_list) = x` + /// + /// ```aiken + /// when foo is { + /// [_, _] as the_list -> ... + /// } + /// ``` Assign { name: String, location: Span, @@ -849,7 +855,6 @@ impl Pattern { | Pattern::List { location, .. } | Pattern::Discard { location, .. } | Pattern::Tuple { location, .. } - // | Pattern::Concatenate { location, .. } | Pattern::Constructor { location, .. } => *location, } } diff --git a/crates/aiken-lang/src/tests/check.rs b/crates/aiken-lang/src/tests/check.rs index 40227927..0bf369a7 100644 --- a/crates/aiken-lang/src/tests/check.rs +++ b/crates/aiken-lang/src/tests/check.rs @@ -1,6 +1,8 @@ use crate::{ - ast::{ModuleKind, Tracing, TypedModule, UntypedModule}, - builtins, parser, + ast::{Definition, ModuleKind, Tracing, TypedModule, UntypedModule}, + builtins, + expr::TypedExpr, + parser, tipo::error::{Error, Warning}, IdGenerator, }; @@ -319,3 +321,55 @@ fn utf8_hex_literal_warning() { Warning::Utf8ByteArrayIsValidHexString { .. } )) } + +#[test] +fn discarded_let_bindings() { + let source_code = r#" + fn foo() { + let result = when 42 is { + 1 -> { + let unused = "foo" + Void + } + _ -> { + Void + } + } + + let _ = "foo" + + result + } + "#; + + let (warnings, ast) = check(parse(source_code)).unwrap(); + + assert!(matches!(warnings[0], Warning::UnusedVariable { ref name, .. } if name == "unused")); + assert!(matches!(warnings[1], Warning::UnusedVariable { ref name, .. } if name == "_")); + + // Controls that unused let-bindings have been erased from the transformed AST. + match ast.definitions.first() { + Some(Definition::Fn(def)) => match &def.body { + TypedExpr::Sequence { expressions, .. } => { + assert_eq!(expressions.len(), 2); + assert!( + matches!(expressions[1], TypedExpr::Var { .. }), + "last expression isn't return variable" + ); + match &expressions[0] { + TypedExpr::Assignment { value, .. } => match **value { + TypedExpr::When { ref clauses, .. } => { + assert!( + matches!(clauses[0].then, TypedExpr::Sequence { ref expressions, ..} if expressions.len() == 1) + ) + } + _ => unreachable!("first expression isn't when/is"), + }, + _ => unreachable!("first expression isn't assignment"), + } + } + _ => unreachable!("body isn't a Sequence"), + }, + _ => unreachable!("ast isn't a Fn"), + } +} diff --git a/crates/aiken-lang/src/tipo/error.rs b/crates/aiken-lang/src/tipo/error.rs index 21324ff7..fc7f3833 100644 --- a/crates/aiken-lang/src/tipo/error.rs +++ b/crates/aiken-lang/src/tipo/error.rs @@ -254,7 +254,7 @@ You can use '{discard}' and numbers to distinguish between similar names. #[error("I found a discarded expression not bound to a variable.\n")] #[diagnostic(code("implicit_discard"))] #[diagnostic(help( - "A function can contain a sequence of expressions. However, any expression but the last one must be assign to a variable using the {keyword_let} keyword. If you really wish to discard an expression that is unused, you can assign it to '{discard}'.", + "A function can contain a sequence of expressions. However, any expression but the last one must be assigned to a variable using the {keyword_let} keyword. If you really wish to discard an expression that is unused, you can assign it to '{discard}'.", keyword_let = "let".if_supports_color(Stdout, |s| s.yellow()), discard = "_".if_supports_color(Stdout, |s| s.yellow()) ))] @@ -1337,13 +1337,6 @@ pub enum Warning { name: String, }, - #[error("I found a literal that is unused.\n")] - #[diagnostic(code("unused::literal"))] - UnusedLiteral { - #[label] - location: Span, - }, - #[error( "I found an unused private function: '{}'.\n", name.if_supports_color(Stderr, |s| s.purple()), @@ -1389,9 +1382,18 @@ pub enum Warning { }, #[error("I came across an unused variable.\n")] - #[diagnostic(help( - "No big deal, but you might want to remove it to get rid of that warning." - ))] + #[diagnostic(help("{}", formatdoc! { + r#"No big deal, but you might want to remove it to get rid of that warning. + + You should also know that, unlike in typical imperative languages, unused let-bindings are {fully_ignored} in Aiken. + They will not produce any side-effect (such as error calls). Programs with or without unused variables are semantically equivalent. + + If you do want to enforce some side-effects, use {keyword_expect} instead of {keyword_let}. + "#, + fully_ignored = "fully_ignored".if_supports_color(Stderr, |s| s.bold()), + keyword_expect = "expect".if_supports_color(Stderr, |s| s.yellow()), + keyword_let = "let".if_supports_color(Stderr, |s| s.yellow()), + }))] #[diagnostic(code("unused::variable"))] UnusedVariable { #[label("unused")] diff --git a/crates/aiken-lang/src/tipo/expr.rs b/crates/aiken-lang/src/tipo/expr.rs index c7ccc52f..2042ae7a 100644 --- a/crates/aiken-lang/src/tipo/expr.rs +++ b/crates/aiken-lang/src/tipo/expr.rs @@ -912,8 +912,7 @@ impl<'a, 'b> ExprTyper<'a, 'b> { annotation: &Option, location: Span, ) -> Result { - let typed_value = - self.in_new_scope(|value_typer| value_typer.infer(untyped_value.clone()))?; + let typed_value = self.infer(untyped_value.clone())?; let mut value_typ = typed_value.tipo(); let value_is_data = value_typ.is_data(); @@ -938,6 +937,7 @@ impl<'a, 'b> ExprTyper<'a, 'b> { untyped_pattern.clone(), value_typ.clone(), Some(ann_typ), + kind.is_let(), )? } else { if value_is_data && !untyped_pattern.is_var() && !untyped_pattern.is_discard() { @@ -963,52 +963,58 @@ impl<'a, 'b> ExprTyper<'a, 'b> { untyped_pattern.clone(), value_typ.clone(), None, + kind.is_let(), )? }; // We currently only do limited exhaustiveness checking of custom types // at the top level of patterns. // Do not perform exhaustiveness checking if user explicitly used `assert`. - if kind != AssignmentKind::Expect { - if let Err(unmatched) = self.environment.check_exhaustiveness( - vec![pattern.clone()], - collapse_links(value_typ.clone()), - location, - ) { - return Err(Error::NotExhaustivePatternMatch { - location, - unmatched, - is_let: true, - }); - } - } else if !value_is_data - && !value_typ.is_list() - && self - .environment - .check_exhaustiveness( + match kind { + AssignmentKind::Let => { + if let Err(unmatched) = self.environment.check_exhaustiveness( vec![pattern.clone()], collapse_links(value_typ.clone()), location, - ) - .is_ok() - { - self.environment - .warnings - .push(Warning::SingleConstructorExpect { - location: Span { - start: location.start, - end: location.start + kind.location_offset(), - }, - pattern_location: untyped_pattern.location(), - value_location: untyped_value.location(), - sample: UntypedExpr::Assignment { - location: Span::empty(), - value: Box::new(untyped_value), - pattern: untyped_pattern, - kind: AssignmentKind::Let, - annotation: None, - }, - }) + ) { + return Err(Error::NotExhaustivePatternMatch { + location, + unmatched, + is_let: true, + }); + } + } + + AssignmentKind::Expect => { + let is_exaustive_pattern = self + .environment + .check_exhaustiveness( + vec![pattern.clone()], + collapse_links(value_typ.clone()), + location, + ) + .is_ok(); + + if !value_is_data && !value_typ.is_list() && is_exaustive_pattern { + self.environment + .warnings + .push(Warning::SingleConstructorExpect { + location: Span { + start: location.start, + end: location.start + kind.location_offset(), + }, + pattern_location: untyped_pattern.location(), + value_location: untyped_value.location(), + sample: UntypedExpr::Assignment { + location: Span::empty(), + value: Box::new(untyped_value), + pattern: untyped_pattern, + kind: AssignmentKind::Let, + annotation: None, + }, + }); + } + } } Ok(TypedExpr::Assignment { @@ -1097,22 +1103,17 @@ impl<'a, 'b> ExprTyper<'a, 'b> { location, } = clause; - let (guard, then, typed_pattern, typed_alternatives) = - self.in_new_scope(|clause_typer| { - // Check the types - let (typed_pattern, typed_alternatives) = clause_typer.infer_clause_pattern( - pattern, - alternative_patterns, - subjects, - &location, - )?; + let (guard, then, typed_pattern, typed_alternatives) = self.in_new_scope(|scope| { + // Check the types + let (typed_pattern, typed_alternatives) = + scope.infer_clause_pattern(pattern, alternative_patterns, subjects, &location)?; - let guard = clause_typer.infer_optional_clause_guard(guard)?; + let guard = scope.infer_optional_clause_guard(guard)?; - let then = clause_typer.infer(then)?; + let then = scope.infer(then)?; - Ok::<_, Error>((guard, then, typed_pattern, typed_alternatives)) - })?; + Ok::<_, Error>((guard, then, typed_pattern, typed_alternatives)) + })?; Ok(Clause { location, @@ -1416,7 +1417,7 @@ impl<'a, 'b> ExprTyper<'a, 'b> { false, )?; - let body = self.in_new_scope(|body_typer| body_typer.infer(first.body.clone()))?; + let body = self.infer(first.body.clone())?; let tipo = body.tipo(); @@ -1436,7 +1437,7 @@ impl<'a, 'b> ExprTyper<'a, 'b> { false, )?; - let body = self.in_new_scope(|body_typer| body_typer.infer(branch.body.clone()))?; + let body = self.infer(branch.body.clone())?; self.unify( tipo.clone(), @@ -1452,7 +1453,7 @@ impl<'a, 'b> ExprTyper<'a, 'b> { }); } - let typed_final_else = self.in_new_scope(|body_typer| body_typer.infer(final_else))?; + let typed_final_else = self.infer(final_else)?; self.unify( tipo.clone(), @@ -1502,30 +1503,28 @@ impl<'a, 'b> ExprTyper<'a, 'b> { ) -> Result<(Vec, TypedExpr), Error> { self.assert_no_assignment(&body)?; - let (body_rigid_names, body_infer) = self.in_new_scope(|body_typer| { - for (arg, t) in args.iter().zip(args.iter().map(|arg| arg.tipo.clone())) { - match &arg.arg_name { - ArgName::Named { name, .. } => { - body_typer.environment.insert_variable( - name.to_string(), - ValueConstructorVariant::LocalVariable { - location: arg.location, - }, - t, - ); + for (arg, t) in args.iter().zip(args.iter().map(|arg| arg.tipo.clone())) { + match &arg.arg_name { + ArgName::Named { name, .. } => { + self.environment.insert_variable( + name.to_string(), + ValueConstructorVariant::LocalVariable { + location: arg.location, + }, + t, + ); - body_typer.environment.init_usage( - name.to_string(), - EntityKind::Variable, - arg.location, - ); - } - ArgName::Discarded { .. } => (), - }; - } + self.environment.init_usage( + name.to_string(), + EntityKind::Variable, + arg.location, + ); + } + ArgName::Discarded { .. } => (), + }; + } - (body_typer.hydrator.rigid_names(), body_typer.infer(body)) - }); + let (body_rigid_names, body_infer) = (self.hydrator.rigid_names(), self.infer(body)); let body = body_infer.map_err(|e| e.with_unify_error_rigid_names(&body_rigid_names))?; @@ -1620,26 +1619,51 @@ impl<'a, 'b> ExprTyper<'a, 'b> { } fn infer_seq(&mut self, location: Span, untyped: Vec) -> Result { - let count = untyped.len(); + let sequence = self.in_new_scope(|scope| { + let count = untyped.len(); - let mut expressions = Vec::with_capacity(count); + let mut expressions = Vec::with_capacity(count); - for (i, expression) in untyped.into_iter().enumerate() { - match i.cmp(&(count - 1)) { - // When the expression is the last in a sequence, we enforce it is NOT - // an assignment (kind of treat assignments like statements). - Ordering::Equal => self.assert_no_assignment(&expression)?, + for (i, expression) in untyped.into_iter().enumerate() { + match i.cmp(&(count - 1)) { + // When the expression is the last in a sequence, we enforce it is NOT + // an assignment (kind of treat assignments like statements). + Ordering::Equal => scope.assert_no_assignment(&expression)?, - // This isn't the final expression in the sequence, so it *must* - // be a let-binding; we do not allow anything else. - Ordering::Less => self.assert_assignment(&expression)?, + // This isn't the final expression in the sequence, so it *must* + // be a let-binding; we do not allow anything else. + Ordering::Less => scope.assert_assignment(&expression)?, - // Can't actually happen - Ordering::Greater => (), + // Can't actually happen + Ordering::Greater => (), + } + + expressions.push(scope.infer(expression)?); } - expressions.push(self.infer(expression)?); - } + Ok(expressions) + })?; + + let unused = self + .environment + .warnings + .iter() + .filter_map(|w| match w { + Warning::UnusedVariable { location, .. } => Some(*location), + _ => None, + }) + .collect::>(); + + let expressions = sequence + .into_iter() + .filter(|expr| { + if let TypedExpr::Assignment { pattern, .. } = expr { + !unused.contains(&pattern.location()) + } else { + true + } + }) + .collect::>(); Ok(TypedExpr::Sequence { location, @@ -1869,11 +1893,7 @@ impl<'a, 'b> ExprTyper<'a, 'b> { let return_type = self.new_unbound_var(); for subject in subjects { - let subject = self.in_new_scope(|subject_typer| { - let subject = subject_typer.infer(subject)?; - - Ok::<_, Error>(subject) - })?; + let subject = self.infer(subject)?; subject_types.push(subject.tipo()); diff --git a/crates/aiken-lang/src/tipo/pattern.rs b/crates/aiken-lang/src/tipo/pattern.rs index 3f596dd3..57f5ee86 100644 --- a/crates/aiken-lang/src/tipo/pattern.rs +++ b/crates/aiken-lang/src/tipo/pattern.rs @@ -145,79 +145,12 @@ impl<'a, 'b> PatternTyper<'a, 'b> { // Unify each pattern in the multi-pattern with the corresponding subject let mut typed_multi = Vec::with_capacity(multi_pattern.len()); for (pattern, subject_type) in multi_pattern.into_iter().zip(subjects) { - let pattern = self.unify(pattern, subject_type.clone(), None)?; + let pattern = self.unify(pattern, subject_type.clone(), None, false)?; typed_multi.push(pattern); } Ok(typed_multi) } - // fn infer_pattern_bit_string( - // &mut self, - // mut segments: Vec, - // location: Span, - // ) -> Result { - // let last_segment = segments.pop(); - - // let mut typed_segments: Vec<_> = segments - // .into_iter() - // .map(|s| self.infer_pattern_segment(s, false)) - // .try_collect()?; - - // if let Some(s) = last_segment { - // let typed_last_segment = self.infer_pattern_segment(s, true)?; - // typed_segments.push(typed_last_segment) - // } - - // Ok(TypedPattern::BitString { - // location, - // segments: typed_segments, - // }) - // } - - // fn infer_pattern_segment( - // &mut self, - // segment: UntypedPatternBitStringSegment, - // is_last_segment: bool, - // ) -> Result { - // let UntypedPatternBitStringSegment { - // location, - // options, - // value, - // .. - // } = segment; - - // let options: Vec<_> = options - // .into_iter() - // .map(|o| infer_bit_string_segment_option(o, |value, typ| self.unify(value, typ))) - // .try_collect()?; - - // let segment_type = bit_string::type_options_for_pattern(&options, !is_last_segment) - // .map_err(|error| Error::BitStringSegmentError { - // error: error.error, - // location: error.location, - // })?; - - // let typ = { - // match value.deref() { - // Pattern::Var { .. } if segment_type == string() => { - // Err(Error::BitStringSegmentError { - // error: bit_string::ErrorType::VariableUtfSegmentInPattern, - // location, - // }) - // } - // _ => Ok(segment_type), - // } - // }?; - // let typed_value = self.unify(*value, typ.clone())?; - - // Ok(BitStringSegment { - // location, - // value: Box::new(typed_value), - // options, - // type_: typ, - // }) - // } - /// When we have an assignment or a case expression we unify the pattern with the /// inferred type of the subject in order to determine what variables to insert /// into the environment (or to detect a type error). @@ -226,9 +159,17 @@ impl<'a, 'b> PatternTyper<'a, 'b> { pattern: UntypedPattern, tipo: Arc, ann_type: Option>, + is_assignment: bool, ) -> Result { match pattern { - Pattern::Discard { name, location } => Ok(Pattern::Discard { name, location }), + Pattern::Discard { name, location } => { + if is_assignment { + // Register declaration for the unused variable detection + self.environment + .init_usage(name.to_string(), EntityKind::Variable, location); + }; + Ok(Pattern::Discard { name, location }) + } Pattern::Var { name, location } => { self.insert_variable(&name, ann_type.unwrap_or(tipo), location, location)?; @@ -236,29 +177,6 @@ impl<'a, 'b> PatternTyper<'a, 'b> { Ok(Pattern::Var { name, location }) } - // Pattern::Concatenate { - // location, - // left_location, - // right_location, - // left_side_string, - // right_side_assignment, - // } => { - // // The entire concatenate pattern must be a string - // self.environment.unify(tipo, string(), location)?; - - // // The right hand side may assign a variable, which is the suffix of the string - // if let AssignName::Variable(right) = &right_side_assignment { - // self.insert_variable(right.as_ref(), string(), right_location, location)?; - // }; - - // Ok(Pattern::Concatenate { - // location, - // left_location, - // right_location, - // left_side_string, - // right_side_assignment, - // }) - // } Pattern::Assign { name, pattern, @@ -271,7 +189,7 @@ impl<'a, 'b> PatternTyper<'a, 'b> { pattern.location(), )?; - let pattern = self.unify(*pattern, tipo, ann_type)?; + let pattern = self.unify(*pattern, tipo, ann_type, false)?; Ok(Pattern::Assign { name, @@ -299,11 +217,11 @@ impl<'a, 'b> PatternTyper<'a, 'b> { let elements = elements .into_iter() - .map(|element| self.unify(element, tipo.clone(), None)) + .map(|element| self.unify(element, tipo.clone(), None, false)) .try_collect()?; let tail = match tail { - Some(tail) => Some(Box::new(self.unify(*tail, list(tipo), None)?)), + Some(tail) => Some(Box::new(self.unify(*tail, list(tipo), None, false)?)), None => None, }; @@ -336,7 +254,7 @@ impl<'a, 'b> PatternTyper<'a, 'b> { let mut patterns = vec![]; for (pattern, typ) in elems.into_iter().zip(type_elems) { - let typed_pattern = self.unify(pattern, typ.clone(), None)?; + let typed_pattern = self.unify(pattern, typ.clone(), None, false)?; patterns.push(typed_pattern); } @@ -358,7 +276,7 @@ impl<'a, 'b> PatternTyper<'a, 'b> { let mut patterns = vec![]; for (pattern, type_) in elems.into_iter().zip(elems_types) { - let typed_pattern = self.unify(pattern, type_, None)?; + let typed_pattern = self.unify(pattern, type_, None, false)?; patterns.push(typed_pattern); } @@ -498,7 +416,7 @@ impl<'a, 'b> PatternTyper<'a, 'b> { label, } = arg; - let value = self.unify(value, typ.clone(), None)?; + let value = self.unify(value, typ.clone(), None, false)?; Ok::<_, Error>(CallArg { value, diff --git a/examples/acceptance_tests/077/plutus.json b/examples/acceptance_tests/077/plutus.json index 4cacceb9..2454a18e 100644 --- a/examples/acceptance_tests/077/plutus.json +++ b/examples/acceptance_tests/077/plutus.json @@ -5,67 +5,52 @@ "plutusVersion": "v2" }, "validators": [ - { - "title": "spend2.backtrace", - "datum": { - "title": "Unit", - "description": "The nullary constructor.", - "schema": { - "anyOf": [ - { - "dataType": "constructor", - "index": 0, - "fields": [] - } - ] - } - }, - "redeemer": { - "title": "Unit", - "description": "The nullary constructor.", - "schema": { - "anyOf": [ - { - "dataType": "constructor", - "index": 0, - "fields": [] - } - ] - } - }, - "compiledCode": "59016e010000323232323232323232323222253330063232323232324a26464640026028002600c64a66601a66e1d20003010375400220022a6601e9212a4578706563746564206f6e20696e636f727265637420636f6e7374727563746f722076617269616e742e00163300337586600a600c6600a600c0029000240084944018c010c94ccc02ccdc3a4000601c6ea8004400454cc03524012a4578706563746564206f6e20696e636f727265637420636f6e7374727563746f722076617269616e742e001633001375866006600866006600800a9000240084944c0040048894ccc0400084cdd2a400497ae013232533300d300300213374a90001980980125eb804ccc01401400400cc05000cc04800888c8ccc0040052000003222333300c3370e008004026466600800866e0000d200230150010012300b37540022930b180080091129998048010a4c26600a600260160046660060066018004002ae695cdab9c5573aaae7955cfaba05742ae881", - "hash": "546dac827307d09062352c0f50a35bfd5bcfe64233ea06d791f754ea" - }, { "title": "spend.staking", "datum": { - "title": "Unit", - "description": "The nullary constructor.", + "title": "_datum", "schema": { - "anyOf": [ - { - "dataType": "constructor", - "index": 0, - "fields": [] - } - ] + "$ref": "#/definitions/Void" } }, "redeemer": { - "title": "Unit", - "description": "The nullary constructor.", + "title": "_redeemer", "schema": { - "anyOf": [ - { - "dataType": "constructor", - "index": 0, - "fields": [] - } - ] + "$ref": "#/definitions/Void" } }, "compiledCode": "5904a1010000323232323232323232323222253330063232323232323232323232323232323232323232323232323375e6e98008dd300099980a1bac33016301733016301701848001200422533301f3375e66030603200490000020998061bab3301830190024800800440052f5bded8c06660266eb0cc054c058cc054c05805d200048000894ccc078cdd79980b980c1980b980c0012400490000018998059bab33017301833017301800248009200200110014bd6f7b6301980a180a800a4000604200260420026024002603c002602064a66602e66e1d2000301a375400220022a660329212a4578706563746564206f6e20696e636f727265637420636f6e7374727563746f722076617269616e742e00163232330013758660226024660226024026900024000466ebccc048c04c00520000043001001222533301e00213374a900125eb804c8c94ccc06cc00c0084cdd2a40006604200497ae0133300500500100330220033020002301c001300e3253330153370e9001180c1baa001100115330174912a4578706563746564206f6e20696e636f727265637420636f6e7374727563746f722076617269616e742e00163300d300e00f4800888c8004cccc8888cccc03001000c008004008004888c94ccc064c94ccc07c0045288a5000113374a900125eb804cdd2a40006603e6e980052f5c066664444666601600800600400200400244464a66603866e1c005200013374a900125eb804cdd2a4000660446ea00052f5c066e0000800401800c894ccc050cdc8001000899ba5480012f5c02a66602866e3c0080044cdd2a400497ae013374a900225eb80c004004888894ccc068010400c4c8c8c8c8ccccc02402400cccccc02801c004008018014018014dd7180d8019bad301b002301e005301c0043001001222222533301900513301a337606ea4010dd4001a5eb7bdb1804c8c8c8c94ccc060cdd79980280400099ba5480012f5c026603c66ec0dd48041ba8007009153330183371e01000226464a66603466e1c00520001323233022337606ea4030dd40008039bad302200130140021005301c375400266600c01000e00426603c66ec0dd48009ba800233333300a00a003008007006005375c60340066eb4c068008c074018c06c014c004004888894ccc058010400c4c8c8c8c8ccccc02402400cccccc02801c004008018014018014dd7180b8019bab3017002301a005301800430010012222225333015005133016337606ea4010dd3001a5eb7bdb1804c8c8c8c94ccc050cdd79980280400099ba5480012f5c026603466ec0dd48041ba6007009153330143371e01000226464a66602c66e1c0052000132323301e337606ea4030dd30008039bab301e001301000210053018375400266600c01000e00426603466ec0dd48009ba600233333300a00a003008007006005375c602c0066eacc058008c064018c05c014c00400488894ccc04400c40044c8c8cc010008cccc01801800401000cc054010c04c00c88c8ccc0040052000003222333300c3370e008004026466600800866e0000d200230150010012300b37540022930b180080091129998048010a4c26600a600260160046660060066018004002ae695cdab9c5573aaae7955cfaba05742ae881", "hash": "f6448fb18db20c4da7590f743682d806a7e1ab9cccee75848885b22a" + }, + { + "title": "spend2.backtrace", + "datum": { + "title": "_datum", + "schema": { + "$ref": "#/definitions/Void" + } + }, + "redeemer": { + "title": "_redeemer", + "schema": { + "$ref": "#/definitions/Void" + } + }, + "compiledCode": "590109010000323232323232323232323222253330063232324a2600464a66601266e1d2000300c375400220022a660169212a4578706563746564206f6e20696e636f727265637420636f6e7374727563746f722076617269616e742e0016323233001375866006600866006600800a9000240084944c0040048894ccc0400084cdd2a400497ae013232533300d300300213374a90001980980125eb804ccc01401400400cc05000cc04800888c8ccc0040052000003222333300c3370e008004026466600800866e0000d200230150010012300b37540022930b180080091129998048010a4c26600a600260160046660060066018004002ae695cdab9c5573aaae7955cfaba05742ae881", + "hash": "a48c257b952e82673f9832cba28bf43833fddb1b02ec0aeabe25e338" } - ] + ], + "definitions": { + "Void": { + "title": "Unit", + "description": "The nullary constructor.", + "anyOf": [ + { + "dataType": "constructor", + "index": 0, + "fields": [] + } + ] + } + } } \ No newline at end of file diff --git a/examples/acceptance_tests/run b/examples/acceptance_tests/run index 2bbe07b4..748cdf8c 100755 --- a/examples/acceptance_tests/run +++ b/examples/acceptance_tests/run @@ -16,7 +16,7 @@ TARGET="$WORKDIR/$(basename $1)" TMP=$(mktemp) VALIDATORS=$(find $TARGET -type f -path "*validators/*.ak") -if [ -z $VALIDATORS ]; then +if [ -z "$VALIDATORS" ]; then RESULT=$(cargo run --quiet -- check $TARGET 2>$TMP) else RESULT=$(cargo run --quiet -- build $TARGET 2>$TMP)