From c2f14980767cb0fb2ff3b605989aaeaa07294c49 Mon Sep 17 00:00:00 2001 From: jared <> Date: Wed, 29 May 2024 14:28:57 -0600 Subject: [PATCH 1/7] Draft of milestone 3 report --- docs/aiken-integration.md | 516 ++++++++++++++++++++++++++++++++++++++ 1 file changed, 516 insertions(+) create mode 100644 docs/aiken-integration.md diff --git a/docs/aiken-integration.md b/docs/aiken-integration.md new file mode 100644 index 00000000..0d61aae8 --- /dev/null +++ b/docs/aiken-integration.md @@ -0,0 +1,516 @@ +# Aiken Integration Report +This document describes LambdaBuffers integration with Aiken. +It appears at Aiken's best, +while it is possible to provide limited support for a subset of LambdaBuffers' features, +integration with LambdaBuffers would provide a poor user experience that would be in conflict with either the major language features of Aiken or the key functionalities provided by LambdaBuffers. +As such, this document describes the challenges LambdaBuffers integration with Aiken, +and proposes alternative Milestone 4 outputs to better foster adoption of LambdaBuffers within the Cardano ecosystem. + +## Aiken limitations + +This section describes limitations with Aiken. + +### Aiken has no type class support + +A key feature of LambdaBuffers is to provide both types and type class instances. +Aiken has no support for type classes, so one must generate the type class system themselves. +In other words, one must provide: + +* A calling convention for functions with type classes. + +* Functions to create instance dictionaries for instances of the type class. + +The former is straightforward to implement. +One can explicit pass an instance dictionary for each function that requires a type class as per the usual compilation of type class code to type class free code. + +The latter requirement poses troubles for Aiken. +Aiken does not allow one to create instance dictionaries (product types of the instance's methods) since composite types in Aiken cannot contain a function type. + +For example, the following Aiken type + +```ruost +type EqDict { + eq: fn(a,a) -> Bool, + neq: fn(a,a) -> Bool, +} +``` + +would produce an error like + +```bash +$ aiken build + Compiling me/package 0.0.0 (/aikentest/package) + Compiling aiken-lang/stdlib 1.8.0 (/aikentest/package/build/packages/aiken-lang-stdlib) + Error aiken::check::illegal::function_in_type + + × While trying to make sense of your code... + ╰─▶ I found a type definition that has a function type in it. This is not allowed. + + ╭─[/aikentest/package/validators/myvalidator.ak:3:1] + 3 │ type EqDict { + 4 │ eq: fn(a,a) -> Bool, + · ─────────────────── + 5 │ neq: fn(a,a) -> Bool, + ╰──── + help: Data-types can't hold functions. If you want to define method-like functions, group the type definition and the methods under a common namespace in a standalone + module. + + Summary 1 error, 0 warnings +``` + +This makes it impossible to pass instance dictionaries via Aiken's builtin types for type classes. + +Alternatively, one could try to sidestep Aiken's builtin types by creating a type which is a Church encoded tuple +(i.e., implementing a tuple type via function types), +but doing so requires higher ranked types which again Aiken does not support. +Moreover, it appears that Aiken does not provide any "back doors" to the type system (e.g. TypeScript's `any` type) to trick the type system that using a Church encoded tuple and with its projections are well typed. + +It's clear now that having an explicit type for an instance dictionary is not feasible in Aiken, +so owing to the fact that an instance dictionary is a product type of functions, one can replace all instance dictionaries as an argument with multiple arguments with each method, and replace the functions to create an instance dictionary with multiple functions to create each method in the type class. +This is indeed possible in Aiken, and to demonstrate this technique, consider the following translation. + +```rust +use aiken/builtin as builtin +use mypackage/lb_prelude/types_/int as lb_prelude_int // this would have to be implemented in an lb-prelude runtime + +pub type MyOption { + MyJust(t) + MyNothing +} + +pub fn eq(eqt : fn(t,t) -> Bool) -> fn(MyOption,MyOption) -> Bool { + fn(a,b) { + when a is { + MyJust(aJust) -> when b is { + MyJust(bJust) -> eqt(aJust,bJust) + MyNothing -> False + } + MyNothing -> when b is { + MyJust(_) -> False + MyNothing -> True + } + } + } +} + +pub fn fromData(fromDataT : fn(Data) -> t) -> fn(Data) -> MyOption { + fn(theData) { + let answer = + builtin.choose_data (theData, + (fn(theData) { + let constr = builtin.un_constr_data(theData) + let tag = constr.1st + let fields = constr.2nd + when tag is { + 0 -> when fields is { + [ justValue ] -> MyJust(fromDataT(justValue)) + _ -> error @"Bad parse" + } + 1 -> when fields is { + [] -> MyNothing + _ -> error @"Bad parse" + } + _ -> error @"Bad parse" + } + })(theData), (fn(_theData) { error @"Bad parse" })(theData), (fn(_theData) { error @"Bad parse"})(theData), (fn(_theData){error @"Bad parse"})(theData), (fn(_theData) { error @"Bad parse"})(theData)) + answer + } +} + +pub fn toData(toDataT : fn(t) -> Data) -> fn(MyOption) -> Data { + fn(theOption) { + when theOption is { + MyJust(justValue) -> builtin.constr_data(0, [toDataT(justValue)]) + MyNothing -> builtin.constr_data(1, []) + } + } +} + +// Example usages: + +test from_nothing_test() { + fromData(lb_prelude_int.fromData)(builtin.constr_data(1, [])) == MyNothing +} + +test from_just_test() { + fromData(lb_prelude_int.fromData)(builtin.constr_data(0, [builtin.i_data(69)])) == MyJust(69) +} + +test from_to_nothing_test() { + toData(lb_prelude_int.toData)(fromData(lb_prelude_int.fromData)(builtin.constr_data(1, []))) == builtin.constr_data(1, []) +} + +test from_to_just_test() { + toData(lb_prelude_int.toData)(fromData(lb_prelude_int.fromData)(builtin.constr_data(0, [builtin.i_data(69)]))) == builtin.constr_data(0, [builtin.i_data(69)]) +} +``` + +This translation of type classes has some limitations such as: + +* All type class instances must be defined in the same module that the type is defined in i.e., orphan instances are forbidden. + +* Only Haskell2010 type classes would be supported. + +### Aiken's encoding of its data is different from LambdaBuffers encoding + +All onchain scripts must be compiled to UPLC which must in some method represent the higher level language constructs like data types in the original language. +Often, data types in a higher level language are translated to UPLC's builtin `Data` type which supports types like lists, constructors, integers, bytestrings, and maps. +Note that data which will exist as a datum or redeemer must admit a representation with this `Data` type. + +LambdaBuffers chooses a particularly efficient encoding of its data types to `Data` mapping to its target languages that map to UPLC. +For example, a record like + +```purescript +record MyRecord = { a : Integer, b : Integer } +``` + +would be translated to + +```purescript +[a, b] +``` + +i.e., records are lists of all record components[^recordsSpecialCases]. + +[^recordsSpecialCases]: Singleton records are encoded as just the single element. + +If LambdaBuffers compiled `MyRecord` to a [record in Aiken](https://aiken-lang.org/language-tour/custom-types) as follows + +```rust +type MyRecord { + a : Int, + b : Int +} +``` + +we know that Aiken will internally represent this as the following `Data` type + +```purescript +Constr 0 [a, b] +``` + +where we note that Aiken includes a useless `Constr 0` tag meaning Aiken's encoding is less efficient than LambdaBuffers' encoding. + +In general, Aiken's documentation for the encoding from Aiken's types to UPLC `Data` is unclear, +but one can inspect the generated UPLC to verify that Aiken would encode the data as mentioned above. + +For example, given the following Aiken module + +```rust +pub type MyRecord { a: Int, b: Int } + +validator { + pub fn hello_world(_redeemer: Data, _scriptContext: Data) { + let theRecord = MyRecord(69, -69) + + theRecord.a == 420 && theRecord.b == -420 + } +} +``` + +One can compile and inspect the UPLC as follows + +```shell +$ aiken build --uplc +... +$ cat artifacts/myvalidator.hello_world.uplc +artifacts/myvalidator.hello_world.uplc +(program + 1.0.0 + [ + (lam + i_0 + [ + (lam + i_1 + [ + (lam + i_2 + [ + (lam + i_3 + (lam + i_4 + (lam + i_5 + (force + [ + [ + [ + i_3 + (force + [ + [ + [ + i_3 + [ + [ + (builtin equalsInteger) + [ + (builtin unIData) + [ + i_1 + [ + i_2 + [ + (builtin unConstrData) + (con + data (Constr 0 [I 69, I -69]) + ) + ] + ] + ] + ] + ] + (con integer 420) + ] + ] + (delay + [ + [ + (builtin equalsInteger) + [ + (builtin unIData) + [ + i_1 + [ + i_0 + [ + i_2 + [ + (builtin unConstrData) + (con + data (Constr 0 [I 69, I -69]) + ) + ] + ] + ] + ] + ] + ] + (con integer -420) + ] + ) + ] + (delay (con bool False)) + ] + ) + ] + (delay (con unit ())) + ] + (delay [ (error ) (force (error )) ]) + ] + ) + ) + ) + ) + (force (builtin ifThenElse)) + ] + ) + (force (force (builtin sndPair))) + ] + ) + (force (builtin headList)) + ] + ) + (force (builtin tailList)) + ] +) +``` + +In particular, the following lines are evidence to support that the record is encoded inefficiently as `Constr 0 [, ]`. + +```haskell + [ + (builtin unConstrData) + (con + data (Constr 0 [I 69, I -69]) + ) + ] +``` + +This is awkward for LambdaBuffers since when Aiken works with the `MyRecord` type, +it is represented with the `Constr` tag meaning that LambdaBuffers' efficient encoding would be translated to Aiken's inefficient encoding. +Ideally, one would want to change how Aiken encodes its data types internally so that Aiken can use LambdaBuffers' efficient encoding everywhere. +Thus, we lose all benefits of LambdaBuffers' efficient encoding when working with Aiken's mechanisms to define types because LambdaBuffers is forced to take an extra step to translate to Aiken's inefficient encoding. +As such, Aiken's opinionated way of encoding its data is at odds with LambdaBuffers. + +To resolve the mismatch in the encoding of data between the two, one could alternatively sidestep all of Aiken's methods for defining types and instead use Aiken's opaque types to alias `Data` and provide ones own constructors / record accesses as follows + +```rust +use aiken/builtin as builtin + +pub opaque type MyRecord { data: Data } + +pub fn myRecord(a: Int) -> MyRecord { + MyRecord{ data : builtin.list_data([builtin.i_data(a)]) } +} + +pub fn myRecord_a(value : MyRecord) -> Int { + builtin.un_i_data(builtin.head_list(builtin.un_list_data(value))) +} + +// Example program: +validator { + pub fn hello_world(_redeemer: Data, _scriptContext: Data) { + let theRecord = myRecord(69) + + myRecord_a(theRecord) == 420 + } +} +``` + +Interested readers may inspect the compiled UPLC to verify that the data encoding of `MyRecord` agrees with LambdaBuffers' encoding. + +```purescript +(program + 1.0.0 + [ + (lam + i_0 + [ + (lam + i_1 + [ + (lam + i_2 + (lam + i_3 + (lam + i_4 + (force + [ + [ + [ + i_2 + [ + [ + (builtin equalsInteger) + [ + (builtin unIData) + [ + i_0 + [ + [ i_1 (con data (I 69)) ] + (con (list data) []) + ] + ] + ] + ] + (con integer 420) + ] + ] + (delay (con unit ())) + ] + (delay [ (error ) (force (error )) ]) + ] + ) + ) + ) + ) + (force (builtin ifThenElse)) + ] + ) + (force (builtin mkCons)) + ] + ) + (force (builtin headList)) + ] +) +``` + +Note that this would most likely offer a poor user experience as this would essentially replace a large part of Aiken's language construct with our own generated functions for constructing, deconstructing, serialization / deserialization to `Data`, etc. + +### Aiken's packages only support fetching dependencies remotely + +LambdaBuffers is more than just a code generator. +In addition to generating code for sharing types and semantics, its Nix tools augment a set of packages for a project with a package of the generated LambdaBuffers code. + +Aiken does support having packages, but it appears that it only officially supports fetching packages from either Github, GitLab, or BitBucket i.e., it's unclear how to create a local package set augmented with LambdaBuffers' packages. + +For example, the following `aiken.toml` file + +```toml +name = "me/package" +version = "0.0.0" +plutus = "v2" +license = "Apache-2.0" +description = "Aiken contracts for project 'package'" + +[repository] +user = "me" +project = "package" +platform = "github" + +[[dependencies]] +name = "aiken-lang/stdlib" +version = "1.8.0" + +source = "github" +[[dependencies]] +name = "me/otherpackage" +version = "0.0.0" +source = "what do I put here if I have my own local package?" +``` + +would produce an error like + +```bash +$ aiken build + Error aiken::loading::toml + + × TOML parse error at line 20, column 10 + │ | + │ 20 | source = "what do I put here if I have my own local package?" + │ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + │ unknown variant `what do I put here if I have my own local package?`, expected one of `github`, `gitlab`, `bitbucket` + │ + ╭─[/home/jared/Documents/Work/aikentest/mypackage/aiken.toml:19:1] + 19 │ version = "0.0.0" + 20 │ source = "what do I put here if I have my own local package?" + · ──────────────────────────────────────────────────── + 21 │ + ╰──── +``` + +where the error message makes it clear that it only expects the source of dependencies to be from either GitHub, GitLab, or BitBucket. + +As such, it's unclear how to augment the local set of packages with a LambdaBuffers package, as the Nix tools would provide a local package. +Indeed, it's most likely possible to trick Aiken into thinking that a LambdaBuffers package is already installed, +but this delves into implementation specific details of Aiken that may break between releases. + +## Alternative milestone 4 outputs + +Seeing the aforementioned incompatibilities between Aiken and LambdaBuffers, +instead of hacking around the foundational design decisions of Aiken and LambdaBuffers to create an Aiken backend with limited support and a poor user experience, +we strongly believe that milestone 4 would be better spent to improve the existing LambdaBuffers stack. +In particular, LambdaBuffers has seen use in other projects such as [DeNS](https://github.com/mlabs-haskell/DeNS/tree/main), OrcFax, etc. +and we've received feedback to better the LambdaBuffers existing facilities so addressing this feedback would aid in fostering adoption of LambdaBuffers in other projects. + +As such, for milestone 4, we propose to provide the following instead: + +Bugs: + +* Haskell backend bugs. + + * [Generated Haskell code is invalid](https://github.com/mlabs-haskell/lambda-buffers/issues/197) + + * [Missing dependencies from the generated files](https://github.com/mlabs-haskell/lambda-buffers/issues/124) + +* Plutarch backend bugs. + + * [Generated Plutarch code is invalid](https://github.com/mlabs-haskell/lambda-buffers/issues/148) + +* [Optimizing the LambdaBuffers compiler performance](https://github.com/mlabs-haskell/lambda-buffers/issues/76) + +Features: + +* [Completing the Plutus `.lbf` schemas to include all Plutus Ledger API types](https://github.com/mlabs-haskell/lambda-buffers/issues/175) + +* [Creating a versioning scheme](https://github.com/mlabs-haskell/lambda-buffers/issues/220) + +* [Separate the PlutusTx backend from a Haskell Plutus backend](https://github.com/mlabs-haskell/lambda-buffers/issues/221) + +* [Optimizing slow nix build times](https://github.com/mlabs-haskell/lambda-buffers/pull/193#issuecomment-1942114795) + +* [Improving error messages for better editor integration](https://github.com/mlabs-haskell/lambda-buffers/issues/152) From 8e191aee41f4a99ca5596d0e1115765df6244778 Mon Sep 17 00:00:00 2001 From: jared <> Date: Fri, 31 May 2024 00:18:36 -0600 Subject: [PATCH 2/7] PR feedback: - Fixed typos - Changed wording from "we know" to the tone of "we observe" - Added Aiken version --- docs/aiken-integration.md | 16 ++++++++++++---- 1 file changed, 12 insertions(+), 4 deletions(-) diff --git a/docs/aiken-integration.md b/docs/aiken-integration.md index 0d61aae8..26d3494e 100644 --- a/docs/aiken-integration.md +++ b/docs/aiken-integration.md @@ -10,6 +10,13 @@ and proposes alternative Milestone 4 outputs to better foster adoption of Lambda This section describes limitations with Aiken. +All testing / observations with Aiken were done with the following version. + +```bash +$ aiken --version +aiken v1.0.28-alpha+c9a1519 +``` + ### Aiken has no type class support A key feature of LambdaBuffers is to provide both types and type class instances. @@ -172,9 +179,9 @@ would be translated to i.e., records are lists of all record components[^recordsSpecialCases]. -[^recordsSpecialCases]: Singleton records are encoded as just the single element. +[^recordsSpecialCases]: There are some special cases for the encoding in LambdaBuffers. For example, singleton records are encoded as just the single element. -If LambdaBuffers compiled `MyRecord` to a [record in Aiken](https://aiken-lang.org/language-tour/custom-types) as follows +If LambdaBuffers compiled `MyRecord` to a [record in Aiken](https://aiken-lang.org/language-tour/custom-types) as follows. ```rust type MyRecord { @@ -183,7 +190,7 @@ type MyRecord { } ``` -we know that Aiken will internally represent this as the following `Data` type +Then, one can observe that Aiken will internally represent this as the following `Data` type ```purescript Constr 0 [a, b] @@ -476,8 +483,9 @@ $ aiken build where the error message makes it clear that it only expects the source of dependencies to be from either GitHub, GitLab, or BitBucket. As such, it's unclear how to augment the local set of packages with a LambdaBuffers package, as the Nix tools would provide a local package. -Indeed, it's most likely possible to trick Aiken into thinking that a LambdaBuffers package is already installed, +Indeed, it's possible to trick Aiken into thinking that a LambdaBuffers package is already installed by preparing Aiken's build directory with the dependencies already included, but this delves into implementation specific details of Aiken that may break between releases. +An example of this technique is [here](https://github.com/mlabs-haskell/uplc-benchmark/blob/master/nix/aiken/lib.nix#L83). ## Alternative milestone 4 outputs From d757112e9aa3fb0ada1cf333502562a6b24a5782 Mon Sep 17 00:00:00 2001 From: jared <> Date: Tue, 4 Jun 2024 22:46:33 -0600 Subject: [PATCH 3/7] General prose improvements --- docs/aiken-integration.md | 162 ++++++++++++++++++++++++++++++-------- 1 file changed, 130 insertions(+), 32 deletions(-) diff --git a/docs/aiken-integration.md b/docs/aiken-integration.md index 26d3494e..26dc43d2 100644 --- a/docs/aiken-integration.md +++ b/docs/aiken-integration.md @@ -70,11 +70,37 @@ This makes it impossible to pass instance dictionaries via Aiken's builtin types Alternatively, one could try to sidestep Aiken's builtin types by creating a type which is a Church encoded tuple (i.e., implementing a tuple type via function types), but doing so requires higher ranked types which again Aiken does not support. -Moreover, it appears that Aiken does not provide any "back doors" to the type system (e.g. TypeScript's `any` type) to trick the type system that using a Church encoded tuple and with its projections are well typed. +Moreover, it appears that Aiken does not provide any "back doors" to the type system (e.g. TypeScript's `any` type) to trick the type system that using a Church encoded tuple and its projections are well typed. It's clear now that having an explicit type for an instance dictionary is not feasible in Aiken, -so owing to the fact that an instance dictionary is a product type of functions, one can replace all instance dictionaries as an argument with multiple arguments with each method, and replace the functions to create an instance dictionary with multiple functions to create each method in the type class. -This is indeed possible in Aiken, and to demonstrate this technique, consider the following translation. +so owing to the fact that an instance dictionary is a product type of functions, one can achieve type classes via dictionary passing by replacing all instance dictionaries as multiple arguments of each method in the type class, and replace the function to create an instance dictionary with multiple functions to create each method in the type class. +This is indeed possible in Aiken, and to demonstrate this technique, consider the following Haskell code (which loosely models code generated from LambdaBuffers) + +```haskell +class Eq a where + eq :: a -> a -> Bool + +class PlutusData a where + toData :: a -> Data + fromData :: Data -> a + +data MyOption a = MyJust a | MyNothing + +instance Eq a => Eq (MyOption a) where + eq (MyJust s) (MyJust t) = s == t + eq MyNothing MyNothing = True + eq _ _ = False + +instance PlutusData a => PlutusData (MyOption a) where + toData (MyJust s) = Constr 0 [toData s] + toData MyNothing = Constr 1 [] + + fromData (Constr 0 [s]) = MyJust (fromData s) + fromData (Constr 1 []) = MyNothing + fromData _ = error "bad parse" +``` + +A translation to type class free code in Aiken is as follows. ```rust use aiken/builtin as builtin @@ -342,27 +368,33 @@ Ideally, one would want to change how Aiken encodes its data types internally so Thus, we lose all benefits of LambdaBuffers' efficient encoding when working with Aiken's mechanisms to define types because LambdaBuffers is forced to take an extra step to translate to Aiken's inefficient encoding. As such, Aiken's opinionated way of encoding its data is at odds with LambdaBuffers. -To resolve the mismatch in the encoding of data between the two, one could alternatively sidestep all of Aiken's methods for defining types and instead use Aiken's opaque types to alias `Data` and provide ones own constructors / record accesses as follows +To resolve the mismatch in the encoding of data between the two, one could alternatively sidestep all of Aiken's methods for defining types and instead use Aiken's opaque types to alias `Data` and provide ones own constructors / record accesses as follows. ```rust use aiken/builtin as builtin pub opaque type MyRecord { data: Data } -pub fn myRecord(a: Int) -> MyRecord { - MyRecord{ data : builtin.list_data([builtin.i_data(a)]) } +// Constructor for `MyRecord` +pub fn myRecord(a: Int, b: Int) -> MyRecord { + MyRecord{ data : builtin.list_data([builtin.i_data(a), builtin.i_data(b)]) } } +// Projection for the field `a` of `MyRecord` pub fn myRecord_a(value : MyRecord) -> Int { builtin.un_i_data(builtin.head_list(builtin.un_list_data(value))) } -// Example program: +// Projection for the field `b` of `MyRecord` +pub fn myRecord_b(value : MyRecord) -> Int { + builtin.un_i_data(builtin.head_list(builtin.tail_list(builtin.un_list_data(value)))) +} + validator { pub fn hello_world(_redeemer: Data, _scriptContext: Data) { - let theRecord = myRecord(69) + let theRecord = myRecord(69, -69) - myRecord_a(theRecord) == 420 + myRecord_a(theRecord) == 420 && myRecord_b(theRecord) == -420 } } ``` @@ -381,47 +413,93 @@ Interested readers may inspect the compiled UPLC to verify that the data encodin [ (lam i_2 - (lam - i_3 + [ (lam - i_4 - (force - [ - [ + i_3 + (lam + i_4 + (lam + i_5 + (force [ - i_2 [ [ - (builtin equalsInteger) + i_3 [ - (builtin unIData) + (lam + i_6 + (force + [ + [ + [ + i_3 + [ + [ + (builtin equalsInteger) + [ + (builtin unIData) + [ + i_1 + [ (builtin unListData) i_6 ] + ] + ] + ] + (con integer 420) + ] + ] + (delay + [ + [ + (builtin equalsInteger) + [ + (builtin unIData) + [ + i_1 + [ + i_0 + [ (builtin unListData) i_6 ] + ] + ] + ] + ] + (con integer -420) + ] + ) + ] + (delay (con bool False)) + ] + ) + ) [ - i_0 + (builtin listData) [ - [ i_1 (con data (I 69)) ] - (con (list data) []) + [ i_2 (con data (I 69)) ] + [ + [ i_2 (con data (I -69)) ] + (con (list data) []) + ] ] ] ] ] - (con integer 420) + (delay (con unit ())) ] + (delay [ (error ) (force (error )) ]) ] - (delay (con unit ())) - ] - (delay [ (error ) (force (error )) ]) - ] + ) + ) ) ) - ) + (force (builtin ifThenElse)) + ] ) - (force (builtin ifThenElse)) + (force (builtin mkCons)) ] ) - (force (builtin mkCons)) + (force (builtin headList)) ] ) - (force (builtin headList)) + (force (builtin tailList)) ] ) ``` @@ -482,11 +560,31 @@ $ aiken build where the error message makes it clear that it only expects the source of dependencies to be from either GitHub, GitLab, or BitBucket. -As such, it's unclear how to augment the local set of packages with a LambdaBuffers package, as the Nix tools would provide a local package. -Indeed, it's possible to trick Aiken into thinking that a LambdaBuffers package is already installed by preparing Aiken's build directory with the dependencies already included, +As such, it's unclear how to augment the local set of packages with a LambdaBuffers package. +Indeed, it's possible to trick Aiken into thinking that a LambdaBuffers package is already installed by preparing Aiken's build directory with the dependencies copied in ahead of time, but this delves into implementation specific details of Aiken that may break between releases. An example of this technique is [here](https://github.com/mlabs-haskell/uplc-benchmark/blob/master/nix/aiken/lib.nix#L83). +## Summary of Aiken limitations + +This section summarizes the Aiken limitations and incompatibilities with LambdaBuffers. + +1. Aiken has no type classes, but LambdaBuffers requires type classes. As such, LambdaBuffers support for Aiken would require its own implementation of type classes. + Unfortunately, implementing type classes is awkward in Aiken because composite data types in Aiken cannot store functions. + It can be argued that this awkwardness creates a poor user experience for an Aiken developer, but this can be mitigated by the fact that the only type classes LambdaBuffers generates are relatively simplistic. + +2. Aiken's PlutusData representation of its data types is different from LambdaBuffers' representation of PlutusData. + This means that we have a choice of either: + + * Translating LambdaBuffers types to Aiken's builtin composite types which would lead to inefficient code in the already constrained onchain code environment since this would be "massaging" PlutusData representations when we would really want Aiken to use LambdaBuffers PlutusData encoding directly. + + * Translating LambdaBuffers types to a opaque type alias in Aiken which would then require us to generate supporting functions for constructors and destructors which would make Aiken's major language features obsolete, and so have a poor user experience. + + To put this more explicitly, we either have inefficient code with a nice user experience for an Aiken developer, or efficient code with an awful user experience for an Aiken developer. + +3. Creating local package sets in Aiken is unclear, but creating such local package sets is a principle feature of LambdaBuffers. + Indeed, there are tricks one can do to work around this, but this depends on internal implementation details of Aiken that may break between releases. + ## Alternative milestone 4 outputs Seeing the aforementioned incompatibilities between Aiken and LambdaBuffers, From 5e41069396ee559c9c908e8b0c43628b8509e039 Mon Sep 17 00:00:00 2001 From: jared <> Date: Thu, 6 Jun 2024 03:50:15 -0600 Subject: [PATCH 4/7] Draft of Slack feedback --- docs/aiken-integration.md | 211 ++++++++++++++++++++++++++------------ 1 file changed, 143 insertions(+), 68 deletions(-) diff --git a/docs/aiken-integration.md b/docs/aiken-integration.md index 26dc43d2..8ac304bc 100644 --- a/docs/aiken-integration.md +++ b/docs/aiken-integration.md @@ -1,10 +1,8 @@ -# Aiken Integration Report -This document describes LambdaBuffers integration with Aiken. -It appears at Aiken's best, -while it is possible to provide limited support for a subset of LambdaBuffers' features, -integration with LambdaBuffers would provide a poor user experience that would be in conflict with either the major language features of Aiken or the key functionalities provided by LambdaBuffers. -As such, this document describes the challenges LambdaBuffers integration with Aiken, -and proposes alternative Milestone 4 outputs to better foster adoption of LambdaBuffers within the Cardano ecosystem. +# Lambda Buffers: Aiken Research Document + +The Lambda Buffers team has deeply researched the Aiken programming language with the intention to find a technical path to integrate it with Lambda Buffers along the already integrated languages Plutus, Haskell, Rust and JavaScript. +The conclusion of this research phase is that, while it would be indeed possible for Lambda Buffers to provide limited support for Aiken, it would result in a poor user experience that would be in conflict with the major language features of Aiken or in conflict with the key functionalities provided by Lambda Buffers. +This document presents in detail the challenges found and its impact on the feasibility or convenience to undertake the Aiken integration. ## Aiken limitations @@ -19,7 +17,7 @@ aiken v1.0.28-alpha+c9a1519 ### Aiken has no type class support -A key feature of LambdaBuffers is to provide both types and type class instances. +A key feature of Lambda Buffers is to provide both types and type class instances. Aiken has no support for type classes, so one must generate the type class system themselves. In other words, one must provide: @@ -74,7 +72,7 @@ Moreover, it appears that Aiken does not provide any "back doors" to the type sy It's clear now that having an explicit type for an instance dictionary is not feasible in Aiken, so owing to the fact that an instance dictionary is a product type of functions, one can achieve type classes via dictionary passing by replacing all instance dictionaries as multiple arguments of each method in the type class, and replace the function to create an instance dictionary with multiple functions to create each method in the type class. -This is indeed possible in Aiken, and to demonstrate this technique, consider the following Haskell code (which loosely models code generated from LambdaBuffers) +This is indeed possible in Aiken, and to demonstrate this technique, consider the following Haskell code (which loosely models code generated from Lambda Buffers) ```haskell class Eq a where @@ -184,13 +182,52 @@ This translation of type classes has some limitations such as: * Only Haskell2010 type classes would be supported. -### Aiken's encoding of its data is different from LambdaBuffers encoding +While the above has demonstrated how one can translate type class instances in Lambda Buffers to type class free code in Aiken, +this unfortunately leads to a bad user experience for the "builtin" PlutusData type class in Aiken. +Aiken by default "generates" its own PlutusData instances for all composite types. +As such, Aiken provides some nice syntactic features to make writing smart contracts particularly readable. + +A common pattern to write a validator in Aiken is as follows. + +```rust +pub type MyRecord {a : t, b : Int } + +validator { + pub fn hello_world(_redeemer: MyRecord, _scriptContext: Data) { + // ^~~~ this will automatically use Aiken's builtin PlutusData instances + ... + } +} +``` + +Unfortunately, with the type class system described in this section, +an Aiken developer will no longer be able to write this (since Lambda Buffers would generate its own PlutusData instances that are not used by Aiken) +and instead must write the validator more verbosely as follows. + +```rust +pub type MyRecord {a : t, b : Int } + +validator { + pub fn hello_world(redeemer: Data, _scriptContext: Data) { + let actualRedeemer = myRecordFromData(intFromData)(redeemer) + // ^~~~ Aiken users need to write more code in order to use Lambda + // Buffers so that it will use Lambda Buffers' encoding for the + // validator. Note that this sample assumes that `myRecordFromData :: (Data -> t) -> Data -> MyRecord` + // exists as would be generated by Lambda Buffers. + ... + } +} +``` + +Clearly, this increase in code bloat to express the same simple idea contradicts the promises of making smart contracts easy to write on Aiken. + +### Aiken's encoding of its data is different from Lambda Buffers encoding All onchain scripts must be compiled to UPLC which must in some method represent the higher level language constructs like data types in the original language. Often, data types in a higher level language are translated to UPLC's builtin `Data` type which supports types like lists, constructors, integers, bytestrings, and maps. Note that data which will exist as a datum or redeemer must admit a representation with this `Data` type. -LambdaBuffers chooses a particularly efficient encoding of its data types to `Data` mapping to its target languages that map to UPLC. +Lambda Buffers chooses a particularly efficient encoding of its data types to `Data` mapping to its target languages that map to UPLC. For example, a record like ```purescript @@ -205,9 +242,9 @@ would be translated to i.e., records are lists of all record components[^recordsSpecialCases]. -[^recordsSpecialCases]: There are some special cases for the encoding in LambdaBuffers. For example, singleton records are encoded as just the single element. +[^recordsSpecialCases]: There are some special cases for the encoding in Lambda Buffers. For example, singleton records are encoded as just the single element. -If LambdaBuffers compiled `MyRecord` to a [record in Aiken](https://aiken-lang.org/language-tour/custom-types) as follows. +If Lambda Buffers compiled `MyRecord` to a [record in Aiken](https://aiken-lang.org/language-tour/custom-types) as follows. ```rust type MyRecord { @@ -222,7 +259,7 @@ Then, one can observe that Aiken will internally represent this as the following Constr 0 [a, b] ``` -where we note that Aiken includes a useless `Constr 0` tag meaning Aiken's encoding is less efficient than LambdaBuffers' encoding. +where we note that Aiken includes a useless `Constr 0` tag meaning Aiken's encoding is less efficient than Lambda Buffers' encoding. In general, Aiken's documentation for the encoding from Aiken's types to UPLC `Data` is unclear, but one can inspect the generated UPLC to verify that Aiken would encode the data as mentioned above. @@ -362,11 +399,11 @@ In particular, the following lines are evidence to support that the record is en ] ``` -This is awkward for LambdaBuffers since when Aiken works with the `MyRecord` type, -it is represented with the `Constr` tag meaning that LambdaBuffers' efficient encoding would be translated to Aiken's inefficient encoding. -Ideally, one would want to change how Aiken encodes its data types internally so that Aiken can use LambdaBuffers' efficient encoding everywhere. -Thus, we lose all benefits of LambdaBuffers' efficient encoding when working with Aiken's mechanisms to define types because LambdaBuffers is forced to take an extra step to translate to Aiken's inefficient encoding. -As such, Aiken's opinionated way of encoding its data is at odds with LambdaBuffers. +This is awkward for Lambda Buffers since when Aiken works with the `MyRecord` type, +it is represented with the `Constr` tag meaning that Lambda Buffers' efficient encoding would be translated to Aiken's inefficient encoding. +Ideally, one would want to change how Aiken encodes its data types internally so that Aiken can use Lambda Buffers' efficient encoding everywhere. +Thus, we lose all benefits of Lambda Buffers' efficient encoding when working with Aiken's mechanisms to define types because Lambda Buffers is forced to take an extra step to translate to Aiken's inefficient encoding. +As such, Aiken's opinionated way of encoding its data is at odds with Lambda Buffers. To resolve the mismatch in the encoding of data between the two, one could alternatively sidestep all of Aiken's methods for defining types and instead use Aiken's opaque types to alias `Data` and provide ones own constructors / record accesses as follows. @@ -399,7 +436,7 @@ validator { } ``` -Interested readers may inspect the compiled UPLC to verify that the data encoding of `MyRecord` agrees with LambdaBuffers' encoding. +Interested readers may inspect the compiled UPLC to verify that the data encoding of `MyRecord` agrees with Lambda Buffers' encoding. ```purescript (program @@ -506,12 +543,44 @@ Interested readers may inspect the compiled UPLC to verify that the data encodin Note that this would most likely offer a poor user experience as this would essentially replace a large part of Aiken's language construct with our own generated functions for constructing, deconstructing, serialization / deserialization to `Data`, etc. +In either case, +to mediate the Data serialization / deserialization mismatch of Aiken and Lambda Buffers, +it puts a bulkier mental overhead on the Aiken developer. +As in the previous section, an Aiken developer would expect to write a validator as follows. + +```rust +pub type MyRecord {a : Int, b : Int } + +validator { + pub fn hello_world(_redeemer: MyRecord, _scriptContext: Data) { + // ^~~~ this will automatically use Aiken's builtin Data serialization and deserialization + ... + } +} +``` + +But, any of the solutions to mediate the Data encoding mismatch of Aiken and Lambda Buffers would force an Aiken developer to instead write a more verbose validator as follows. + +```rust +pub type MyRecord {a : Int, b : Int } + +validator { + pub fn hello_world(redeemer: Data, _scriptContext: Data) { + let actualRedeemer = myRecordFromData(redeemer) + // ^~~~ Assume that Lambda Buffers would generate `myRecordFromData :: Data -> MyRecord` + ... + } +} +``` + +Again, it's clear this contradicts Aiken's goal of making writing smart contracts easy as Lambda Buffers integration would increase the mental overhead of working with all generated data types. + ### Aiken's packages only support fetching dependencies remotely -LambdaBuffers is more than just a code generator. -In addition to generating code for sharing types and semantics, its Nix tools augment a set of packages for a project with a package of the generated LambdaBuffers code. +Lambda Buffers is more than just a code generator. +In addition to generating code for sharing types and semantics, its Nix tools augment a set of packages for a project with a package of the generated Lambda Buffers code. -Aiken does support having packages, but it appears that it only officially supports fetching packages from either Github, GitLab, or BitBucket i.e., it's unclear how to create a local package set augmented with LambdaBuffers' packages. +Aiken does support having packages, but it appears that it only officially supports fetching packages from either Github, GitLab, or BitBucket i.e., it's unclear how to create a local package set augmented with Lambda Buffers' packages. For example, the following `aiken.toml` file @@ -560,63 +629,69 @@ $ aiken build where the error message makes it clear that it only expects the source of dependencies to be from either GitHub, GitLab, or BitBucket. -As such, it's unclear how to augment the local set of packages with a LambdaBuffers package. -Indeed, it's possible to trick Aiken into thinking that a LambdaBuffers package is already installed by preparing Aiken's build directory with the dependencies copied in ahead of time, +As such, it's unclear how to augment the local set of packages with a Lambda Buffers package. +Indeed, it's possible to trick Aiken into thinking that a Lambda Buffers package is already installed by preparing Aiken's build directory with the dependencies copied in ahead of time, but this delves into implementation specific details of Aiken that may break between releases. An example of this technique is [here](https://github.com/mlabs-haskell/uplc-benchmark/blob/master/nix/aiken/lib.nix#L83). ## Summary of Aiken limitations -This section summarizes the Aiken limitations and incompatibilities with LambdaBuffers. +This section summarizes the Aiken limitations and incompatibilities with Lambda Buffers. -1. Aiken has no type classes, but LambdaBuffers requires type classes. As such, LambdaBuffers support for Aiken would require its own implementation of type classes. +1. Aiken has no type classes, but Lambda Buffers requires type classes. As such, Lambda Buffers support for Aiken would require its own implementation of type classes. Unfortunately, implementing type classes is awkward in Aiken because composite data types in Aiken cannot store functions. - It can be argued that this awkwardness creates a poor user experience for an Aiken developer, but this can be mitigated by the fact that the only type classes LambdaBuffers generates are relatively simplistic. + While there are work arounds to implement type classes in Aiken, + this fundamentally will create a poor user experience for an Aiken developer as using Lambda Buffers' generated type classes such as PlutusData would be at odds with the builtin syntactic goodies of Aiken's default PlutusData type class instances. -2. Aiken's PlutusData representation of its data types is different from LambdaBuffers' representation of PlutusData. +2. Aiken's PlutusData representation of its data types is different from Lambda Buffers' representation of PlutusData. This means that we have a choice of either: - * Translating LambdaBuffers types to Aiken's builtin composite types which would lead to inefficient code in the already constrained onchain code environment since this would be "massaging" PlutusData representations when we would really want Aiken to use LambdaBuffers PlutusData encoding directly. + * Translating Lambda Buffers types to Aiken's builtin composite types which would lead to inefficient code in the already constrained onchain code environment since this would be "massaging" PlutusData representations when we would really want Aiken to use Lambda Buffers PlutusData encoding directly. - * Translating LambdaBuffers types to a opaque type alias in Aiken which would then require us to generate supporting functions for constructors and destructors which would make Aiken's major language features obsolete, and so have a poor user experience. + * Translating Lambda Buffers types to a opaque type alias in Aiken which would then require us to generate supporting functions for constructors and destructors which would make Aiken's major language features obsolete, and so have a poor user experience. - To put this more explicitly, we either have inefficient code with a nice user experience for an Aiken developer, or efficient code with an awful user experience for an Aiken developer. + To put this more explicitly, we either have inefficient code with a somewhat nice user experience for an Aiken developer, or efficient code with an awful user experience for an Aiken developer. -3. Creating local package sets in Aiken is unclear, but creating such local package sets is a principle feature of LambdaBuffers. +3. Creating local package sets in Aiken is unclear, but creating such local package sets is a principle feature of Lambda Buffers. Indeed, there are tricks one can do to work around this, but this depends on internal implementation details of Aiken that may break between releases. -## Alternative milestone 4 outputs - -Seeing the aforementioned incompatibilities between Aiken and LambdaBuffers, -instead of hacking around the foundational design decisions of Aiken and LambdaBuffers to create an Aiken backend with limited support and a poor user experience, -we strongly believe that milestone 4 would be better spent to improve the existing LambdaBuffers stack. -In particular, LambdaBuffers has seen use in other projects such as [DeNS](https://github.com/mlabs-haskell/DeNS/tree/main), OrcFax, etc. -and we've received feedback to better the LambdaBuffers existing facilities so addressing this feedback would aid in fostering adoption of LambdaBuffers in other projects. - -As such, for milestone 4, we propose to provide the following instead: - -Bugs: - -* Haskell backend bugs. - - * [Generated Haskell code is invalid](https://github.com/mlabs-haskell/lambda-buffers/issues/197) - - * [Missing dependencies from the generated files](https://github.com/mlabs-haskell/lambda-buffers/issues/124) - -* Plutarch backend bugs. - - * [Generated Plutarch code is invalid](https://github.com/mlabs-haskell/lambda-buffers/issues/148) - -* [Optimizing the LambdaBuffers compiler performance](https://github.com/mlabs-haskell/lambda-buffers/issues/76) - -Features: - -* [Completing the Plutus `.lbf` schemas to include all Plutus Ledger API types](https://github.com/mlabs-haskell/lambda-buffers/issues/175) - -* [Creating a versioning scheme](https://github.com/mlabs-haskell/lambda-buffers/issues/220) - -* [Separate the PlutusTx backend from a Haskell Plutus backend](https://github.com/mlabs-haskell/lambda-buffers/issues/221) - -* [Optimizing slow nix build times](https://github.com/mlabs-haskell/lambda-buffers/pull/193#issuecomment-1942114795) - -* [Improving error messages for better editor integration](https://github.com/mlabs-haskell/lambda-buffers/issues/152) +All in all, at the moment it's clear that while it may be possible to integrate Aiken with Lambda Buffers, such integration would have + +* limited support for Lambda Buffers' key features; and + +* a poor user experience for Aiken developers that use Lambda Buffers. + +So, the extra effort needed to mitigate these challenges appear to be counter productive with Aiken's and Lambda Buffers' project goals. +Moreover, Aiken is still in an alpha release and is rapidly changing, so the effort to mitigate these challenges would be squandered away as Aiken evolves. +Thus, given these challenges, it's clear that it would be unwise to undertake the Aiken implementation currently, +and it would be wiser to revisit this later and focus on matters of pressing importance today to better foster adoption of Lambda Buffers. + +Lambda Buffers has fortunately seen industry use in other projects such as [DeNS](https://github.com/mlabs-haskell/DeNS/tree/main), OrcFax, etc., +and there's been feedback to improve the existing facilities in Lambda Buffers which would aid in fostering the adoption of Lambda Buffers in the greater Cardano community. +Some of these issues include the following. + +* Bugs: + + * Haskell backend bugs. + + * [Generated Haskell code is invalid](https://github.com/mlabs-haskell/lambda-buffers/issues/197) + + * [Missing dependencies from the generated files](https://github.com/mlabs-haskell/lambda-buffers/issues/124) + + * Plutarch backend bugs. + + * [Generated Plutarch code is invalid](https://github.com/mlabs-haskell/lambda-buffers/issues/148) + + * [Optimizing the Lambda Buffers compiler performance](https://github.com/mlabs-haskell/lambda-buffers/issues/76) + +* Features: + + * [Completing the Plutus `.lbf` schemas to include all Plutus Ledger API types](https://github.com/mlabs-haskell/lambda-buffers/issues/175) + + * [Creating a versioning scheme](https://github.com/mlabs-haskell/lambda-buffers/issues/220) + + * [Separate the PlutusTx backend from a Haskell Plutus backend](https://github.com/mlabs-haskell/lambda-buffers/issues/221) + + * [Optimizing slow nix build times](https://github.com/mlabs-haskell/lambda-buffers/pull/193#issuecomment-1942114795) + + * [Improving error messages for better editor integration](https://github.com/mlabs-haskell/lambda-buffers/issues/152) From fe43d302fc6aac73c916f69aa78a8bcabf601d01 Mon Sep 17 00:00:00 2001 From: jared <> Date: Fri, 7 Jun 2024 01:49:20 -0600 Subject: [PATCH 5/7] Correct typos + improved wording. --- docs/aiken-integration.md | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/docs/aiken-integration.md b/docs/aiken-integration.md index 8ac304bc..bf4306ab 100644 --- a/docs/aiken-integration.md +++ b/docs/aiken-integration.md @@ -40,7 +40,7 @@ type EqDict { } ``` -would produce an error like +would produce an error as follows. ```bash $ aiken build @@ -405,7 +405,9 @@ Ideally, one would want to change how Aiken encodes its data types internally so Thus, we lose all benefits of Lambda Buffers' efficient encoding when working with Aiken's mechanisms to define types because Lambda Buffers is forced to take an extra step to translate to Aiken's inefficient encoding. As such, Aiken's opinionated way of encoding its data is at odds with Lambda Buffers. -To resolve the mismatch in the encoding of data between the two, one could alternatively sidestep all of Aiken's methods for defining types and instead use Aiken's opaque types to alias `Data` and provide ones own constructors / record accesses as follows. +To resolve the mismatch in the encoding of data between the two, +one could alternatively sidestep all of Aiken's methods for defining types +and instead use Aiken's opaque types to alias `Data` and provide ones own constructors / record accesses as follows. ```rust use aiken/builtin as builtin @@ -546,7 +548,7 @@ Note that this would most likely offer a poor user experience as this would esse In either case, to mediate the Data serialization / deserialization mismatch of Aiken and Lambda Buffers, it puts a bulkier mental overhead on the Aiken developer. -As in the previous section, an Aiken developer would expect to write a validator as follows. +For example, as in the previous section, an Aiken developer would expect to write a validator as follows. ```rust pub type MyRecord {a : Int, b : Int } @@ -666,7 +668,7 @@ Moreover, Aiken is still in an alpha release and is rapidly changing, so the eff Thus, given these challenges, it's clear that it would be unwise to undertake the Aiken implementation currently, and it would be wiser to revisit this later and focus on matters of pressing importance today to better foster adoption of Lambda Buffers. -Lambda Buffers has fortunately seen industry use in other projects such as [DeNS](https://github.com/mlabs-haskell/DeNS/tree/main), OrcFax, etc., +Lambda Buffers has fortunately seen industry use in other projects such as [DeNS](https://github.com/mlabs-haskell/DeNS/tree/main), [TripHut DAO](https://github.com/yaadlabs/DAO-Off-Chain), etc., and there's been feedback to improve the existing facilities in Lambda Buffers which would aid in fostering the adoption of Lambda Buffers in the greater Cardano community. Some of these issues include the following. From a3ad26d32de349c2dcf1eb8da6747a60f9476a10 Mon Sep 17 00:00:00 2001 From: jared <> Date: Fri, 7 Jun 2024 01:52:45 -0600 Subject: [PATCH 6/7] principle --> principal --- docs/aiken-integration.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/aiken-integration.md b/docs/aiken-integration.md index bf4306ab..fc0077e5 100644 --- a/docs/aiken-integration.md +++ b/docs/aiken-integration.md @@ -654,7 +654,7 @@ This section summarizes the Aiken limitations and incompatibilities with Lambda To put this more explicitly, we either have inefficient code with a somewhat nice user experience for an Aiken developer, or efficient code with an awful user experience for an Aiken developer. -3. Creating local package sets in Aiken is unclear, but creating such local package sets is a principle feature of Lambda Buffers. +3. Creating local package sets in Aiken is unclear, but creating such local package sets is a principal feature of Lambda Buffers. Indeed, there are tricks one can do to work around this, but this depends on internal implementation details of Aiken that may break between releases. All in all, at the moment it's clear that while it may be possible to integrate Aiken with Lambda Buffers, such integration would have From 8c3c08440d4f2a025da3139bb5f4140f82f3c2d5 Mon Sep 17 00:00:00 2001 From: jared <> Date: Mon, 10 Jun 2024 12:20:51 -0600 Subject: [PATCH 7/7] Added Aiken Research Document to the mdbook --- docs/SUMMARY.md | 2 ++ docs/book.toml | 2 +- docs/catalyst10-reports/milestone-3.md | 32 +++++++++++--------------- 3 files changed, 17 insertions(+), 19 deletions(-) diff --git a/docs/SUMMARY.md b/docs/SUMMARY.md index fa41913e..64421464 100644 --- a/docs/SUMMARY.md +++ b/docs/SUMMARY.md @@ -21,6 +21,7 @@ - [Codegen](codegen.md) - [Command line interface](command-line-interface.md) - [Comparison matrix](comparison-matrix.md) +- [Aiken Research Document](aiken-integration.md) - [Catalyst reports](catalyst-reports.md) - [Catalyst 9 reports](catalyst09-reports/README.md) - [Milestone 1: Research](catalyst09-reports/milestone-1.md) @@ -30,3 +31,4 @@ - [Catalyst 10 reports](catalyst10-reports/README.md) - [Milestone 1: Rust support](catalyst10-reports/milestone-1.md) - [Milestone 2: Javascript/Typescript support](catalyst10-reports/milestone-2.md) + - [Milestone 3: Aiken integration research and development](catalyst10-reports/milestone-3.md) diff --git a/docs/book.toml b/docs/book.toml index 32204690..90fad0fa 100644 --- a/docs/book.toml +++ b/docs/book.toml @@ -4,4 +4,4 @@ authors = [ language = "en" multilingual = false src = "." -title = "Lambda Buffers documentation" \ No newline at end of file +title = "Lambda Buffers documentation" diff --git a/docs/catalyst10-reports/milestone-3.md b/docs/catalyst10-reports/milestone-3.md index 475e0ddc..f7f1bcbb 100644 --- a/docs/catalyst10-reports/milestone-3.md +++ b/docs/catalyst10-reports/milestone-3.md @@ -1,30 +1,26 @@ - # Catalyst milestone 3: Aiken integration research and development ## Outputs -- [ ] Document describing Aiken integration and how PlutusData encodings can be provided by the LambdaBuffers toolkit. - NOTE: We cannot ignore the fact that the outcome of the research done in Milestone 3 could point to incompatibilities that make an Aiken integration with LambdaBuffers infeasible. Should that be the case, an in-depth technical explanation of the limitations will be provided. +- [x] Document describing Aiken integration and how PlutusData encodings can be provided by the Lambda Buffers toolkit. + NOTE: We cannot ignore the fact that the outcome of the research done in Milestone 3 could point to incompatibilities that make an Aiken integration with Lambda Buffers infeasible. Should that be the case, an in-depth technical explanation of the limitations will be provided. + - The document can be found [here](../aiken-integration.md). ## Acceptance Criteria -- [ ] The generated document describes all the technical components relevant to the integration of Aiken with Lambda Buffers, showing a good understanding of them and how they would be relevant to the intended integration. -- [ ] The generated document describes a clear technical path to integrate Aiken with Lambda buffers and, in addition or alternatively, an in-depth analysis of any limitations found. +- [x] The generated document describes all the technical components relevant to the integration of Aiken with Lambda Buffers, showing a good understanding of them and how they would be relevant to the intended integration. + - The document discusses relevant technical components of Aiken integration with Lambda Buffers. +- [x] The generated document describes a clear technical path to integrate Aiken with Lambda buffers and, in addition or alternatively, an in-depth analysis of any limitations found. + - The document discusses limitations of Aiken and its incompatibilities with Lambda Buffers along with workarounds when possible. ## Evidence of Milestone Completion -- [ ] The completed and reviewed "LambdaBuffers for Aiken - PlutusData encoding implementation provider" documentation is available. -- [ ] Alternatively, if Aiken integration proves to be infeasible: -- [ ] The completed and reviewed documentation describing the Aiken limitations and their impact is available. - -## Demo recordings - -- [ ] - -Demo files: - -- [ ] +- [x] The completed and reviewed "Lambda Buffers for Aiken - PlutusData encoding implementation provider" documentation is available. + - Methods to workaround the incompatibilities of Aiken and Lambda Buffers where possible are provided in the document. +- [x] Alternatively, if Aiken integration proves to be infeasible: +- [x] The completed and reviewed documentation describing the Aiken limitations and their impact is available. + - The discussion of the limitations and whether they are worthwhile are also included in the document. -## References +## Demo files -- [ ] +- [x] The document *[Aiken Research Document](../aiken-integration.md)* addresses these outputs.