Merge remote-tracking branch 'origin/dev' into rinderknecht-dev
This commit is contained in:
commit
c7ffa6d563
@ -40,6 +40,39 @@ title: Cheat Sheet
|
||||
|Transactions|<pre><code>const payment : operation = transaction(unit, amount, receiver);</code></pre>|
|
||||
|Exception/Failure|`failwith("Your descriptive error message for the user goes here.")`|
|
||||
|
||||
<!--CameLIGO-->
|
||||
|
||||
|Primitive |Example|
|
||||
|--- |---|
|
||||
|Strings | `"Tezos"`|
|
||||
|Characters | `"t"`|
|
||||
|Integers | `42`, `7`|
|
||||
|Natural numbers | `42n`, `7n`|
|
||||
|Unit| `unit`|
|
||||
|Boolean|<pre><code>let has_drivers_license: bool = false<br/>let adult: bool = true</code></pre> |
|
||||
|Boolean Logic|<pre><code>(not true) = false = (false && true) = (false || false)</code></pre>|
|
||||
|Mutez (micro tez)| `42mutez`, `7mutez` |
|
||||
|Address | `("tz1KqTpEZ7Yob7QbPE4Hy4Wo8fHG8LhKxZSx": address)`, `("KT1JepfBfMSqkQyf9B1ndvURghGsSB8YCLMD": address)`|
|
||||
|Addition |`3 + 4`, `3n + 4n`|
|
||||
|Multiplication & Division| `3 * 4`, `3n * 4n`, `10 / 5`, `10n / 5n`|
|
||||
|Modulo| `10 mod 3`|
|
||||
|Tuples| <pre><code>type name = (string * string)<br/>let winner: name = "John", "Doe"<br/>let first_name: string = winner.0<br/>let last_name: string = winner.1</code></pre>|
|
||||
|Types|`type age = int`, `type name = string` |
|
||||
|Includes|```#include "library.mligo"```|
|
||||
|Functions |<pre><code>let add (a : int) (b : int) : int = a + b </code></pre>|
|
||||
| If Statement | <pre><code>let new_id: int = if age < 16 <br/> then failwith("Too young to drive.") <br/> else prev_id + 1</code></pre>|
|
||||
|Options|<pre><code>type middle_name = string option<br/>let middle_name : middle_name = Some "Foo"<br/>let middle_name : middle_name = None</code></pre>|
|
||||
|Variable Binding | ```let age: int = 5```|
|
||||
|Type Annotations| ```("tz1KqTpEZ7Yob7QbPE4Hy4Wo8fHG8LhKxZSx" : address)```|
|
||||
|Variants|<pre><code>type action =<br/>| Increment of int<br/>| Decrement of int</code></pre>|
|
||||
|Variant *(pattern)* matching|<pre><code>let a: action = Increment 5<br/>match a with<br/>| Increment n -> n + 1<br/>| Decrement n -> n - 1<br/></code></pre>|
|
||||
|Records|<pre><code>type person = {<br/> age: int ;<br/> name: string ;<br/>}<br/><br/>let john : person = {<br/> age = 18;<br/> name = "John Doe";<br/>}<br/><br/>let name: string = john.name</code></pre>|
|
||||
|Maps|<pre><code>type prices = (nat, tez) map<br/><br/>let prices : prices = Map.literal [<br/> (10n, 60mutez);<br/> (50n, 30mutez);<br/> (100n, 10mutez)<br/>]<br/><br/>let price: tez option = Map.find 50n prices<br/><br/>let prices: prices = Map.update 200n 5mutez prices</code></pre>|
|
||||
|Contracts & Accounts|<pre><code>let destination_address : address = "tz1..."<br/>let contract : unit contract = <br/> Operation.get_contract destination_address</code></pre>|
|
||||
|Transactions|<pre><code>let payment : operation = <br/> Operation.transaction (unit, receiver, amount)</code></pre>|
|
||||
|Exception/Failure|`failwith("Your descriptive error message for the user goes here.")`|
|
||||
|
||||
|
||||
|
||||
<!--END_DOCUSAURUS_CODE_TABS-->
|
||||
|
||||
|
@ -15,7 +15,7 @@ Here's how to define a boolean:
|
||||
const a: bool = True;
|
||||
const b: bool = False;
|
||||
```
|
||||
<!--Cameligo-->
|
||||
<!--CameLIGO-->
|
||||
```cameligo
|
||||
let a: bool = true
|
||||
let b: bool = false
|
||||
@ -43,7 +43,7 @@ const b: string = "Alice";
|
||||
// True
|
||||
const c: bool = (a = b);
|
||||
```
|
||||
<!--Cameligo-->
|
||||
<!--CameLIGO-->
|
||||
```cameligo
|
||||
let a: string = "Alice"
|
||||
let b: string = "Alice"
|
||||
@ -54,7 +54,7 @@ let c: bool = (a = b)
|
||||
```reasonligo
|
||||
let a: string = "Alice";
|
||||
let b: string = "Alice";
|
||||
// true
|
||||
/* true */
|
||||
let c: bool = (a == b);
|
||||
```
|
||||
<!--END_DOCUSAURUS_CODE_TABS-->
|
||||
@ -74,7 +74,7 @@ const f: bool = (a <= b);
|
||||
const g: bool = (a >= b);
|
||||
const h: bool = (a =/= b);
|
||||
```
|
||||
<!--Cameligo-->
|
||||
<!--CameLIGO-->
|
||||
```cameligo
|
||||
let a: int = 5
|
||||
let b: int = 4
|
||||
@ -111,7 +111,7 @@ const a: tez = 5mutez;
|
||||
const b: tez = 10mutez;
|
||||
const c: bool = (a = b);
|
||||
```
|
||||
<!--Cameligo-->
|
||||
<!--CameLIGO-->
|
||||
```cameligo
|
||||
let a: tez = 5mutez
|
||||
let b: tez = 10mutez
|
||||
@ -122,7 +122,7 @@ let c: bool = (a = b)
|
||||
```reasonligo
|
||||
let a: tez = 5mutez;
|
||||
let b: tez = 10mutez;
|
||||
// false
|
||||
/* false */
|
||||
let c: bool = (a == b);
|
||||
```
|
||||
<!--END_DOCUSAURUS_CODE_TABS-->
|
||||
@ -161,7 +161,7 @@ function is_adult(const age: nat): bool is
|
||||
> ligo run-function -s pascaligo src/if-else.ligo is_adult 21n
|
||||
> ```
|
||||
|
||||
<!--Cameligo-->
|
||||
<!--CameLIGO-->
|
||||
```cameligo
|
||||
let min_age: nat = 16n
|
||||
|
||||
|
@ -55,7 +55,7 @@ Instead, you can inline the necessary logic directly, like this:
|
||||
function add(const a: int; const b: int): int is a + b
|
||||
```
|
||||
|
||||
<!--Cameligo-->
|
||||
<!--CameLIGO-->
|
||||
|
||||
Functions in CameLIGO are defined using the `let` keyword, like value bindings.
|
||||
The difference is that after the value name a list of function parameters is provided,
|
||||
@ -80,7 +80,7 @@ along with a return type.
|
||||
Here's how you define a basic function that accepts two `ints` and returns an `int` as well:
|
||||
|
||||
```reasonligo
|
||||
let add (a: int, b: int) : int = a + b;
|
||||
let add = (a: int, b: int) : int => a + b;
|
||||
```
|
||||
|
||||
The function body is a series of expressions, which are evaluated to give the return
|
||||
@ -110,7 +110,7 @@ const increment : (int -> int) = (function (const i : int) : int is i + 1);
|
||||
const a: int = increment(1);
|
||||
```
|
||||
|
||||
<!--Cameligo-->
|
||||
<!--CameLIGO-->
|
||||
```cameligo
|
||||
let increment : (int -> int) = fun (i: int) -> i + 1
|
||||
```
|
||||
|
@ -17,7 +17,7 @@ Here's how a custom map type is defined:
|
||||
type ledger is map(address, tez);
|
||||
```
|
||||
|
||||
<!--Cameligo-->
|
||||
<!--CameLIGO-->
|
||||
```cameligo
|
||||
type ledger = (address, tez) map
|
||||
```
|
||||
@ -44,7 +44,7 @@ end
|
||||
>
|
||||
> `("<string value>": address)` means that we type-cast a string into an address.
|
||||
|
||||
<!--Cameligo-->
|
||||
<!--CameLIGO-->
|
||||
|
||||
```cameligo
|
||||
let ledger: ledger = Map.literal
|
||||
@ -82,7 +82,7 @@ If we want to access a balance from our ledger above, we can use the `[]` operat
|
||||
const balance: option(tez) = ledger[("tz1gjaF81ZRRvdzjobyfVNsAeSC6PScjfQwN": address)];
|
||||
```
|
||||
|
||||
<!--Cameligo-->
|
||||
<!--CameLIGO-->
|
||||
|
||||
```cameligo
|
||||
let balance: tez option = Map.find_opt ("tz1gjaF81ZRRvdzjobyfVNsAeSC6PScjfQwN": address) ledger
|
||||
@ -106,7 +106,7 @@ Accessing a value in a map yields an option, however you can also get the value
|
||||
const balance: tez = get_force(("tz1gjaF81ZRRvdzjobyfVNsAeSC6PScjfQwN": address), ledger);
|
||||
```
|
||||
|
||||
<!--Cameligo-->
|
||||
<!--CameLIGO-->
|
||||
|
||||
```cameligo
|
||||
let balance: tez = Map.find ("tz1gjaF81ZRRvdzjobyfVNsAeSC6PScjfQwN": address) ledger
|
||||
@ -139,7 +139,7 @@ function iter_op (const m : ledger) : unit is
|
||||
} with map_iter(aggregate, m) ;
|
||||
```
|
||||
|
||||
<!--Cameligo-->
|
||||
<!--CameLIGO-->
|
||||
```cameligo
|
||||
let iter_op (m : ledger) : unit =
|
||||
let assert_eq = fun (i: address) (j: tez) -> assert (j > 100)
|
||||
@ -166,7 +166,7 @@ function map_op (const m : ledger) : ledger is
|
||||
} with map_map(increment, m) ;
|
||||
```
|
||||
|
||||
<!--Cameligo-->
|
||||
<!--CameLIGO-->
|
||||
```cameligo
|
||||
let map_op (m : ledger) : ledger =
|
||||
let increment = fun (_: address) (j: tez) -> j+1
|
||||
@ -200,7 +200,7 @@ function fold_op (const m : ledger) : tez is
|
||||
} with map_fold(aggregate, m , 10)
|
||||
```
|
||||
|
||||
<!--Cameligo-->
|
||||
<!--CameLIGO-->
|
||||
```cameligo
|
||||
let fold_op (m : ledger) : ledger =
|
||||
let aggregate = fun (ignore: address) (j: tez * tez) -> j.0 + j.1
|
||||
@ -234,7 +234,7 @@ type user is record
|
||||
end
|
||||
```
|
||||
|
||||
<!--Cameligo-->
|
||||
<!--CameLIGO-->
|
||||
```cameligo
|
||||
type user = {
|
||||
id: nat;
|
||||
@ -266,7 +266,7 @@ const user: user = record
|
||||
end
|
||||
```
|
||||
|
||||
<!--Cameligo-->
|
||||
<!--CameLIGO-->
|
||||
```cameligo
|
||||
let user: user = {
|
||||
id = 1n;
|
||||
@ -296,7 +296,7 @@ If we want to obtain a value from a record for a given key, we can do the follow
|
||||
const is_admin: bool = user.is_admin;
|
||||
```
|
||||
|
||||
<!--Cameligo-->
|
||||
<!--CameLIGO-->
|
||||
```cameligo
|
||||
let is_admin: bool = user.is_admin
|
||||
```
|
||||
|
@ -36,7 +36,7 @@ const g: int = 1_000_000;
|
||||
>const g: int = 1_000_000;
|
||||
>```
|
||||
|
||||
<!--Cameligo-->
|
||||
<!--CameLIGO-->
|
||||
|
||||
```cameligo
|
||||
// int + int produces int
|
||||
@ -63,18 +63,18 @@ let g: int = 1_000_000
|
||||
<!--ReasonLIGO-->
|
||||
|
||||
```reasonligo
|
||||
// int + int produces int
|
||||
/* int + int produces int */
|
||||
let a: int = 5 + 10;
|
||||
// nat + int produces int
|
||||
/* nat + int produces int */
|
||||
let b: int = 5n + 10;
|
||||
// tez + tez produces tez
|
||||
/* tez + tez produces tez */
|
||||
let c: tez = 5mutez + 10mutez;
|
||||
// you can't add tez + int or tez + nat, this won't compile
|
||||
// let d: tez = 5mutez + 10n;
|
||||
// two nats produce a nat
|
||||
/* you can't add tez + int or tez + nat, this won't compile */
|
||||
/* let d: tez = 5mutez + 10n; */
|
||||
/* two nats produce a nat */
|
||||
let e: nat = 5n + 10n;
|
||||
// nat + int produces an int, this won't compile
|
||||
// let f: nat = 5n + 10;
|
||||
/* nat + int produces an int, this won't compile */
|
||||
/* let f: nat = 5n + 10; */
|
||||
let g: int = 1_000_000;
|
||||
```
|
||||
|
||||
@ -103,7 +103,7 @@ const b: int = 5n - 2n;
|
||||
const d: tez = 5mutez - 1mt;
|
||||
```
|
||||
|
||||
<!--Cameligo-->
|
||||
<!--CameLIGO-->
|
||||
```cameligo
|
||||
let a: int = 5 - 10
|
||||
// substraction of two nats, yields an int
|
||||
@ -116,10 +116,10 @@ let d: tez = 5mutez - 1mt
|
||||
<!--ReasonLIGO-->
|
||||
```reasonligo
|
||||
let a: int = 5 - 10;
|
||||
// substraction of two nats, yields an int
|
||||
/* substraction of two nats, yields an int */
|
||||
let b: int = 5n - 2n;
|
||||
// won't compile, result is an int, not a nat
|
||||
// let c: nat = 5n - 2n;
|
||||
/* won't compile, result is an int, not a nat */
|
||||
/* let c: nat = 5n - 2n; */
|
||||
let d: tez = 5mutez - 1mt;
|
||||
```
|
||||
|
||||
@ -140,7 +140,7 @@ const b: nat = 5n * 5n;
|
||||
const c: tez = 5n * 5mutez;
|
||||
```
|
||||
|
||||
<!--Cameligo-->
|
||||
<!--CameLIGO-->
|
||||
```cameligo
|
||||
let a: int = 5 * 5
|
||||
let b: nat = 5n * 5n
|
||||
@ -152,7 +152,7 @@ let c: tez = 5n * 5mutez
|
||||
```reasonligo
|
||||
let a: int = 5 * 5;
|
||||
let b: nat = 5n * 5n;
|
||||
// you can also multiply `nat` and `tez`
|
||||
/* you can also multiply `nat` and `tez` */
|
||||
let c: tez = 5n * 5mutez;
|
||||
```
|
||||
|
||||
@ -173,7 +173,7 @@ const b: nat = 10n / 3n;
|
||||
const c: nat = 10mutez / 3mutez;
|
||||
```
|
||||
|
||||
<!--Cameligo-->
|
||||
<!--CameLIGO-->
|
||||
```cameligo
|
||||
let a: int = 10 / 3
|
||||
let b: nat = 10n / 3n
|
||||
|
@ -24,7 +24,7 @@ const my_set: int_set = set
|
||||
end
|
||||
```
|
||||
|
||||
<!--Cameligo-->
|
||||
<!--CameLIGO-->
|
||||
```cameligo
|
||||
type int_set = int set
|
||||
let my_set: int_set =
|
||||
@ -48,7 +48,7 @@ let my_set: int_set =
|
||||
const my_set: int_set = set end;
|
||||
const my_set_2: int_set = set_empty;
|
||||
```
|
||||
<!--Cameligo-->
|
||||
<!--CameLIGO-->
|
||||
```cameligo
|
||||
let my_set: int_set = (Set.empty: int set)
|
||||
```
|
||||
@ -68,7 +68,7 @@ const contains_three: bool = my_set contains 3;
|
||||
const contains_three_fn: bool = set_mem(3, my_set);
|
||||
```
|
||||
|
||||
<!--Cameligo-->
|
||||
<!--CameLIGO-->
|
||||
```cameligo
|
||||
let contains_three: bool = Set.mem 3 my_set
|
||||
```
|
||||
@ -87,7 +87,7 @@ let contains_three: bool = Set.mem(3, my_set);
|
||||
const set_size: nat = size(my_set);
|
||||
```
|
||||
|
||||
<!--Cameligo-->
|
||||
<!--CameLIGO-->
|
||||
```cameligo
|
||||
let set_size: nat = Set.size my_set
|
||||
```
|
||||
@ -108,7 +108,7 @@ const larger_set: int_set = set_add(4, my_set);
|
||||
const smaller_set: int_set = set_remove(3, my_set);
|
||||
```
|
||||
|
||||
<!--Cameligo-->
|
||||
<!--CameLIGO-->
|
||||
|
||||
```cameligo
|
||||
let larger_set: int_set = Set.add 4 my_set
|
||||
@ -134,7 +134,7 @@ function sum(const result: int; const i: int): int is result + i;
|
||||
const sum_of_a_set: int = set_fold(sum, my_set, 0);
|
||||
```
|
||||
|
||||
<!--Cameligo-->
|
||||
<!--CameLIGO-->
|
||||
```cameligo
|
||||
let sum (result: int) (i: int) : int = result + i
|
||||
let sum_of_a_set: int = Set.fold sum my_set 0
|
||||
@ -166,7 +166,7 @@ const my_list: int_list = list
|
||||
end
|
||||
```
|
||||
|
||||
<!--Cameligo-->
|
||||
<!--CameLIGO-->
|
||||
```cameligo
|
||||
type int_list = int list
|
||||
let my_list: int_list = [1; 2; 3]
|
||||
@ -190,7 +190,7 @@ const larger_list: int_list = cons(4, my_list);
|
||||
const even_larger_list: int_list = 5 # larger_list;
|
||||
```
|
||||
|
||||
<!--Cameligo-->
|
||||
<!--CameLIGO-->
|
||||
```cameligo
|
||||
let larger_list: int_list = 4 :: my_list
|
||||
(* CameLIGO doesn't have a List.cons *)
|
||||
@ -217,7 +217,7 @@ function increment(const i: int): int is block { skip } with i + 1;
|
||||
const incremented_list: int_list = list_map(increment, even_larger_list);
|
||||
```
|
||||
|
||||
<!--Cameligo-->
|
||||
<!--CameLIGO-->
|
||||
|
||||
```cameligo
|
||||
let increment (i: int) : int = i + 1
|
||||
@ -246,7 +246,7 @@ function sum(const result: int; const i: int): int is block { skip } with result
|
||||
const sum_of_a_list: int = list_fold(sum, my_list, 0);
|
||||
```
|
||||
|
||||
<!--Cameligo-->
|
||||
<!--CameLIGO-->
|
||||
|
||||
```cameligo
|
||||
let sum (result: int) (i: int) : int = result + i
|
||||
@ -258,7 +258,7 @@ let sum_of_a_list: int = List.fold sum my_list 0
|
||||
|
||||
```reasonligo
|
||||
let sum = (result: int, i: int): int => result + i;
|
||||
// Outputs 6
|
||||
/* Outputs 6 */
|
||||
let sum_of_a_list: int = List.fold(sum, my_list, 0);
|
||||
```
|
||||
|
||||
@ -292,7 +292,7 @@ type full_name is string * string;
|
||||
const full_name: full_name = ("Alice", "Johnson");
|
||||
```
|
||||
|
||||
<!--Cameligo-->
|
||||
<!--CameLIGO-->
|
||||
```cameligo
|
||||
type full_name = string * string
|
||||
(* The parenthesis here are optional *)
|
||||
@ -324,7 +324,7 @@ Tuple elements are one-indexed and accessed like so:
|
||||
const first_name: string = full_name.1;
|
||||
```
|
||||
|
||||
<!--Cameligo-->
|
||||
<!--CameLIGO-->
|
||||
```cameligo
|
||||
let first_name: string = full_name.1
|
||||
```
|
||||
|
@ -12,12 +12,12 @@ Strings are defined using the built-in `string` type like this:
|
||||
```
|
||||
const a: string = "Hello Alice";
|
||||
```
|
||||
<!--Cameligo-->
|
||||
<!--CameLIGO-->
|
||||
```
|
||||
let a: string = "Hello Alice"
|
||||
```
|
||||
<!--ReasonLIGO-->
|
||||
```
|
||||
```reasonligo
|
||||
let a: string = "Hello Alice";
|
||||
```
|
||||
<!--END_DOCUSAURUS_CODE_TABS-->
|
||||
@ -37,7 +37,7 @@ const full_greeting: string = greeting ^ " " ^ name;
|
||||
// Hello Alice! (alternatively)
|
||||
const full_greeting_exclamation: string = string_concat(full_greeting, "!");
|
||||
```
|
||||
<!--Cameligo-->
|
||||
<!--CameLIGO-->
|
||||
Strings can be concatenated using the `^` operator.
|
||||
|
||||
```cameligo
|
||||
@ -67,7 +67,7 @@ const name: string = "Alice";
|
||||
// slice = "A"
|
||||
const slice: string = string_slice(0n, 1n, name);
|
||||
```
|
||||
<!--Cameligo-->
|
||||
<!--CameLIGO-->
|
||||
```cameligo
|
||||
let name: string = "Alice"
|
||||
let slice: string = String.slice 0n 1n name
|
||||
@ -92,7 +92,7 @@ const name: string = "Alice";
|
||||
// length = 5
|
||||
const length: nat = size(name);
|
||||
```
|
||||
<!--Cameligo-->
|
||||
<!--CameLIGO-->
|
||||
```cameligo
|
||||
let name: string = "Alice"
|
||||
let length: nat = String.size name
|
||||
|
@ -20,7 +20,7 @@ type animalBreed is string;
|
||||
const dogBreed : animalBreed = "Saluki";
|
||||
```
|
||||
|
||||
<!--Cameligo-->
|
||||
<!--CameLIGO-->
|
||||
|
||||
```cameligo
|
||||
type animal_breed = string
|
||||
@ -48,7 +48,7 @@ const ledger: accountBalances = map
|
||||
end
|
||||
```
|
||||
|
||||
<!--Cameligo-->
|
||||
<!--CameLIGO-->
|
||||
```cameligo
|
||||
// account_balances is a simple type, a map of address <-> tez
|
||||
type account_balances = (address, tez) map
|
||||
@ -57,9 +57,9 @@ let ledger: account_balances = Map.literal
|
||||
[(("tz1KqTpEZ7Yob7QbPE4Hy4Wo8fHG8LhKxZSx": address), 10mutez)]
|
||||
```
|
||||
|
||||
<!--Reasonligo-->
|
||||
<!--ReasonLIGO-->
|
||||
```reasonligo
|
||||
// account_balances is a simple type, a map of address <-> tez
|
||||
/* account_balances is a simple type, a map of address <-> tez */
|
||||
type account_balances = map(address, tez);
|
||||
|
||||
let ledger: account_balances =
|
||||
@ -100,7 +100,7 @@ const ledger: accountBalances = map
|
||||
end
|
||||
```
|
||||
|
||||
<!--Cameligo-->
|
||||
<!--CameLIGO-->
|
||||
```cameligo
|
||||
(* alias two types *)
|
||||
type account = address
|
||||
@ -122,7 +122,7 @@ let ledger: account_balances = Map.literal
|
||||
)]
|
||||
```
|
||||
|
||||
<!--Reasonligo-->
|
||||
<!--ReasonLIGO-->
|
||||
```reasonligo
|
||||
/* alias two types */
|
||||
type account = address;
|
||||
@ -135,8 +135,8 @@ type account_data = {
|
||||
/* our ledger / account_balances is a map of account <-> account_data */
|
||||
type account_balances = map(account, account_data);
|
||||
|
||||
// pseudo-JSON representation of our map
|
||||
// {"tz1...": {balance: 10mutez, number_of_transactions: 5n}}
|
||||
/* pseudo-JSON representation of our map */
|
||||
/* {"tz1...": {balance: 10mutez, number_of_transactions: 5n}} */
|
||||
let ledger: account_balances =
|
||||
Map.literal([
|
||||
(
|
||||
|
@ -19,7 +19,7 @@ Here's how they're defined:
|
||||
const n: unit = Unit;
|
||||
```
|
||||
|
||||
<!--Cameligo-->
|
||||
<!--CameLIGO-->
|
||||
```cameligo
|
||||
let n: unit = ()
|
||||
```
|
||||
@ -50,7 +50,7 @@ const u: user = Admin(1000n);
|
||||
const g: user = Guest(Unit);
|
||||
```
|
||||
|
||||
<!--Cameligo-->
|
||||
<!--CameLIGO-->
|
||||
```cameligo
|
||||
type id = nat
|
||||
type user =
|
||||
@ -93,7 +93,7 @@ const p1: dinner = None;
|
||||
const p2: dinner = Some("Hamburgers")
|
||||
```
|
||||
|
||||
<!--Cameligo-->
|
||||
<!--CameLIGO-->
|
||||
```cameligo
|
||||
type dinner = string option
|
||||
|
||||
@ -129,7 +129,7 @@ function is_hungry(const dinner: dinner): bool is block { skip }
|
||||
)
|
||||
```
|
||||
|
||||
<!--Cameligo-->
|
||||
<!--CameLIGO-->
|
||||
```cameligo
|
||||
type dinner = string option
|
||||
let is_hungry (d: dinner) : bool =
|
||||
|
@ -5,8 +5,6 @@ title: Constants & Variables
|
||||
|
||||
The next building block after types are constants and variables.
|
||||
|
||||
pleh.
|
||||
|
||||
## Constants
|
||||
|
||||
Constants are immutable by design, which means their values can't be reassigned.
|
||||
@ -23,7 +21,7 @@ You can evaluate the constant definition above using the following CLI command:
|
||||
ligo evaluate-value -s pascaligo gitlab-pages/docs/language-basics/src/variables-and-constants/const.ligo age
|
||||
# Outputs: 25
|
||||
```
|
||||
<!--Cameligo-->
|
||||
<!--CameLIGO-->
|
||||
```cameligo
|
||||
let age: int = 25
|
||||
```
|
||||
@ -80,7 +78,7 @@ ligo run-function -s pascaligo gitlab-pages/docs/language-basics/src/variables-a
|
||||
# Outputs: 2
|
||||
```
|
||||
|
||||
<!--Cameligo-->
|
||||
<!--CameLIGO-->
|
||||
|
||||
As expected from a functional language, CameLIGO uses value-binding
|
||||
for variables rather than assignment. Variables are changed by replacement,
|
||||
|
@ -939,3 +939,18 @@ a:hover {
|
||||
width: 15em;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
/* ReasonLIGO specific syntax highlighting */
|
||||
.language-reasonligo .hljs-operator {
|
||||
color: #a626a4;
|
||||
}
|
||||
.language-reasonligo .hljs-character {
|
||||
color: #50a14f;
|
||||
}
|
||||
.language-reasonligo .hljs-module-identifier {
|
||||
color: #00f;
|
||||
}
|
||||
.language-reasonligo .hljs-constructor {
|
||||
color: #a31515;
|
||||
}
|
@ -7,9 +7,7 @@ dry_run_output=$(./scripts/ligo_ci.sh dry-run src/test/contracts/website2.ligo m
|
||||
|
||||
expected_compiled_parameter="(Right 1)";
|
||||
expected_compiled_storage=1;
|
||||
expected_dry_run_output="tuple[ list[]
|
||||
2
|
||||
]";
|
||||
expected_dry_run_output="( [] , 2 )";
|
||||
|
||||
if [ "$compiled_storage" != "$expected_compiled_storage" ]; then
|
||||
echo "Expected $expected_compiled_storage as compile-storage output, got $compiled_storage instead";
|
||||
|
@ -47,6 +47,14 @@ let req_syntax n =
|
||||
info ~docv ~doc [] in
|
||||
required @@ pos n (some string) None info
|
||||
|
||||
let init_file =
|
||||
let open Arg in
|
||||
let info =
|
||||
let docv = "INIT_FILE" in
|
||||
let doc = "$(docv) is the path to the .ligo or .mligo file to be used for context initialization." in
|
||||
info ~docv ~doc ["init-file"] in
|
||||
value @@ opt (some string) None info
|
||||
|
||||
let amount =
|
||||
let open Arg in
|
||||
let info =
|
||||
@ -153,7 +161,7 @@ let compile_parameter =
|
||||
let%bind simplified_param = Compile.Of_source.compile_expression v_syntax expression in
|
||||
let%bind (typed_param,_) = Compile.Of_simplified.compile_expression ~env ~state simplified_param in
|
||||
let%bind mini_c_param = Compile.Of_typed.compile_expression typed_param in
|
||||
let%bind compiled_param = Compile.Of_mini_c.aggregate_and_compile_expression mini_c_prg (Expression mini_c_param) [] in
|
||||
let%bind compiled_param = Compile.Of_mini_c.aggregate_and_compile_expression mini_c_prg mini_c_param in
|
||||
let%bind value = Run.evaluate_expression compiled_param.expr compiled_param.expr_ty in
|
||||
ok @@ Format.asprintf "%a\n" (Main.Display.michelson_pp michelson_format) value
|
||||
in
|
||||
@ -163,6 +171,35 @@ let compile_parameter =
|
||||
let doc = "Subcommand: compile parameters to a michelson expression. The resulting michelson expression can be passed as an argument in a transaction which calls a contract." in
|
||||
(term , Term.info ~doc cmdname)
|
||||
|
||||
let interpret =
|
||||
let f expression init_file syntax amount sender source display_format =
|
||||
toplevel ~display_format @@
|
||||
let%bind (decl_list,state,env) = match init_file with
|
||||
| Some init_file ->
|
||||
let%bind simplified = Compile.Of_source.compile init_file (Syntax_name syntax) in
|
||||
let%bind typed_prg,state = Compile.Of_simplified.compile simplified in
|
||||
let%bind mini_c_prg = Compile.Of_typed.compile typed_prg in
|
||||
let env = Ast_typed.program_environment typed_prg in
|
||||
ok (mini_c_prg,state,env)
|
||||
| None -> ok ([],Typer.Solver.initial_state,Ast_typed.Environment.full_empty) in
|
||||
|
||||
let%bind v_syntax = Helpers.syntax_to_variant (Syntax_name syntax) init_file in
|
||||
let%bind simplified_exp = Compile.Of_source.compile_expression v_syntax expression in
|
||||
let%bind (typed_exp,_) = Compile.Of_simplified.compile_expression ~env ~state simplified_exp in
|
||||
let%bind mini_c_exp = Compile.Of_typed.compile_expression typed_exp in
|
||||
let%bind compiled_exp = Compile.Of_mini_c.aggregate_and_compile_expression decl_list mini_c_exp in
|
||||
let%bind options = Run.make_dry_run_options {amount ; sender ; source } in
|
||||
let%bind value = Run.run ~options compiled_exp.expr compiled_exp.expr_ty in
|
||||
let%bind simplified_output = Uncompile.uncompile_expression typed_exp.type_annotation value in
|
||||
ok @@ Format.asprintf "%a\n" Ast_simplified.PP.expression simplified_output
|
||||
in
|
||||
let term =
|
||||
Term.(const f $ expression "EXPRESSION" 0 $ init_file $ syntax $ amount $ sender $ source $ display_format ) in
|
||||
let cmdname = "interpret" in
|
||||
let doc = "Subcommand: interpret the expression in the context initialized by the provided source file." in
|
||||
(term , Term.info ~doc cmdname)
|
||||
|
||||
|
||||
let compile_storage =
|
||||
let f source_file entry_point expression syntax display_format michelson_format =
|
||||
toplevel ~display_format @@
|
||||
@ -235,10 +272,11 @@ let run_function =
|
||||
let%bind mini_c_prg = Compile.Of_typed.compile typed_prg in
|
||||
|
||||
let%bind simplified_param = Compile.Of_source.compile_expression v_syntax parameter in
|
||||
let%bind (typed_param,_) = Compile.Of_simplified.compile_expression ~env ~state simplified_param in
|
||||
let%bind compiled_param = Compile.Of_typed.compile_expression typed_param in
|
||||
let%bind app = Compile.Of_simplified.apply entry_point simplified_param in
|
||||
let%bind (typed_app,_) = Compile.Of_simplified.compile_expression ~env ~state app in
|
||||
let%bind compiled_applied = Compile.Of_typed.compile_expression typed_app in
|
||||
|
||||
let%bind michelson = Compile.Of_mini_c.aggregate_and_compile_expression mini_c_prg (Entry_name entry_point) [compiled_param] in
|
||||
let%bind michelson = Compile.Of_mini_c.aggregate_and_compile_expression mini_c_prg compiled_applied in
|
||||
let%bind options = Run.make_dry_run_options {amount ; sender ; source } in
|
||||
let%bind michelson_output = Run.run ~options michelson.expr michelson.expr_ty in
|
||||
let%bind simplified_output = Uncompile.uncompile_typed_program_entry_function_result typed_prg entry_point michelson_output in
|
||||
@ -257,7 +295,7 @@ let evaluate_value =
|
||||
let%bind typed_prg,_ = Compile.Of_simplified.compile simplified in
|
||||
let%bind mini_c = Compile.Of_typed.compile typed_prg in
|
||||
let%bind (exp,_) = Mini_c.get_entry mini_c entry_point in
|
||||
let%bind compiled = Compile.Of_mini_c.aggregate_and_compile_expression mini_c (Expression exp) [] in
|
||||
let%bind compiled = Compile.Of_mini_c.aggregate_and_compile_expression mini_c exp in
|
||||
let%bind options = Run.make_dry_run_options {amount ; sender ; source } in
|
||||
let%bind michelson_output = Run.run ~options compiled.expr compiled.expr_ty in
|
||||
let%bind simplified_output = Uncompile.uncompile_typed_program_entry_expression_result typed_prg entry_point michelson_output in
|
||||
@ -295,6 +333,7 @@ let run ?argv () =
|
||||
compile_parameter ;
|
||||
compile_storage ;
|
||||
compile_expression ;
|
||||
interpret ;
|
||||
dry_run ;
|
||||
run_function ;
|
||||
evaluate_value ;
|
||||
|
@ -37,6 +37,10 @@ let%expect_test _ =
|
||||
evaluate-value
|
||||
Subcommand: evaluate a given definition.
|
||||
|
||||
interpret
|
||||
Subcommand: interpret the expression in the context initialized by
|
||||
the provided source file.
|
||||
|
||||
measure-contract
|
||||
Subcommand: measure a contract's compiled size in bytes.
|
||||
|
||||
@ -84,6 +88,10 @@ let%expect_test _ =
|
||||
evaluate-value
|
||||
Subcommand: evaluate a given definition.
|
||||
|
||||
interpret
|
||||
Subcommand: interpret the expression in the context initialized by
|
||||
the provided source file.
|
||||
|
||||
measure-contract
|
||||
Subcommand: measure a contract's compiled size in bytes.
|
||||
|
||||
|
@ -41,22 +41,22 @@ let parsify_expression_pascaligo = fun source ->
|
||||
Simplify.Pascaligo.simpl_expression raw in
|
||||
ok simplified
|
||||
|
||||
let parsify_ligodity = fun source ->
|
||||
let parsify_cameligo = fun source ->
|
||||
let%bind raw =
|
||||
trace (simple_error "parsing") @@
|
||||
Parser.Ligodity.parse_file source in
|
||||
Parser.Cameligo.parse_file source in
|
||||
let%bind simplified =
|
||||
trace (simple_error "simplifying") @@
|
||||
Simplify.Ligodity.simpl_program raw in
|
||||
Simplify.Cameligo.simpl_program raw in
|
||||
ok simplified
|
||||
|
||||
let parsify_expression_ligodity = fun source ->
|
||||
let parsify_expression_cameligo = fun source ->
|
||||
let%bind raw =
|
||||
trace (simple_error "parsing expression") @@
|
||||
Parser.Ligodity.parse_expression source in
|
||||
Parser.Cameligo.parse_expression source in
|
||||
let%bind simplified =
|
||||
trace (simple_error "simplifying expression") @@
|
||||
Simplify.Ligodity.simpl_expression raw in
|
||||
Simplify.Cameligo.simpl_expression raw in
|
||||
ok simplified
|
||||
|
||||
let parsify_reasonligo = fun source ->
|
||||
@ -65,7 +65,7 @@ let parsify_reasonligo = fun source ->
|
||||
Parser.Reasonligo.parse_file source in
|
||||
let%bind simplified =
|
||||
trace (simple_error "simplifying") @@
|
||||
Simplify.Ligodity.simpl_program raw in
|
||||
Simplify.Cameligo.simpl_program raw in
|
||||
ok simplified
|
||||
|
||||
let parsify_string_reasonligo = fun source ->
|
||||
@ -74,7 +74,7 @@ let parsify_string_reasonligo = fun source ->
|
||||
Parser.Reasonligo.parse_string source in
|
||||
let%bind simplified =
|
||||
trace (simple_error "simplifying") @@
|
||||
Simplify.Ligodity.simpl_program raw in
|
||||
Simplify.Cameligo.simpl_program raw in
|
||||
ok simplified
|
||||
|
||||
let parsify_expression_reasonligo = fun source ->
|
||||
@ -83,13 +83,13 @@ let parsify_expression_reasonligo = fun source ->
|
||||
Parser.Reasonligo.parse_expression source in
|
||||
let%bind simplified =
|
||||
trace (simple_error "simplifying expression") @@
|
||||
Simplify.Ligodity.simpl_expression raw in
|
||||
Simplify.Cameligo.simpl_expression raw in
|
||||
ok simplified
|
||||
|
||||
let parsify = fun (syntax : v_syntax) source_filename ->
|
||||
let%bind parsify = match syntax with
|
||||
| Pascaligo -> ok parsify_pascaligo
|
||||
| Cameligo -> ok parsify_ligodity
|
||||
| Cameligo -> ok parsify_cameligo
|
||||
| ReasonLIGO -> ok parsify_reasonligo
|
||||
in
|
||||
let%bind parsified = parsify source_filename in
|
||||
@ -99,7 +99,7 @@ let parsify = fun (syntax : v_syntax) source_filename ->
|
||||
let parsify_expression = fun syntax source ->
|
||||
let%bind parsify = match syntax with
|
||||
| Pascaligo -> ok parsify_expression_pascaligo
|
||||
| Cameligo -> ok parsify_expression_ligodity
|
||||
| Cameligo -> ok parsify_expression_cameligo
|
||||
| ReasonLIGO -> ok parsify_expression_reasonligo
|
||||
in
|
||||
let%bind parsified = parsify source in
|
||||
|
@ -27,32 +27,23 @@ let aggregate_and_compile = fun program form ->
|
||||
| ExpressionForm _ -> compile_expression aggregated'
|
||||
|
||||
let aggregate_and_compile_contract = fun program name ->
|
||||
let%bind (exp, idx) = get_entry program name in
|
||||
aggregate_and_compile program (ContractForm (exp, idx))
|
||||
let%bind (exp, _) = get_entry program name in
|
||||
aggregate_and_compile program (ContractForm exp)
|
||||
|
||||
type compiled_expression_t =
|
||||
| Expression of expression
|
||||
| Entry_name of string
|
||||
|
||||
let aggregate_and_compile_expression = fun program exp args ->
|
||||
match exp with
|
||||
| Expression exp ->
|
||||
aggregate_and_compile program (ExpressionForm ((exp,List.length program), args))
|
||||
| Entry_name name ->
|
||||
let%bind (exp, idx) = get_entry program name in
|
||||
aggregate_and_compile program (ExpressionForm ((exp,idx), args))
|
||||
let aggregate_and_compile_expression = fun program exp ->
|
||||
aggregate_and_compile program (ExpressionForm exp)
|
||||
|
||||
let build_contract : Compiler.compiled_expression -> Michelson.michelson result =
|
||||
fun compiled ->
|
||||
let%bind ((Ex_ty _param_ty),(Ex_ty _storage_ty)) = Self_michelson.fetch_lambda_parameters compiled.expr_ty in
|
||||
let%bind ((Ex_ty _param_ty),(Ex_ty _storage_ty)) = Self_michelson.fetch_contract_inputs compiled.expr_ty in
|
||||
let%bind param_michelson =
|
||||
Trace.trace_tzresult_lwt (simple_error "Could not unparse contract lambda's parameter") @@
|
||||
Trace.trace_tzresult_lwt (simple_error "Invalid contract: Could not unparse parameter") @@
|
||||
Proto_alpha_utils.Memory_proto_alpha.unparse_ty_michelson _param_ty in
|
||||
let%bind storage_michelson =
|
||||
Trace.trace_tzresult_lwt (simple_error "Could not unparse contract lambda's storage") @@
|
||||
Trace.trace_tzresult_lwt (simple_error "Invalid contract: Could not unparse storage") @@
|
||||
Proto_alpha_utils.Memory_proto_alpha.unparse_ty_michelson _storage_ty in
|
||||
let contract = Michelson.contract param_michelson storage_michelson compiled.expr in
|
||||
let%bind () =
|
||||
Trace.trace_tzresult_lwt (simple_error "Invalid contract") @@
|
||||
Trace.trace_tzresult_lwt (simple_error "Invalid contract: Contract did not typecheck") @@
|
||||
Proto_alpha_utils.Memory_proto_alpha.typecheck_contract contract in
|
||||
ok contract
|
||||
|
@ -5,5 +5,16 @@ let compile (program : Ast_simplified.program) : (Ast_typed.program * Typer.Solv
|
||||
let () = Typer.Solver.discard_state state in
|
||||
ok @@ (prog_typed, state)
|
||||
|
||||
let compile_expression ?(env = Ast_typed.Environment.full_empty) ~(state : Typer.Solver.state) (ae : Ast_simplified.expression) : (Ast_typed.value * Typer.Solver.state) result =
|
||||
let compile_expression ?(env = Ast_typed.Environment.full_empty) ~(state : Typer.Solver.state) (ae : Ast_simplified.expression)
|
||||
: (Ast_typed.value * Typer.Solver.state) result =
|
||||
Typer.type_expression env state ae
|
||||
|
||||
let apply (entry_point : string) (param : Ast_simplified.expression) : Ast_simplified.expression result =
|
||||
let name = Var.of_name entry_point in
|
||||
let entry_point_var : Ast_simplified.expression =
|
||||
{ expression = Ast_simplified.E_variable name ;
|
||||
location = Virtual "generated entry-point variable" } in
|
||||
let applied : Ast_simplified.expression =
|
||||
{ expression = Ast_simplified.E_application (entry_point_var, param) ;
|
||||
location = Virtual "generated application" } in
|
||||
ok applied
|
||||
|
@ -17,3 +17,8 @@ let uncompile_typed_program_entry_expression_result program entry ex_ty_value =
|
||||
|
||||
let uncompile_typed_program_entry_function_result program entry ex_ty_value =
|
||||
uncompile_value Function program entry ex_ty_value
|
||||
|
||||
let uncompile_expression type_value ex_ty_value =
|
||||
let%bind mini_c = Compiler.Uncompiler.translate_value ex_ty_value in
|
||||
let%bind typed = Transpiler.untranspile mini_c type_value in
|
||||
Typer.untype_expression typed
|
@ -1,9 +1,9 @@
|
||||
open Trace
|
||||
|
||||
module Parser = Parser_ligodity.Parser
|
||||
module AST = Parser_ligodity.AST
|
||||
module ParserLog = Parser_ligodity.ParserLog
|
||||
module LexToken = Parser_ligodity.LexToken
|
||||
module Parser = Parser_cameligo.Parser
|
||||
module AST = Parser_cameligo.AST
|
||||
module ParserLog = Parser_cameligo.ParserLog
|
||||
module LexToken = Parser_cameligo.LexToken
|
||||
module Lexer = Lexer.Make(LexToken)
|
||||
|
||||
let parse_file (source: string) : AST.t result =
|
@ -1,4 +1,4 @@
|
||||
(* Abstract Syntax Tree (AST) for Ligodity *)
|
||||
(* Abstract Syntax Tree (AST) for Cameligo *)
|
||||
|
||||
[@@@warning "-30"]
|
||||
|
@ -6,9 +6,9 @@
|
||||
(flags -la 1 --explain --external-tokens LexToken))
|
||||
|
||||
(library
|
||||
(name parser_ligodity)
|
||||
(public_name ligo.parser.ligodity)
|
||||
(modules AST ligodity Parser ParserLog LexToken)
|
||||
(name parser_cameligo)
|
||||
(public_name ligo.parser.cameligo)
|
||||
(modules AST cameligo Parser ParserLog LexToken)
|
||||
(libraries
|
||||
parser_shared
|
||||
str
|
||||
@ -22,19 +22,19 @@
|
||||
(executable
|
||||
(name LexerMain)
|
||||
(libraries
|
||||
parser_ligodity)
|
||||
parser_cameligo)
|
||||
(modules
|
||||
LexerMain
|
||||
)
|
||||
(flags (:standard -open Parser_shared -open Parser_ligodity))
|
||||
(flags (:standard -open Parser_shared -open Parser_cameligo))
|
||||
)
|
||||
|
||||
(executable
|
||||
(name ParserMain)
|
||||
(libraries
|
||||
parser_ligodity)
|
||||
parser_cameligo)
|
||||
(modules
|
||||
ParserMain
|
||||
)
|
||||
(flags (:standard -open Simple_utils -open Parser_shared -open Parser_ligodity))
|
||||
(flags (:standard -open Simple_utils -open Parser_shared -open Parser_cameligo))
|
||||
)
|
2
src/passes/1-parser/camligo/.gitignore
vendored
2
src/passes/1-parser/camligo/.gitignore
vendored
@ -1,2 +0,0 @@
|
||||
ast_generated.ml
|
||||
parser_generated.mly
|
@ -1 +0,0 @@
|
||||
include Ast_generated
|
@ -1,66 +0,0 @@
|
||||
(library
|
||||
(name parser_camligo)
|
||||
(public_name ligo.parser.camligo)
|
||||
(libraries
|
||||
simple-utils
|
||||
tezos-utils
|
||||
lex
|
||||
)
|
||||
(modules ast ast_generated parser user)
|
||||
(flags (:standard -w +1..62-4-9-44-40-42-48@39@33 -open Simple_utils -open Tezos_utils ))
|
||||
(preprocess
|
||||
(pps
|
||||
ppx_let
|
||||
ppx_deriving.std
|
||||
)
|
||||
)
|
||||
)
|
||||
|
||||
;; Generating parser
|
||||
|
||||
(rule
|
||||
(targets parser.ml parser.mli)
|
||||
(deps parser_generated.mly ast.ml)
|
||||
(action (system "menhir --explain --unused-tokens --external-tokens Lex.Token lex/token.mly parser_generated.mly --base parser"))
|
||||
)
|
||||
|
||||
(rule
|
||||
(targets parser_generated.mly)
|
||||
(deps partial_parser.mly pre_parser.mly)
|
||||
(action (system "cat pre_parser.mly partial_parser.mly > parser_generated.mly"))
|
||||
(mode (promote (until-clean) (only *)))
|
||||
)
|
||||
|
||||
(rule
|
||||
(targets partial_parser.mly)
|
||||
(deps generator.exe)
|
||||
(action (system "./generator.exe parser > partial_parser.mly"))
|
||||
)
|
||||
|
||||
;; Generating AST
|
||||
|
||||
(rule
|
||||
(targets ast_generated.ml)
|
||||
(deps generator.exe)
|
||||
(action (system "./generator.exe ast > ast_generated.ml"))
|
||||
(mode (promote (until-clean) (only *)))
|
||||
)
|
||||
|
||||
;; Generating Generator
|
||||
|
||||
(executable
|
||||
(name generator)
|
||||
(libraries
|
||||
ocamlgraph
|
||||
simple-utils
|
||||
tezos-utils
|
||||
lex
|
||||
)
|
||||
(modules generator)
|
||||
(preprocess
|
||||
(pps
|
||||
ppx_let
|
||||
ppx_deriving.std
|
||||
)
|
||||
)
|
||||
)
|
@ -1,738 +0,0 @@
|
||||
open Simple_utils
|
||||
|
||||
type 'a name = {
|
||||
name : string ;
|
||||
content : 'a ;
|
||||
}
|
||||
|
||||
let make_name name content = { name ; content }
|
||||
let destruct {name ; content} = (name, content)
|
||||
let get_name x = x.name
|
||||
let get_content x = x.content
|
||||
|
||||
module Token = Lex.Token
|
||||
type token = Token.token
|
||||
|
||||
module O = struct
|
||||
|
||||
type list_mode =
|
||||
| Trail of token
|
||||
| Trail_option of token
|
||||
| Trail_force of token
|
||||
| Trail_force_ne of token
|
||||
| Lead of token
|
||||
| Lead_ne of token
|
||||
| Separated of token
|
||||
| Separated_ne of token
|
||||
| Separated_nene of token
|
||||
| Naked
|
||||
| Naked_ne
|
||||
|
||||
type 'a list_element = list_mode * 'a
|
||||
|
||||
type rhs_element = [
|
||||
| `Named of string
|
||||
| `Token of token
|
||||
| `List of string list_element
|
||||
| `Option of string
|
||||
]
|
||||
|
||||
type rhs = rhs_element list name
|
||||
type rule = rhs list name
|
||||
|
||||
type manual_rule_content = {
|
||||
menhir_codes : string list ;
|
||||
ast_code : string ;
|
||||
}
|
||||
type manual_rule = manual_rule_content name
|
||||
|
||||
type singleton =
|
||||
| Manual of manual_rule
|
||||
| Generated of rule
|
||||
|
||||
type name_element = [
|
||||
| `Named of string
|
||||
| `Current
|
||||
| `Lower
|
||||
]
|
||||
|
||||
type element = [
|
||||
| `Named of string
|
||||
| `Token of token
|
||||
| `List of name_element list_element
|
||||
| `Current
|
||||
| `Lower
|
||||
]
|
||||
|
||||
type operator = element list
|
||||
type n_operator = operator name
|
||||
|
||||
type n_operators = n_operator list
|
||||
type level = n_operators name
|
||||
type level_list = level list
|
||||
type levels = level List.Ne.t
|
||||
|
||||
type hierarchy = {
|
||||
prefix : string ;
|
||||
levels : levels ;
|
||||
auxiliary_rules : rule list ;
|
||||
}
|
||||
type n_hierarchy = hierarchy name
|
||||
let make_hierarchy prefix levels auxiliary_rules : hierarchy = { levels ; auxiliary_rules ; prefix }
|
||||
|
||||
type language = {
|
||||
entry_point : string ;
|
||||
singletons : singleton list ;
|
||||
hierarchies : n_hierarchy list ;
|
||||
}
|
||||
|
||||
let get_op : n_operator -> operator = get_content
|
||||
|
||||
let manual_singleton name menhir_codes ast_code : singleton = Manual (make_name name {menhir_codes ; ast_code})
|
||||
let rule_singleton rule : singleton = Generated rule
|
||||
let language entry_point singletons hierarchies = {entry_point ; singletons ; hierarchies}
|
||||
|
||||
let name_hierarchy name prefix : n_operators list -> rule list -> n_hierarchy = fun nopss rules ->
|
||||
let nopss' = List.Ne.of_list nopss in
|
||||
let name_i : int -> n_operators -> level = fun i x ->
|
||||
let first = get_name (List.hd x) in
|
||||
let name' = Format.asprintf "%s_%d_%s" name i first in
|
||||
make_name name' x in
|
||||
let levels : levels = List.Ne.mapi name_i nopss' in
|
||||
make_name name @@ make_hierarchy prefix levels rules
|
||||
|
||||
end
|
||||
|
||||
module Check = struct
|
||||
open O
|
||||
|
||||
let well_formed : language -> unit = fun l ->
|
||||
let elements : element list -> unit = fun es ->
|
||||
let rec aux = fun es ->
|
||||
match es with
|
||||
| [] -> ()
|
||||
| [ _ ] -> ()
|
||||
| (`List _ | `Named _ | `Current | `Lower) :: (`List _ | `Named _ | `Current | `Lower) :: _ ->
|
||||
raise (Failure "two non-token separated ops in a row")
|
||||
| _ :: tl -> aux tl
|
||||
in
|
||||
(if (List.length es < 2) then raise (Failure "operator is too short")) ;
|
||||
aux es in
|
||||
let op : n_operator -> unit = fun x -> elements @@ get_content x in
|
||||
let level : level -> unit = fun l -> List.iter op @@ get_content l in
|
||||
let hierarchy : n_hierarchy -> unit = fun h -> List.Ne.iter level @@ h.content.levels in
|
||||
List.iter hierarchy l.hierarchies
|
||||
|
||||
let associativity : language -> unit = fun l ->
|
||||
let level : level -> unit = fun l ->
|
||||
let aux : ([`Left | `Right | `None] as 'a) -> n_operator -> 'a = fun ass nop ->
|
||||
let op = get_content nop in
|
||||
match ass, List.hd op, List.nth op (List.length op - 1) with
|
||||
| _, `Lower, `Lower -> raise (Failure "double assoc")
|
||||
| `None, `Lower, _ -> `Left
|
||||
| `None, _, `Lower -> `Right
|
||||
| `Left, _, `Lower -> raise (Failure "different assocs")
|
||||
| `Right, `Lower, _ -> raise (Failure "different assocs")
|
||||
| m, _, _ -> m
|
||||
in
|
||||
let _assert = List.fold_left aux `None (get_content l) in
|
||||
()
|
||||
in
|
||||
let hierarchy : n_hierarchy -> unit = fun h ->
|
||||
List.Ne.iter level h.content.levels in
|
||||
List.iter hierarchy l.hierarchies
|
||||
|
||||
end
|
||||
|
||||
|
||||
let make_constructor : _ -> (string * string) -> unit = fun ppf (gr, rhs) ->
|
||||
let gr = String.capitalize_ascii gr in
|
||||
match rhs with
|
||||
| "" -> Format.fprintf ppf "%s" gr
|
||||
| s -> Format.fprintf ppf "%s_%s" gr s
|
||||
|
||||
let make_operator : _ -> (string * string) -> unit = fun ppf (prefix, op) ->
|
||||
Format.fprintf ppf "%s_%s" prefix op
|
||||
|
||||
module Print_AST = struct
|
||||
open Format
|
||||
open PP_helpers
|
||||
|
||||
let manual_rule : _ -> O.manual_rule -> _ = fun ppf mr ->
|
||||
fprintf ppf "%s = %s" mr.name mr.content.ast_code
|
||||
|
||||
let generated_rule : _ -> O.rule -> _ = fun ppf gr ->
|
||||
let aux : _ -> O.rhs -> _ = fun ppf rhs ->
|
||||
let type_elements =
|
||||
let aux : O.rhs_element -> string option = fun e ->
|
||||
match e with
|
||||
| `Named s -> Some (s ^ " Location.wrap")
|
||||
| `List ( _, s) -> Some ("(" ^ s ^ " Location.wrap list)")
|
||||
| `Option s -> Some ("(" ^ s ^ " Location.wrap option)")
|
||||
| `Token _ -> None
|
||||
in
|
||||
List.filter_map aux rhs.content in
|
||||
let type_element = fun ppf te -> fprintf ppf "%s" te in
|
||||
fprintf ppf "| %a of (%a)"
|
||||
make_constructor (gr.name, rhs.name)
|
||||
(list_sep type_element (const " * ")) type_elements
|
||||
in
|
||||
fprintf ppf "%s =@. @[<v>%a@]" gr.name
|
||||
(list_sep aux new_line) gr.content
|
||||
|
||||
let singleton : _ -> O.singleton -> _ = fun ppf s ->
|
||||
match s with
|
||||
| Manual s -> manual_rule ppf s
|
||||
| Generated s -> generated_rule ppf s
|
||||
|
||||
let singletons : _ -> O.singleton list -> _ = fun ppf ss ->
|
||||
match ss with
|
||||
| [] -> ()
|
||||
| hd :: tl ->
|
||||
fprintf ppf "%a\n" (prepend "type " (singleton)) hd ;
|
||||
fprintf ppf "%a" (list_sep (prepend "and " (singleton)) (const "\n")) tl
|
||||
|
||||
let n_operator prefix level_name : _ -> O.n_operator -> _ = fun ppf nop ->
|
||||
let type_elements =
|
||||
let aux : O.element -> string option = fun e ->
|
||||
match e with
|
||||
| `Named s -> Some (s ^ " Location.wrap")
|
||||
| `List ( _, s) -> Some ("(" ^ (match s with
|
||||
| `Lower | `Current -> level_name |`Named s -> s
|
||||
) ^ " Location.wrap list)")
|
||||
| `Token _ -> None
|
||||
| `Current | `Lower -> Some (level_name ^ " Location.wrap") in
|
||||
List.filter_map aux (get_content nop) in
|
||||
let type_element = fun ppf te -> fprintf ppf "%s" te in
|
||||
fprintf ppf "| %a of (%a)"
|
||||
make_operator (prefix, nop.name)
|
||||
(list_sep type_element (const " * ")) type_elements
|
||||
|
||||
let n_hierarchy t : _ -> O.n_hierarchy -> _ = fun ppf nh ->
|
||||
let levels = List.Ne.map get_content ((get_content nh).levels) in
|
||||
let nops = List.Ne.concat levels in
|
||||
let name = get_name nh in
|
||||
fprintf ppf "%s %s =@.@[%a@] [@@@@deriving show]" t
|
||||
name
|
||||
(list_sep (n_operator nh.content.prefix name) new_line) nops
|
||||
|
||||
let n_hierarchies (first:bool) : _ -> O.n_hierarchy list -> _ = fun ppf ss ->
|
||||
match ss with
|
||||
| [] -> ()
|
||||
| hd :: tl ->
|
||||
fprintf ppf "%a\n" (n_hierarchy (if first then "type" else "and")) hd ;
|
||||
fprintf ppf "%a" (list_sep (n_hierarchy "and") (const "\n")) tl
|
||||
|
||||
let language : _ -> O.language -> _ = fun ppf l ->
|
||||
fprintf ppf "%a@.@." comment "Language" ;
|
||||
let first = List.length l.singletons = 0 in
|
||||
fprintf ppf " %a@.%a@.@." comment "Singletons" singletons l.singletons ;
|
||||
fprintf ppf " %a@.%a@." comment "Hierarchies" (n_hierarchies first) l.hierarchies ;
|
||||
fprintf ppf " %a@.type entry_point = %s Location.wrap@.@." comment "Entry point" l.entry_point ;
|
||||
()
|
||||
end
|
||||
|
||||
module Print_Grammar = struct
|
||||
open Format
|
||||
open PP_helpers
|
||||
|
||||
let letters = [| "a" ; "b" ; "c" ; "d" ; "e" ; "f" ; "g" ; "h" ; "i" ; "j" |]
|
||||
|
||||
|
||||
let manual_rule : _ -> O.manual_rule -> _ = fun ppf mr ->
|
||||
let {name;content} = mr in
|
||||
fprintf ppf "%s:@. @[<v>%a@]" name (list_sep string new_line) content.menhir_codes
|
||||
|
||||
let generated_rule : _ -> O.rule -> _ = fun ppf gr ->
|
||||
let aux_rule : _ -> O.rhs -> _ = fun ppf rhs ->
|
||||
let i = ref 0 in
|
||||
let aux : _ -> O.rhs_element -> _ = fun ppf e ->
|
||||
(match e with
|
||||
| `Named s -> fprintf ppf "%s = wrap(%s)" letters.(!i) s
|
||||
| `Option s -> fprintf ppf "%s = option(wrap(%s))" letters.(!i) s
|
||||
| `List (mode, s) ->
|
||||
fprintf ppf "%s = %swrap(%s))"
|
||||
letters.(!i)
|
||||
(match mode with
|
||||
| Naked -> "naked_list("
|
||||
| Naked_ne -> "naked_list_ne("
|
||||
| Lead s -> "lead_list(" ^ (Token.to_string s) ^ ","
|
||||
| Lead_ne s -> "lead_list_ne(" ^ (Token.to_string s) ^ ","
|
||||
| Trail s -> "trail_list(" ^ (Token.to_string s) ^ ","
|
||||
| Trail_option s -> "trail_option_list(" ^ (Token.to_string s) ^ ","
|
||||
| Trail_force s -> "trail_force_list(" ^ (Token.to_string s) ^ ","
|
||||
| Trail_force_ne s -> "trail_force_list_ne(" ^ (Token.to_string s) ^ ","
|
||||
| Separated s -> "separated_list(" ^ (Token.to_string s) ^ ","
|
||||
| Separated_ne s -> "separated_list_ne(" ^ (Token.to_string s) ^ ","
|
||||
| Separated_nene s -> "separated_list_nene(" ^ (Token.to_string s) ^ ","
|
||||
)
|
||||
s
|
||||
| `Token t -> i := !i - 1 ; string ppf @@ Token.to_string t) ;
|
||||
i := !i + 1
|
||||
in
|
||||
fprintf ppf "%a" (list_sep aux (const " ")) rhs.content in
|
||||
let aux_code : _ -> O.rhs -> _ = fun ppf rhs ->
|
||||
let i = ref 0 in
|
||||
let aux : O.rhs_element -> _ = fun e ->
|
||||
let s = (match e with
|
||||
| `Named _ | `List _ | `Option _ -> Some (letters.(!i))
|
||||
| `Token _ -> i := !i - 1 ; None) in
|
||||
i := !i + 1 ; s
|
||||
in
|
||||
let content = List.filter_map aux rhs.content in
|
||||
fprintf ppf "%a (%a)" make_constructor (gr.name, rhs.name) (list_sep string (const " , ")) content
|
||||
in
|
||||
let aux : _ -> O.rhs -> _ = fun ppf rhs ->
|
||||
fprintf ppf "| %a { %a }"
|
||||
aux_rule rhs
|
||||
aux_code rhs in
|
||||
fprintf ppf "%s:@.%a" gr.name (list_sep aux (const "\n")) gr.content
|
||||
|
||||
let singleton : _ -> O.singleton -> _ = fun ppf s ->
|
||||
match s with
|
||||
| Manual s -> manual_rule ppf s
|
||||
| Generated s -> generated_rule ppf s
|
||||
|
||||
|
||||
let n_operator_rule prev_lvl_name cur_lvl_name : _ -> O.n_operator -> _ = fun ppf nop ->
|
||||
let i = ref 0 in
|
||||
let element : _ -> O.element -> _ = fun ppf element ->
|
||||
(match element with
|
||||
| `Token t -> i := !i - 1 ; string ppf @@ Token.to_string t
|
||||
| `List (mode, content) ->
|
||||
fprintf ppf "%s = %swrap(%s))"
|
||||
letters.(!i)
|
||||
(match mode with
|
||||
| Naked -> "naked_list("
|
||||
| Naked_ne -> "naked_list_ne("
|
||||
| Lead s -> "lead_list(" ^ (Token.to_string s) ^ ","
|
||||
| Lead_ne s -> "lead_list_ne(" ^ (Token.to_string s) ^ ","
|
||||
| Trail s -> "trail_list(" ^ (Token.to_string s) ^ ","
|
||||
| Trail_option s -> "trail_option_list(" ^ (Token.to_string s) ^ ","
|
||||
| Trail_force s -> "trail_force_list(" ^ (Token.to_string s) ^ ","
|
||||
| Trail_force_ne s -> "trail_force_list_ne(" ^ (Token.to_string s) ^ ","
|
||||
| Separated s -> "separated_list(" ^ (Token.to_string s) ^ ","
|
||||
| Separated_ne s -> "separated_list_ne(" ^ (Token.to_string s) ^ ","
|
||||
| Separated_nene s -> "separated_list_nene(" ^ (Token.to_string s) ^ ","
|
||||
)
|
||||
(match content with | `Lower -> prev_lvl_name | `Named s -> s | `Current -> cur_lvl_name)
|
||||
| `Named n ->
|
||||
fprintf ppf "%s = wrap(%s)" letters.(!i) n
|
||||
| `Current ->
|
||||
fprintf ppf "%s = wrap(%s)" letters.(!i) cur_lvl_name
|
||||
| `Lower ->
|
||||
fprintf ppf "%s = wrap(%s)" letters.(!i) prev_lvl_name
|
||||
) ;
|
||||
i := !i + 1
|
||||
in
|
||||
(list_sep element (const " ")) ppf (get_content nop)
|
||||
|
||||
let n_operator_code prefix : _ -> O.n_operator -> _ = fun ppf nop ->
|
||||
let (name, elements) = destruct nop in
|
||||
let elements' =
|
||||
let i = ref 0 in
|
||||
let aux : O.element -> _ = fun e ->
|
||||
let r =
|
||||
match e with
|
||||
| `Token _ -> i := !i - 1 ; None
|
||||
| `List _ | `Named _ | `Current | `Lower -> Some letters.(!i)
|
||||
in i := !i + 1 ; r
|
||||
in
|
||||
List.filter_map aux elements in
|
||||
fprintf ppf "%a (%a)" make_operator (prefix, name) (list_sep string (const " , ")) elements'
|
||||
|
||||
let n_operator prefix prev_lvl_name cur_lvl_name : _ -> O.n_operator -> _ = fun ppf nop ->
|
||||
let name = get_name nop in
|
||||
fprintf ppf "%a@;| %a { %a }" comment name
|
||||
(n_operator_rule prev_lvl_name cur_lvl_name) nop
|
||||
(n_operator_code prefix) nop
|
||||
|
||||
let level prefix prev_lvl_name : _ -> O.level -> _ = fun ppf l ->
|
||||
let name = get_name l in
|
||||
match prev_lvl_name with
|
||||
| "" -> (
|
||||
fprintf ppf "%s :@. @[<v>%a@]" name
|
||||
(list_sep (n_operator prefix prev_lvl_name name) new_line) (get_content l) ;
|
||||
)
|
||||
| _ -> (
|
||||
fprintf ppf "%s :@. @[<v>%a@;| %s { $1 }@]" name
|
||||
(list_sep (n_operator prefix prev_lvl_name name) new_line) (get_content l)
|
||||
prev_lvl_name
|
||||
)
|
||||
|
||||
let n_hierarchy : _ -> O.n_hierarchy -> _ = fun ppf nh ->
|
||||
let name = get_name nh in
|
||||
let top_level = get_name @@ List.Ne.hd nh.content.levels in
|
||||
fprintf ppf "%a@.%%inline %s : %s { $1 }@.@;" comment ("Top-level for " ^ name) name top_level;
|
||||
let (hd, tl) = List.Ne.rev (get_content nh).levels in
|
||||
fprintf ppf "%a" (level nh.content.prefix "") hd ;
|
||||
let aux prev_name lvl =
|
||||
new_lines 2 ppf () ;
|
||||
fprintf ppf "%a" (level nh.content.prefix prev_name) lvl ;
|
||||
get_name lvl
|
||||
in
|
||||
let _last_name = List.fold_left aux (get_name hd) tl in
|
||||
()
|
||||
|
||||
let language : _ -> O.language -> _ = fun ppf l ->
|
||||
fprintf ppf "%a@.@." comment "Generated Language" ;
|
||||
fprintf ppf "entry_point : wrap(%s) EOF { $1 }@.@." l.entry_point ;
|
||||
fprintf ppf "%a@.@." comment "Singletons" ;
|
||||
fprintf ppf "@[%a@]@.@." (list_sep singleton new_line) l.singletons ;
|
||||
fprintf ppf "%a@.@." comment "Hierarchies" ;
|
||||
fprintf ppf "@[%a@]" (list_sep n_hierarchy new_line) l.hierarchies ;
|
||||
|
||||
end
|
||||
|
||||
|
||||
let infix : string -> [`Left | `Right] -> token -> O.n_operator = fun name assoc t ->
|
||||
match assoc with
|
||||
| `Left -> make_name name [`Current ; `Token t ; `Lower]
|
||||
| `Right -> make_name name [`Lower ; `Token t ; `Current]
|
||||
|
||||
(* Ocaml is bad *)
|
||||
let empty_infix : string -> [`Left | `Right] -> O.n_operator = fun name assoc ->
|
||||
match assoc with
|
||||
| `Left -> make_name name [`Current ; `Lower]
|
||||
| `Right -> make_name name [`Lower ; `Current]
|
||||
|
||||
|
||||
let paren : string -> string -> O.n_operator = fun constructor_name name ->
|
||||
make_name constructor_name [`Token Token.LPAREN ; `Named name ; `Token Token.RPAREN]
|
||||
|
||||
let expression_name = "expression"
|
||||
let type_expression_name = "type_expression"
|
||||
let restricted_type_expression_name = "restricted_type_expression"
|
||||
let program_name = "program"
|
||||
let variable_name = "variable"
|
||||
let pattern_name = "pattern"
|
||||
let constructor_name = "constructor"
|
||||
let int_name = "int_"
|
||||
let tz_name = "tz_"
|
||||
let unit_name = "unit_"
|
||||
let string_name = "string_"
|
||||
|
||||
let variable = O.manual_singleton variable_name ["| NAME { $1 }"] "string"
|
||||
let int = O.manual_singleton int_name ["| INT { $1 }"] "int"
|
||||
let tz = O.manual_singleton tz_name ["| TZ { $1 }"] "int"
|
||||
let unit = O.manual_singleton unit_name ["| UNIT { () }"] "unit"
|
||||
let string = O.manual_singleton string_name ["| STRING { $1 }"] "string"
|
||||
let constructor = O.manual_singleton constructor_name ["| CONSTRUCTOR_NAME { $1 }"] "string"
|
||||
|
||||
module Pattern = struct
|
||||
|
||||
open Token
|
||||
open O
|
||||
|
||||
let application = empty_infix "application" `Left
|
||||
|
||||
let data_structure : O.n_operator = make_name "data_structure" [
|
||||
`Named variable_name ; `Token LSQUARE ; `List (Lead SEMICOLON, `Current) ; `Token RSQUARE ;
|
||||
]
|
||||
|
||||
let record_element : O.rule = make_name "p_record_element" [
|
||||
make_name "" [`Named variable_name ; `Token EQUAL ; `Named pattern_name]
|
||||
]
|
||||
|
||||
let record : O.n_operator = make_name "record" [
|
||||
`Token LBRACKET ;
|
||||
`List (Trail SEMICOLON, `Named record_element.name) ;
|
||||
`Token RBRACKET ;
|
||||
]
|
||||
|
||||
let pair = infix "pair" `Left COMMA
|
||||
let type_annotation = make_name "type_annotation" [
|
||||
`Current ; `Token COLON ; `Named restricted_type_expression_name
|
||||
]
|
||||
|
||||
let variable : O.n_operator = make_name "variable" [ `Named variable_name ]
|
||||
let constructor : O.n_operator = make_name "constructor" [ `Named constructor_name ]
|
||||
|
||||
let module_ident : O.n_operator = make_name "module_ident" [
|
||||
`List (Trail_force_ne DOT, `Named constructor_name) ; `Named variable_name ;
|
||||
]
|
||||
|
||||
let unit : O.n_operator = make_name "unit" [ `Named unit_name ]
|
||||
|
||||
let restricted_pattern_name = "restricted_pattern"
|
||||
|
||||
let restricted_pattern = O.name_hierarchy restricted_pattern_name "Pr" [
|
||||
[variable ; unit] ;
|
||||
[paren "restrict" pattern_name]
|
||||
] []
|
||||
|
||||
let main = O.name_hierarchy pattern_name "P" [
|
||||
[record] ;
|
||||
[type_annotation] ;
|
||||
[pair] ;
|
||||
[data_structure] ;
|
||||
[application] ;
|
||||
[variable ; constructor ; module_ident ; unit] ;
|
||||
[paren "paren" pattern_name]
|
||||
] []
|
||||
|
||||
let singletons = [O.rule_singleton record_element]
|
||||
end
|
||||
|
||||
module Expression = struct
|
||||
|
||||
open Token
|
||||
open O
|
||||
|
||||
let application = empty_infix "application" `Right
|
||||
|
||||
let type_annotation = make_name "type_annotation" [
|
||||
`Current ; `Token COLON ; `Named restricted_type_expression_name
|
||||
]
|
||||
|
||||
let data_structure : O.n_operator = make_name "data_structure" [
|
||||
`Named variable_name ; `Token LSQUARE ; `List (Trail SEMICOLON, `Current) ; `Token RSQUARE ;
|
||||
]
|
||||
|
||||
let fun_ : O.n_operator = make_name "fun" [
|
||||
`Token FUN ; `Named pattern_name ;
|
||||
`Token ARROW ; `Current ;
|
||||
]
|
||||
|
||||
let let_in : O.n_operator = make_name "let_in" [
|
||||
`Token LET ; `Named pattern_name ;
|
||||
`Token EQUAL ; `Current ;
|
||||
`Token IN ; `Current ;
|
||||
]
|
||||
|
||||
let no_seq_name = "expression_no_seq"
|
||||
let no_match_name = "expression_no_match"
|
||||
|
||||
let record_element : O.rule = make_name "e_record_element" [
|
||||
make_name "record_explicit" [`Named variable_name ; `Token EQUAL ; `Named no_seq_name] ;
|
||||
make_name "record_implicit" [`Named variable_name ] ;
|
||||
]
|
||||
|
||||
let record : O.n_operator = make_name "record" [
|
||||
`Token LBRACKET ;
|
||||
`List (Trail SEMICOLON, `Named record_element.name) ;
|
||||
`Token RBRACKET ;
|
||||
]
|
||||
|
||||
let ite : O.n_operator = make_name "ifthenelse" [
|
||||
`Token IF ;
|
||||
`Current ;
|
||||
`Token THEN ;
|
||||
`Lower ;
|
||||
`Token ELSE ;
|
||||
`Current ;
|
||||
]
|
||||
|
||||
let it : O.n_operator = make_name "ifthen" [
|
||||
`Token IF ;
|
||||
`Current ;
|
||||
`Token THEN ;
|
||||
`Lower ;
|
||||
]
|
||||
|
||||
(* let sequence = infix "sequence" `Left SEMICOLON *)
|
||||
let sequence = make_name "sequence" [
|
||||
`List (Separated_nene SEMICOLON , `Lower)
|
||||
]
|
||||
|
||||
let match_clause = make_name "e_match_clause" [
|
||||
make_name "" [`Named pattern_name ; `Token ARROW ; `Named no_match_name]
|
||||
]
|
||||
let match_with = make_name "match" [
|
||||
`Token MATCH ; `Current ; `Token WITH ;
|
||||
`List (Lead_ne VBAR, `Named match_clause.name) ;
|
||||
]
|
||||
let lt = infix "lt" `Left LT
|
||||
let le = infix "le" `Left LE
|
||||
let gt = infix "gt" `Left GT
|
||||
let eq = infix "eq" `Left EQUAL
|
||||
let neq = infix "neq" `Left UNEQUAL
|
||||
|
||||
let cons = infix "cons" `Left DOUBLE_COLON
|
||||
|
||||
let addition = infix "addition" `Left PLUS
|
||||
let substraction = infix "substraction" `Left MINUS
|
||||
|
||||
let multiplication = infix "multiplication" `Left TIMES
|
||||
let division = infix "division" `Left DIV
|
||||
|
||||
let arith_variable : O.n_operator = make_name "variable" [ `Named variable_name ]
|
||||
let int : O.n_operator = make_name "int" [ `Named int_name ]
|
||||
let tz : O.n_operator = make_name "tz" [ `Named tz_name ]
|
||||
let unit : O.n_operator = make_name "unit" [ `Named unit_name ]
|
||||
let string : O.n_operator = make_name "string" [ `Named string_name ]
|
||||
let constructor : O.n_operator = make_name "constructor" [ `Named constructor_name ]
|
||||
|
||||
let module_ident : O.n_operator = make_name "module_ident" [
|
||||
`List (Trail_force_ne DOT, `Named constructor_name) ; `Named variable_name ;
|
||||
]
|
||||
let access : O.n_operator = infix "access" `Right DOT
|
||||
let accessor : O.n_operator = make_name "accessor" [
|
||||
`Named variable_name ; `List (Lead_ne DOT, `Named variable_name) ;
|
||||
]
|
||||
|
||||
let assignment : O.n_operator = infix "assign" `Left LEFT_ARROW
|
||||
|
||||
let tuple = make_name "tuple" [
|
||||
`List (Separated_nene COMMA, `Lower)
|
||||
]
|
||||
|
||||
let name = make_name "name" [`Token TILDE ; `Current]
|
||||
|
||||
let main_hierarchy_name = "expression_main"
|
||||
|
||||
let main_hierarchy = O.name_hierarchy main_hierarchy_name "Eh" [
|
||||
[tuple] ;
|
||||
[type_annotation] ;
|
||||
[lt ; le ; gt ; eq ; neq] ;
|
||||
[assignment] ;
|
||||
[cons] ;
|
||||
[addition ; substraction] ;
|
||||
[multiplication ; division] ;
|
||||
[application] ;
|
||||
[data_structure] ;
|
||||
[name] ;
|
||||
[arith_variable ; constructor ; module_ident ; accessor ; int ; unit ; string ; tz] ;
|
||||
[paren "bottom" expression_name] ;
|
||||
] []
|
||||
|
||||
let no_sequence_expression = O.name_hierarchy no_seq_name "Es" [
|
||||
[let_in ; fun_ ; record ; ite ; it ; match_with] ;
|
||||
[make_name "main" [`Named main_hierarchy_name]] ;
|
||||
] []
|
||||
|
||||
let no_match_expression = O.name_hierarchy no_match_name "Em" [
|
||||
[let_in ; fun_ ; record ; ite ; it ] ;
|
||||
[make_name "main" [`Named main_hierarchy_name]] ;
|
||||
] []
|
||||
|
||||
let expression = O.name_hierarchy expression_name "E" [
|
||||
[sequence] ;
|
||||
[let_in ; fun_ ; record ; ite ; it ; match_with] ;
|
||||
[make_name "main" [`Named main_hierarchy_name]] ;
|
||||
] []
|
||||
|
||||
let singletons = List.map O.rule_singleton [record_element ; match_clause]
|
||||
end
|
||||
|
||||
module Type_expression = struct
|
||||
|
||||
open Token
|
||||
open O
|
||||
|
||||
let record_element : O.rule = make_name "t_record_element" [
|
||||
make_name "" [`Named variable_name ; `Token COLON ; `Named type_expression_name]
|
||||
]
|
||||
|
||||
let record : O.n_operator = make_name "record" [
|
||||
`Token LBRACKET ;
|
||||
`List (Trail SEMICOLON, `Named record_element.name) ;
|
||||
`Token RBRACKET ;
|
||||
]
|
||||
|
||||
let application = empty_infix "application" `Right
|
||||
|
||||
let tuple = make_name "tuple" [
|
||||
`List (Separated_nene COMMA, `Lower)
|
||||
]
|
||||
|
||||
let type_variable : O.n_operator = make_name "variable" [ `Named variable_name ]
|
||||
|
||||
let restricted_type_expression = O.name_hierarchy restricted_type_expression_name "Tr" [
|
||||
[application] ;
|
||||
[type_variable] ;
|
||||
[paren "paren" type_expression_name] ;
|
||||
] []
|
||||
|
||||
let type_expression = O.name_hierarchy type_expression_name "T" [
|
||||
[record] ;
|
||||
[tuple] ;
|
||||
[application] ;
|
||||
[type_variable] ;
|
||||
[paren "paren" type_expression_name]
|
||||
] []
|
||||
|
||||
let singletons = [O.rule_singleton record_element]
|
||||
|
||||
end
|
||||
|
||||
module Program = struct
|
||||
|
||||
open Token
|
||||
open O
|
||||
|
||||
let statement_name = "statement"
|
||||
|
||||
let program : O.rule = make_name program_name [make_name "" [
|
||||
`List (Trail_option DOUBLE_SEMICOLON, statement_name)
|
||||
]]
|
||||
|
||||
let param_name = "param"
|
||||
|
||||
let param : O.rule = make_name param_name [
|
||||
make_name "restricted_pattern" [ `Named Pattern.restricted_pattern_name ] ;
|
||||
make_name "implicit_named_param" [ `Token TILDE ; `Named variable_name ] ;
|
||||
]
|
||||
|
||||
let type_annotation_name = "type_annotation_"
|
||||
let type_annotation : O.rule = make_name type_annotation_name [
|
||||
make_name "" [ `Token COLON ; `Named type_expression_name ] ;
|
||||
]
|
||||
|
||||
let let_content_name = "let_content"
|
||||
let let_content : O.rule = make_name let_content_name [
|
||||
make_name "" [
|
||||
`Named variable_name ;
|
||||
`List (Naked, param_name) ;
|
||||
`Option type_annotation_name ;
|
||||
`Token EQUAL ;
|
||||
`Named expression_name ;
|
||||
] ;
|
||||
]
|
||||
|
||||
let statement : O.rule = make_name statement_name [
|
||||
make_name "variable_declaration" [`Token LET ; `Named let_content_name] ;
|
||||
make_name "init_declaration" [`Token LET_INIT ; `Named let_content_name] ;
|
||||
make_name "entry_declaration" [`Token LET_ENTRY ; `Named let_content_name] ;
|
||||
make_name "type_declaration" [`Token TYPE ; `Named variable_name ; `Token EQUAL ; `Named type_expression_name] ;
|
||||
]
|
||||
|
||||
let singletons = List.map O.rule_singleton [
|
||||
let_content ;
|
||||
type_annotation ;
|
||||
program ;
|
||||
statement ;
|
||||
param ;
|
||||
]
|
||||
|
||||
end
|
||||
|
||||
let language = O.language program_name (
|
||||
variable :: constructor :: int :: unit :: string :: tz ::
|
||||
Program.singletons @
|
||||
Pattern.singletons @
|
||||
Expression.singletons @
|
||||
Type_expression.singletons
|
||||
) [
|
||||
Pattern.main ;
|
||||
Pattern.restricted_pattern ;
|
||||
Expression.main_hierarchy ;
|
||||
Expression.no_sequence_expression ;
|
||||
Expression.no_match_expression ;
|
||||
Expression.expression ;
|
||||
Type_expression.restricted_type_expression ;
|
||||
Type_expression.type_expression ;
|
||||
]
|
||||
|
||||
let () =
|
||||
let argn = Array.length Sys.argv in
|
||||
if argn = 1 then exit 1 ;
|
||||
let arg = Sys.argv.(1) in
|
||||
match arg with
|
||||
| "parser" -> (
|
||||
Format.printf "%a@.%a\n" PP_helpers.comment "Full Grammar" Print_Grammar.language language
|
||||
)
|
||||
| "ast" -> (
|
||||
Format.printf "%a@.%a\n" PP_helpers.comment "AST" Print_AST.language language
|
||||
)
|
||||
| _ -> exit 1
|
@ -1,275 +0,0 @@
|
||||
open Simple_utils
|
||||
|
||||
type 'a name = {
|
||||
name : string ;
|
||||
content : 'a ;
|
||||
}
|
||||
|
||||
(*
|
||||
val make_name : string -> 'a -> 'a name
|
||||
val destruct : 'a name -> ( string * 'a )
|
||||
val get_name : 'a name -> string
|
||||
val get_content : 'a name -> 'a
|
||||
*)
|
||||
|
||||
module Token = Lex.Token
|
||||
type token = Token.token
|
||||
|
||||
module O : sig
|
||||
|
||||
type list_mode =
|
||||
| Trail of token
|
||||
| Trail_option of token
|
||||
| Trail_force of token
|
||||
| Trail_force_ne of token
|
||||
| Lead of token
|
||||
| Lead_ne of token
|
||||
| Separated of token
|
||||
| Separated_ne of token
|
||||
| Separated_nene of token
|
||||
| Naked
|
||||
| Naked_ne
|
||||
|
||||
type 'a list_element = list_mode * 'a
|
||||
|
||||
type rhs_element = [
|
||||
| `Named of string
|
||||
| `Token of token
|
||||
| `List of string list_element
|
||||
| `Option of string
|
||||
]
|
||||
|
||||
type rhs = rhs_element list name
|
||||
type rule = rhs list name
|
||||
|
||||
type manual_rule_content = {
|
||||
menhir_codes : string list ;
|
||||
ast_code : string ;
|
||||
}
|
||||
type manual_rule = manual_rule_content name
|
||||
|
||||
type singleton =
|
||||
| Manual of manual_rule
|
||||
| Generated of rule
|
||||
|
||||
type name_element = [
|
||||
| `Named of string
|
||||
| `Current
|
||||
| `Lower
|
||||
]
|
||||
|
||||
type element = [
|
||||
| `Named of string
|
||||
| `Token of token
|
||||
| `List of name_element list_element
|
||||
| `Current
|
||||
| `Lower
|
||||
]
|
||||
|
||||
type operator = element list
|
||||
type n_operator = operator name
|
||||
|
||||
type n_operators = n_operator list
|
||||
type level = n_operators name
|
||||
type level_list = level list
|
||||
type levels = level List.Ne.t
|
||||
|
||||
type hierarchy = {
|
||||
prefix : string ;
|
||||
levels : levels ;
|
||||
auxiliary_rules : rule list ;
|
||||
}
|
||||
type n_hierarchy = hierarchy name
|
||||
val make_hierarchy : string -> levels -> rule list -> hierarchy
|
||||
|
||||
type language = {
|
||||
entry_point : string ;
|
||||
singletons : singleton list ;
|
||||
hierarchies : n_hierarchy list ;
|
||||
}
|
||||
|
||||
val get_op : n_operator -> operator
|
||||
(*
|
||||
val manual_singleton : string -> string list -> string -> singleton
|
||||
val rule_singleton : rule -> singleton
|
||||
val language : string -> singleton list -> n_hierarchy list -> language
|
||||
val name_hierarchy : string -> string -> n_operators list -> rule list -> n_hierarchy
|
||||
*)
|
||||
|
||||
end
|
||||
|
||||
module Check : sig
|
||||
open O
|
||||
|
||||
val well_formed : language -> unit
|
||||
val associativity : language -> unit
|
||||
|
||||
end
|
||||
|
||||
|
||||
(*
|
||||
val make_constructor : Format.formatter -> (string * string) -> unit
|
||||
val make_operator : Format.formatter -> (string * string) -> unit
|
||||
*)
|
||||
|
||||
module Print_AST : sig
|
||||
(*
|
||||
open Format
|
||||
val manual_rule : formatter -> O.manual_rule -> unit
|
||||
val generated_rule : formatter -> O.rule -> unit
|
||||
val singleton : formatter -> O.singleton -> unit
|
||||
val singletons : formatter -> O.singleton list -> unit
|
||||
val n_operator : string -> string -> formatter -> O.n_operator -> unit
|
||||
val n_hierarchy : string -> formatter -> O.n_hierarchy -> unit
|
||||
val n_hierarchies : bool -> formatter -> O.n_hierarchy list -> unit
|
||||
val language : formatter -> O.language -> unit
|
||||
*)
|
||||
end
|
||||
|
||||
module Print_Grammar : sig
|
||||
(*
|
||||
open Format
|
||||
val letters : string array
|
||||
val manual_rule : formatter -> O.manual_rule -> unit
|
||||
val generated_rule : formatter -> O.rule -> unit
|
||||
val singleton : formatter -> O.singleton -> unit
|
||||
val n_operator_rule : string -> string -> formatter -> O.n_operator -> unit
|
||||
val n_operator_code : string -> formatter -> O.n_operator -> unit
|
||||
val n_operator : string -> string -> string -> formatter -> O.n_operator -> unit
|
||||
val level : string -> string -> formatter -> O.level -> unit
|
||||
val n_hierarchy : formatter -> O.n_hierarchy -> unit
|
||||
val language : formatter -> O.language -> unit
|
||||
*)
|
||||
end
|
||||
|
||||
(*
|
||||
val infix : string -> [`Left | `Right] -> token -> O.n_operator
|
||||
(* Ocaml is bad *)
|
||||
val empty_infix : string -> [`Left | `Right] -> O.n_operator
|
||||
val paren : string -> string -> O.n_operator
|
||||
val expression_name : string
|
||||
val type_expression_name : string
|
||||
val restricted_type_expression_name : string
|
||||
val program_name : string
|
||||
val variable_name : string
|
||||
val pattern_name : string
|
||||
val constructor_name : string
|
||||
val int_name : string
|
||||
val tz_name : string
|
||||
val unit_name : string
|
||||
val string_name : string
|
||||
val variable : O.singleton
|
||||
val int : O.singleton
|
||||
val tz : O.singleton
|
||||
val unit : O.singleton
|
||||
val string : O.singleton
|
||||
val constructor : O.singleton
|
||||
*)
|
||||
|
||||
module Pattern : sig
|
||||
(*
|
||||
val application : O.n_operator
|
||||
val data_structure : O.n_operator
|
||||
val record_element : O.rule
|
||||
val record : O.n_operator
|
||||
val pair : O.n_operator
|
||||
val type_annotation : [> `Current | `Named of string | `Token of token ] list name
|
||||
val variable : O.n_operator
|
||||
val constructor : O.n_operator
|
||||
val module_ident : O.n_operator
|
||||
val unit : O.n_operator
|
||||
val restricted_pattern_name : string
|
||||
val restricted_pattern : O.n_hierarchy
|
||||
val main : O.n_hierarchy
|
||||
val singletons : O.singleton list
|
||||
*)
|
||||
end
|
||||
|
||||
module Expression : sig
|
||||
(*
|
||||
val application : O.n_operator
|
||||
val type_annotation : [> `Current | `Named of string | `Token of token ] list name
|
||||
val data_structure : O.n_operator
|
||||
val fun_ : O.n_operator
|
||||
val let_in : O.n_operator
|
||||
val no_seq_name : string
|
||||
val no_match_name : string
|
||||
val record_element : O.rule
|
||||
val record : O.n_operator
|
||||
val ite : O.n_operator
|
||||
val it : O.n_operator
|
||||
|
||||
(* let sequence = infix "sequence" `Left SEMICOLON *)
|
||||
val sequence : [> `List of O.list_mode * [> `Lower ] ] list name
|
||||
val match_clause : [> `Named of string | `Token of token ] list name list name
|
||||
val match_with : [> `Current
|
||||
| `List of O.list_mode * [> `Named of string ]
|
||||
| `Token of token ] list name
|
||||
val lt : O.n_operator
|
||||
val le : O.n_operator
|
||||
val gt : O.n_operator
|
||||
val eq : O.n_operator
|
||||
val neq : O.n_operator
|
||||
val cons : O.n_operator
|
||||
val addition : O.n_operator
|
||||
val substraction : O.n_operator
|
||||
val multiplication : O.n_operator
|
||||
val division : O.n_operator
|
||||
val arith_variable : O.n_operator
|
||||
val int : O.n_operator
|
||||
val tz : O.n_operator
|
||||
val unit : O.n_operator
|
||||
val string : O.n_operator
|
||||
val constructor : O.n_operator
|
||||
val module_ident : O.n_operator
|
||||
*)
|
||||
val access : O.n_operator
|
||||
(*
|
||||
val accessor : O.n_operator
|
||||
val assignment : O.n_operator
|
||||
val tuple : [> `List of O.list_mode * [> `Lower ] ] list name
|
||||
val name : [> `Current | `Token of token ] list name
|
||||
val main_hierarchy_name : string
|
||||
val main_hierarchy : O.n_hierarchy
|
||||
val no_sequence_expression : O.n_hierarchy
|
||||
val no_match_expression : O.n_hierarchy
|
||||
val expression : O.n_hierarchy
|
||||
val singletons : O.singleton list
|
||||
*)
|
||||
end
|
||||
|
||||
module Type_expression : sig
|
||||
|
||||
(*
|
||||
val record_element : O.rule
|
||||
val record : O.n_operator
|
||||
val application : O.n_operator
|
||||
val tuple : [> `List of O.list_mode * [> `Lower ] ] list name
|
||||
val type_variable : O.n_operator
|
||||
val restricted_type_expression : O.n_hierarchy
|
||||
val type_expression : O.n_hierarchy
|
||||
val singletons : O.singleton list
|
||||
*)
|
||||
|
||||
end
|
||||
|
||||
module Program : sig
|
||||
|
||||
(*
|
||||
val statement_name : string
|
||||
val program : O.rule
|
||||
val param_name : string
|
||||
val param : O.rule
|
||||
val type_annotation_name : string
|
||||
val type_annotation : O.rule
|
||||
val let_content_name : string
|
||||
val let_content : O.rule
|
||||
val statement : O.rule
|
||||
val singletons : O.singleton list
|
||||
*)
|
||||
|
||||
end
|
||||
|
||||
(*
|
||||
val language : O.language
|
||||
*)
|
@ -1,54 +0,0 @@
|
||||
(library
|
||||
(name lex)
|
||||
(public_name ligo.multifix.lex)
|
||||
(libraries
|
||||
simple-utils
|
||||
tezos-utils
|
||||
)
|
||||
(modules token token_type lexer)
|
||||
)
|
||||
|
||||
(executable
|
||||
(name generator)
|
||||
(libraries
|
||||
str
|
||||
simple-utils
|
||||
)
|
||||
(modules generator)
|
||||
(flags (:standard -w +1..62-4-9-44-40-42-48@39@33 ))
|
||||
)
|
||||
|
||||
(rule
|
||||
(targets token.mly)
|
||||
(deps generator.exe)
|
||||
(action (system "./generator.exe mly > token.mly"))
|
||||
)
|
||||
|
||||
(rule
|
||||
(targets token.ml)
|
||||
(deps generator.exe)
|
||||
(action (system "./generator.exe ml > token.ml"))
|
||||
)
|
||||
|
||||
(rule
|
||||
(targets lexer.mll)
|
||||
(deps generator.exe)
|
||||
(action (system "./generator.exe mll > lexer.mll"))
|
||||
)
|
||||
|
||||
(rule
|
||||
(targets token_type.ml token_type.mli)
|
||||
(deps token.mly)
|
||||
(action (system "menhir --only-tokens token.mly --base token_type"))
|
||||
)
|
||||
|
||||
(alias
|
||||
(name lexer.mll)
|
||||
(deps token.ml)
|
||||
)
|
||||
|
||||
(rule
|
||||
(targets lexer.ml)
|
||||
(deps token.ml lexer.mll)
|
||||
(action (system "ocamllex lexer.mll"))
|
||||
)
|
@ -1,185 +0,0 @@
|
||||
type pre_token = {
|
||||
name : string ;
|
||||
pattern : string ;
|
||||
}
|
||||
|
||||
let make name pattern = { name ; pattern }
|
||||
|
||||
let keyword = fun k ->
|
||||
let regexp = Str.regexp "[^0-9a-zA-Z]" in
|
||||
let constructor_name =
|
||||
Str.global_replace regexp "_"
|
||||
@@ String.uppercase_ascii k
|
||||
in
|
||||
make constructor_name k
|
||||
let symbol = fun sym name -> make name sym
|
||||
|
||||
module Print_mly = struct
|
||||
open Format
|
||||
|
||||
let token = fun ppf pre_token ->
|
||||
fprintf ppf "%%token %s" pre_token.name
|
||||
|
||||
let tokens = fun ppf tokens ->
|
||||
let open Simple_utils.PP_helpers in
|
||||
fprintf ppf "%%token EOF\n" ;
|
||||
fprintf ppf "%%token <int> INT\n" ;
|
||||
fprintf ppf "%%token <int> NAT\n" ;
|
||||
fprintf ppf "%%token <int> TZ\n" ;
|
||||
fprintf ppf "%%token <string> STRING\n" ;
|
||||
fprintf ppf "%%token <string> NAME\n" ;
|
||||
fprintf ppf "%%token <string> CONSTRUCTOR_NAME\n" ;
|
||||
fprintf ppf "\n%a\n\n" (list_sep token (const "\n")) tokens ;
|
||||
fprintf ppf "%%%%\n"
|
||||
end
|
||||
|
||||
module Print_mll = struct
|
||||
open Format
|
||||
|
||||
let token = fun ppf {name;pattern} ->
|
||||
fprintf ppf "| \"%s\" { %s }" pattern name
|
||||
|
||||
let pre =
|
||||
{pre|{
|
||||
open Token
|
||||
|
||||
exception Error of string
|
||||
exception Unexpected_character of string
|
||||
}
|
||||
|
||||
(* This rule analyzes a single line and turns it into a stream of
|
||||
tokens. *)
|
||||
|
||||
rule token = parse
|
||||
(*
|
||||
| "//" ([^ '\n']* ) (['\n' '\r']+)
|
||||
{ Lexing.new_line lexbuf ; token lexbuf }
|
||||
*)
|
||||
| ('\r'? '\n' '\r'?)
|
||||
{ Lexing.new_line lexbuf; token lexbuf }
|
||||
| '"' { string "" lexbuf }
|
||||
| [' ' '\t']
|
||||
{ token lexbuf }
|
||||
| (['0'-'9']+ as i) 'p'
|
||||
{ NAT (int_of_string i) }
|
||||
| (['0'-'9']+ as n) '.' (['0'-'9']['0'-'9'] as d) "tz" { TZ ((int_of_string n) * 100 + (int_of_string d)) }
|
||||
| (['0'-'9']+ as i)
|
||||
{ INT (int_of_string i) }
|
||||
|pre}
|
||||
let post =
|
||||
{post|
|
||||
| (['a'-'z''_']['a'-'z''A'-'Z''0'-'9''_']*) as v
|
||||
{ NAME v }
|
||||
| (['A'-'Z']['a'-'z''A'-'Z''0'-'9''_']*) as v
|
||||
{ CONSTRUCTOR_NAME v }
|
||||
| eof { EOF }
|
||||
| "(*" { comment 1 lexbuf }
|
||||
| _
|
||||
{ raise (Unexpected_character (Printf.sprintf "At offset %d: unexpected character.\n" (Lexing.lexeme_start lexbuf))) }
|
||||
|
||||
and string s = parse
|
||||
| "\\\"" { string (s ^ "\"") lexbuf }
|
||||
| "\\\\" { string (s ^ "\\") lexbuf }
|
||||
| '"' { STRING s }
|
||||
| eof { raise (Unexpected_character "missing string terminator") }
|
||||
| _ as c { string (s ^ (String.make 1 c)) lexbuf }
|
||||
|
||||
|
||||
and comment n = parse
|
||||
| "*)" { if n = 1 then token lexbuf else comment (n - 1) lexbuf }
|
||||
| "(*" { comment (n + 1) lexbuf }
|
||||
| '"' ( [^ '"' '\\'] | ( '\\' [^ '"'] ) ) '"' { comment n lexbuf }
|
||||
| eof { raise (Unexpected_character "missing comment terminator") }
|
||||
| ('\r'? '\n' '\r'?) { Lexing.new_line lexbuf; comment n lexbuf }
|
||||
| _ { comment n lexbuf }
|
||||
|
||||
|post}
|
||||
let tokens = fun ppf tokens ->
|
||||
let open Simple_utils.PP_helpers in
|
||||
fprintf ppf "%s%a\n%s" pre (list_sep token (const "\n")) tokens post
|
||||
end
|
||||
|
||||
module Print_ml = struct
|
||||
open Format
|
||||
|
||||
let token = fun ppf {name} ->
|
||||
fprintf ppf " | %s -> \"%s\"" name name
|
||||
|
||||
let pre =
|
||||
{pre|include Token_type
|
||||
|
||||
let to_string : token -> string = function
|
||||
| STRING _ -> "STRING"
|
||||
| NAME _ -> "NAME s"
|
||||
| CONSTRUCTOR_NAME _ -> "CONSTRUCTOR_NAME s"
|
||||
| INT _ -> "INT n"
|
||||
| NAT _ -> "NAT n"
|
||||
| TZ _ -> "TZ n"
|
||||
| EOF -> "EOF"
|
||||
|pre}
|
||||
|
||||
let tokens = fun ppf tokens ->
|
||||
let open Simple_utils.PP_helpers in
|
||||
fprintf ppf "%s%a" pre (list_sep token (const "\n")) tokens
|
||||
end
|
||||
|
||||
let tokens = [
|
||||
keyword "let%init" ;
|
||||
keyword "let%entry" ;
|
||||
keyword "let" ;
|
||||
keyword "type" ;
|
||||
keyword "in" ;
|
||||
keyword "if" ;
|
||||
keyword "then" ;
|
||||
keyword "else" ;
|
||||
(* keyword "block" ;
|
||||
* keyword "for" ;
|
||||
* keyword "const" ; *)
|
||||
keyword "fun" ;
|
||||
keyword "match" ;
|
||||
keyword "with" ;
|
||||
symbol "()" "UNIT" ;
|
||||
symbol "+" "PLUS" ;
|
||||
symbol "~" "TILDE" ;
|
||||
symbol "->" "ARROW" ;
|
||||
symbol "<-" "LEFT_ARROW" ;
|
||||
symbol "<=" "LE" ;
|
||||
symbol "<>" "UNEQUAL" ;
|
||||
symbol "<" "LT" ;
|
||||
symbol ">" "GT" ;
|
||||
symbol "-" "MINUS" ;
|
||||
symbol "*" "TIMES" ;
|
||||
symbol "/" "DIV" ;
|
||||
symbol "=" "EQUAL" ;
|
||||
symbol "|" "VBAR" ;
|
||||
symbol "[" "LSQUARE" ;
|
||||
symbol "]" "RSQUARE" ;
|
||||
symbol "(" "LPAREN" ;
|
||||
symbol ")" "RPAREN" ;
|
||||
symbol "{" "LBRACKET" ;
|
||||
symbol "}" "RBRACKET" ;
|
||||
symbol ";;" "DOUBLE_SEMICOLON" ;
|
||||
symbol ";" "SEMICOLON" ;
|
||||
symbol "::" "DOUBLE_COLON" ;
|
||||
symbol ":" "COLON" ;
|
||||
symbol "," "COMMA" ;
|
||||
symbol "." "DOT" ;
|
||||
]
|
||||
|
||||
let () =
|
||||
let argn = Array.length Sys.argv in
|
||||
if argn = 1 then exit 1 ;
|
||||
let arg = Sys.argv.(1) in
|
||||
let open Simple_utils.PP_helpers in
|
||||
match arg with
|
||||
| "mll" -> (
|
||||
Format.printf "%a@.%a\n" comment "Generated .mll" Print_mll.tokens tokens
|
||||
)
|
||||
| "mly" -> (
|
||||
Format.printf "%a@.%a\n" comment "Generated .mly" Print_mly.tokens tokens
|
||||
)
|
||||
| "ml" -> (
|
||||
Format.printf "%a@.%a\n" comment "Generated .ml" Print_ml.tokens tokens
|
||||
)
|
||||
| _ -> exit 1
|
||||
|
@ -1,43 +0,0 @@
|
||||
(*
|
||||
type pre_token = {
|
||||
name : string ;
|
||||
pattern : string ;
|
||||
}
|
||||
|
||||
val make : string -> string -> pre_token
|
||||
|
||||
val keyword : string -> pre_token
|
||||
val symbol : string -> string -> pre_token
|
||||
|
||||
module Print_mly : sig
|
||||
(*
|
||||
open Format
|
||||
val token : formatter -> pre_token -> unit
|
||||
val tokens : formatter -> pre_token list -> unit
|
||||
*)
|
||||
end
|
||||
|
||||
module Print_mll : sig
|
||||
(*
|
||||
open Format
|
||||
|
||||
val pre : string
|
||||
val post : string
|
||||
|
||||
val token : formatter -> pre_token -> unit
|
||||
val tokens : formatter -> pre_token list -> unit
|
||||
*)
|
||||
end
|
||||
|
||||
module Print_ml : sig
|
||||
(*
|
||||
open Format
|
||||
|
||||
val pre : string
|
||||
val token : formatter -> pre_token -> unit
|
||||
val tokens : formatter -> pre_token list -> unit
|
||||
*)
|
||||
end
|
||||
|
||||
val tokens : pre_token list
|
||||
*)
|
@ -1,25 +0,0 @@
|
||||
type file_location = {
|
||||
filename : string ;
|
||||
start_line : int ;
|
||||
start_column : int ;
|
||||
end_line : int ;
|
||||
end_column : int ;
|
||||
}
|
||||
|
||||
type virtual_location = string
|
||||
|
||||
type t =
|
||||
| File of file_location
|
||||
| Virtual of virtual_location
|
||||
|
||||
let make (start_pos:Lexing.position) (end_pos:Lexing.position) : t =
|
||||
let filename = start_pos.pos_fname in
|
||||
let start_line = start_pos.pos_lnum in
|
||||
let end_line = end_pos.pos_lnum in
|
||||
let start_column = start_pos.pos_cnum - start_pos.pos_bol in
|
||||
let end_column = end_pos.pos_cnum - end_pos.pos_bol in
|
||||
File { filename ; start_line ; start_column ; end_line ; end_column }
|
||||
|
||||
let virtual_location s = Virtual s
|
||||
let dummy = virtual_location "dummy"
|
||||
|
@ -1,20 +0,0 @@
|
||||
(*
|
||||
type file_location = {
|
||||
filename : string ;
|
||||
start_line : int ;
|
||||
start_column : int ;
|
||||
end_line : int ;
|
||||
end_column : int ;
|
||||
}
|
||||
|
||||
type virtual_location = string
|
||||
|
||||
type t =
|
||||
| File of file_location
|
||||
| Virtual of virtual_location
|
||||
|
||||
val make : Lexing.position -> Lexing.position -> t
|
||||
|
||||
val virtual_location : string -> t
|
||||
val dummy : string
|
||||
*)
|
@ -1,3 +0,0 @@
|
||||
module Ast = Ast
|
||||
module Parser = Parser
|
||||
module User = User
|
@ -1,72 +0,0 @@
|
||||
%{
|
||||
open Ast
|
||||
%}
|
||||
|
||||
%start <Ast.entry_point> entry_point
|
||||
|
||||
%%
|
||||
|
||||
naked_list(X):
|
||||
| { [] }
|
||||
| x = X xs = naked_list(X) { x :: xs }
|
||||
|
||||
naked_list_ne(X):
|
||||
| x = X { [ x ] }
|
||||
| x = X xs = naked_list_ne(X) { x :: xs }
|
||||
|
||||
trail_list(separator, X):
|
||||
| { [] }
|
||||
| trail_list_content(separator, X) { $1 }
|
||||
|
||||
trail_list_content(separator, X):
|
||||
| x = trail_list_last(separator, X) { x }
|
||||
| x = X separator xs = trail_list_content(separator, X) { x :: xs }
|
||||
|
||||
trail_list_last(separator, X):
|
||||
| x = X option(separator) { [ x ] }
|
||||
|
||||
trail_force_list(separator, X):
|
||||
| { [] }
|
||||
| x = X separator xs = trail_force_list(separator, X) { x :: xs }
|
||||
|
||||
trail_force_list_ne(separator, X):
|
||||
| x = X separator { [ x ] }
|
||||
| x = X separator xs = trail_force_list_ne(separator, X) { x :: xs }
|
||||
|
||||
trail_option_list(separator, X):
|
||||
| { [] }
|
||||
| trail_option_list_content(separator, X) { $1 }
|
||||
|
||||
trail_option_list_content(separator, X):
|
||||
| x = trail_option_list_last(separator, X) { x }
|
||||
| x = X option(separator) xs = trail_option_list_content(separator, X) { x :: xs }
|
||||
|
||||
trail_option_list_last(separator, X):
|
||||
| x = X option(separator) { [ x ] }
|
||||
|
||||
lead_list_ne(separator, X):
|
||||
| separator x = X { [x] }
|
||||
| separator x = X xs = lead_list_ne(separator, X) { x :: xs }
|
||||
|
||||
lead_list(separator, X):
|
||||
| { [] }
|
||||
| lead_list_content(separator, X) { $1 }
|
||||
|
||||
lead_list_content(separator, X):
|
||||
| x = lead_list_first(separator, X) { x }
|
||||
| xs = lead_list_content(separator, X) separator x = X { xs @ [ x ] }
|
||||
|
||||
lead_list_first (separator, X):
|
||||
| option(separator) x = X { [ x ] }
|
||||
|
||||
separated_list_ne(separator, X):
|
||||
| x = X { [x] }
|
||||
| x = X separator xs = separated_list_ne(separator, X) { x :: xs }
|
||||
|
||||
separated_list_nene(separator, X):
|
||||
| x = X separator y = X { [x ; y] }
|
||||
| x = X separator xs = separated_list_nene(separator, X) { x :: xs }
|
||||
|
||||
|
||||
%inline wrap(X):
|
||||
| x = X { let loc = Location.make $startpos $endpos in Location.wrap ~loc x }
|
@ -1,40 +0,0 @@
|
||||
open! Trace
|
||||
|
||||
let parse_file (source: string) : Ast.entry_point result =
|
||||
(* let pp_input =
|
||||
* let prefix = Filename.(source |> basename |> remove_extension)
|
||||
* and suffix = ".pp.ligo"
|
||||
* in prefix ^ suffix in
|
||||
*
|
||||
* let cpp_cmd = Printf.sprintf "cpp -traditional-cpp %s > %s"
|
||||
* source pp_input in
|
||||
* let%bind () = sys_command cpp_cmd in
|
||||
*
|
||||
* let%bind channel =
|
||||
* generic_try (simple_error "error opening file") @@
|
||||
* (fun () -> open_in pp_input) in *)
|
||||
let%bind channel =
|
||||
generic_try (simple_error "error opening file") @@
|
||||
(fun () -> open_in source) in
|
||||
let lexbuf = Lexing.from_channel channel in
|
||||
let module Lexer = Lex.Lexer in
|
||||
(specific_try (fun e ->
|
||||
let error s () =
|
||||
let start = Lexing.lexeme_start_p lexbuf in
|
||||
let end_ = Lexing.lexeme_end_p lexbuf in
|
||||
let str () = Format.sprintf
|
||||
"at \"%s\" from (%d, %d) to (%d, %d)\n"
|
||||
(Lexing.lexeme lexbuf)
|
||||
start.pos_lnum (start.pos_cnum - start.pos_bol)
|
||||
end_.pos_lnum (end_.pos_cnum - end_.pos_bol) in
|
||||
error s str () in
|
||||
match e with
|
||||
| Parser.Error -> (fun () -> error (thunk "Parse") ())
|
||||
| Lexer.Error s -> (fun () -> error (fun () -> "Lexer " ^ s) ())
|
||||
| Lexer.Unexpected_character s -> error (fun () -> "Unexpected char " ^ s) (* TODO: this allows injection of ANSI escape codes in error messages, fix this. *)
|
||||
| _ -> simple_error "unrecognized parse_ error"
|
||||
)) @@ (fun () ->
|
||||
let raw = Parser.entry_point Lexer.token lexbuf in
|
||||
raw
|
||||
) >>? fun raw ->
|
||||
ok raw
|
@ -1,3 +0,0 @@
|
||||
open! Trace
|
||||
|
||||
val parse_file : string -> Ast.entry_point result
|
@ -6,8 +6,7 @@
|
||||
tezos-utils
|
||||
parser_shared
|
||||
parser_pascaligo
|
||||
parser_camligo
|
||||
parser_ligodity
|
||||
parser_cameligo
|
||||
parser_reasonligo
|
||||
)
|
||||
(preprocess
|
||||
|
@ -1,296 +0,0 @@
|
||||
type region
|
||||
type 'a reg
|
||||
type lexeme = string reg
|
||||
|
||||
(* Tokens *)
|
||||
|
||||
type integer = [`Integer of lexeme reg]
|
||||
type natural = [`Natural of lexeme reg]
|
||||
type ident = [`Ident of lexeme reg]
|
||||
type uident = [`Uident of lexeme reg]
|
||||
type chr = [`Chr of lexeme reg]
|
||||
type str = [`Str of lexeme reg]
|
||||
|
||||
type bool_or = [`bool_or of lexeme reg]
|
||||
type bool_and = [`bool_and of lexeme reg]
|
||||
type lt = [`lt of lexeme reg]
|
||||
type le = [`le of lexeme reg]
|
||||
type gt = [`gt of lexeme reg]
|
||||
type ge = [`ge of lexeme reg]
|
||||
type eq = [`eq of lexeme reg]
|
||||
type ne = [`ne of lexeme reg]
|
||||
type cat = [`cat of lexeme reg]
|
||||
type cons = [`cons of lexeme reg]
|
||||
type plus = [`plus of lexeme reg]
|
||||
type minus = [`minus of lexeme reg]
|
||||
type times = [`times of lexeme reg]
|
||||
type slash = [`slash of lexeme reg]
|
||||
type div = [`div of lexeme reg]
|
||||
type kwd_mod = [`kwd_mod of lexeme reg]
|
||||
type uminus = [`uminus of lexeme reg]
|
||||
type kwd_not = [`kwd_not of lexeme reg]
|
||||
|
||||
type lpar = [`lpar of lexeme reg]
|
||||
type rpar = [`rpar of lexeme reg]
|
||||
type lbracket = [`lbracket of lexeme reg]
|
||||
type rbracket = [`rbracket of lexeme reg]
|
||||
type lbrace = [`lbrace of lexeme reg]
|
||||
type rbrace = [`rbrace of lexeme reg]
|
||||
type semi = [`semi of lexeme reg]
|
||||
type comma = [`comma of lexeme reg]
|
||||
type colon = [`colon of lexeme reg]
|
||||
type vbar = [`vbar of lexeme reg]
|
||||
type arrow = [`arrow of lexeme reg]
|
||||
type wild = [`wild of lexeme reg]
|
||||
|
||||
type kwd_and = [`kwd_and of lexeme reg]
|
||||
type kwd_begin = [`kwd_begin of lexeme reg]
|
||||
type kwd_else = [`kwd_else of lexeme reg]
|
||||
type kwd_end = [`kwd_end of lexeme reg]
|
||||
type kwd_false = [`kwd_false of lexeme reg]
|
||||
type kwd_fun = [`kwd_fun of lexeme reg]
|
||||
type kwd_if = [`kwd_if of lexeme reg]
|
||||
type kwd_in = [`kwd_in of lexeme reg]
|
||||
type kwd_let = [`kwd_let of lexeme reg]
|
||||
type kwd_list = [`kwd_list of lexeme reg]
|
||||
type kwd_map = [`kwd_map of lexeme reg]
|
||||
type kwd_match = [`kwd_match of lexeme reg]
|
||||
type kwd_of = [`kwd_of of lexeme reg]
|
||||
type kwd_set = [`kwd_set of lexeme reg]
|
||||
type kwd_then = [`kwd_then of lexeme reg]
|
||||
type kwd_true = [`kwd_true of lexeme reg]
|
||||
type kwd_type = [`kwd_type of lexeme reg]
|
||||
type kwd_with = [`kwd_with of lexeme reg]
|
||||
|
||||
type token =
|
||||
Integer of integer
|
||||
| Natural of natural
|
||||
| Ident of ident
|
||||
| Uident of uident
|
||||
| Chr of chr
|
||||
| Str of str
|
||||
| Bool_or of bool_or
|
||||
| Bool_and of bool_and
|
||||
| Lt of lt
|
||||
| Le of le
|
||||
| Gt of gt
|
||||
| Ge of ge
|
||||
| Eq of eq
|
||||
| Ne of ne
|
||||
| Cat of cat
|
||||
| Cons of cons
|
||||
| Plus of plus
|
||||
| Minus of minus
|
||||
| Times of times
|
||||
| Slash of slash
|
||||
| Div of div
|
||||
| Kwd_mod of kwd_mod
|
||||
| Uminus of uminus
|
||||
| Kwd_not of kwd_not
|
||||
| Lpar of lpar
|
||||
| Rpar of rpar
|
||||
| Lbracket of lbracket
|
||||
| Rbracket of rbracket
|
||||
| Lbrace of lbrace
|
||||
| Rbrace of rbrace
|
||||
| Semi of semi
|
||||
| Comma of comma
|
||||
| Colon of colon
|
||||
| Vbar of vbar
|
||||
| Arrow of arrow
|
||||
| Wild of wild
|
||||
| Kwd_and of kwd_and
|
||||
| Kwd_begin of kwd_begin
|
||||
| Kwd_else of kwd_else
|
||||
| Kwd_end of kwd_end
|
||||
| Kwd_false of kwd_false
|
||||
| Kwd_fun of kwd_fun
|
||||
| Kwd_if of kwd_if
|
||||
| Kwd_in of kwd_in
|
||||
| Kwd_let of kwd_let
|
||||
| Kwd_list of kwd_list
|
||||
| Kwd_map of kwd_map
|
||||
| Kwd_match of kwd_match
|
||||
| Kwd_of of kwd_of
|
||||
| Kwd_set of kwd_set
|
||||
| Kwd_then of kwd_then
|
||||
| Kwd_true of kwd_true
|
||||
| Kwd_type of kwd_type
|
||||
| Kwd_with of kwd_with
|
||||
|
||||
(* The following are meant to be part of a library *)
|
||||
|
||||
type 'item seq = 'item list
|
||||
type 'item nseq = 'item * 'item seq
|
||||
type ('item,'sep) nsepseq = 'item * ('sep * 'item) list
|
||||
type ('item,'sep) sepseq = ('item,'sep) nsepseq option
|
||||
type ('item,'sep) sep_or_term_list =
|
||||
('item,'sep) nsepseq * 'sep option
|
||||
|
||||
(* The following are specific to the present grammar *)
|
||||
|
||||
type 'item list_of__rec_0 = {
|
||||
lbracket__1 : lbracket;
|
||||
list_of__rec_0__2 : ('item, semi) nsepseq;
|
||||
rbracket__3 : rbracket
|
||||
}
|
||||
|
||||
type 'item list_of = [`List of 'item list_of__rec_0]
|
||||
|
||||
type 'item tuple__rec_0 = {
|
||||
item__1 : 'item;
|
||||
comma__2 : comma;
|
||||
tuple__rec_0__3 : ('item, comma) nsepseq
|
||||
}
|
||||
|
||||
type 'item tuple = [`Tuple of 'item tuple__rec_0]
|
||||
|
||||
type 'item par__rec_0 = {
|
||||
lpar__1 : lpar;
|
||||
item__2 : 'item;
|
||||
rpar__3 : rpar
|
||||
}
|
||||
|
||||
type 'item par = [`Par of 'item par__rec_0]
|
||||
|
||||
(* Non-recursive value declarations *)
|
||||
|
||||
type sub_irrefutable = [
|
||||
`P_Var of string
|
||||
| `P_Wild
|
||||
| `P_Unit
|
||||
| closed_irrefutable par
|
||||
]
|
||||
|
||||
and closed_irrefutable = [
|
||||
sub_irrefutable tuple
|
||||
| `P_SubI of sub_irrefutable (* `P_SubI necessary *)
|
||||
]
|
||||
|
||||
type irrefutable = [
|
||||
sub_irrefutable tuple
|
||||
| sub_irrefutable
|
||||
]
|
||||
|
||||
type let_binding__rec_1 = {
|
||||
variable__1 : variable;
|
||||
sub_irrefutable__nseq__2 : sub_irrefutable nseq;
|
||||
eq__3 : eq;
|
||||
expr__4 : expr
|
||||
}
|
||||
|
||||
type let_binding__rec_2 = {
|
||||
irrefutable__1 : irrefutable;
|
||||
eq__2 : eq;
|
||||
expr__3 : expr
|
||||
}
|
||||
|
||||
type let_binding = [
|
||||
`LetFun of let_binding__rec_1
|
||||
| `LetNonFun of let_binding__rec_2 (* `LetNonFun necessary *)
|
||||
]
|
||||
|
||||
type let_bindings = (let_binding, kwd_and) nsepseq
|
||||
|
||||
type let_declarations = {
|
||||
kwd_let : kwd_let;
|
||||
let_bindings : let_bindings
|
||||
}
|
||||
|
||||
(*
|
||||
type pattern = [
|
||||
`P_Cons of {sub_pattern: sub_pattern; cons: cons; tail: tail}
|
||||
| `P_Tuple
|
||||
*)
|
||||
|
||||
(* Type declarations *)
|
||||
|
||||
type type_name = ident
|
||||
type field_name = ident
|
||||
type constr = uident
|
||||
|
||||
type type_constr = [
|
||||
`T_Constr of ident
|
||||
| kwd_set
|
||||
| kwd_map
|
||||
| kwd_list
|
||||
]
|
||||
|
||||
type record_type = {
|
||||
lbrace : lbrace;
|
||||
record_type__2 : (field_decl, semi) sep_or_term_list;
|
||||
rbrace : rbrace
|
||||
}
|
||||
|
||||
and field_decl = {
|
||||
field_name : field_name;
|
||||
colon : colon;
|
||||
type_expr : type_expr
|
||||
}
|
||||
|
||||
and variant = {
|
||||
constr : constr;
|
||||
kwd_of : kwd_of;
|
||||
cartesian : cartesian
|
||||
}
|
||||
|
||||
and sum_type = {
|
||||
vbar_opt : vbar option;
|
||||
sum_type__2 : (variant, vbar) nsepseq
|
||||
}
|
||||
|
||||
and type_param__rec_1 = {
|
||||
core_type : core_type;
|
||||
type_constr : type_constr
|
||||
}
|
||||
|
||||
and type_param = [
|
||||
(type_expr, comma) nsepseq par
|
||||
| `T_App of type_param__rec_1
|
||||
]
|
||||
|
||||
and core_type__rec_1 = {
|
||||
type_param : type_param;
|
||||
type_constr : type_constr
|
||||
}
|
||||
|
||||
and core_type = [
|
||||
`T_Alias of type_name
|
||||
| `T_App of core_type__rec_1
|
||||
| cartesian par
|
||||
]
|
||||
|
||||
and fun_type__rec_0 = {
|
||||
core_type : core_type;
|
||||
arrow : arrow;
|
||||
fun_type : fun_type
|
||||
}
|
||||
|
||||
and fun_type = [
|
||||
`T_Fun of fun_type__rec_0
|
||||
| `T_Core of core_type (* `T_Core necessary *)
|
||||
]
|
||||
|
||||
and cartesian = (fun_type, times) nsepseq
|
||||
|
||||
and type_expr = [
|
||||
`T_Prod of cartesian
|
||||
| `T_Sum of sum_type
|
||||
| `T_Record of record_type
|
||||
]
|
||||
|
||||
type type_declaration = {
|
||||
kwd_type__1 : kwd_type;
|
||||
type_name__2 : type_name;
|
||||
eq__3 : eq;
|
||||
type_expr__4 : type_expr
|
||||
}
|
||||
|
||||
(* Entry *)
|
||||
|
||||
type statement = [
|
||||
`Let of let_declarations
|
||||
| `TypeDecl of type_declaration
|
||||
]
|
||||
|
||||
type program = statement list
|
@ -1,270 +0,0 @@
|
||||
(* Extended Backus-Naur Form (EBNF) for Mini-ML *)
|
||||
|
||||
(* LEXIS *)
|
||||
|
||||
let nl = ['\n' '\r']
|
||||
let blank = [' ' '\t']
|
||||
|
||||
let digit = ['0'-'9']
|
||||
let natural = digit | digit (digit | '_')* digit
|
||||
let integer = '-'? natural
|
||||
|
||||
let small = ['a'-'z']
|
||||
let capital = ['A'-'Z']
|
||||
let letter = small | capital
|
||||
|
||||
let ichar = letter | digit | ['_' '\'']
|
||||
let ident = small ichar* | '_' ichar+
|
||||
let uident = capital ichar*
|
||||
|
||||
let hexa = digit | ['A'-'F']
|
||||
let byte = hexa hexa
|
||||
|
||||
let esc = "\\n" | "\\\\" | "\\b" | "\\r" | "\\t"
|
||||
let string
|
||||
let char_set = [^'\'' '\\'] # nl
|
||||
| "\\'" | esc | "\\x" byte | "\\0" digit digit
|
||||
let char = "'" char_set "'"
|
||||
|
||||
|
||||
(* SYNTAX *)
|
||||
|
||||
(* Helpers *)
|
||||
|
||||
(* The following are meant to be part of a library *)
|
||||
|
||||
sep_or_term_list<item,sep> ::=
|
||||
item sep ...
|
||||
| (item sep)+
|
||||
|
||||
seq<item> ::= nseq<item>?
|
||||
|
||||
nseq<item> ::= item seq<item>
|
||||
|
||||
nsepseq<item,sep> ::=
|
||||
item
|
||||
| item sep nsepseq<item,sep>
|
||||
|
||||
sepseq<item,sep> ::= nsepseq<item,sep>?
|
||||
|
||||
(* The following are specific to the present grammar *)
|
||||
|
||||
list_of<item> ::= "[" item ";" ... "]"
|
||||
|
||||
csv<item> ::= item "," item "," ...
|
||||
|
||||
(* Entry *)
|
||||
|
||||
program ::= statement* EOF
|
||||
|
||||
statement ::=
|
||||
let_declarations
|
||||
| type_declaration
|
||||
|
||||
(* Type declarations *)
|
||||
|
||||
type_declaration ::= "type" type_name "=" type_expr
|
||||
|
||||
type_name == ident
|
||||
|
||||
type_expr ::=
|
||||
cartesian
|
||||
| sum_type
|
||||
| record_type
|
||||
|
||||
cartesian ::= fun_type "*" ...
|
||||
|
||||
fun_type ::=
|
||||
core_type "->" fun_type
|
||||
| core_type
|
||||
|
||||
core_type ::=
|
||||
type_name
|
||||
| type_param type_constr
|
||||
| "(" cartesian ")"
|
||||
|
||||
type_param ==
|
||||
core_type type_constr
|
||||
| type_tuple type_constr
|
||||
|
||||
type_constr == type_name
|
||||
|
||||
type_tuple ::= "(" type_expr "," ... ")"
|
||||
|
||||
sum_type ::= variant "|" ...
|
||||
|
||||
variant ::= constr "of" cartesian
|
||||
|
||||
constr == uident
|
||||
|
||||
record_type ::=
|
||||
"{" sep_or_term_list<field_decl, ";"> "}"
|
||||
|
||||
field_decl ::= field_name ":" type_expr
|
||||
|
||||
field_name == ident
|
||||
|
||||
(* Non-recursive value declarations *)
|
||||
|
||||
let_declarations ::= "let" let_bindings
|
||||
|
||||
let_bindings := let_binding "and" ...
|
||||
|
||||
let_binding ::=
|
||||
value_name pattern+ "=" expr
|
||||
| let_lhs "=" expr
|
||||
|
||||
value_name == ident
|
||||
|
||||
(* Patterns *)
|
||||
|
||||
let_lhs ::=
|
||||
pattern "::" cons_pat
|
||||
| pattern "," pattern "," ...
|
||||
| core_pattern
|
||||
|
||||
core_pattern ::=
|
||||
variable
|
||||
| "_"
|
||||
| "(" ")"
|
||||
| number
|
||||
| "true"
|
||||
| "false"
|
||||
| string
|
||||
| list_of<cons_pat>
|
||||
| "(" ptuple ")"
|
||||
| constr core_pattern
|
||||
|
||||
variable == ident
|
||||
number == int
|
||||
|
||||
ptuple ::= csv<cons_pat>
|
||||
|
||||
unit ::= "(" ")"
|
||||
|
||||
cons_pat ::=
|
||||
pattern "::" cons_pat
|
||||
| pattern
|
||||
|
||||
pattern ::=
|
||||
"(" cons_pat ")"
|
||||
| core_pattern
|
||||
|
||||
(* Expressions *)
|
||||
|
||||
expr ::=
|
||||
base_cond__open<expr>
|
||||
| match_expr<base_cond>
|
||||
|
||||
base_cond__open<x> ::=
|
||||
base_expr<x>
|
||||
| conditional<x>
|
||||
|
||||
base_cond ::= base_cond__open<base_cond>
|
||||
|
||||
base_expr<right_expr> ::=
|
||||
let_expr<right_expr>
|
||||
| fun_expr<right_expr>
|
||||
| csv<op_expr>
|
||||
| op_expr
|
||||
|
||||
conditional<right_expr> ::=
|
||||
if_then_else<right_expr>
|
||||
| if_then<right_expr>
|
||||
|
||||
if_then<right_expr> ::= "if" expr "then" right_expr
|
||||
|
||||
if_then_else<right_expr> ::=
|
||||
"if" expr "then" closed_if "else" right_expr
|
||||
|
||||
base_if_then_else__open<x> ::=
|
||||
base_expr<x>
|
||||
| if_then_else<x>
|
||||
|
||||
base_if_then_else ::=
|
||||
base_if_then_else__open<base_if_then_else>
|
||||
|
||||
closed_if ::=
|
||||
base_if_then_else__open<closed_if>
|
||||
| match_expr<base_if_then_else>
|
||||
|
||||
match_expr<right_expr> ::=
|
||||
"match" expr "with" cases<right_expr>
|
||||
|
||||
cases<right_expr> ::=
|
||||
case<right_expr>
|
||||
| cases<base_cond> "|" case<right_expr>
|
||||
|
||||
case<right_expr> ::= let_lhs "->" right_expr
|
||||
|
||||
let_in<right_expr> ::= "let" par_let "in" right_expr
|
||||
|
||||
fun_expr<right_expr> ::= "fun" pattern+ "->" right_expr
|
||||
|
||||
op_expr ::=
|
||||
op_expr "||" conj_expr
|
||||
| conj_expr
|
||||
|
||||
conj_expr ::=
|
||||
conj_expr "&&" comp_expr
|
||||
| comp_expr
|
||||
|
||||
comp_expr ::=
|
||||
comp_expr "<" cat_expr
|
||||
| comp_expr "<=" cat_expr
|
||||
| comp_expr ">" cat_expr
|
||||
| comp_expr ">=" cat_expr
|
||||
| comp_expr "=" cat_expr
|
||||
| comp_expr "<>" cat_expr
|
||||
| cat_expr
|
||||
|
||||
cat_expr ::=
|
||||
cons_expr "^" cat_expr
|
||||
| cons_expr
|
||||
|
||||
cons_expr ::=
|
||||
add_expr "::" cons_expr
|
||||
| add_expr
|
||||
|
||||
add_expr ::=
|
||||
add_expr "+" mult_expr
|
||||
| add_expr "-" mult_expr
|
||||
| mult_expr
|
||||
|
||||
mult_expr ::=
|
||||
mult_expr "*" unary_expr
|
||||
| mult_expr "div" unary_expr
|
||||
| mult_expr "mod" unary_expr
|
||||
| unary_expr
|
||||
|
||||
unary_expr ::=
|
||||
"-" core_expr
|
||||
| "not" core_expr
|
||||
| call_expr
|
||||
|
||||
call_expr ::=
|
||||
call_expr core_expr
|
||||
| core_expr
|
||||
|
||||
core_expr ::=
|
||||
number
|
||||
| module_name "." variable
|
||||
| string
|
||||
| char
|
||||
| "()"
|
||||
| "false"
|
||||
| "true"
|
||||
| list_of<expr>
|
||||
| "(" expr ")"
|
||||
| constr
|
||||
| sequence
|
||||
| record_expr
|
||||
|
||||
module_name == uident
|
||||
|
||||
record_expr ::=
|
||||
"{" sep_or_term_list(field_assignment,";") "}"
|
||||
|
||||
field_assignment ::= field_name "=" expr
|
||||
|
||||
sequence ::= "begin" (expr ";" ...)? "end"
|
@ -1,270 +0,0 @@
|
||||
(* Extended Backus-Naur Form (EBNF) for Mini-ML *)
|
||||
|
||||
(* LEXIS *)
|
||||
|
||||
let nl = ['\n' '\r']
|
||||
let blank = [' ' '\t']
|
||||
|
||||
let digit = ['0'-'9']
|
||||
let natural = digit | digit (digit | '_')* digit
|
||||
token int = '-'? natural
|
||||
|
||||
let small = ['a'-'z']
|
||||
let capital = ['A'-'Z']
|
||||
let letter = small | capital
|
||||
|
||||
let ichar = letter | digit | ['_' '\'']
|
||||
token ident = small ichar* | '_' ichar+
|
||||
token uident = capital ichar*
|
||||
|
||||
let esc = "\\n" | "\\\\" | "\\b" | "\\r" | "\\t"
|
||||
token string
|
||||
|
||||
let hexa = digit | ['A'-'F']
|
||||
let byte = hexa hexa
|
||||
let char_set = [^'\'' '\\'] # nl
|
||||
| "\\'" | esc | "\\x" byte | "\\0" digit digit
|
||||
token char = "'" char_set "'"
|
||||
|
||||
|
||||
(* SYNTAX *)
|
||||
|
||||
(* Helpers *)
|
||||
|
||||
(* The following are meant to be part of a library *)
|
||||
|
||||
sep_or_term_list<item,sep> ::=
|
||||
item sep ...
|
||||
| (item sep)+
|
||||
|
||||
seq<item> ::= nseq<item>?
|
||||
|
||||
nseq<item> ::= item seq<item>
|
||||
|
||||
nsepseq<item,sep> ::=
|
||||
item
|
||||
| item sep nsepseq<item,sep>
|
||||
|
||||
sepseq<item,sep> ::= nsepseq<item,sep>?
|
||||
|
||||
(* The following are specific to the present grammar *)
|
||||
|
||||
list_of<item> ::= "[" item ";" ... "]"
|
||||
|
||||
csv<item> ::= item "," item "," ...
|
||||
|
||||
(* Entry *)
|
||||
|
||||
program ::= statement* EOF
|
||||
|
||||
statement ::=
|
||||
let_declarations
|
||||
| type_declaration
|
||||
|
||||
(* Type declarations *)
|
||||
|
||||
type_declaration ::= "type" type_name "=" type_expr
|
||||
|
||||
type_name == ident
|
||||
|
||||
type_expr ::=
|
||||
cartesian
|
||||
| sum_type
|
||||
| record_type
|
||||
|
||||
cartesian ::= fun_type "*" ...
|
||||
|
||||
fun_type ::=
|
||||
core_type "->" fun_type
|
||||
| core_type
|
||||
|
||||
core_type ::=
|
||||
type_name
|
||||
| type_param type_constr
|
||||
| "(" cartesian ")"
|
||||
|
||||
type_param ==
|
||||
core_type type_constr
|
||||
| type_tuple type_constr
|
||||
|
||||
type_constr == type_name
|
||||
|
||||
type_tuple ::= "(" type_expr "," ... ")"
|
||||
|
||||
sum_type ::= variant "|" ...
|
||||
|
||||
variant ::= constr "of" cartesian
|
||||
|
||||
constr == uident
|
||||
|
||||
record_type ::=
|
||||
"{" sep_or_term_list<field_decl,";"> "}"
|
||||
|
||||
field_decl ::= field_name ":" type_expr
|
||||
|
||||
field_name == ident
|
||||
|
||||
(* Non-recursive value declarations *)
|
||||
|
||||
let_declarations ::= "let" let_bindings
|
||||
|
||||
let_bindings := let_binding "and" ...
|
||||
|
||||
let_binding ::=
|
||||
value_name pattern+ "=" expr
|
||||
| let_lhs "=" expr
|
||||
|
||||
value_name == ident
|
||||
|
||||
(* Patterns *)
|
||||
|
||||
let_lhs ::=
|
||||
pattern "::" cons_pat
|
||||
| pattern "," pattern "," ...
|
||||
| core_pattern
|
||||
|
||||
core_pattern ::=
|
||||
variable
|
||||
| "_"
|
||||
| "(" ")"
|
||||
| number
|
||||
| "true"
|
||||
| "false"
|
||||
| string
|
||||
| list_of<cons_pat>
|
||||
| "(" ptuple ")"
|
||||
| constr core_pattern
|
||||
|
||||
variable == ident
|
||||
number == int
|
||||
|
||||
ptuple ::= csv<cons_pat>
|
||||
|
||||
unit ::= "(" ")"
|
||||
|
||||
cons_pat ::=
|
||||
pattern "::" cons_pat
|
||||
| pattern
|
||||
|
||||
pattern ::=
|
||||
"(" cons_pat ")"
|
||||
| core_pattern
|
||||
|
||||
(* Expressions *)
|
||||
|
||||
expr ::=
|
||||
base_cond__open<expr>
|
||||
| match_expr<base_cond>
|
||||
|
||||
base_cond__open<x> ::=
|
||||
base_expr<x>
|
||||
| conditional<x>
|
||||
|
||||
base_cond ::= base_cond__open<base_cond>
|
||||
|
||||
base_expr<right_expr> ::=
|
||||
let_expr<right_expr>
|
||||
| fun_expr<right_expr>
|
||||
| csv<op_expr>
|
||||
| op_expr
|
||||
|
||||
conditional<right_expr> ::=
|
||||
if_then_else<right_expr>
|
||||
| if_then<right_expr>
|
||||
|
||||
if_then<right_expr> ::= "if" expr "then" right_expr
|
||||
|
||||
if_then_else<right_expr> ::=
|
||||
"if" expr "then" closed_if "else" right_expr
|
||||
|
||||
base_if_then_else__open<x> ::=
|
||||
base_expr<x>
|
||||
| if_then_else<x>
|
||||
|
||||
base_if_then_else ::=
|
||||
base_if_then_else__open<base_if_then_else>
|
||||
|
||||
closed_if ::=
|
||||
base_if_then_else__open<closed_if>
|
||||
| match_expr<base_if_then_else>
|
||||
|
||||
match_expr<right_expr> ::=
|
||||
"match" expr "with" cases<right_expr>
|
||||
|
||||
cases<right_expr> ::=
|
||||
case<right_expr>
|
||||
| cases<base_cond> "|" case<right_expr>
|
||||
|
||||
case<right_expr> ::= let_lhs "->" right_expr
|
||||
|
||||
let_in<right_expr> ::= "let" par_let "in" right_expr
|
||||
|
||||
fun_expr<right_expr> ::= "fun" pattern+ "->" right_expr
|
||||
|
||||
op_expr ::=
|
||||
op_expr "||" conj_expr
|
||||
| conj_expr
|
||||
|
||||
conj_expr ::=
|
||||
conj_expr "&&" comp_expr
|
||||
| comp_expr
|
||||
|
||||
comp_expr ::=
|
||||
comp_expr "<" cat_expr
|
||||
| comp_expr "<=" cat_expr
|
||||
| comp_expr ">" cat_expr
|
||||
| comp_expr ">=" cat_expr
|
||||
| comp_expr "=" cat_expr
|
||||
| comp_expr "<>" cat_expr
|
||||
| cat_expr
|
||||
|
||||
cat_expr ::=
|
||||
cons_expr "^" cat_expr
|
||||
| cons_expr
|
||||
|
||||
cons_expr ::=
|
||||
add_expr "::" cons_expr
|
||||
| add_expr
|
||||
|
||||
add_expr ::=
|
||||
add_expr "+" mult_expr
|
||||
| add_expr "-" mult_expr
|
||||
| mult_expr
|
||||
|
||||
mult_expr ::=
|
||||
mult_expr "*" unary_expr
|
||||
| mult_expr "div" unary_expr
|
||||
| mult_expr "mod" unary_expr
|
||||
| unary_expr
|
||||
|
||||
unary_expr ::=
|
||||
"-" core_expr
|
||||
| "not" core_expr
|
||||
| call_expr
|
||||
|
||||
call_expr ::=
|
||||
call_expr core_expr
|
||||
| core_expr
|
||||
|
||||
core_expr ::=
|
||||
number
|
||||
| module_name "." variable
|
||||
| string
|
||||
| char
|
||||
| "()"
|
||||
| "false"
|
||||
| "true"
|
||||
| list_of<expr>
|
||||
| "(" expr ")"
|
||||
| constr
|
||||
| sequence
|
||||
| record_expr
|
||||
|
||||
module_name == uident
|
||||
|
||||
record_expr ::=
|
||||
"{" sep_or_term_list<field_assignment,";"> "}"
|
||||
|
||||
field_assignment ::= field_name "=" expr
|
||||
|
||||
sequence ::= "begin" (expr ";" ...)? "end"
|
@ -1,249 +0,0 @@
|
||||
(* Extended Backus-Naur Form (EBNF) for Mini-ML *)
|
||||
|
||||
(* LEXIS *)
|
||||
|
||||
let nl = ['\n' '\r']
|
||||
let blank = [' ' '\t']
|
||||
|
||||
let digit = ['0'-'9']
|
||||
let natural = digit | digit (digit | '_')* digit
|
||||
token int = '-'? natural
|
||||
|
||||
let small = ['a'-'z']
|
||||
let capital = ['A'-'Z']
|
||||
let letter = small | capital
|
||||
|
||||
let ichar = letter | digit | ['_' '\'']
|
||||
token ident = small ichar* | '_' ichar+
|
||||
token uident = capital ichar*
|
||||
|
||||
let esc = "\\n" | "\\\\" | "\\b" | "\\r" | "\\t"
|
||||
let hexa = digit | ['A'-'F']
|
||||
let byte = hexa hexa
|
||||
let char_set = [^'\'' '\\'] # nl
|
||||
| "\\'" | esc | "\\x" byte | "\\0" digit digit
|
||||
token char = "'" char_set "'"
|
||||
|
||||
token string
|
||||
|
||||
|
||||
(* SYNTAX *)
|
||||
|
||||
(* Helpers *)
|
||||
|
||||
(* The following are meant to be part of a library *)
|
||||
|
||||
sep_or_term_list<item,sep> ::=
|
||||
item sep etc.
|
||||
| (item sep)+
|
||||
|
||||
seq<item> ::= nseq<item>?
|
||||
|
||||
nseq<item> ::= item seq<item>
|
||||
|
||||
nsepseq<item,sep> ::=
|
||||
item
|
||||
| item sep nsepseq<item,sep>
|
||||
|
||||
sepseq<item,sep> ::= nsepseq<item,sep>?
|
||||
|
||||
(* The following are specific to the present grammar *)
|
||||
|
||||
list_of<item> ::= "[" item ";" etc. "]"
|
||||
|
||||
csv<item> ::= item "," item "," etc.
|
||||
|
||||
(* Entry *)
|
||||
|
||||
program ::= statement*
|
||||
|
||||
statement ::=
|
||||
let_declarations
|
||||
| type_declaration
|
||||
|
||||
(* Type declarations *)
|
||||
|
||||
type_declaration ::= "type" type_name "=" type_expr
|
||||
|
||||
type_name == ident
|
||||
|
||||
type_expr ::=
|
||||
cartesian
|
||||
| sum_type
|
||||
| record_type
|
||||
|
||||
cartesian ::= fun_type "*" etc.
|
||||
|
||||
fun_type ::=
|
||||
core_type "->" fun_type
|
||||
| core_type
|
||||
|
||||
core_type ::=
|
||||
type_name
|
||||
| type_param type_constr
|
||||
| "(" cartesian ")"
|
||||
|
||||
type_param ==
|
||||
core_type type_constr
|
||||
| type_tuple type_constr
|
||||
|
||||
type_constr == type_name
|
||||
|
||||
type_tuple ::= "(" type_expr "," etc. ")"
|
||||
|
||||
sum_type ::= "|"? variant "|" etc.
|
||||
|
||||
variant ::= constr "of" cartesian
|
||||
|
||||
constr == uident
|
||||
|
||||
record_type ::=
|
||||
"{" sep_or_term_list<field_decl,";"> "}"
|
||||
|
||||
field_decl ::= field_name ":" type_expr
|
||||
|
||||
field_name == ident
|
||||
|
||||
(* Non-recursive value declarations *)
|
||||
|
||||
let_declarations ::= "let" let_bindings
|
||||
|
||||
let_bindings := let_binding "and" etc.
|
||||
|
||||
let_binding ::=
|
||||
value_name pattern+ "=" expr
|
||||
| let_lhs "=" expr
|
||||
|
||||
value_name == ident
|
||||
|
||||
(* Patterns *)
|
||||
|
||||
let_lhs ::=
|
||||
pattern "::" cons_pat
|
||||
| pattern "," pattern "," etc.
|
||||
| core_pattern
|
||||
|
||||
core_pattern ::=
|
||||
variable
|
||||
| "_"
|
||||
| "(" ")"
|
||||
| number
|
||||
| "true"
|
||||
| "false"
|
||||
| string
|
||||
| list_of<cons_pat>
|
||||
| "(" ptuple ")"
|
||||
| constr core_pattern
|
||||
|
||||
variable == ident
|
||||
number == int
|
||||
|
||||
ptuple ::= csv<cons_pat>
|
||||
|
||||
unit ::= "(" ")"
|
||||
|
||||
cons_pat ::=
|
||||
pattern "::" cons_pat
|
||||
| pattern
|
||||
|
||||
pattern ::=
|
||||
"(" cons_pat ")"
|
||||
| core_pattern
|
||||
|
||||
(* Expressions *)
|
||||
|
||||
expr ::=
|
||||
base_cond__<expr>
|
||||
| match_expr<base_cond>
|
||||
|
||||
base_cond__<x> ::=
|
||||
base_expr<x>
|
||||
| conditional<x>
|
||||
|
||||
base_cond ::= base_cond__<base_cond>
|
||||
|
||||
base_expr<right_expr> ::=
|
||||
let_expr<right_expr>
|
||||
| fun_expr<right_expr>
|
||||
| csv<op_expr>
|
||||
| op_expr
|
||||
|
||||
conditional<right_expr> ::=
|
||||
if_then_else<right_expr>
|
||||
| if_then<right_expr>
|
||||
|
||||
if_then<right_expr> ::= "if" expr "then" right_expr
|
||||
|
||||
if_then_else<right_expr> ::=
|
||||
"if" expr "then" closed_if "else" right_expr
|
||||
|
||||
base_if_then_else__<x> ::=
|
||||
base_expr<x>
|
||||
| if_then_else<x>
|
||||
|
||||
base_if_then_else ::=
|
||||
base_if_then_else__<base_if_then_else>
|
||||
|
||||
closed_if ::=
|
||||
base_if_then_else__<closed_if>
|
||||
| match_expr<base_if_then_else>
|
||||
|
||||
match_expr<right_expr> ::=
|
||||
"match" expr "with" cases<right_expr>
|
||||
|
||||
cases<right_expr> ::=
|
||||
case<right_expr>
|
||||
| cases<base_cond> "|" case<right_expr>
|
||||
|
||||
case<right_expr> ::= let_lhs "->" right_expr
|
||||
|
||||
let_in<right_expr> ::= "let" par_let "in" right_expr
|
||||
|
||||
fun_expr<right_expr> ::= "fun" pattern+ "->" right_expr
|
||||
|
||||
op_expr ::=
|
||||
op_expr "||" %left %prec1 op_expr
|
||||
| op_expr "&&" %left %prec2 op_expr
|
||||
| op_expr "<" %left %prec3 op_expr
|
||||
| op_expr "<=" %left %prec3 op_expr
|
||||
| op_expr ">" %left %prec3 op_expr
|
||||
| op_expr ">=" %left %prec3 op_expr
|
||||
| op_expr "=" %left %prec3 op_expr
|
||||
| op_expr "<>" %left %prec3 op_expr
|
||||
| op_expr "^" %right %prec4 op_expr
|
||||
| op_expr "::" %right %prec5 op_expr
|
||||
| op_expr "+" %left %prec6 op_expr
|
||||
| op_expr "-" %left %prec6 op_expr
|
||||
| op_expr "*" %left %prec7 op_expr
|
||||
| op_expr "div" %left %prec7 op_expr
|
||||
| op_expr "mod" %left %prec7 op_expr
|
||||
| "-" %prec8 op_expr
|
||||
| "not" %prec8 op_expr
|
||||
| call_expr
|
||||
|
||||
call_expr ::=
|
||||
call_expr core_expr
|
||||
| core_expr
|
||||
|
||||
core_expr ::=
|
||||
number
|
||||
| module_name "." variable
|
||||
| string
|
||||
| char
|
||||
| "()"
|
||||
| "false"
|
||||
| "true"
|
||||
| list_of<expr>
|
||||
| "(" expr ")"
|
||||
| constr
|
||||
| sequence
|
||||
| record_expr
|
||||
|
||||
module_name == uident
|
||||
|
||||
record_expr ::=
|
||||
"{" sep_or_term_list<field_assignment,";"> "}"
|
||||
|
||||
field_assignment ::= field_name "=" expr
|
||||
|
||||
sequence ::= "begin" sep_or_term_list<expr,";">? "end"
|
@ -1,336 +0,0 @@
|
||||
(* Extended Backus-Naur Form (EBNF) for Mini-ML *)
|
||||
|
||||
(* LEXIS *)
|
||||
|
||||
let digit = ['0'-'9']
|
||||
let natural = digit | digit (digit | '_')* digit
|
||||
%token integer = '-'? natural
|
||||
%token natural = natural 'n'
|
||||
|
||||
let small = ['a'-'z']
|
||||
let capital = ['A'-'Z']
|
||||
let letter = small | capital
|
||||
|
||||
let ichar = letter | digit | ['_' '\'']
|
||||
%token ident = small ichar* | '_' ichar+
|
||||
%token uident = capital ichar*
|
||||
|
||||
let esc = "\\n" | "\\\\" | "\\b" | "\\r" | "\\t"
|
||||
let hexa = digit | ['A'-'F']
|
||||
let byte = hexa hexa
|
||||
let char_set = [^'\'' '\\'] # nl
|
||||
| "\\'" | esc | "\\x" byte | "\\0" digit digit
|
||||
%token chr = "'" char_set "'"
|
||||
|
||||
%token str
|
||||
|
||||
%token bool_or = "||" %left %prec1
|
||||
%token bool_and = "&&" %left %prec2
|
||||
%token lt = "<" %left %prec3
|
||||
%token le = "<=" %left %prec3
|
||||
%token gt = ">" %left %prec3
|
||||
%token ge = ">=" %left %prec3
|
||||
%token eq = "=" %left %prec3
|
||||
%token ne = "<>" %left %prec3
|
||||
%token cat = "^" %right %prec4
|
||||
%token cons = "::" %right %prec5
|
||||
%token plus = "+" %left %prec6
|
||||
%token minus = "-" %left %prec6
|
||||
%token times = "*" %left %prec7
|
||||
%token slash = "/" %left %prec7
|
||||
%token kwd_div = "div" %left %prec7
|
||||
%token kwd_mod = "mod" %left %prec7
|
||||
%token uminus = "-" %prec8
|
||||
%token kwd_not = "not" %prec8
|
||||
|
||||
%token lpar = "("
|
||||
%token rpar = ")"
|
||||
%token lbracket = "["
|
||||
%token rbracket = "]"
|
||||
%token lbrace = "{"
|
||||
%token rbrace = "}"
|
||||
%token semi = ";"
|
||||
%token comma = ","
|
||||
%token colon = ":"
|
||||
%token vbar = "|"
|
||||
%token arrow = "->"
|
||||
%token wild = "_"
|
||||
|
||||
(* SYNTAX *)
|
||||
|
||||
(* Helpers *)
|
||||
|
||||
(* The following are meant to be part of a library *)
|
||||
|
||||
%ocaml "Utils"
|
||||
type 'item seq = 'item list
|
||||
type 'item nseq = 'item * 'item seq
|
||||
type ('item,'sep) nsepseq = 'item * ('sep * 'item) list
|
||||
type ('item,'sep) sepseq = ('item,'sep) nsepseq option
|
||||
type ('item,'sep) sep_or_term_list =
|
||||
('item,'sep) nsepseq * 'sep option
|
||||
%end
|
||||
|
||||
%menhir_decl "Parser"
|
||||
%start program interactive_expr
|
||||
%type <AST.t> program
|
||||
%type <AST.expr> interactive_expr
|
||||
%type <('item,'sep) sep_or_term_list> sep_or_term_list
|
||||
%end
|
||||
|
||||
%menhir_rule "Parser"
|
||||
seq(item):
|
||||
(**) { [] }
|
||||
| X seq(item) { $1::$2 }
|
||||
|
||||
nseq(item):
|
||||
item seq(item) { $1,$2 }
|
||||
|
||||
nsepseq(item,sep):
|
||||
item { $1, [] }
|
||||
| item sep nsepseq(item,sep) { let h,t = $3 in $1, ($2,h)::t }
|
||||
|
||||
sepseq(item,sep):
|
||||
(**) { None }
|
||||
| nsepseq(item,sep) { Some $1 }
|
||||
|
||||
sep_or_term_list(item,sep):
|
||||
nsepseq(item,sep) {
|
||||
$1, None
|
||||
}
|
||||
| nseq(item sep {$1,$2}) {
|
||||
let (first,sep), tail = $1 in
|
||||
let rec trans (seq, prev_sep as acc) = function
|
||||
[] -> acc
|
||||
| (item,next_sep)::others ->
|
||||
trans ((prev_sep,item)::seq, next_sep) others in
|
||||
let list, term = trans ([],sep) tail
|
||||
in (first, List.rev list), Some term }
|
||||
%end
|
||||
|
||||
(* The following are specific to the present grammar *)
|
||||
|
||||
list<item> ::= "[" item ";" etc. "]"
|
||||
|
||||
tuple<item> ::= item "," item "," etc.
|
||||
|
||||
par<item> ::= "(" item ")"
|
||||
|
||||
(* Entry *)
|
||||
|
||||
program == statement*
|
||||
|
||||
statement ::=
|
||||
let_declarations { `Let }
|
||||
| type_declaration { `TypeDecl }
|
||||
|
||||
(* Type declarations *)
|
||||
|
||||
type_declaration == "type" type_name "=" type_expr
|
||||
|
||||
type_name == ident
|
||||
|
||||
type_expr ::=
|
||||
cartesian { `T_Prod }
|
||||
| sum_type { `T_Sum }
|
||||
| record_type { `T_Record }
|
||||
|
||||
cartesian == fun_type "*" etc.
|
||||
|
||||
fun_type ::=
|
||||
core_type "->" fun_type { `T_Fun }
|
||||
| core_type { `T_Core }
|
||||
|
||||
core_type ::=
|
||||
type_name { `T_Alias }
|
||||
| type_param type_constr { `T_App }
|
||||
| par<cartesian>
|
||||
|
||||
type_param ::=
|
||||
par<type_expr "," etc.>
|
||||
| core_type type_constr { `T_App }
|
||||
|
||||
type_constr ::=
|
||||
ident { `T_Constr }
|
||||
| "set"
|
||||
| "map"
|
||||
| "list"
|
||||
|
||||
sum_type == "|"? variant "|" etc.
|
||||
|
||||
variant == constr "of" cartesian
|
||||
|
||||
constr == uident
|
||||
|
||||
record_type ==
|
||||
"{" sep_or_term_list<field_decl,";"> "}"
|
||||
|
||||
field_decl == field_name ":" type_expr
|
||||
|
||||
field_name == ident
|
||||
|
||||
(* Non-recursive value declarations *)
|
||||
|
||||
let_declarations == "let" let_bindings
|
||||
|
||||
let_bindings == let_binding "and" etc.
|
||||
|
||||
let_binding ::=
|
||||
variable sub_irrefutable+ "=" expr { `LetFun }
|
||||
| irrefutable "=" expr { `LetNonFun }
|
||||
|
||||
(* Patterns *)
|
||||
|
||||
irrefutable ::=
|
||||
tuple<sub_irrefutable> { `P_Tuple }
|
||||
| sub_irrefutable
|
||||
|
||||
sub_irrefutable ::=
|
||||
variable { `P_Var }
|
||||
| "_" { `P_Wild }
|
||||
| unit { `P_Unit }
|
||||
| par<closed_irrefutable>
|
||||
|
||||
closed_irrefutable ::=
|
||||
tuple<sub_irrefutable>
|
||||
| sub_irrefutable { `P_SubI }
|
||||
|
||||
pattern ::=
|
||||
sub_pattern "::" tail { `P_Cons }
|
||||
| tuple<sub_pattern> { `P_Tuple }
|
||||
| core_pattern { `P_Core }
|
||||
|
||||
sub_pattern ::=
|
||||
par<tail>
|
||||
| core_pattern { `P_Core }
|
||||
|
||||
core_pattern ::=
|
||||
variable { `P_Var }
|
||||
| "_" { `P_Wild }
|
||||
| unit { `P_Unit }
|
||||
| integer { `P_Int }
|
||||
| natural { `P_Nat }
|
||||
| "true" { `P_True }
|
||||
| "false" { `P_False }
|
||||
| str { `P_Str }
|
||||
| chr { `P_Chr }
|
||||
| list<tail> { `P_List }
|
||||
| constr sub_pattern { `P_Constr }
|
||||
| record_pattern { `P_Record }
|
||||
| par<tuple<tail>>
|
||||
|
||||
variable == ident
|
||||
|
||||
record_pattern ::=
|
||||
"{" sep_or_term_list<field_pattern,";"> "}"
|
||||
|
||||
field_pattern ::= field_name "=" sub_pattern
|
||||
|
||||
unit ::= "(" ")"
|
||||
|
||||
tail ::=
|
||||
sub_pattern "::" tail
|
||||
| sub_pattern
|
||||
|
||||
(* Expressions *)
|
||||
|
||||
expr ::=
|
||||
base_cond__<expr>
|
||||
| match_expr<base_cond>
|
||||
|
||||
base_cond__<x> ::=
|
||||
base_expr<x>
|
||||
| conditional<x>
|
||||
|
||||
base_cond == base_cond__<base_cond>
|
||||
|
||||
base_expr<right_expr> ::=
|
||||
let_expr<right_expr>
|
||||
| fun_expr<right_expr>
|
||||
| csv<op_expr>
|
||||
|
||||
conditional<right_expr> ::=
|
||||
if_then_else<right_expr>
|
||||
| if_then<right_expr>
|
||||
|
||||
if_then<right_expr> ::=
|
||||
"if" expr "then" right_expr { `IfThen }
|
||||
|
||||
if_then_else<right_expr> ::=
|
||||
"if" expr "then" closed_if "else" right_expr { `IfThenElse }
|
||||
|
||||
base_if_then_else__<x> ::=
|
||||
base_expr<x>
|
||||
| if_then_else<x>
|
||||
|
||||
base_if_then_else ::=
|
||||
base_if_then_else__<base_if_then_else>
|
||||
|
||||
closed_if ::=
|
||||
base_if_then_else__<closed_if>
|
||||
| match_expr<base_if_then_else>
|
||||
|
||||
match_expr<right_expr> ::=
|
||||
"match" expr "with" cases<right_expr>
|
||||
|
||||
cases<right_expr> ::=
|
||||
case<right_expr>
|
||||
| cases<base_cond> "|" case<right_expr>
|
||||
|
||||
case<right_expr> ::= pattern "->" right_expr
|
||||
|
||||
let_in<right_expr> ::= "let" par_let "in" right_expr
|
||||
|
||||
fun_expr<right_expr> ::= "fun" sub_pattern+ "->" right_expr
|
||||
|
||||
op_expr ::=
|
||||
op_expr "||" op_expr
|
||||
| op_expr "&&" op_expr
|
||||
| op_expr "<" op_expr
|
||||
| op_expr "<=" op_expr
|
||||
| op_expr ">" op_expr
|
||||
| op_expr ">=" op_expr
|
||||
| op_expr "=" op_expr
|
||||
| op_expr "<>" op_expr
|
||||
| op_expr "^" op_expr
|
||||
| op_expr "::" op_expr
|
||||
| op_expr "+" op_expr
|
||||
| op_expr "-" op_expr
|
||||
| op_expr "*" op_expr
|
||||
| op_expr "/" op_expr
|
||||
| op_expr "div" op_expr
|
||||
| op_expr "mod" op_expr
|
||||
| "-" op_expr
|
||||
| "not" op_expr
|
||||
| call_expr
|
||||
|
||||
call_expr ::=
|
||||
call_expr core_expr
|
||||
| core_expr
|
||||
|
||||
core_expr ::=
|
||||
variable
|
||||
| module_name "." path
|
||||
| unit
|
||||
| integer
|
||||
| natural
|
||||
| "false"
|
||||
| "true"
|
||||
| str
|
||||
| chr
|
||||
| constr
|
||||
| sequence
|
||||
| record_expr
|
||||
| list<expr>
|
||||
| par<expr>
|
||||
|
||||
module_name == uident
|
||||
|
||||
path == ident "." etc.
|
||||
|
||||
record_expr ::=
|
||||
"{" sep_or_term_list<field_assignment,";"> "}"
|
||||
|
||||
field_assignment ::= field_name "=" expr
|
||||
|
||||
sequence ::= "begin" sep_or_term_list<expr,";">? "end"
|
@ -1,6 +1,5 @@
|
||||
module Pascaligo = Pascaligo
|
||||
module Camligo = Parser_camligo
|
||||
module Ligodity = Ligodity
|
||||
module Cameligo = Cameligo
|
||||
module Reasonligo = Reasonligo
|
||||
|
||||
|
||||
|
@ -1,8 +1,8 @@
|
||||
open Trace
|
||||
|
||||
module Parser = Parser_reasonligo.Parser
|
||||
module AST = Parser_ligodity.AST
|
||||
module ParserLog = Parser_ligodity.ParserLog
|
||||
module AST = Parser_cameligo.AST
|
||||
module ParserLog = Parser_cameligo.ParserLog
|
||||
module LexToken = Parser_reasonligo.LexToken
|
||||
module Lexer = Lexer.Make(LexToken)
|
||||
|
||||
|
@ -4,7 +4,7 @@
|
||||
[@@@warning "-42"]
|
||||
|
||||
open Region
|
||||
module AST = Parser_ligodity.AST
|
||||
module AST = Parser_cameligo.AST
|
||||
open AST
|
||||
|
||||
|
||||
|
@ -105,9 +105,9 @@ let () =
|
||||
if Utils.String.Set.mem "ast" options.verbose
|
||||
then let buffer = Buffer.create 131 in
|
||||
begin
|
||||
Parser_ligodity.ParserLog.offsets := options.offsets;
|
||||
Parser_ligodity.ParserLog.mode := options.mode;
|
||||
Parser_ligodity.ParserLog.print_tokens buffer ast;
|
||||
Parser_cameligo.ParserLog.offsets := options.offsets;
|
||||
Parser_cameligo.ParserLog.mode := options.mode;
|
||||
Parser_cameligo.ParserLog.print_tokens buffer ast;
|
||||
Buffer.output_buffer stdout buffer
|
||||
end
|
||||
with
|
||||
|
@ -11,13 +11,13 @@
|
||||
(modules reasonligo LexToken Parser)
|
||||
(libraries
|
||||
parser_shared
|
||||
parser_ligodity
|
||||
parser_cameligo
|
||||
str
|
||||
simple-utils
|
||||
tezos-utils
|
||||
getopt
|
||||
)
|
||||
(flags (:standard -open Simple_utils -open Parser_shared -open Parser_ligodity ))
|
||||
(flags (:standard -open Simple_utils -open Parser_shared -open Parser_cameligo ))
|
||||
)
|
||||
|
||||
(executable
|
||||
|
@ -1,5 +1,5 @@
|
||||
module Parser = Parser
|
||||
module AST = Parser_ligodity.AST
|
||||
module AST = Parser_cameligo.AST
|
||||
module Lexer = Lexer
|
||||
module LexToken = LexToken
|
||||
module ParserLog = Parser_ligodity.ParserLog
|
||||
module ParserLog = Parser_cameligo.ParserLog
|
||||
|
@ -9,7 +9,7 @@ INTERNAL DOCUMENTATION OF THE SHARED PARSER FUNCTIONALITY
|
||||
The module EvalOpt parses the command-line for options to the
|
||||
parser. That action is performed as a side-effect when the module
|
||||
is initialised at run-time: this is ugly and easy to fix. See
|
||||
ligo/src/parser/ligodity/EvalOpt.ml{i} for the right way to do
|
||||
ligo/src/parser/cameligo/EvalOpt.ml{i} for the right way to do
|
||||
it. Ignore them: the file actually calling directly the parser is
|
||||
ligo/src/parser/parser.ml. Note that, as a consequence, no option
|
||||
is currently passed to the parser when building Pascaligo with
|
||||
|
@ -3,7 +3,7 @@
|
||||
open Trace
|
||||
open Ast_simplified
|
||||
|
||||
module Raw = Parser.Ligodity.AST
|
||||
module Raw = Parser.Cameligo.AST
|
||||
module SMap = Map.String
|
||||
module Option = Simple_utils.Option
|
||||
(* TODO: move 1-parser/shared/Utils.ml{i} to Simple_utils/ *)
|
||||
@ -132,7 +132,7 @@ end
|
||||
|
||||
open Errors
|
||||
|
||||
open Operators.Simplify.Ligodity
|
||||
open Operators.Simplify.Cameligo
|
||||
|
||||
let r_split = Location.r_split
|
||||
|
||||
@ -381,7 +381,7 @@ let rec simpl_expression :
|
||||
let default_action () =
|
||||
let%bind cases = simpl_cases lst in
|
||||
return @@ e_matching ~loc e cases in
|
||||
(* Hack to take care of patterns introduced by `parser/ligodity/Parser.mly` in "norm_fun_expr". TODO: Still needed? *)
|
||||
(* Hack to take care of patterns introduced by `parser/cameligo/Parser.mly` in "norm_fun_expr". TODO: Still needed? *)
|
||||
match lst with
|
||||
| [ (pattern , rhs) ] -> (
|
||||
match pattern with
|
@ -4,7 +4,7 @@ open Trace
|
||||
|
||||
open Ast_simplified
|
||||
|
||||
module Raw = Parser.Ligodity.AST
|
||||
module Raw = Parser.Cameligo.AST
|
||||
module SMap = Map.String
|
||||
module Option = Simple_utils.Option
|
||||
|
@ -1,12 +1,12 @@
|
||||
open Trace
|
||||
open Function
|
||||
module I = Parser.Camligo.Ast
|
||||
module I = Parser.Cameligo.Ast
|
||||
module O = Ast_simplified
|
||||
open O.Combinators
|
||||
|
||||
let unwrap : type a . a Location.wrap -> a = Location.unwrap
|
||||
|
||||
open Operators.Simplify.Camligo
|
||||
open Operators.Simplify.Cameligo
|
||||
|
||||
let type_variable : string -> O.type_expression result = fun str ->
|
||||
match List.assoc_opt str type_constants with
|
||||
|
@ -8,7 +8,7 @@
|
||||
ast_simplified
|
||||
self_ast_simplified
|
||||
operators)
|
||||
(modules ligodity pascaligo simplify)
|
||||
(modules cameligo pascaligo simplify)
|
||||
(preprocess
|
||||
(pps
|
||||
ppx_let
|
||||
|
@ -1,2 +1,2 @@
|
||||
module Pascaligo = Pascaligo
|
||||
module Ligodity = Ligodity
|
||||
module Cameligo = Cameligo
|
||||
|
@ -672,7 +672,7 @@ and type_expression : environment -> Solver.state -> ?tv_opt:O.type_value -> I.e
|
||||
* } -> (
|
||||
* let%bind input_type =
|
||||
* let%bind input_type =
|
||||
* (\* Hack to take care of let_in introduced by `simplify/ligodity.ml` in ECase's hack *\)
|
||||
* (\* Hack to take care of let_in introduced by `simplify/cameligo.ml` in ECase's hack *\)
|
||||
* let default_action e () = fail @@ (needs_annotation e "the returned value") in
|
||||
* match input_type with
|
||||
* | Some ty -> ok ty
|
||||
|
@ -557,7 +557,7 @@ and type_expression : environment -> ?tv_opt:O.type_expression -> I.expression -
|
||||
} -> (
|
||||
let%bind input_type =
|
||||
let%bind input_type =
|
||||
(* Hack to take care of let_in introduced by `simplify/ligodity.ml` in ECase's hack *)
|
||||
(* Hack to take care of let_in introduced by `simplify/cameligo.ml` in ECase's hack *)
|
||||
let default_action e () = fail @@ (needs_annotation e "the returned value") in
|
||||
match input_type with
|
||||
| Some ty -> ok ty
|
||||
|
@ -601,7 +601,7 @@ and type_expression' : environment -> ?tv_opt:O.type_value -> I.expression -> O.
|
||||
} -> (
|
||||
let%bind input_type =
|
||||
let%bind input_type =
|
||||
(* Hack to take care of let_in introduced by `simplify/ligodity.ml` in ECase's hack *)
|
||||
(* Hack to take care of let_in introduced by `simplify/cameligo.ml` in ECase's hack *)
|
||||
let default_action e () = fail @@ (needs_annotation e "the returned value") in
|
||||
match input_type with
|
||||
| Some ty -> ok ty
|
||||
|
@ -19,9 +19,8 @@ let rec map_expression : mapper -> michelson -> michelson result = fun f e ->
|
||||
| x -> ok x
|
||||
|
||||
open Memory_proto_alpha.Protocol.Script_ir_translator
|
||||
(* fetches lambda first and second parameter (parameter,storage) *)
|
||||
let fetch_lambda_parameters : ex_ty -> (ex_ty * ex_ty) result =
|
||||
let error () = simple_fail "failed to fetch lambda parameters" in
|
||||
let fetch_contract_inputs : ex_ty -> (ex_ty * ex_ty) result =
|
||||
let error () = simple_fail "Invalid contract: Failed to fetch parameter and storage" in
|
||||
function
|
||||
| Ex_ty (Lambda_t (in_ty, _, _)) -> (
|
||||
match in_ty with
|
||||
|
@ -137,22 +137,8 @@ module Simplify = struct
|
||||
let type_operators = type_operators
|
||||
end
|
||||
|
||||
module Camligo = struct
|
||||
let constants = function
|
||||
| "Bytes.pack" -> ok C_BYTES_PACK
|
||||
| "Crypto.hash" -> ok C_HASH (* TODO : Check if right *)
|
||||
| "Operation.transaction" -> ok C_CALL
|
||||
| "Operation.get_contract" -> ok C_CONTRACT
|
||||
| "sender" -> ok C_SENDER
|
||||
| "unit" -> ok C_UNIT
|
||||
| "source" -> ok C_SOURCE
|
||||
| _ -> simple_fail "Not a CamLIGO constant"
|
||||
|
||||
let type_constants = type_constants
|
||||
let type_operators = type_operators
|
||||
end
|
||||
|
||||
module Ligodity = struct
|
||||
module Cameligo = struct
|
||||
let constants = function
|
||||
| "assert" -> ok C_ASSERTION
|
||||
| "Current.balance" -> ok C_BALANCE
|
||||
|
@ -9,13 +9,8 @@ module Simplify : sig
|
||||
val type_operators : string -> type_expression type_operator result
|
||||
end
|
||||
|
||||
module Camligo : sig
|
||||
val constants : string -> constant result
|
||||
val type_constants : string -> type_constant result
|
||||
val type_operators : string -> type_expression type_operator result
|
||||
end
|
||||
|
||||
module Ligodity : sig
|
||||
module Cameligo : sig
|
||||
val constants : string -> constant result
|
||||
val type_constants : string -> type_constant result
|
||||
val type_operators : string -> type_expression type_operator result
|
||||
|
@ -5,7 +5,10 @@ include Stage_common.PP
|
||||
|
||||
let list_sep_d x ppf lst = match lst with
|
||||
| [] -> ()
|
||||
| _ -> fprintf ppf "@; @[<v>%a@]@;" (list_sep x (tag "@;")) lst
|
||||
| _ -> fprintf ppf " @[<v>%a@] " (list_sep x (tag " ; ")) lst
|
||||
let tuple_sep_d x ppf lst = match lst with
|
||||
| [] -> ()
|
||||
| _ -> fprintf ppf " @[<v>%a@] " (list_sep x (tag " , ")) lst
|
||||
|
||||
let rec te' ppf (te : type_expression type_expression') : unit =
|
||||
type_expression' type_expression ppf te
|
||||
@ -19,13 +22,13 @@ let rec expression ppf (e:expression) = match e.expression with
|
||||
| E_application (f, arg) -> fprintf ppf "(%a)@(%a)" expression f expression arg
|
||||
| E_constructor (c, ae) -> fprintf ppf "%a(%a)" constructor c expression ae
|
||||
| E_constant (b, lst) -> fprintf ppf "%a(%a)" constant b (list_sep_d expression) lst
|
||||
| E_tuple lst -> fprintf ppf "tuple[%a]" (list_sep_d expression) lst
|
||||
| E_tuple lst -> fprintf ppf "(%a)" (tuple_sep_d expression) lst
|
||||
| E_accessor (ae, p) -> fprintf ppf "%a.%a" expression ae access_path p
|
||||
| E_record m -> fprintf ppf "record[%a]" (lmap_sep expression (const " , ")) m
|
||||
| E_map m -> fprintf ppf "map[%a]" (list_sep_d assoc_expression) m
|
||||
| E_record m -> fprintf ppf "{%a}" (lrecord_sep expression (const " , ")) m
|
||||
| E_map m -> fprintf ppf "[%a]" (list_sep_d assoc_expression) m
|
||||
| E_big_map m -> fprintf ppf "big_map[%a]" (list_sep_d assoc_expression) m
|
||||
| E_list lst -> fprintf ppf "list[%a]" (list_sep_d expression) lst
|
||||
| E_set lst -> fprintf ppf "set[%a]" (list_sep_d expression) lst
|
||||
| E_list lst -> fprintf ppf "[%a]" (list_sep_d expression) lst
|
||||
| E_set lst -> fprintf ppf "{%a}" (list_sep_d expression) lst
|
||||
| E_look_up (ds, ind) -> fprintf ppf "(%a)[%a]" expression ds expression ind
|
||||
| E_lambda {binder;input_type;output_type;result} ->
|
||||
fprintf ppf "lambda (%a:%a) : %a return %a"
|
||||
|
@ -128,6 +128,11 @@ let lmap_sep value sep ppf m =
|
||||
let new_pp ppf (k, v) = fprintf ppf "%a -> %a" label k value v in
|
||||
fprintf ppf "%a" (list_sep new_pp sep) lst
|
||||
|
||||
let lrecord_sep value sep ppf m =
|
||||
let lst = Types.LMap.to_kv_list m in
|
||||
let new_pp ppf (k, v) = fprintf ppf "%a = %a" label k value v in
|
||||
fprintf ppf "%a" (list_sep new_pp sep) lst
|
||||
|
||||
let list_sep_d x = list_sep x (const " , ")
|
||||
let cmap_sep_d x = cmap_sep x (const " , ")
|
||||
let lmap_sep_d x = lmap_sep x (const " , ")
|
||||
|
@ -8,6 +8,7 @@ val label : formatter -> label -> unit
|
||||
val constant : formatter -> constant -> unit
|
||||
val cmap_sep : (formatter -> 'a -> unit) -> (formatter -> unit -> unit) -> formatter -> 'a CMap.t -> unit
|
||||
val lmap_sep : (formatter -> 'a -> unit) -> (formatter -> unit -> unit) -> formatter -> 'a LMap.t -> unit
|
||||
val lrecord_sep : (formatter -> 'a -> unit) -> (formatter -> unit -> unit) -> formatter -> 'a LMap.t -> unit
|
||||
val type_expression' : (formatter -> 'a -> unit) -> formatter -> 'a type_expression' -> unit
|
||||
val type_operator : (formatter -> 'a -> unit) -> formatter -> 'a type_operator -> unit
|
||||
val type_constant : formatter -> type_constant -> unit
|
||||
|
@ -140,60 +140,29 @@ let get_entry (lst : program) (name : string) : (expression * int) result =
|
||||
in
|
||||
ok (entry_expression , entry_index)
|
||||
|
||||
(*
|
||||
Assume the following program:
|
||||
```
|
||||
const x = 42
|
||||
const y = 120
|
||||
const f = () -> x + y
|
||||
```
|
||||
aggregate_entry program "f" (Some [unit]) would return:
|
||||
```
|
||||
let x = 42 in
|
||||
let y = 120 in
|
||||
const f = () -> x + y
|
||||
f(unit)
|
||||
```
|
||||
|
||||
if arg_lst is None, it means that the entry point is not an arbitrary expression
|
||||
*)
|
||||
type form_t =
|
||||
| ContractForm of (expression * int)
|
||||
| ExpressionForm of ((expression * int) * expression list)
|
||||
| ContractForm of expression
|
||||
| ExpressionForm of expression
|
||||
|
||||
let aggregate_entry (lst : program) (form : form_t) : expression result =
|
||||
let (entry_expression , entry_index, arg_lst) = match form with
|
||||
| ContractForm (exp,i) -> (exp,i,[])
|
||||
| ExpressionForm ((exp,i),argl) -> (exp,i,argl) in
|
||||
let pre_declarations = List.until entry_index lst in
|
||||
let wrapper =
|
||||
let aux prec cur =
|
||||
let (((name , expr) , _)) = cur in
|
||||
e_let_in name expr.type_value expr prec
|
||||
in
|
||||
fun expr -> List.fold_right' aux expr pre_declarations
|
||||
fun expr -> List.fold_right' aux expr lst
|
||||
in
|
||||
match (entry_expression.content , arg_lst) with
|
||||
| (E_closure _ , (hd::tl)) -> (
|
||||
let%bind type_value' = match entry_expression.type_value with
|
||||
| T_function (_,t) -> ok t
|
||||
| _ -> simple_fail "Trying to aggregate closure which does not have function type" in
|
||||
let entry_expression' = List.fold_left
|
||||
(fun acc el ->
|
||||
let type_value' = match acc.type_value with
|
||||
| T_function (_,t) -> t
|
||||
| e -> e in
|
||||
{
|
||||
content = E_application (acc,el) ;
|
||||
type_value = type_value' ;
|
||||
}
|
||||
)
|
||||
{
|
||||
content = E_application (entry_expression, hd) ;
|
||||
type_value = type_value' ;
|
||||
} tl in
|
||||
ok @@ wrapper entry_expression'
|
||||
)
|
||||
| (_ , _) -> (
|
||||
ok @@ wrapper entry_expression
|
||||
)
|
||||
match form with
|
||||
| ContractForm entry_expression -> (
|
||||
match (entry_expression.content) with
|
||||
| (E_closure l) -> (
|
||||
let l' = { l with body = wrapper l.body } in
|
||||
let e' = {
|
||||
content = E_closure l' ;
|
||||
type_value = entry_expression.type_value ;
|
||||
} in
|
||||
ok e'
|
||||
)
|
||||
| _ -> simple_fail "a contract must be a closure" )
|
||||
| ExpressionForm entry_expression ->
|
||||
ok @@ wrapper entry_expression
|
@ -7,5 +7,5 @@ It's there to detect a regression of: https://gitlab.com/ligolang/ligo/issues/68
|
||||
|
||||
type storage is tez
|
||||
|
||||
function main (const p : unit; const s: int) : list(operation) * storage is
|
||||
function main (const p : unit; const s: tez) : list(operation) * storage is
|
||||
((nil : list(operation)), balance)
|
||||
|
@ -1,143 +0,0 @@
|
||||
open Trace
|
||||
open Test_helpers
|
||||
|
||||
let type_file f =
|
||||
let%bind simplified = Ligo.Compile.Of_source.compile f (Syntax_name "pascaligo") in
|
||||
let%bind typed,state = Ligo.Compile.Of_simplified.compile simplified in
|
||||
ok @@ (typed,state)
|
||||
|
||||
let get_program =
|
||||
let s = ref None in
|
||||
fun () -> match !s with
|
||||
| Some s -> ok s
|
||||
| None -> (
|
||||
let%bind (program , state) = type_file "./contracts/heap-instance.ligo" in
|
||||
let () = Typer.Solver.discard_state state in
|
||||
s := Some program ;
|
||||
ok program
|
||||
)
|
||||
|
||||
let a_heap_ez ?value_type (content:(int * Ast_typed.ae) list) =
|
||||
let open Ast_typed.Combinators in
|
||||
let content =
|
||||
let aux = fun (x, y) -> e_a_empty_nat x, y in
|
||||
List.map aux content in
|
||||
let value_type = match value_type, content with
|
||||
| None, hd :: _ -> (snd hd).type_annotation
|
||||
| Some s, _ -> s
|
||||
| _ -> raise (Failure "no value type and heap empty when building heap") in
|
||||
e_a_empty_map content (t_nat ()) value_type
|
||||
|
||||
let ez lst =
|
||||
let open Ast_typed.Combinators in
|
||||
let value_type = t_pair
|
||||
(t_int ())
|
||||
(t_string ())
|
||||
()
|
||||
in
|
||||
let lst' =
|
||||
let aux (i, (j, s)) =
|
||||
(i, e_a_empty_pair (e_a_empty_int j) (e_a_empty_string s)) in
|
||||
List.map aux lst in
|
||||
a_heap_ez ~value_type lst'
|
||||
|
||||
let dummy n =
|
||||
ez List.(
|
||||
map (fun n -> (n, (n, string_of_int n)))
|
||||
@@ tl
|
||||
@@ range (n + 1)
|
||||
)
|
||||
|
||||
let run_typed (entry_point:string) (program:Ast_typed.program) (input:Ast_typed.annotated_expression) =
|
||||
let%bind input_mini_c = Compile.Of_typed.compile_expression input in
|
||||
let%bind mini_c = Compile.Of_typed.compile program in
|
||||
let%bind program_mich = Compile.Of_mini_c.aggregate_and_compile_expression
|
||||
mini_c (Entry_name entry_point) [input_mini_c] in
|
||||
let%bind res = Run.Of_michelson.run program_mich.expr program_mich.expr_ty in
|
||||
let%bind output_type =
|
||||
let%bind entry_expression = Ast_typed.get_entry program entry_point in
|
||||
let%bind (_ , output_type) = Ast_typed.get_t_function entry_expression.type_annotation in
|
||||
ok output_type
|
||||
in
|
||||
let%bind mini_c = Compiler.Uncompiler.translate_value res in
|
||||
Transpiler.untranspile mini_c output_type
|
||||
|
||||
let is_empty () : unit result =
|
||||
let%bind program = get_program () in
|
||||
let aux n =
|
||||
let open Ast_typed.Combinators in
|
||||
let input = dummy n in
|
||||
let%bind result = run_typed "is_empty" program input in
|
||||
let expected = e_a_empty_bool (n = 0) in
|
||||
Ast_typed.assert_value_eq (expected, result)
|
||||
in
|
||||
let%bind _ = bind_list
|
||||
@@ List.map aux
|
||||
@@ [0 ; 2 ; 7 ; 12] in
|
||||
ok ()
|
||||
|
||||
let get_top () : unit result =
|
||||
let%bind program = get_program () in
|
||||
let aux n =
|
||||
let open Ast_typed.Combinators in
|
||||
let input = dummy n in
|
||||
match n, run_typed "get_top" program input with
|
||||
| 0, Trace.Ok _ -> simple_fail "unexpected success"
|
||||
| 0, _ -> ok ()
|
||||
| _, result ->
|
||||
let%bind result' = result in
|
||||
let expected = e_a_empty_pair (e_a_empty_int 1) (e_a_empty_string "1") in
|
||||
Ast_typed.assert_value_eq (expected, result')
|
||||
in
|
||||
let%bind _ = bind_list
|
||||
@@ List.map aux
|
||||
@@ [0 ; 2 ; 7 ; 12] in
|
||||
ok ()
|
||||
|
||||
let pop_switch () : unit result =
|
||||
let%bind program = get_program () in
|
||||
let aux n =
|
||||
let input = dummy n in
|
||||
match n, run_typed "pop_switch" program input with
|
||||
| 0, Trace.Ok _ -> simple_fail "unexpected success"
|
||||
| 0, _ -> ok ()
|
||||
| _, result ->
|
||||
let%bind result' = result in
|
||||
let expected = ez List.(
|
||||
map (fun i -> if i = 1 then (1, (n, string_of_int n)) else (i, (i, string_of_int i)))
|
||||
@@ tl
|
||||
@@ range (n + 1)
|
||||
) in
|
||||
Ast_typed.assert_value_eq (expected, result')
|
||||
in
|
||||
let%bind _ = bind_list
|
||||
@@ List.map aux
|
||||
@@ [0 ; 2 ; 7 ; 12] in
|
||||
ok ()
|
||||
|
||||
let pop () : unit result =
|
||||
let%bind program = get_program () in
|
||||
let aux n =
|
||||
let input = dummy n in
|
||||
(match run_typed "pop" program input with
|
||||
| Trace.Ok (output , _) -> (
|
||||
Format.printf "\nPop output on %d : %a\n" n Ast_typed.PP.annotated_expression output ;
|
||||
)
|
||||
| Trace.Error err -> (
|
||||
Format.printf "\nPop output on %d : error\n" n) ;
|
||||
Format.printf "Errors : {\n%a}\n%!" error_pp (err ()) ;
|
||||
) ;
|
||||
ok ()
|
||||
in
|
||||
let%bind _ = bind_list
|
||||
@@ List.map aux
|
||||
@@ [2 ; 7 ; 12] in
|
||||
simple_fail "display"
|
||||
(* ok () *)
|
||||
|
||||
let main = test_suite "Heap (End to End)" [
|
||||
test "is_empty" is_empty ;
|
||||
test "get_top" get_top ;
|
||||
test "pop_switch" pop_switch ;
|
||||
(* test "pop" pop ; *)
|
||||
]
|
@ -1,6 +1,6 @@
|
||||
open Trace
|
||||
open Test_helpers
|
||||
open Parser.Camligo
|
||||
open Parser.Cameligo
|
||||
|
||||
let basic () : unit result =
|
||||
let%bind _ = User.parse_file "./contracts/new-syntax.mligo" in
|
||||
@ -8,7 +8,7 @@ let basic () : unit result =
|
||||
|
||||
let simplify () : unit result =
|
||||
let%bind raw = User.parse_file "./contracts/new-syntax.mligo" in
|
||||
let%bind _simpl = Simplify.Camligo.main raw in
|
||||
let%bind _simpl = Simplify.Cameligo.main raw in
|
||||
ok ()
|
||||
|
||||
let main = "Multifix", [
|
||||
|
@ -8,7 +8,6 @@ let () =
|
||||
Integration_tests.main ;
|
||||
Transpiler_tests.main ;
|
||||
Typer_tests.main ;
|
||||
Heap_tests.main ;
|
||||
Coase_tests.main ;
|
||||
Vote_tests.main ;
|
||||
Multisig_tests.main ;
|
||||
|
@ -83,10 +83,11 @@ let typed_program_with_simplified_input_to_michelson
|
||||
(input: Ast_simplified.expression) : Compiler.compiled_expression result =
|
||||
let env = Ast_typed.program_environment program in
|
||||
let state = Typer.Solver.initial_state in
|
||||
let%bind (typed_in,_) = Compile.Of_simplified.compile_expression ~env ~state input in
|
||||
let%bind mini_c_in = Compile.Of_typed.compile_expression typed_in in
|
||||
let%bind mini_c_prg = Compile.Of_typed.compile program in
|
||||
Compile.Of_mini_c.aggregate_and_compile_expression mini_c_prg (Entry_name entry_point) [mini_c_in]
|
||||
let%bind app = Compile.Of_simplified.apply entry_point input in
|
||||
let%bind (typed_app,_) = Compile.Of_simplified.compile_expression ~env ~state app in
|
||||
let%bind compiled_applied = Compile.Of_typed.compile_expression typed_app in
|
||||
let%bind mini_c_prg = Compile.Of_typed.compile program in
|
||||
Compile.Of_mini_c.aggregate_and_compile_expression mini_c_prg compiled_applied
|
||||
|
||||
let run_typed_program_with_simplified_input ?options
|
||||
(program: Ast_typed.program) (entry_point: string)
|
||||
@ -143,10 +144,11 @@ let expect_evaluate program entry_point expecter =
|
||||
let content () = Format.asprintf "Entry_point: %s" entry_point in
|
||||
error title content in
|
||||
trace error @@
|
||||
let%bind mini_c = Ligo.Compile.Of_typed.compile program in
|
||||
let%bind michelson_value = Ligo.Compile.Of_mini_c.aggregate_and_compile_expression mini_c (Entry_name entry_point) [] in
|
||||
let%bind res_michelson = Ligo.Run.Of_michelson.run michelson_value.expr michelson_value.expr_ty in
|
||||
let%bind res_simpl = Uncompile.uncompile_typed_program_entry_expression_result program entry_point res_michelson in
|
||||
let%bind mini_c = Ligo.Compile.Of_typed.compile program in
|
||||
let%bind (exp,_) = Mini_c.get_entry mini_c entry_point in
|
||||
let%bind michelson_value = Ligo.Compile.Of_mini_c.aggregate_and_compile_expression mini_c exp in
|
||||
let%bind res_michelson = Ligo.Run.Of_michelson.run michelson_value.expr michelson_value.expr_ty in
|
||||
let%bind res_simpl = Uncompile.uncompile_typed_program_entry_expression_result program entry_point res_michelson in
|
||||
expecter res_simpl
|
||||
|
||||
let expect_eq_evaluate program entry_point expected =
|
||||
|
Loading…
Reference in New Issue
Block a user