Remove original Cameligo and rename Ligodity to Cameligo.

This commit is contained in:
Sander Spies 2019-12-12 14:35:07 +01:00
parent e558d23658
commit 6f9134834f
71 changed files with 101 additions and 3070 deletions

View File

@ -15,7 +15,7 @@ Here's how to define a boolean:
const a: bool = True; const a: bool = True;
const b: bool = False; const b: bool = False;
``` ```
<!--Cameligo--> <!--CameLIGO-->
```cameligo ```cameligo
let a: bool = true let a: bool = true
let b: bool = false let b: bool = false
@ -43,7 +43,7 @@ const b: string = "Alice";
// True // True
const c: bool = (a = b); const c: bool = (a = b);
``` ```
<!--Cameligo--> <!--CameLIGO-->
```cameligo ```cameligo
let a: string = "Alice" let a: string = "Alice"
let b: string = "Alice" let b: string = "Alice"
@ -74,7 +74,7 @@ const f: bool = (a <= b);
const g: bool = (a >= b); const g: bool = (a >= b);
const h: bool = (a =/= b); const h: bool = (a =/= b);
``` ```
<!--Cameligo--> <!--CameLIGO-->
```cameligo ```cameligo
let a: int = 5 let a: int = 5
let b: int = 4 let b: int = 4
@ -111,7 +111,7 @@ const a: tez = 5mutez;
const b: tez = 10mutez; const b: tez = 10mutez;
const c: bool = (a = b); const c: bool = (a = b);
``` ```
<!--Cameligo--> <!--CameLIGO-->
```cameligo ```cameligo
let a: tez = 5mutez let a: tez = 5mutez
let b: tez = 10mutez let b: tez = 10mutez
@ -161,7 +161,7 @@ function is_adult(const age: nat): bool is
> ligo run-function -s pascaligo src/if-else.ligo is_adult 21n > ligo run-function -s pascaligo src/if-else.ligo is_adult 21n
> ``` > ```
<!--Cameligo--> <!--CameLIGO-->
```cameligo ```cameligo
let min_age: nat = 16n let min_age: nat = 16n

View File

@ -55,7 +55,7 @@ Instead, you can inline the necessary logic directly, like this:
function add(const a: int; const b: int): int is a + b function add(const a: int; const b: int): int is a + b
``` ```
<!--Cameligo--> <!--CameLIGO-->
Functions in CameLIGO are defined using the `let` keyword, like value bindings. Functions in CameLIGO are defined using the `let` keyword, like value bindings.
The difference is that after the value name a list of function parameters is provided, The difference is that after the value name a list of function parameters is provided,
@ -110,7 +110,7 @@ const increment : (int -> int) = (function (const i : int) : int is i + 1);
const a: int = increment(1); const a: int = increment(1);
``` ```
<!--Cameligo--> <!--CameLIGO-->
```cameligo ```cameligo
let increment : (int -> int) = fun (i: int) -> i + 1 let increment : (int -> int) = fun (i: int) -> i + 1
``` ```

View File

@ -17,7 +17,7 @@ Here's how a custom map type is defined:
type ledger is map(address, tez); type ledger is map(address, tez);
``` ```
<!--Cameligo--> <!--CameLIGO-->
```cameligo ```cameligo
type ledger = (address, tez) map type ledger = (address, tez) map
``` ```
@ -44,7 +44,7 @@ end
> >
> `("<string value>": address)` means that we type-cast a string into an address. > `("<string value>": address)` means that we type-cast a string into an address.
<!--Cameligo--> <!--CameLIGO-->
```cameligo ```cameligo
let ledger: ledger = Map.literal let ledger: ledger = Map.literal
@ -82,7 +82,7 @@ If we want to access a balance from our ledger above, we can use the `[]` operat
const balance: option(tez) = ledger[("tz1gjaF81ZRRvdzjobyfVNsAeSC6PScjfQwN": address)]; const balance: option(tez) = ledger[("tz1gjaF81ZRRvdzjobyfVNsAeSC6PScjfQwN": address)];
``` ```
<!--Cameligo--> <!--CameLIGO-->
```cameligo ```cameligo
let balance: tez option = Map.find_opt ("tz1gjaF81ZRRvdzjobyfVNsAeSC6PScjfQwN": address) ledger let balance: tez option = Map.find_opt ("tz1gjaF81ZRRvdzjobyfVNsAeSC6PScjfQwN": address) ledger
@ -106,7 +106,7 @@ Accessing a value in a map yields an option, however you can also get the value
const balance: tez = get_force(("tz1gjaF81ZRRvdzjobyfVNsAeSC6PScjfQwN": address), ledger); const balance: tez = get_force(("tz1gjaF81ZRRvdzjobyfVNsAeSC6PScjfQwN": address), ledger);
``` ```
<!--Cameligo--> <!--CameLIGO-->
```cameligo ```cameligo
let balance: tez = Map.find ("tz1gjaF81ZRRvdzjobyfVNsAeSC6PScjfQwN": address) ledger let balance: tez = Map.find ("tz1gjaF81ZRRvdzjobyfVNsAeSC6PScjfQwN": address) ledger
@ -139,7 +139,7 @@ function iter_op (const m : ledger) : unit is
} with map_iter(aggregate, m) ; } with map_iter(aggregate, m) ;
``` ```
<!--Cameligo--> <!--CameLIGO-->
```cameligo ```cameligo
let iter_op (m : ledger) : unit = let iter_op (m : ledger) : unit =
let assert_eq = fun (i: address) (j: tez) -> assert (j > 100) let assert_eq = fun (i: address) (j: tez) -> assert (j > 100)
@ -166,7 +166,7 @@ function map_op (const m : ledger) : ledger is
} with map_map(increment, m) ; } with map_map(increment, m) ;
``` ```
<!--Cameligo--> <!--CameLIGO-->
```cameligo ```cameligo
let map_op (m : ledger) : ledger = let map_op (m : ledger) : ledger =
let increment = fun (_: address) (j: tez) -> j+1 let increment = fun (_: address) (j: tez) -> j+1
@ -200,7 +200,7 @@ function fold_op (const m : ledger) : tez is
} with map_fold(aggregate, m , 10) } with map_fold(aggregate, m , 10)
``` ```
<!--Cameligo--> <!--CameLIGO-->
```cameligo ```cameligo
let fold_op (m : ledger) : ledger = let fold_op (m : ledger) : ledger =
let aggregate = fun (ignore: address) (j: tez * tez) -> j.0 + j.1 let aggregate = fun (ignore: address) (j: tez * tez) -> j.0 + j.1
@ -234,7 +234,7 @@ type user is record
end end
``` ```
<!--Cameligo--> <!--CameLIGO-->
```cameligo ```cameligo
type user = { type user = {
id: nat; id: nat;
@ -266,7 +266,7 @@ const user: user = record
end end
``` ```
<!--Cameligo--> <!--CameLIGO-->
```cameligo ```cameligo
let user: user = { let user: user = {
id = 1n; id = 1n;
@ -296,7 +296,7 @@ If we want to obtain a value from a record for a given key, we can do the follow
const is_admin: bool = user.is_admin; const is_admin: bool = user.is_admin;
``` ```
<!--Cameligo--> <!--CameLIGO-->
```cameligo ```cameligo
let is_admin: bool = user.is_admin let is_admin: bool = user.is_admin
``` ```

View File

@ -36,7 +36,7 @@ const g: int = 1_000_000;
>const g: int = 1_000_000; >const g: int = 1_000_000;
>``` >```
<!--Cameligo--> <!--CameLIGO-->
```cameligo ```cameligo
// int + int produces int // int + int produces int
@ -103,7 +103,7 @@ const b: int = 5n - 2n;
const d: tez = 5mutez - 1mt; const d: tez = 5mutez - 1mt;
``` ```
<!--Cameligo--> <!--CameLIGO-->
```cameligo ```cameligo
let a: int = 5 - 10 let a: int = 5 - 10
// substraction of two nats, yields an int // substraction of two nats, yields an int
@ -140,7 +140,7 @@ const b: nat = 5n * 5n;
const c: tez = 5n * 5mutez; const c: tez = 5n * 5mutez;
``` ```
<!--Cameligo--> <!--CameLIGO-->
```cameligo ```cameligo
let a: int = 5 * 5 let a: int = 5 * 5
let b: nat = 5n * 5n let b: nat = 5n * 5n
@ -173,7 +173,7 @@ const b: nat = 10n / 3n;
const c: nat = 10mutez / 3mutez; const c: nat = 10mutez / 3mutez;
``` ```
<!--Cameligo--> <!--CameLIGO-->
```cameligo ```cameligo
let a: int = 10 / 3 let a: int = 10 / 3
let b: nat = 10n / 3n let b: nat = 10n / 3n

View File

@ -24,7 +24,7 @@ const my_set: int_set = set
end end
``` ```
<!--Cameligo--> <!--CameLIGO-->
```cameligo ```cameligo
type int_set = int set type int_set = int set
let my_set: int_set = let my_set: int_set =
@ -48,7 +48,7 @@ let my_set: int_set =
const my_set: int_set = set end; const my_set: int_set = set end;
const my_set_2: int_set = set_empty; const my_set_2: int_set = set_empty;
``` ```
<!--Cameligo--> <!--CameLIGO-->
```cameligo ```cameligo
let my_set: int_set = (Set.empty: int set) let my_set: int_set = (Set.empty: int set)
``` ```
@ -68,7 +68,7 @@ const contains_three: bool = my_set contains 3;
const contains_three_fn: bool = set_mem(3, my_set); const contains_three_fn: bool = set_mem(3, my_set);
``` ```
<!--Cameligo--> <!--CameLIGO-->
```cameligo ```cameligo
let contains_three: bool = Set.mem 3 my_set let contains_three: bool = Set.mem 3 my_set
``` ```
@ -87,7 +87,7 @@ let contains_three: bool = Set.mem(3, my_set);
const set_size: nat = size(my_set); const set_size: nat = size(my_set);
``` ```
<!--Cameligo--> <!--CameLIGO-->
```cameligo ```cameligo
let set_size: nat = Set.size my_set let set_size: nat = Set.size my_set
``` ```
@ -108,7 +108,7 @@ const larger_set: int_set = set_add(4, my_set);
const smaller_set: int_set = set_remove(3, my_set); const smaller_set: int_set = set_remove(3, my_set);
``` ```
<!--Cameligo--> <!--CameLIGO-->
```cameligo ```cameligo
let larger_set: int_set = Set.add 4 my_set let larger_set: int_set = Set.add 4 my_set
@ -134,7 +134,7 @@ function sum(const result: int; const i: int): int is result + i;
const sum_of_a_set: int = set_fold(sum, my_set, 0); const sum_of_a_set: int = set_fold(sum, my_set, 0);
``` ```
<!--Cameligo--> <!--CameLIGO-->
```cameligo ```cameligo
let sum (result: int) (i: int) : int = result + i let sum (result: int) (i: int) : int = result + i
let sum_of_a_set: int = Set.fold sum my_set 0 let sum_of_a_set: int = Set.fold sum my_set 0
@ -166,7 +166,7 @@ const my_list: int_list = list
end end
``` ```
<!--Cameligo--> <!--CameLIGO-->
```cameligo ```cameligo
type int_list = int list type int_list = int list
let my_list: int_list = [1; 2; 3] let my_list: int_list = [1; 2; 3]
@ -190,7 +190,7 @@ const larger_list: int_list = cons(4, my_list);
const even_larger_list: int_list = 5 # larger_list; const even_larger_list: int_list = 5 # larger_list;
``` ```
<!--Cameligo--> <!--CameLIGO-->
```cameligo ```cameligo
let larger_list: int_list = 4 :: my_list let larger_list: int_list = 4 :: my_list
(* CameLIGO doesn't have a List.cons *) (* CameLIGO doesn't have a List.cons *)
@ -217,7 +217,7 @@ function increment(const i: int): int is block { skip } with i + 1;
const incremented_list: int_list = list_map(increment, even_larger_list); const incremented_list: int_list = list_map(increment, even_larger_list);
``` ```
<!--Cameligo--> <!--CameLIGO-->
```cameligo ```cameligo
let increment (i: int) : int = i + 1 let increment (i: int) : int = i + 1
@ -246,7 +246,7 @@ function sum(const result: int; const i: int): int is block { skip } with result
const sum_of_a_list: int = list_fold(sum, my_list, 0); const sum_of_a_list: int = list_fold(sum, my_list, 0);
``` ```
<!--Cameligo--> <!--CameLIGO-->
```cameligo ```cameligo
let sum (result: int) (i: int) : int = result + i let sum (result: int) (i: int) : int = result + i
@ -292,7 +292,7 @@ type full_name is string * string;
const full_name: full_name = ("Alice", "Johnson"); const full_name: full_name = ("Alice", "Johnson");
``` ```
<!--Cameligo--> <!--CameLIGO-->
```cameligo ```cameligo
type full_name = string * string type full_name = string * string
(* The parenthesis here are optional *) (* The parenthesis here are optional *)
@ -324,7 +324,7 @@ Tuple elements are one-indexed and accessed like so:
const first_name: string = full_name.1; const first_name: string = full_name.1;
``` ```
<!--Cameligo--> <!--CameLIGO-->
```cameligo ```cameligo
let first_name: string = full_name.1 let first_name: string = full_name.1
``` ```

View File

@ -12,7 +12,7 @@ Strings are defined using the built-in `string` type like this:
``` ```
const a: string = "Hello Alice"; const a: string = "Hello Alice";
``` ```
<!--Cameligo--> <!--CameLIGO-->
``` ```
let a: string = "Hello Alice" let a: string = "Hello Alice"
``` ```
@ -37,7 +37,7 @@ const full_greeting: string = greeting ^ " " ^ name;
// Hello Alice! (alternatively) // Hello Alice! (alternatively)
const full_greeting_exclamation: string = string_concat(full_greeting, "!"); const full_greeting_exclamation: string = string_concat(full_greeting, "!");
``` ```
<!--Cameligo--> <!--CameLIGO-->
Strings can be concatenated using the `^` operator. Strings can be concatenated using the `^` operator.
```cameligo ```cameligo
@ -67,7 +67,7 @@ const name: string = "Alice";
// slice = "A" // slice = "A"
const slice: string = string_slice(0n, 1n, name); const slice: string = string_slice(0n, 1n, name);
``` ```
<!--Cameligo--> <!--CameLIGO-->
```cameligo ```cameligo
let name: string = "Alice" let name: string = "Alice"
let slice: string = String.slice 0n 1n name let slice: string = String.slice 0n 1n name
@ -92,7 +92,7 @@ const name: string = "Alice";
// length = 5 // length = 5
const length: nat = size(name); const length: nat = size(name);
``` ```
<!--Cameligo--> <!--CameLIGO-->
```cameligo ```cameligo
let name: string = "Alice" let name: string = "Alice"
let length: nat = String.size name let length: nat = String.size name

View File

@ -20,7 +20,7 @@ type animalBreed is string;
const dogBreed : animalBreed = "Saluki"; const dogBreed : animalBreed = "Saluki";
``` ```
<!--Cameligo--> <!--CameLIGO-->
```cameligo ```cameligo
type animal_breed = string type animal_breed = string
@ -48,7 +48,7 @@ const ledger: accountBalances = map
end end
``` ```
<!--Cameligo--> <!--CameLIGO-->
```cameligo ```cameligo
// account_balances is a simple type, a map of address <-> tez // account_balances is a simple type, a map of address <-> tez
type account_balances = (address, tez) map type account_balances = (address, tez) map
@ -100,7 +100,7 @@ const ledger: accountBalances = map
end end
``` ```
<!--Cameligo--> <!--CameLIGO-->
```cameligo ```cameligo
(* alias two types *) (* alias two types *)
type account = address type account = address

View File

@ -19,7 +19,7 @@ Here's how they're defined:
const n: unit = Unit; const n: unit = Unit;
``` ```
<!--Cameligo--> <!--CameLIGO-->
```cameligo ```cameligo
let n: unit = () let n: unit = ()
``` ```
@ -50,7 +50,7 @@ const u: user = Admin(1000n);
const g: user = Guest(Unit); const g: user = Guest(Unit);
``` ```
<!--Cameligo--> <!--CameLIGO-->
```cameligo ```cameligo
type id = nat type id = nat
type user = type user =
@ -93,7 +93,7 @@ const p1: dinner = None;
const p2: dinner = Some("Hamburgers") const p2: dinner = Some("Hamburgers")
``` ```
<!--Cameligo--> <!--CameLIGO-->
```cameligo ```cameligo
type dinner = string option type dinner = string option
@ -129,7 +129,7 @@ function is_hungry(const dinner: dinner): bool is block { skip }
) )
``` ```
<!--Cameligo--> <!--CameLIGO-->
```cameligo ```cameligo
type dinner = string option type dinner = string option
let is_hungry (d: dinner) : bool = let is_hungry (d: dinner) : bool =

View File

@ -21,7 +21,7 @@ You can evaluate the constant definition above using the following CLI command:
ligo evaluate-value -s pascaligo gitlab-pages/docs/language-basics/src/variables-and-constants/const.ligo age ligo evaluate-value -s pascaligo gitlab-pages/docs/language-basics/src/variables-and-constants/const.ligo age
# Outputs: 25 # Outputs: 25
``` ```
<!--Cameligo--> <!--CameLIGO-->
```cameligo ```cameligo
let age: int = 25 let age: int = 25
``` ```
@ -78,7 +78,7 @@ ligo run-function -s pascaligo gitlab-pages/docs/language-basics/src/variables-a
# Outputs: 2 # Outputs: 2
``` ```
<!--Cameligo--> <!--CameLIGO-->
As expected from a functional language, CameLIGO uses value-binding As expected from a functional language, CameLIGO uses value-binding
for variables rather than assignment. Variables are changed by replacement, for variables rather than assignment. Variables are changed by replacement,

View File

@ -41,22 +41,22 @@ let parsify_expression_pascaligo = fun source ->
Simplify.Pascaligo.simpl_expression raw in Simplify.Pascaligo.simpl_expression raw in
ok simplified ok simplified
let parsify_ligodity = fun source -> let parsify_cameligo = fun source ->
let%bind raw = let%bind raw =
trace (simple_error "parsing") @@ trace (simple_error "parsing") @@
Parser.Ligodity.parse_file source in Parser.Cameligo.parse_file source in
let%bind simplified = let%bind simplified =
trace (simple_error "simplifying") @@ trace (simple_error "simplifying") @@
Simplify.Ligodity.simpl_program raw in Simplify.Cameligo.simpl_program raw in
ok simplified ok simplified
let parsify_expression_ligodity = fun source -> let parsify_expression_cameligo = fun source ->
let%bind raw = let%bind raw =
trace (simple_error "parsing expression") @@ trace (simple_error "parsing expression") @@
Parser.Ligodity.parse_expression source in Parser.Cameligo.parse_expression source in
let%bind simplified = let%bind simplified =
trace (simple_error "simplifying expression") @@ trace (simple_error "simplifying expression") @@
Simplify.Ligodity.simpl_expression raw in Simplify.Cameligo.simpl_expression raw in
ok simplified ok simplified
let parsify_reasonligo = fun source -> let parsify_reasonligo = fun source ->
@ -65,7 +65,7 @@ let parsify_reasonligo = fun source ->
Parser.Reasonligo.parse_file source in Parser.Reasonligo.parse_file source in
let%bind simplified = let%bind simplified =
trace (simple_error "simplifying") @@ trace (simple_error "simplifying") @@
Simplify.Ligodity.simpl_program raw in Simplify.Cameligo.simpl_program raw in
ok simplified ok simplified
let parsify_string_reasonligo = fun source -> let parsify_string_reasonligo = fun source ->
@ -74,7 +74,7 @@ let parsify_string_reasonligo = fun source ->
Parser.Reasonligo.parse_string source in Parser.Reasonligo.parse_string source in
let%bind simplified = let%bind simplified =
trace (simple_error "simplifying") @@ trace (simple_error "simplifying") @@
Simplify.Ligodity.simpl_program raw in Simplify.Cameligo.simpl_program raw in
ok simplified ok simplified
let parsify_expression_reasonligo = fun source -> let parsify_expression_reasonligo = fun source ->
@ -83,13 +83,13 @@ let parsify_expression_reasonligo = fun source ->
Parser.Reasonligo.parse_expression source in Parser.Reasonligo.parse_expression source in
let%bind simplified = let%bind simplified =
trace (simple_error "simplifying expression") @@ trace (simple_error "simplifying expression") @@
Simplify.Ligodity.simpl_expression raw in Simplify.Cameligo.simpl_expression raw in
ok simplified ok simplified
let parsify = fun (syntax : v_syntax) source_filename -> let parsify = fun (syntax : v_syntax) source_filename ->
let%bind parsify = match syntax with let%bind parsify = match syntax with
| Pascaligo -> ok parsify_pascaligo | Pascaligo -> ok parsify_pascaligo
| Cameligo -> ok parsify_ligodity | Cameligo -> ok parsify_cameligo
| ReasonLIGO -> ok parsify_reasonligo | ReasonLIGO -> ok parsify_reasonligo
in in
let%bind parsified = parsify source_filename in let%bind parsified = parsify source_filename in
@ -99,7 +99,7 @@ let parsify = fun (syntax : v_syntax) source_filename ->
let parsify_expression = fun syntax source -> let parsify_expression = fun syntax source ->
let%bind parsify = match syntax with let%bind parsify = match syntax with
| Pascaligo -> ok parsify_expression_pascaligo | Pascaligo -> ok parsify_expression_pascaligo
| Cameligo -> ok parsify_expression_ligodity | Cameligo -> ok parsify_expression_cameligo
| ReasonLIGO -> ok parsify_expression_reasonligo | ReasonLIGO -> ok parsify_expression_reasonligo
in in
let%bind parsified = parsify source in let%bind parsified = parsify source in

View File

@ -1,9 +1,9 @@
open Trace open Trace
module Parser = Parser_ligodity.Parser module Parser = Parser_cameligo.Parser
module AST = Parser_ligodity.AST module AST = Parser_cameligo.AST
module ParserLog = Parser_ligodity.ParserLog module ParserLog = Parser_cameligo.ParserLog
module LexToken = Parser_ligodity.LexToken module LexToken = Parser_cameligo.LexToken
module Lexer = Lexer.Make(LexToken) module Lexer = Lexer.Make(LexToken)
let parse_file (source: string) : AST.t result = let parse_file (source: string) : AST.t result =

View File

@ -1,4 +1,4 @@
(* Abstract Syntax Tree (AST) for Ligodity *) (* Abstract Syntax Tree (AST) for Cameligo *)
[@@@warning "-30"] [@@@warning "-30"]

View File

@ -1,4 +1,4 @@
(* Driver for the lexer of Ligodity *) (* Driver for the lexer of Cameligo *)
(* Error printing and exception tracing *) (* Error printing and exception tracing *)

View File

@ -1,4 +1,4 @@
(* Driver for the parser of Ligodity *) (* Driver for the parser of Cameligo *)
(* Error printing and exception tracing *) (* Error printing and exception tracing *)

View File

@ -6,9 +6,9 @@
(flags -la 1 --explain --external-tokens LexToken)) (flags -la 1 --explain --external-tokens LexToken))
(library (library
(name parser_ligodity) (name parser_cameligo)
(public_name ligo.parser.ligodity) (public_name ligo.parser.cameligo)
(modules AST ligodity Parser ParserLog LexToken) (modules AST cameligo Parser ParserLog LexToken)
(libraries (libraries
parser_shared parser_shared
str str
@ -22,19 +22,19 @@
(executable (executable
(name LexerMain) (name LexerMain)
(libraries (libraries
parser_ligodity) parser_cameligo)
(modules (modules
LexerMain LexerMain
) )
(flags (:standard -open Parser_shared -open Parser_ligodity)) (flags (:standard -open Parser_shared -open Parser_cameligo))
) )
(executable (executable
(name ParserMain) (name ParserMain)
(libraries (libraries
parser_ligodity) parser_cameligo)
(modules (modules
ParserMain ParserMain
) )
(flags (:standard -open Simple_utils -open Parser_shared -open Parser_ligodity)) (flags (:standard -open Simple_utils -open Parser_shared -open Parser_cameligo))
) )

View File

@ -1,2 +0,0 @@
ast_generated.ml
parser_generated.mly

View File

@ -1 +0,0 @@
include Ast_generated

View File

@ -1,66 +0,0 @@
(library
(name parser_camligo)
(public_name ligo.parser.camligo)
(libraries
simple-utils
tezos-utils
lex
)
(modules ast ast_generated parser user)
(flags (:standard -w +1..62-4-9-44-40-42-48@39@33 -open Simple_utils -open Tezos_utils ))
(preprocess
(pps
ppx_let
ppx_deriving.std
)
)
)
;; Generating parser
(rule
(targets parser.ml parser.mli)
(deps parser_generated.mly ast.ml)
(action (system "menhir --explain --unused-tokens --external-tokens Lex.Token lex/token.mly parser_generated.mly --base parser"))
)
(rule
(targets parser_generated.mly)
(deps partial_parser.mly pre_parser.mly)
(action (system "cat pre_parser.mly partial_parser.mly > parser_generated.mly"))
(mode (promote (until-clean) (only *)))
)
(rule
(targets partial_parser.mly)
(deps generator.exe)
(action (system "./generator.exe parser > partial_parser.mly"))
)
;; Generating AST
(rule
(targets ast_generated.ml)
(deps generator.exe)
(action (system "./generator.exe ast > ast_generated.ml"))
(mode (promote (until-clean) (only *)))
)
;; Generating Generator
(executable
(name generator)
(libraries
ocamlgraph
simple-utils
tezos-utils
lex
)
(modules generator)
(preprocess
(pps
ppx_let
ppx_deriving.std
)
)
)

View File

@ -1,738 +0,0 @@
open Simple_utils
type 'a name = {
name : string ;
content : 'a ;
}
let make_name name content = { name ; content }
let destruct {name ; content} = (name, content)
let get_name x = x.name
let get_content x = x.content
module Token = Lex.Token
type token = Token.token
module O = struct
type list_mode =
| Trail of token
| Trail_option of token
| Trail_force of token
| Trail_force_ne of token
| Lead of token
| Lead_ne of token
| Separated of token
| Separated_ne of token
| Separated_nene of token
| Naked
| Naked_ne
type 'a list_element = list_mode * 'a
type rhs_element = [
| `Named of string
| `Token of token
| `List of string list_element
| `Option of string
]
type rhs = rhs_element list name
type rule = rhs list name
type manual_rule_content = {
menhir_codes : string list ;
ast_code : string ;
}
type manual_rule = manual_rule_content name
type singleton =
| Manual of manual_rule
| Generated of rule
type name_element = [
| `Named of string
| `Current
| `Lower
]
type element = [
| `Named of string
| `Token of token
| `List of name_element list_element
| `Current
| `Lower
]
type operator = element list
type n_operator = operator name
type n_operators = n_operator list
type level = n_operators name
type level_list = level list
type levels = level List.Ne.t
type hierarchy = {
prefix : string ;
levels : levels ;
auxiliary_rules : rule list ;
}
type n_hierarchy = hierarchy name
let make_hierarchy prefix levels auxiliary_rules : hierarchy = { levels ; auxiliary_rules ; prefix }
type language = {
entry_point : string ;
singletons : singleton list ;
hierarchies : n_hierarchy list ;
}
let get_op : n_operator -> operator = get_content
let manual_singleton name menhir_codes ast_code : singleton = Manual (make_name name {menhir_codes ; ast_code})
let rule_singleton rule : singleton = Generated rule
let language entry_point singletons hierarchies = {entry_point ; singletons ; hierarchies}
let name_hierarchy name prefix : n_operators list -> rule list -> n_hierarchy = fun nopss rules ->
let nopss' = List.Ne.of_list nopss in
let name_i : int -> n_operators -> level = fun i x ->
let first = get_name (List.hd x) in
let name' = Format.asprintf "%s_%d_%s" name i first in
make_name name' x in
let levels : levels = List.Ne.mapi name_i nopss' in
make_name name @@ make_hierarchy prefix levels rules
end
module Check = struct
open O
let well_formed : language -> unit = fun l ->
let elements : element list -> unit = fun es ->
let rec aux = fun es ->
match es with
| [] -> ()
| [ _ ] -> ()
| (`List _ | `Named _ | `Current | `Lower) :: (`List _ | `Named _ | `Current | `Lower) :: _ ->
raise (Failure "two non-token separated ops in a row")
| _ :: tl -> aux tl
in
(if (List.length es < 2) then raise (Failure "operator is too short")) ;
aux es in
let op : n_operator -> unit = fun x -> elements @@ get_content x in
let level : level -> unit = fun l -> List.iter op @@ get_content l in
let hierarchy : n_hierarchy -> unit = fun h -> List.Ne.iter level @@ h.content.levels in
List.iter hierarchy l.hierarchies
let associativity : language -> unit = fun l ->
let level : level -> unit = fun l ->
let aux : ([`Left | `Right | `None] as 'a) -> n_operator -> 'a = fun ass nop ->
let op = get_content nop in
match ass, List.hd op, List.nth op (List.length op - 1) with
| _, `Lower, `Lower -> raise (Failure "double assoc")
| `None, `Lower, _ -> `Left
| `None, _, `Lower -> `Right
| `Left, _, `Lower -> raise (Failure "different assocs")
| `Right, `Lower, _ -> raise (Failure "different assocs")
| m, _, _ -> m
in
let _assert = List.fold_left aux `None (get_content l) in
()
in
let hierarchy : n_hierarchy -> unit = fun h ->
List.Ne.iter level h.content.levels in
List.iter hierarchy l.hierarchies
end
let make_constructor : _ -> (string * string) -> unit = fun ppf (gr, rhs) ->
let gr = String.capitalize_ascii gr in
match rhs with
| "" -> Format.fprintf ppf "%s" gr
| s -> Format.fprintf ppf "%s_%s" gr s
let make_operator : _ -> (string * string) -> unit = fun ppf (prefix, op) ->
Format.fprintf ppf "%s_%s" prefix op
module Print_AST = struct
open Format
open PP_helpers
let manual_rule : _ -> O.manual_rule -> _ = fun ppf mr ->
fprintf ppf "%s = %s" mr.name mr.content.ast_code
let generated_rule : _ -> O.rule -> _ = fun ppf gr ->
let aux : _ -> O.rhs -> _ = fun ppf rhs ->
let type_elements =
let aux : O.rhs_element -> string option = fun e ->
match e with
| `Named s -> Some (s ^ " Location.wrap")
| `List ( _, s) -> Some ("(" ^ s ^ " Location.wrap list)")
| `Option s -> Some ("(" ^ s ^ " Location.wrap option)")
| `Token _ -> None
in
List.filter_map aux rhs.content in
let type_element = fun ppf te -> fprintf ppf "%s" te in
fprintf ppf "| %a of (%a)"
make_constructor (gr.name, rhs.name)
(list_sep type_element (const " * ")) type_elements
in
fprintf ppf "%s =@. @[<v>%a@]" gr.name
(list_sep aux new_line) gr.content
let singleton : _ -> O.singleton -> _ = fun ppf s ->
match s with
| Manual s -> manual_rule ppf s
| Generated s -> generated_rule ppf s
let singletons : _ -> O.singleton list -> _ = fun ppf ss ->
match ss with
| [] -> ()
| hd :: tl ->
fprintf ppf "%a\n" (prepend "type " (singleton)) hd ;
fprintf ppf "%a" (list_sep (prepend "and " (singleton)) (const "\n")) tl
let n_operator prefix level_name : _ -> O.n_operator -> _ = fun ppf nop ->
let type_elements =
let aux : O.element -> string option = fun e ->
match e with
| `Named s -> Some (s ^ " Location.wrap")
| `List ( _, s) -> Some ("(" ^ (match s with
| `Lower | `Current -> level_name |`Named s -> s
) ^ " Location.wrap list)")
| `Token _ -> None
| `Current | `Lower -> Some (level_name ^ " Location.wrap") in
List.filter_map aux (get_content nop) in
let type_element = fun ppf te -> fprintf ppf "%s" te in
fprintf ppf "| %a of (%a)"
make_operator (prefix, nop.name)
(list_sep type_element (const " * ")) type_elements
let n_hierarchy t : _ -> O.n_hierarchy -> _ = fun ppf nh ->
let levels = List.Ne.map get_content ((get_content nh).levels) in
let nops = List.Ne.concat levels in
let name = get_name nh in
fprintf ppf "%s %s =@.@[%a@] [@@@@deriving show]" t
name
(list_sep (n_operator nh.content.prefix name) new_line) nops
let n_hierarchies (first:bool) : _ -> O.n_hierarchy list -> _ = fun ppf ss ->
match ss with
| [] -> ()
| hd :: tl ->
fprintf ppf "%a\n" (n_hierarchy (if first then "type" else "and")) hd ;
fprintf ppf "%a" (list_sep (n_hierarchy "and") (const "\n")) tl
let language : _ -> O.language -> _ = fun ppf l ->
fprintf ppf "%a@.@." comment "Language" ;
let first = List.length l.singletons = 0 in
fprintf ppf " %a@.%a@.@." comment "Singletons" singletons l.singletons ;
fprintf ppf " %a@.%a@." comment "Hierarchies" (n_hierarchies first) l.hierarchies ;
fprintf ppf " %a@.type entry_point = %s Location.wrap@.@." comment "Entry point" l.entry_point ;
()
end
module Print_Grammar = struct
open Format
open PP_helpers
let letters = [| "a" ; "b" ; "c" ; "d" ; "e" ; "f" ; "g" ; "h" ; "i" ; "j" |]
let manual_rule : _ -> O.manual_rule -> _ = fun ppf mr ->
let {name;content} = mr in
fprintf ppf "%s:@. @[<v>%a@]" name (list_sep string new_line) content.menhir_codes
let generated_rule : _ -> O.rule -> _ = fun ppf gr ->
let aux_rule : _ -> O.rhs -> _ = fun ppf rhs ->
let i = ref 0 in
let aux : _ -> O.rhs_element -> _ = fun ppf e ->
(match e with
| `Named s -> fprintf ppf "%s = wrap(%s)" letters.(!i) s
| `Option s -> fprintf ppf "%s = option(wrap(%s))" letters.(!i) s
| `List (mode, s) ->
fprintf ppf "%s = %swrap(%s))"
letters.(!i)
(match mode with
| Naked -> "naked_list("
| Naked_ne -> "naked_list_ne("
| Lead s -> "lead_list(" ^ (Token.to_string s) ^ ","
| Lead_ne s -> "lead_list_ne(" ^ (Token.to_string s) ^ ","
| Trail s -> "trail_list(" ^ (Token.to_string s) ^ ","
| Trail_option s -> "trail_option_list(" ^ (Token.to_string s) ^ ","
| Trail_force s -> "trail_force_list(" ^ (Token.to_string s) ^ ","
| Trail_force_ne s -> "trail_force_list_ne(" ^ (Token.to_string s) ^ ","
| Separated s -> "separated_list(" ^ (Token.to_string s) ^ ","
| Separated_ne s -> "separated_list_ne(" ^ (Token.to_string s) ^ ","
| Separated_nene s -> "separated_list_nene(" ^ (Token.to_string s) ^ ","
)
s
| `Token t -> i := !i - 1 ; string ppf @@ Token.to_string t) ;
i := !i + 1
in
fprintf ppf "%a" (list_sep aux (const " ")) rhs.content in
let aux_code : _ -> O.rhs -> _ = fun ppf rhs ->
let i = ref 0 in
let aux : O.rhs_element -> _ = fun e ->
let s = (match e with
| `Named _ | `List _ | `Option _ -> Some (letters.(!i))
| `Token _ -> i := !i - 1 ; None) in
i := !i + 1 ; s
in
let content = List.filter_map aux rhs.content in
fprintf ppf "%a (%a)" make_constructor (gr.name, rhs.name) (list_sep string (const " , ")) content
in
let aux : _ -> O.rhs -> _ = fun ppf rhs ->
fprintf ppf "| %a { %a }"
aux_rule rhs
aux_code rhs in
fprintf ppf "%s:@.%a" gr.name (list_sep aux (const "\n")) gr.content
let singleton : _ -> O.singleton -> _ = fun ppf s ->
match s with
| Manual s -> manual_rule ppf s
| Generated s -> generated_rule ppf s
let n_operator_rule prev_lvl_name cur_lvl_name : _ -> O.n_operator -> _ = fun ppf nop ->
let i = ref 0 in
let element : _ -> O.element -> _ = fun ppf element ->
(match element with
| `Token t -> i := !i - 1 ; string ppf @@ Token.to_string t
| `List (mode, content) ->
fprintf ppf "%s = %swrap(%s))"
letters.(!i)
(match mode with
| Naked -> "naked_list("
| Naked_ne -> "naked_list_ne("
| Lead s -> "lead_list(" ^ (Token.to_string s) ^ ","
| Lead_ne s -> "lead_list_ne(" ^ (Token.to_string s) ^ ","
| Trail s -> "trail_list(" ^ (Token.to_string s) ^ ","
| Trail_option s -> "trail_option_list(" ^ (Token.to_string s) ^ ","
| Trail_force s -> "trail_force_list(" ^ (Token.to_string s) ^ ","
| Trail_force_ne s -> "trail_force_list_ne(" ^ (Token.to_string s) ^ ","
| Separated s -> "separated_list(" ^ (Token.to_string s) ^ ","
| Separated_ne s -> "separated_list_ne(" ^ (Token.to_string s) ^ ","
| Separated_nene s -> "separated_list_nene(" ^ (Token.to_string s) ^ ","
)
(match content with | `Lower -> prev_lvl_name | `Named s -> s | `Current -> cur_lvl_name)
| `Named n ->
fprintf ppf "%s = wrap(%s)" letters.(!i) n
| `Current ->
fprintf ppf "%s = wrap(%s)" letters.(!i) cur_lvl_name
| `Lower ->
fprintf ppf "%s = wrap(%s)" letters.(!i) prev_lvl_name
) ;
i := !i + 1
in
(list_sep element (const " ")) ppf (get_content nop)
let n_operator_code prefix : _ -> O.n_operator -> _ = fun ppf nop ->
let (name, elements) = destruct nop in
let elements' =
let i = ref 0 in
let aux : O.element -> _ = fun e ->
let r =
match e with
| `Token _ -> i := !i - 1 ; None
| `List _ | `Named _ | `Current | `Lower -> Some letters.(!i)
in i := !i + 1 ; r
in
List.filter_map aux elements in
fprintf ppf "%a (%a)" make_operator (prefix, name) (list_sep string (const " , ")) elements'
let n_operator prefix prev_lvl_name cur_lvl_name : _ -> O.n_operator -> _ = fun ppf nop ->
let name = get_name nop in
fprintf ppf "%a@;| %a { %a }" comment name
(n_operator_rule prev_lvl_name cur_lvl_name) nop
(n_operator_code prefix) nop
let level prefix prev_lvl_name : _ -> O.level -> _ = fun ppf l ->
let name = get_name l in
match prev_lvl_name with
| "" -> (
fprintf ppf "%s :@. @[<v>%a@]" name
(list_sep (n_operator prefix prev_lvl_name name) new_line) (get_content l) ;
)
| _ -> (
fprintf ppf "%s :@. @[<v>%a@;| %s { $1 }@]" name
(list_sep (n_operator prefix prev_lvl_name name) new_line) (get_content l)
prev_lvl_name
)
let n_hierarchy : _ -> O.n_hierarchy -> _ = fun ppf nh ->
let name = get_name nh in
let top_level = get_name @@ List.Ne.hd nh.content.levels in
fprintf ppf "%a@.%%inline %s : %s { $1 }@.@;" comment ("Top-level for " ^ name) name top_level;
let (hd, tl) = List.Ne.rev (get_content nh).levels in
fprintf ppf "%a" (level nh.content.prefix "") hd ;
let aux prev_name lvl =
new_lines 2 ppf () ;
fprintf ppf "%a" (level nh.content.prefix prev_name) lvl ;
get_name lvl
in
let _last_name = List.fold_left aux (get_name hd) tl in
()
let language : _ -> O.language -> _ = fun ppf l ->
fprintf ppf "%a@.@." comment "Generated Language" ;
fprintf ppf "entry_point : wrap(%s) EOF { $1 }@.@." l.entry_point ;
fprintf ppf "%a@.@." comment "Singletons" ;
fprintf ppf "@[%a@]@.@." (list_sep singleton new_line) l.singletons ;
fprintf ppf "%a@.@." comment "Hierarchies" ;
fprintf ppf "@[%a@]" (list_sep n_hierarchy new_line) l.hierarchies ;
end
let infix : string -> [`Left | `Right] -> token -> O.n_operator = fun name assoc t ->
match assoc with
| `Left -> make_name name [`Current ; `Token t ; `Lower]
| `Right -> make_name name [`Lower ; `Token t ; `Current]
(* Ocaml is bad *)
let empty_infix : string -> [`Left | `Right] -> O.n_operator = fun name assoc ->
match assoc with
| `Left -> make_name name [`Current ; `Lower]
| `Right -> make_name name [`Lower ; `Current]
let paren : string -> string -> O.n_operator = fun constructor_name name ->
make_name constructor_name [`Token Token.LPAREN ; `Named name ; `Token Token.RPAREN]
let expression_name = "expression"
let type_expression_name = "type_expression"
let restricted_type_expression_name = "restricted_type_expression"
let program_name = "program"
let variable_name = "variable"
let pattern_name = "pattern"
let constructor_name = "constructor"
let int_name = "int_"
let tz_name = "tz_"
let unit_name = "unit_"
let string_name = "string_"
let variable = O.manual_singleton variable_name ["| NAME { $1 }"] "string"
let int = O.manual_singleton int_name ["| INT { $1 }"] "int"
let tz = O.manual_singleton tz_name ["| TZ { $1 }"] "int"
let unit = O.manual_singleton unit_name ["| UNIT { () }"] "unit"
let string = O.manual_singleton string_name ["| STRING { $1 }"] "string"
let constructor = O.manual_singleton constructor_name ["| CONSTRUCTOR_NAME { $1 }"] "string"
module Pattern = struct
open Token
open O
let application = empty_infix "application" `Left
let data_structure : O.n_operator = make_name "data_structure" [
`Named variable_name ; `Token LSQUARE ; `List (Lead SEMICOLON, `Current) ; `Token RSQUARE ;
]
let record_element : O.rule = make_name "p_record_element" [
make_name "" [`Named variable_name ; `Token EQUAL ; `Named pattern_name]
]
let record : O.n_operator = make_name "record" [
`Token LBRACKET ;
`List (Trail SEMICOLON, `Named record_element.name) ;
`Token RBRACKET ;
]
let pair = infix "pair" `Left COMMA
let type_annotation = make_name "type_annotation" [
`Current ; `Token COLON ; `Named restricted_type_expression_name
]
let variable : O.n_operator = make_name "variable" [ `Named variable_name ]
let constructor : O.n_operator = make_name "constructor" [ `Named constructor_name ]
let module_ident : O.n_operator = make_name "module_ident" [
`List (Trail_force_ne DOT, `Named constructor_name) ; `Named variable_name ;
]
let unit : O.n_operator = make_name "unit" [ `Named unit_name ]
let restricted_pattern_name = "restricted_pattern"
let restricted_pattern = O.name_hierarchy restricted_pattern_name "Pr" [
[variable ; unit] ;
[paren "restrict" pattern_name]
] []
let main = O.name_hierarchy pattern_name "P" [
[record] ;
[type_annotation] ;
[pair] ;
[data_structure] ;
[application] ;
[variable ; constructor ; module_ident ; unit] ;
[paren "paren" pattern_name]
] []
let singletons = [O.rule_singleton record_element]
end
module Expression = struct
open Token
open O
let application = empty_infix "application" `Right
let type_annotation = make_name "type_annotation" [
`Current ; `Token COLON ; `Named restricted_type_expression_name
]
let data_structure : O.n_operator = make_name "data_structure" [
`Named variable_name ; `Token LSQUARE ; `List (Trail SEMICOLON, `Current) ; `Token RSQUARE ;
]
let fun_ : O.n_operator = make_name "fun" [
`Token FUN ; `Named pattern_name ;
`Token ARROW ; `Current ;
]
let let_in : O.n_operator = make_name "let_in" [
`Token LET ; `Named pattern_name ;
`Token EQUAL ; `Current ;
`Token IN ; `Current ;
]
let no_seq_name = "expression_no_seq"
let no_match_name = "expression_no_match"
let record_element : O.rule = make_name "e_record_element" [
make_name "record_explicit" [`Named variable_name ; `Token EQUAL ; `Named no_seq_name] ;
make_name "record_implicit" [`Named variable_name ] ;
]
let record : O.n_operator = make_name "record" [
`Token LBRACKET ;
`List (Trail SEMICOLON, `Named record_element.name) ;
`Token RBRACKET ;
]
let ite : O.n_operator = make_name "ifthenelse" [
`Token IF ;
`Current ;
`Token THEN ;
`Lower ;
`Token ELSE ;
`Current ;
]
let it : O.n_operator = make_name "ifthen" [
`Token IF ;
`Current ;
`Token THEN ;
`Lower ;
]
(* let sequence = infix "sequence" `Left SEMICOLON *)
let sequence = make_name "sequence" [
`List (Separated_nene SEMICOLON , `Lower)
]
let match_clause = make_name "e_match_clause" [
make_name "" [`Named pattern_name ; `Token ARROW ; `Named no_match_name]
]
let match_with = make_name "match" [
`Token MATCH ; `Current ; `Token WITH ;
`List (Lead_ne VBAR, `Named match_clause.name) ;
]
let lt = infix "lt" `Left LT
let le = infix "le" `Left LE
let gt = infix "gt" `Left GT
let eq = infix "eq" `Left EQUAL
let neq = infix "neq" `Left UNEQUAL
let cons = infix "cons" `Left DOUBLE_COLON
let addition = infix "addition" `Left PLUS
let substraction = infix "substraction" `Left MINUS
let multiplication = infix "multiplication" `Left TIMES
let division = infix "division" `Left DIV
let arith_variable : O.n_operator = make_name "variable" [ `Named variable_name ]
let int : O.n_operator = make_name "int" [ `Named int_name ]
let tz : O.n_operator = make_name "tz" [ `Named tz_name ]
let unit : O.n_operator = make_name "unit" [ `Named unit_name ]
let string : O.n_operator = make_name "string" [ `Named string_name ]
let constructor : O.n_operator = make_name "constructor" [ `Named constructor_name ]
let module_ident : O.n_operator = make_name "module_ident" [
`List (Trail_force_ne DOT, `Named constructor_name) ; `Named variable_name ;
]
let access : O.n_operator = infix "access" `Right DOT
let accessor : O.n_operator = make_name "accessor" [
`Named variable_name ; `List (Lead_ne DOT, `Named variable_name) ;
]
let assignment : O.n_operator = infix "assign" `Left LEFT_ARROW
let tuple = make_name "tuple" [
`List (Separated_nene COMMA, `Lower)
]
let name = make_name "name" [`Token TILDE ; `Current]
let main_hierarchy_name = "expression_main"
let main_hierarchy = O.name_hierarchy main_hierarchy_name "Eh" [
[tuple] ;
[type_annotation] ;
[lt ; le ; gt ; eq ; neq] ;
[assignment] ;
[cons] ;
[addition ; substraction] ;
[multiplication ; division] ;
[application] ;
[data_structure] ;
[name] ;
[arith_variable ; constructor ; module_ident ; accessor ; int ; unit ; string ; tz] ;
[paren "bottom" expression_name] ;
] []
let no_sequence_expression = O.name_hierarchy no_seq_name "Es" [
[let_in ; fun_ ; record ; ite ; it ; match_with] ;
[make_name "main" [`Named main_hierarchy_name]] ;
] []
let no_match_expression = O.name_hierarchy no_match_name "Em" [
[let_in ; fun_ ; record ; ite ; it ] ;
[make_name "main" [`Named main_hierarchy_name]] ;
] []
let expression = O.name_hierarchy expression_name "E" [
[sequence] ;
[let_in ; fun_ ; record ; ite ; it ; match_with] ;
[make_name "main" [`Named main_hierarchy_name]] ;
] []
let singletons = List.map O.rule_singleton [record_element ; match_clause]
end
module Type_expression = struct
open Token
open O
let record_element : O.rule = make_name "t_record_element" [
make_name "" [`Named variable_name ; `Token COLON ; `Named type_expression_name]
]
let record : O.n_operator = make_name "record" [
`Token LBRACKET ;
`List (Trail SEMICOLON, `Named record_element.name) ;
`Token RBRACKET ;
]
let application = empty_infix "application" `Right
let tuple = make_name "tuple" [
`List (Separated_nene COMMA, `Lower)
]
let type_variable : O.n_operator = make_name "variable" [ `Named variable_name ]
let restricted_type_expression = O.name_hierarchy restricted_type_expression_name "Tr" [
[application] ;
[type_variable] ;
[paren "paren" type_expression_name] ;
] []
let type_expression = O.name_hierarchy type_expression_name "T" [
[record] ;
[tuple] ;
[application] ;
[type_variable] ;
[paren "paren" type_expression_name]
] []
let singletons = [O.rule_singleton record_element]
end
module Program = struct
open Token
open O
let statement_name = "statement"
let program : O.rule = make_name program_name [make_name "" [
`List (Trail_option DOUBLE_SEMICOLON, statement_name)
]]
let param_name = "param"
let param : O.rule = make_name param_name [
make_name "restricted_pattern" [ `Named Pattern.restricted_pattern_name ] ;
make_name "implicit_named_param" [ `Token TILDE ; `Named variable_name ] ;
]
let type_annotation_name = "type_annotation_"
let type_annotation : O.rule = make_name type_annotation_name [
make_name "" [ `Token COLON ; `Named type_expression_name ] ;
]
let let_content_name = "let_content"
let let_content : O.rule = make_name let_content_name [
make_name "" [
`Named variable_name ;
`List (Naked, param_name) ;
`Option type_annotation_name ;
`Token EQUAL ;
`Named expression_name ;
] ;
]
let statement : O.rule = make_name statement_name [
make_name "variable_declaration" [`Token LET ; `Named let_content_name] ;
make_name "init_declaration" [`Token LET_INIT ; `Named let_content_name] ;
make_name "entry_declaration" [`Token LET_ENTRY ; `Named let_content_name] ;
make_name "type_declaration" [`Token TYPE ; `Named variable_name ; `Token EQUAL ; `Named type_expression_name] ;
]
let singletons = List.map O.rule_singleton [
let_content ;
type_annotation ;
program ;
statement ;
param ;
]
end
let language = O.language program_name (
variable :: constructor :: int :: unit :: string :: tz ::
Program.singletons @
Pattern.singletons @
Expression.singletons @
Type_expression.singletons
) [
Pattern.main ;
Pattern.restricted_pattern ;
Expression.main_hierarchy ;
Expression.no_sequence_expression ;
Expression.no_match_expression ;
Expression.expression ;
Type_expression.restricted_type_expression ;
Type_expression.type_expression ;
]
let () =
let argn = Array.length Sys.argv in
if argn = 1 then exit 1 ;
let arg = Sys.argv.(1) in
match arg with
| "parser" -> (
Format.printf "%a@.%a\n" PP_helpers.comment "Full Grammar" Print_Grammar.language language
)
| "ast" -> (
Format.printf "%a@.%a\n" PP_helpers.comment "AST" Print_AST.language language
)
| _ -> exit 1

View File

@ -1,275 +0,0 @@
open Simple_utils
type 'a name = {
name : string ;
content : 'a ;
}
(*
val make_name : string -> 'a -> 'a name
val destruct : 'a name -> ( string * 'a )
val get_name : 'a name -> string
val get_content : 'a name -> 'a
*)
module Token = Lex.Token
type token = Token.token
module O : sig
type list_mode =
| Trail of token
| Trail_option of token
| Trail_force of token
| Trail_force_ne of token
| Lead of token
| Lead_ne of token
| Separated of token
| Separated_ne of token
| Separated_nene of token
| Naked
| Naked_ne
type 'a list_element = list_mode * 'a
type rhs_element = [
| `Named of string
| `Token of token
| `List of string list_element
| `Option of string
]
type rhs = rhs_element list name
type rule = rhs list name
type manual_rule_content = {
menhir_codes : string list ;
ast_code : string ;
}
type manual_rule = manual_rule_content name
type singleton =
| Manual of manual_rule
| Generated of rule
type name_element = [
| `Named of string
| `Current
| `Lower
]
type element = [
| `Named of string
| `Token of token
| `List of name_element list_element
| `Current
| `Lower
]
type operator = element list
type n_operator = operator name
type n_operators = n_operator list
type level = n_operators name
type level_list = level list
type levels = level List.Ne.t
type hierarchy = {
prefix : string ;
levels : levels ;
auxiliary_rules : rule list ;
}
type n_hierarchy = hierarchy name
val make_hierarchy : string -> levels -> rule list -> hierarchy
type language = {
entry_point : string ;
singletons : singleton list ;
hierarchies : n_hierarchy list ;
}
val get_op : n_operator -> operator
(*
val manual_singleton : string -> string list -> string -> singleton
val rule_singleton : rule -> singleton
val language : string -> singleton list -> n_hierarchy list -> language
val name_hierarchy : string -> string -> n_operators list -> rule list -> n_hierarchy
*)
end
module Check : sig
open O
val well_formed : language -> unit
val associativity : language -> unit
end
(*
val make_constructor : Format.formatter -> (string * string) -> unit
val make_operator : Format.formatter -> (string * string) -> unit
*)
module Print_AST : sig
(*
open Format
val manual_rule : formatter -> O.manual_rule -> unit
val generated_rule : formatter -> O.rule -> unit
val singleton : formatter -> O.singleton -> unit
val singletons : formatter -> O.singleton list -> unit
val n_operator : string -> string -> formatter -> O.n_operator -> unit
val n_hierarchy : string -> formatter -> O.n_hierarchy -> unit
val n_hierarchies : bool -> formatter -> O.n_hierarchy list -> unit
val language : formatter -> O.language -> unit
*)
end
module Print_Grammar : sig
(*
open Format
val letters : string array
val manual_rule : formatter -> O.manual_rule -> unit
val generated_rule : formatter -> O.rule -> unit
val singleton : formatter -> O.singleton -> unit
val n_operator_rule : string -> string -> formatter -> O.n_operator -> unit
val n_operator_code : string -> formatter -> O.n_operator -> unit
val n_operator : string -> string -> string -> formatter -> O.n_operator -> unit
val level : string -> string -> formatter -> O.level -> unit
val n_hierarchy : formatter -> O.n_hierarchy -> unit
val language : formatter -> O.language -> unit
*)
end
(*
val infix : string -> [`Left | `Right] -> token -> O.n_operator
(* Ocaml is bad *)
val empty_infix : string -> [`Left | `Right] -> O.n_operator
val paren : string -> string -> O.n_operator
val expression_name : string
val type_expression_name : string
val restricted_type_expression_name : string
val program_name : string
val variable_name : string
val pattern_name : string
val constructor_name : string
val int_name : string
val tz_name : string
val unit_name : string
val string_name : string
val variable : O.singleton
val int : O.singleton
val tz : O.singleton
val unit : O.singleton
val string : O.singleton
val constructor : O.singleton
*)
module Pattern : sig
(*
val application : O.n_operator
val data_structure : O.n_operator
val record_element : O.rule
val record : O.n_operator
val pair : O.n_operator
val type_annotation : [> `Current | `Named of string | `Token of token ] list name
val variable : O.n_operator
val constructor : O.n_operator
val module_ident : O.n_operator
val unit : O.n_operator
val restricted_pattern_name : string
val restricted_pattern : O.n_hierarchy
val main : O.n_hierarchy
val singletons : O.singleton list
*)
end
module Expression : sig
(*
val application : O.n_operator
val type_annotation : [> `Current | `Named of string | `Token of token ] list name
val data_structure : O.n_operator
val fun_ : O.n_operator
val let_in : O.n_operator
val no_seq_name : string
val no_match_name : string
val record_element : O.rule
val record : O.n_operator
val ite : O.n_operator
val it : O.n_operator
(* let sequence = infix "sequence" `Left SEMICOLON *)
val sequence : [> `List of O.list_mode * [> `Lower ] ] list name
val match_clause : [> `Named of string | `Token of token ] list name list name
val match_with : [> `Current
| `List of O.list_mode * [> `Named of string ]
| `Token of token ] list name
val lt : O.n_operator
val le : O.n_operator
val gt : O.n_operator
val eq : O.n_operator
val neq : O.n_operator
val cons : O.n_operator
val addition : O.n_operator
val substraction : O.n_operator
val multiplication : O.n_operator
val division : O.n_operator
val arith_variable : O.n_operator
val int : O.n_operator
val tz : O.n_operator
val unit : O.n_operator
val string : O.n_operator
val constructor : O.n_operator
val module_ident : O.n_operator
*)
val access : O.n_operator
(*
val accessor : O.n_operator
val assignment : O.n_operator
val tuple : [> `List of O.list_mode * [> `Lower ] ] list name
val name : [> `Current | `Token of token ] list name
val main_hierarchy_name : string
val main_hierarchy : O.n_hierarchy
val no_sequence_expression : O.n_hierarchy
val no_match_expression : O.n_hierarchy
val expression : O.n_hierarchy
val singletons : O.singleton list
*)
end
module Type_expression : sig
(*
val record_element : O.rule
val record : O.n_operator
val application : O.n_operator
val tuple : [> `List of O.list_mode * [> `Lower ] ] list name
val type_variable : O.n_operator
val restricted_type_expression : O.n_hierarchy
val type_expression : O.n_hierarchy
val singletons : O.singleton list
*)
end
module Program : sig
(*
val statement_name : string
val program : O.rule
val param_name : string
val param : O.rule
val type_annotation_name : string
val type_annotation : O.rule
val let_content_name : string
val let_content : O.rule
val statement : O.rule
val singletons : O.singleton list
*)
end
(*
val language : O.language
*)

View File

@ -1,54 +0,0 @@
(library
(name lex)
(public_name ligo.multifix.lex)
(libraries
simple-utils
tezos-utils
)
(modules token token_type lexer)
)
(executable
(name generator)
(libraries
str
simple-utils
)
(modules generator)
(flags (:standard -w +1..62-4-9-44-40-42-48@39@33 ))
)
(rule
(targets token.mly)
(deps generator.exe)
(action (system "./generator.exe mly > token.mly"))
)
(rule
(targets token.ml)
(deps generator.exe)
(action (system "./generator.exe ml > token.ml"))
)
(rule
(targets lexer.mll)
(deps generator.exe)
(action (system "./generator.exe mll > lexer.mll"))
)
(rule
(targets token_type.ml token_type.mli)
(deps token.mly)
(action (system "menhir --only-tokens token.mly --base token_type"))
)
(alias
(name lexer.mll)
(deps token.ml)
)
(rule
(targets lexer.ml)
(deps token.ml lexer.mll)
(action (system "ocamllex lexer.mll"))
)

View File

@ -1,185 +0,0 @@
type pre_token = {
name : string ;
pattern : string ;
}
let make name pattern = { name ; pattern }
let keyword = fun k ->
let regexp = Str.regexp "[^0-9a-zA-Z]" in
let constructor_name =
Str.global_replace regexp "_"
@@ String.uppercase_ascii k
in
make constructor_name k
let symbol = fun sym name -> make name sym
module Print_mly = struct
open Format
let token = fun ppf pre_token ->
fprintf ppf "%%token %s" pre_token.name
let tokens = fun ppf tokens ->
let open Simple_utils.PP_helpers in
fprintf ppf "%%token EOF\n" ;
fprintf ppf "%%token <int> INT\n" ;
fprintf ppf "%%token <int> NAT\n" ;
fprintf ppf "%%token <int> TZ\n" ;
fprintf ppf "%%token <string> STRING\n" ;
fprintf ppf "%%token <string> NAME\n" ;
fprintf ppf "%%token <string> CONSTRUCTOR_NAME\n" ;
fprintf ppf "\n%a\n\n" (list_sep token (const "\n")) tokens ;
fprintf ppf "%%%%\n"
end
module Print_mll = struct
open Format
let token = fun ppf {name;pattern} ->
fprintf ppf "| \"%s\" { %s }" pattern name
let pre =
{pre|{
open Token
exception Error of string
exception Unexpected_character of string
}
(* This rule analyzes a single line and turns it into a stream of
tokens. *)
rule token = parse
(*
| "//" ([^ '\n']* ) (['\n' '\r']+)
{ Lexing.new_line lexbuf ; token lexbuf }
*)
| ('\r'? '\n' '\r'?)
{ Lexing.new_line lexbuf; token lexbuf }
| '"' { string "" lexbuf }
| [' ' '\t']
{ token lexbuf }
| (['0'-'9']+ as i) 'p'
{ NAT (int_of_string i) }
| (['0'-'9']+ as n) '.' (['0'-'9']['0'-'9'] as d) "tz" { TZ ((int_of_string n) * 100 + (int_of_string d)) }
| (['0'-'9']+ as i)
{ INT (int_of_string i) }
|pre}
let post =
{post|
| (['a'-'z''_']['a'-'z''A'-'Z''0'-'9''_']*) as v
{ NAME v }
| (['A'-'Z']['a'-'z''A'-'Z''0'-'9''_']*) as v
{ CONSTRUCTOR_NAME v }
| eof { EOF }
| "(*" { comment 1 lexbuf }
| _
{ raise (Unexpected_character (Printf.sprintf "At offset %d: unexpected character.\n" (Lexing.lexeme_start lexbuf))) }
and string s = parse
| "\\\"" { string (s ^ "\"") lexbuf }
| "\\\\" { string (s ^ "\\") lexbuf }
| '"' { STRING s }
| eof { raise (Unexpected_character "missing string terminator") }
| _ as c { string (s ^ (String.make 1 c)) lexbuf }
and comment n = parse
| "*)" { if n = 1 then token lexbuf else comment (n - 1) lexbuf }
| "(*" { comment (n + 1) lexbuf }
| '"' ( [^ '"' '\\'] | ( '\\' [^ '"'] ) ) '"' { comment n lexbuf }
| eof { raise (Unexpected_character "missing comment terminator") }
| ('\r'? '\n' '\r'?) { Lexing.new_line lexbuf; comment n lexbuf }
| _ { comment n lexbuf }
|post}
let tokens = fun ppf tokens ->
let open Simple_utils.PP_helpers in
fprintf ppf "%s%a\n%s" pre (list_sep token (const "\n")) tokens post
end
module Print_ml = struct
open Format
let token = fun ppf {name} ->
fprintf ppf " | %s -> \"%s\"" name name
let pre =
{pre|include Token_type
let to_string : token -> string = function
| STRING _ -> "STRING"
| NAME _ -> "NAME s"
| CONSTRUCTOR_NAME _ -> "CONSTRUCTOR_NAME s"
| INT _ -> "INT n"
| NAT _ -> "NAT n"
| TZ _ -> "TZ n"
| EOF -> "EOF"
|pre}
let tokens = fun ppf tokens ->
let open Simple_utils.PP_helpers in
fprintf ppf "%s%a" pre (list_sep token (const "\n")) tokens
end
let tokens = [
keyword "let%init" ;
keyword "let%entry" ;
keyword "let" ;
keyword "type" ;
keyword "in" ;
keyword "if" ;
keyword "then" ;
keyword "else" ;
(* keyword "block" ;
* keyword "for" ;
* keyword "const" ; *)
keyword "fun" ;
keyword "match" ;
keyword "with" ;
symbol "()" "UNIT" ;
symbol "+" "PLUS" ;
symbol "~" "TILDE" ;
symbol "->" "ARROW" ;
symbol "<-" "LEFT_ARROW" ;
symbol "<=" "LE" ;
symbol "<>" "UNEQUAL" ;
symbol "<" "LT" ;
symbol ">" "GT" ;
symbol "-" "MINUS" ;
symbol "*" "TIMES" ;
symbol "/" "DIV" ;
symbol "=" "EQUAL" ;
symbol "|" "VBAR" ;
symbol "[" "LSQUARE" ;
symbol "]" "RSQUARE" ;
symbol "(" "LPAREN" ;
symbol ")" "RPAREN" ;
symbol "{" "LBRACKET" ;
symbol "}" "RBRACKET" ;
symbol ";;" "DOUBLE_SEMICOLON" ;
symbol ";" "SEMICOLON" ;
symbol "::" "DOUBLE_COLON" ;
symbol ":" "COLON" ;
symbol "," "COMMA" ;
symbol "." "DOT" ;
]
let () =
let argn = Array.length Sys.argv in
if argn = 1 then exit 1 ;
let arg = Sys.argv.(1) in
let open Simple_utils.PP_helpers in
match arg with
| "mll" -> (
Format.printf "%a@.%a\n" comment "Generated .mll" Print_mll.tokens tokens
)
| "mly" -> (
Format.printf "%a@.%a\n" comment "Generated .mly" Print_mly.tokens tokens
)
| "ml" -> (
Format.printf "%a@.%a\n" comment "Generated .ml" Print_ml.tokens tokens
)
| _ -> exit 1

View File

@ -1,43 +0,0 @@
(*
type pre_token = {
name : string ;
pattern : string ;
}
val make : string -> string -> pre_token
val keyword : string -> pre_token
val symbol : string -> string -> pre_token
module Print_mly : sig
(*
open Format
val token : formatter -> pre_token -> unit
val tokens : formatter -> pre_token list -> unit
*)
end
module Print_mll : sig
(*
open Format
val pre : string
val post : string
val token : formatter -> pre_token -> unit
val tokens : formatter -> pre_token list -> unit
*)
end
module Print_ml : sig
(*
open Format
val pre : string
val token : formatter -> pre_token -> unit
val tokens : formatter -> pre_token list -> unit
*)
end
val tokens : pre_token list
*)

View File

@ -1,25 +0,0 @@
type file_location = {
filename : string ;
start_line : int ;
start_column : int ;
end_line : int ;
end_column : int ;
}
type virtual_location = string
type t =
| File of file_location
| Virtual of virtual_location
let make (start_pos:Lexing.position) (end_pos:Lexing.position) : t =
let filename = start_pos.pos_fname in
let start_line = start_pos.pos_lnum in
let end_line = end_pos.pos_lnum in
let start_column = start_pos.pos_cnum - start_pos.pos_bol in
let end_column = end_pos.pos_cnum - end_pos.pos_bol in
File { filename ; start_line ; start_column ; end_line ; end_column }
let virtual_location s = Virtual s
let dummy = virtual_location "dummy"

View File

@ -1,20 +0,0 @@
(*
type file_location = {
filename : string ;
start_line : int ;
start_column : int ;
end_line : int ;
end_column : int ;
}
type virtual_location = string
type t =
| File of file_location
| Virtual of virtual_location
val make : Lexing.position -> Lexing.position -> t
val virtual_location : string -> t
val dummy : string
*)

View File

@ -1,3 +0,0 @@
module Ast = Ast
module Parser = Parser
module User = User

View File

@ -1,72 +0,0 @@
%{
open Ast
%}
%start <Ast.entry_point> entry_point
%%
naked_list(X):
| { [] }
| x = X xs = naked_list(X) { x :: xs }
naked_list_ne(X):
| x = X { [ x ] }
| x = X xs = naked_list_ne(X) { x :: xs }
trail_list(separator, X):
| { [] }
| trail_list_content(separator, X) { $1 }
trail_list_content(separator, X):
| x = trail_list_last(separator, X) { x }
| x = X separator xs = trail_list_content(separator, X) { x :: xs }
trail_list_last(separator, X):
| x = X option(separator) { [ x ] }
trail_force_list(separator, X):
| { [] }
| x = X separator xs = trail_force_list(separator, X) { x :: xs }
trail_force_list_ne(separator, X):
| x = X separator { [ x ] }
| x = X separator xs = trail_force_list_ne(separator, X) { x :: xs }
trail_option_list(separator, X):
| { [] }
| trail_option_list_content(separator, X) { $1 }
trail_option_list_content(separator, X):
| x = trail_option_list_last(separator, X) { x }
| x = X option(separator) xs = trail_option_list_content(separator, X) { x :: xs }
trail_option_list_last(separator, X):
| x = X option(separator) { [ x ] }
lead_list_ne(separator, X):
| separator x = X { [x] }
| separator x = X xs = lead_list_ne(separator, X) { x :: xs }
lead_list(separator, X):
| { [] }
| lead_list_content(separator, X) { $1 }
lead_list_content(separator, X):
| x = lead_list_first(separator, X) { x }
| xs = lead_list_content(separator, X) separator x = X { xs @ [ x ] }
lead_list_first (separator, X):
| option(separator) x = X { [ x ] }
separated_list_ne(separator, X):
| x = X { [x] }
| x = X separator xs = separated_list_ne(separator, X) { x :: xs }
separated_list_nene(separator, X):
| x = X separator y = X { [x ; y] }
| x = X separator xs = separated_list_nene(separator, X) { x :: xs }
%inline wrap(X):
| x = X { let loc = Location.make $startpos $endpos in Location.wrap ~loc x }

View File

@ -1,40 +0,0 @@
open! Trace
let parse_file (source: string) : Ast.entry_point result =
(* let pp_input =
* let prefix = Filename.(source |> basename |> remove_extension)
* and suffix = ".pp.ligo"
* in prefix ^ suffix in
*
* let cpp_cmd = Printf.sprintf "cpp -traditional-cpp %s > %s"
* source pp_input in
* let%bind () = sys_command cpp_cmd in
*
* let%bind channel =
* generic_try (simple_error "error opening file") @@
* (fun () -> open_in pp_input) in *)
let%bind channel =
generic_try (simple_error "error opening file") @@
(fun () -> open_in source) in
let lexbuf = Lexing.from_channel channel in
let module Lexer = Lex.Lexer in
(specific_try (fun e ->
let error s () =
let start = Lexing.lexeme_start_p lexbuf in
let end_ = Lexing.lexeme_end_p lexbuf in
let str () = Format.sprintf
"at \"%s\" from (%d, %d) to (%d, %d)\n"
(Lexing.lexeme lexbuf)
start.pos_lnum (start.pos_cnum - start.pos_bol)
end_.pos_lnum (end_.pos_cnum - end_.pos_bol) in
error s str () in
match e with
| Parser.Error -> (fun () -> error (thunk "Parse") ())
| Lexer.Error s -> (fun () -> error (fun () -> "Lexer " ^ s) ())
| Lexer.Unexpected_character s -> error (fun () -> "Unexpected char " ^ s) (* TODO: this allows injection of ANSI escape codes in error messages, fix this. *)
| _ -> simple_error "unrecognized parse_ error"
)) @@ (fun () ->
let raw = Parser.entry_point Lexer.token lexbuf in
raw
) >>? fun raw ->
ok raw

View File

@ -1,3 +0,0 @@
open! Trace
val parse_file : string -> Ast.entry_point result

View File

@ -6,8 +6,7 @@
tezos-utils tezos-utils
parser_shared parser_shared
parser_pascaligo parser_pascaligo
parser_camligo parser_cameligo
parser_ligodity
parser_reasonligo parser_reasonligo
) )
(preprocess (preprocess

View File

@ -1,296 +0,0 @@
type region
type 'a reg
type lexeme = string reg
(* Tokens *)
type integer = [`Integer of lexeme reg]
type natural = [`Natural of lexeme reg]
type ident = [`Ident of lexeme reg]
type uident = [`Uident of lexeme reg]
type chr = [`Chr of lexeme reg]
type str = [`Str of lexeme reg]
type bool_or = [`bool_or of lexeme reg]
type bool_and = [`bool_and of lexeme reg]
type lt = [`lt of lexeme reg]
type le = [`le of lexeme reg]
type gt = [`gt of lexeme reg]
type ge = [`ge of lexeme reg]
type eq = [`eq of lexeme reg]
type ne = [`ne of lexeme reg]
type cat = [`cat of lexeme reg]
type cons = [`cons of lexeme reg]
type plus = [`plus of lexeme reg]
type minus = [`minus of lexeme reg]
type times = [`times of lexeme reg]
type slash = [`slash of lexeme reg]
type div = [`div of lexeme reg]
type kwd_mod = [`kwd_mod of lexeme reg]
type uminus = [`uminus of lexeme reg]
type kwd_not = [`kwd_not of lexeme reg]
type lpar = [`lpar of lexeme reg]
type rpar = [`rpar of lexeme reg]
type lbracket = [`lbracket of lexeme reg]
type rbracket = [`rbracket of lexeme reg]
type lbrace = [`lbrace of lexeme reg]
type rbrace = [`rbrace of lexeme reg]
type semi = [`semi of lexeme reg]
type comma = [`comma of lexeme reg]
type colon = [`colon of lexeme reg]
type vbar = [`vbar of lexeme reg]
type arrow = [`arrow of lexeme reg]
type wild = [`wild of lexeme reg]
type kwd_and = [`kwd_and of lexeme reg]
type kwd_begin = [`kwd_begin of lexeme reg]
type kwd_else = [`kwd_else of lexeme reg]
type kwd_end = [`kwd_end of lexeme reg]
type kwd_false = [`kwd_false of lexeme reg]
type kwd_fun = [`kwd_fun of lexeme reg]
type kwd_if = [`kwd_if of lexeme reg]
type kwd_in = [`kwd_in of lexeme reg]
type kwd_let = [`kwd_let of lexeme reg]
type kwd_list = [`kwd_list of lexeme reg]
type kwd_map = [`kwd_map of lexeme reg]
type kwd_match = [`kwd_match of lexeme reg]
type kwd_of = [`kwd_of of lexeme reg]
type kwd_set = [`kwd_set of lexeme reg]
type kwd_then = [`kwd_then of lexeme reg]
type kwd_true = [`kwd_true of lexeme reg]
type kwd_type = [`kwd_type of lexeme reg]
type kwd_with = [`kwd_with of lexeme reg]
type token =
Integer of integer
| Natural of natural
| Ident of ident
| Uident of uident
| Chr of chr
| Str of str
| Bool_or of bool_or
| Bool_and of bool_and
| Lt of lt
| Le of le
| Gt of gt
| Ge of ge
| Eq of eq
| Ne of ne
| Cat of cat
| Cons of cons
| Plus of plus
| Minus of minus
| Times of times
| Slash of slash
| Div of div
| Kwd_mod of kwd_mod
| Uminus of uminus
| Kwd_not of kwd_not
| Lpar of lpar
| Rpar of rpar
| Lbracket of lbracket
| Rbracket of rbracket
| Lbrace of lbrace
| Rbrace of rbrace
| Semi of semi
| Comma of comma
| Colon of colon
| Vbar of vbar
| Arrow of arrow
| Wild of wild
| Kwd_and of kwd_and
| Kwd_begin of kwd_begin
| Kwd_else of kwd_else
| Kwd_end of kwd_end
| Kwd_false of kwd_false
| Kwd_fun of kwd_fun
| Kwd_if of kwd_if
| Kwd_in of kwd_in
| Kwd_let of kwd_let
| Kwd_list of kwd_list
| Kwd_map of kwd_map
| Kwd_match of kwd_match
| Kwd_of of kwd_of
| Kwd_set of kwd_set
| Kwd_then of kwd_then
| Kwd_true of kwd_true
| Kwd_type of kwd_type
| Kwd_with of kwd_with
(* The following are meant to be part of a library *)
type 'item seq = 'item list
type 'item nseq = 'item * 'item seq
type ('item,'sep) nsepseq = 'item * ('sep * 'item) list
type ('item,'sep) sepseq = ('item,'sep) nsepseq option
type ('item,'sep) sep_or_term_list =
('item,'sep) nsepseq * 'sep option
(* The following are specific to the present grammar *)
type 'item list_of__rec_0 = {
lbracket__1 : lbracket;
list_of__rec_0__2 : ('item, semi) nsepseq;
rbracket__3 : rbracket
}
type 'item list_of = [`List of 'item list_of__rec_0]
type 'item tuple__rec_0 = {
item__1 : 'item;
comma__2 : comma;
tuple__rec_0__3 : ('item, comma) nsepseq
}
type 'item tuple = [`Tuple of 'item tuple__rec_0]
type 'item par__rec_0 = {
lpar__1 : lpar;
item__2 : 'item;
rpar__3 : rpar
}
type 'item par = [`Par of 'item par__rec_0]
(* Non-recursive value declarations *)
type sub_irrefutable = [
`P_Var of string
| `P_Wild
| `P_Unit
| closed_irrefutable par
]
and closed_irrefutable = [
sub_irrefutable tuple
| `P_SubI of sub_irrefutable (* `P_SubI necessary *)
]
type irrefutable = [
sub_irrefutable tuple
| sub_irrefutable
]
type let_binding__rec_1 = {
variable__1 : variable;
sub_irrefutable__nseq__2 : sub_irrefutable nseq;
eq__3 : eq;
expr__4 : expr
}
type let_binding__rec_2 = {
irrefutable__1 : irrefutable;
eq__2 : eq;
expr__3 : expr
}
type let_binding = [
`LetFun of let_binding__rec_1
| `LetNonFun of let_binding__rec_2 (* `LetNonFun necessary *)
]
type let_bindings = (let_binding, kwd_and) nsepseq
type let_declarations = {
kwd_let : kwd_let;
let_bindings : let_bindings
}
(*
type pattern = [
`P_Cons of {sub_pattern: sub_pattern; cons: cons; tail: tail}
| `P_Tuple
*)
(* Type declarations *)
type type_name = ident
type field_name = ident
type constr = uident
type type_constr = [
`T_Constr of ident
| kwd_set
| kwd_map
| kwd_list
]
type record_type = {
lbrace : lbrace;
record_type__2 : (field_decl, semi) sep_or_term_list;
rbrace : rbrace
}
and field_decl = {
field_name : field_name;
colon : colon;
type_expr : type_expr
}
and variant = {
constr : constr;
kwd_of : kwd_of;
cartesian : cartesian
}
and sum_type = {
vbar_opt : vbar option;
sum_type__2 : (variant, vbar) nsepseq
}
and type_param__rec_1 = {
core_type : core_type;
type_constr : type_constr
}
and type_param = [
(type_expr, comma) nsepseq par
| `T_App of type_param__rec_1
]
and core_type__rec_1 = {
type_param : type_param;
type_constr : type_constr
}
and core_type = [
`T_Alias of type_name
| `T_App of core_type__rec_1
| cartesian par
]
and fun_type__rec_0 = {
core_type : core_type;
arrow : arrow;
fun_type : fun_type
}
and fun_type = [
`T_Fun of fun_type__rec_0
| `T_Core of core_type (* `T_Core necessary *)
]
and cartesian = (fun_type, times) nsepseq
and type_expr = [
`T_Prod of cartesian
| `T_Sum of sum_type
| `T_Record of record_type
]
type type_declaration = {
kwd_type__1 : kwd_type;
type_name__2 : type_name;
eq__3 : eq;
type_expr__4 : type_expr
}
(* Entry *)
type statement = [
`Let of let_declarations
| `TypeDecl of type_declaration
]
type program = statement list

View File

@ -1,270 +0,0 @@
(* Extended Backus-Naur Form (EBNF) for Mini-ML *)
(* LEXIS *)
let nl = ['\n' '\r']
let blank = [' ' '\t']
let digit = ['0'-'9']
let natural = digit | digit (digit | '_')* digit
let integer = '-'? natural
let small = ['a'-'z']
let capital = ['A'-'Z']
let letter = small | capital
let ichar = letter | digit | ['_' '\'']
let ident = small ichar* | '_' ichar+
let uident = capital ichar*
let hexa = digit | ['A'-'F']
let byte = hexa hexa
let esc = "\\n" | "\\\\" | "\\b" | "\\r" | "\\t"
let string
let char_set = [^'\'' '\\'] # nl
| "\\'" | esc | "\\x" byte | "\\0" digit digit
let char = "'" char_set "'"
(* SYNTAX *)
(* Helpers *)
(* The following are meant to be part of a library *)
sep_or_term_list<item,sep> ::=
item sep ...
| (item sep)+
seq<item> ::= nseq<item>?
nseq<item> ::= item seq<item>
nsepseq<item,sep> ::=
item
| item sep nsepseq<item,sep>
sepseq<item,sep> ::= nsepseq<item,sep>?
(* The following are specific to the present grammar *)
list_of<item> ::= "[" item ";" ... "]"
csv<item> ::= item "," item "," ...
(* Entry *)
program ::= statement* EOF
statement ::=
let_declarations
| type_declaration
(* Type declarations *)
type_declaration ::= "type" type_name "=" type_expr
type_name == ident
type_expr ::=
cartesian
| sum_type
| record_type
cartesian ::= fun_type "*" ...
fun_type ::=
core_type "->" fun_type
| core_type
core_type ::=
type_name
| type_param type_constr
| "(" cartesian ")"
type_param ==
core_type type_constr
| type_tuple type_constr
type_constr == type_name
type_tuple ::= "(" type_expr "," ... ")"
sum_type ::= variant "|" ...
variant ::= constr "of" cartesian
constr == uident
record_type ::=
"{" sep_or_term_list<field_decl, ";"> "}"
field_decl ::= field_name ":" type_expr
field_name == ident
(* Non-recursive value declarations *)
let_declarations ::= "let" let_bindings
let_bindings := let_binding "and" ...
let_binding ::=
value_name pattern+ "=" expr
| let_lhs "=" expr
value_name == ident
(* Patterns *)
let_lhs ::=
pattern "::" cons_pat
| pattern "," pattern "," ...
| core_pattern
core_pattern ::=
variable
| "_"
| "(" ")"
| number
| "true"
| "false"
| string
| list_of<cons_pat>
| "(" ptuple ")"
| constr core_pattern
variable == ident
number == int
ptuple ::= csv<cons_pat>
unit ::= "(" ")"
cons_pat ::=
pattern "::" cons_pat
| pattern
pattern ::=
"(" cons_pat ")"
| core_pattern
(* Expressions *)
expr ::=
base_cond__open<expr>
| match_expr<base_cond>
base_cond__open<x> ::=
base_expr<x>
| conditional<x>
base_cond ::= base_cond__open<base_cond>
base_expr<right_expr> ::=
let_expr<right_expr>
| fun_expr<right_expr>
| csv<op_expr>
| op_expr
conditional<right_expr> ::=
if_then_else<right_expr>
| if_then<right_expr>
if_then<right_expr> ::= "if" expr "then" right_expr
if_then_else<right_expr> ::=
"if" expr "then" closed_if "else" right_expr
base_if_then_else__open<x> ::=
base_expr<x>
| if_then_else<x>
base_if_then_else ::=
base_if_then_else__open<base_if_then_else>
closed_if ::=
base_if_then_else__open<closed_if>
| match_expr<base_if_then_else>
match_expr<right_expr> ::=
"match" expr "with" cases<right_expr>
cases<right_expr> ::=
case<right_expr>
| cases<base_cond> "|" case<right_expr>
case<right_expr> ::= let_lhs "->" right_expr
let_in<right_expr> ::= "let" par_let "in" right_expr
fun_expr<right_expr> ::= "fun" pattern+ "->" right_expr
op_expr ::=
op_expr "||" conj_expr
| conj_expr
conj_expr ::=
conj_expr "&&" comp_expr
| comp_expr
comp_expr ::=
comp_expr "<" cat_expr
| comp_expr "<=" cat_expr
| comp_expr ">" cat_expr
| comp_expr ">=" cat_expr
| comp_expr "=" cat_expr
| comp_expr "<>" cat_expr
| cat_expr
cat_expr ::=
cons_expr "^" cat_expr
| cons_expr
cons_expr ::=
add_expr "::" cons_expr
| add_expr
add_expr ::=
add_expr "+" mult_expr
| add_expr "-" mult_expr
| mult_expr
mult_expr ::=
mult_expr "*" unary_expr
| mult_expr "div" unary_expr
| mult_expr "mod" unary_expr
| unary_expr
unary_expr ::=
"-" core_expr
| "not" core_expr
| call_expr
call_expr ::=
call_expr core_expr
| core_expr
core_expr ::=
number
| module_name "." variable
| string
| char
| "()"
| "false"
| "true"
| list_of<expr>
| "(" expr ")"
| constr
| sequence
| record_expr
module_name == uident
record_expr ::=
"{" sep_or_term_list(field_assignment,";") "}"
field_assignment ::= field_name "=" expr
sequence ::= "begin" (expr ";" ...)? "end"

View File

@ -1,270 +0,0 @@
(* Extended Backus-Naur Form (EBNF) for Mini-ML *)
(* LEXIS *)
let nl = ['\n' '\r']
let blank = [' ' '\t']
let digit = ['0'-'9']
let natural = digit | digit (digit | '_')* digit
token int = '-'? natural
let small = ['a'-'z']
let capital = ['A'-'Z']
let letter = small | capital
let ichar = letter | digit | ['_' '\'']
token ident = small ichar* | '_' ichar+
token uident = capital ichar*
let esc = "\\n" | "\\\\" | "\\b" | "\\r" | "\\t"
token string
let hexa = digit | ['A'-'F']
let byte = hexa hexa
let char_set = [^'\'' '\\'] # nl
| "\\'" | esc | "\\x" byte | "\\0" digit digit
token char = "'" char_set "'"
(* SYNTAX *)
(* Helpers *)
(* The following are meant to be part of a library *)
sep_or_term_list<item,sep> ::=
item sep ...
| (item sep)+
seq<item> ::= nseq<item>?
nseq<item> ::= item seq<item>
nsepseq<item,sep> ::=
item
| item sep nsepseq<item,sep>
sepseq<item,sep> ::= nsepseq<item,sep>?
(* The following are specific to the present grammar *)
list_of<item> ::= "[" item ";" ... "]"
csv<item> ::= item "," item "," ...
(* Entry *)
program ::= statement* EOF
statement ::=
let_declarations
| type_declaration
(* Type declarations *)
type_declaration ::= "type" type_name "=" type_expr
type_name == ident
type_expr ::=
cartesian
| sum_type
| record_type
cartesian ::= fun_type "*" ...
fun_type ::=
core_type "->" fun_type
| core_type
core_type ::=
type_name
| type_param type_constr
| "(" cartesian ")"
type_param ==
core_type type_constr
| type_tuple type_constr
type_constr == type_name
type_tuple ::= "(" type_expr "," ... ")"
sum_type ::= variant "|" ...
variant ::= constr "of" cartesian
constr == uident
record_type ::=
"{" sep_or_term_list<field_decl,";"> "}"
field_decl ::= field_name ":" type_expr
field_name == ident
(* Non-recursive value declarations *)
let_declarations ::= "let" let_bindings
let_bindings := let_binding "and" ...
let_binding ::=
value_name pattern+ "=" expr
| let_lhs "=" expr
value_name == ident
(* Patterns *)
let_lhs ::=
pattern "::" cons_pat
| pattern "," pattern "," ...
| core_pattern
core_pattern ::=
variable
| "_"
| "(" ")"
| number
| "true"
| "false"
| string
| list_of<cons_pat>
| "(" ptuple ")"
| constr core_pattern
variable == ident
number == int
ptuple ::= csv<cons_pat>
unit ::= "(" ")"
cons_pat ::=
pattern "::" cons_pat
| pattern
pattern ::=
"(" cons_pat ")"
| core_pattern
(* Expressions *)
expr ::=
base_cond__open<expr>
| match_expr<base_cond>
base_cond__open<x> ::=
base_expr<x>
| conditional<x>
base_cond ::= base_cond__open<base_cond>
base_expr<right_expr> ::=
let_expr<right_expr>
| fun_expr<right_expr>
| csv<op_expr>
| op_expr
conditional<right_expr> ::=
if_then_else<right_expr>
| if_then<right_expr>
if_then<right_expr> ::= "if" expr "then" right_expr
if_then_else<right_expr> ::=
"if" expr "then" closed_if "else" right_expr
base_if_then_else__open<x> ::=
base_expr<x>
| if_then_else<x>
base_if_then_else ::=
base_if_then_else__open<base_if_then_else>
closed_if ::=
base_if_then_else__open<closed_if>
| match_expr<base_if_then_else>
match_expr<right_expr> ::=
"match" expr "with" cases<right_expr>
cases<right_expr> ::=
case<right_expr>
| cases<base_cond> "|" case<right_expr>
case<right_expr> ::= let_lhs "->" right_expr
let_in<right_expr> ::= "let" par_let "in" right_expr
fun_expr<right_expr> ::= "fun" pattern+ "->" right_expr
op_expr ::=
op_expr "||" conj_expr
| conj_expr
conj_expr ::=
conj_expr "&&" comp_expr
| comp_expr
comp_expr ::=
comp_expr "<" cat_expr
| comp_expr "<=" cat_expr
| comp_expr ">" cat_expr
| comp_expr ">=" cat_expr
| comp_expr "=" cat_expr
| comp_expr "<>" cat_expr
| cat_expr
cat_expr ::=
cons_expr "^" cat_expr
| cons_expr
cons_expr ::=
add_expr "::" cons_expr
| add_expr
add_expr ::=
add_expr "+" mult_expr
| add_expr "-" mult_expr
| mult_expr
mult_expr ::=
mult_expr "*" unary_expr
| mult_expr "div" unary_expr
| mult_expr "mod" unary_expr
| unary_expr
unary_expr ::=
"-" core_expr
| "not" core_expr
| call_expr
call_expr ::=
call_expr core_expr
| core_expr
core_expr ::=
number
| module_name "." variable
| string
| char
| "()"
| "false"
| "true"
| list_of<expr>
| "(" expr ")"
| constr
| sequence
| record_expr
module_name == uident
record_expr ::=
"{" sep_or_term_list<field_assignment,";"> "}"
field_assignment ::= field_name "=" expr
sequence ::= "begin" (expr ";" ...)? "end"

View File

@ -1,249 +0,0 @@
(* Extended Backus-Naur Form (EBNF) for Mini-ML *)
(* LEXIS *)
let nl = ['\n' '\r']
let blank = [' ' '\t']
let digit = ['0'-'9']
let natural = digit | digit (digit | '_')* digit
token int = '-'? natural
let small = ['a'-'z']
let capital = ['A'-'Z']
let letter = small | capital
let ichar = letter | digit | ['_' '\'']
token ident = small ichar* | '_' ichar+
token uident = capital ichar*
let esc = "\\n" | "\\\\" | "\\b" | "\\r" | "\\t"
let hexa = digit | ['A'-'F']
let byte = hexa hexa
let char_set = [^'\'' '\\'] # nl
| "\\'" | esc | "\\x" byte | "\\0" digit digit
token char = "'" char_set "'"
token string
(* SYNTAX *)
(* Helpers *)
(* The following are meant to be part of a library *)
sep_or_term_list<item,sep> ::=
item sep etc.
| (item sep)+
seq<item> ::= nseq<item>?
nseq<item> ::= item seq<item>
nsepseq<item,sep> ::=
item
| item sep nsepseq<item,sep>
sepseq<item,sep> ::= nsepseq<item,sep>?
(* The following are specific to the present grammar *)
list_of<item> ::= "[" item ";" etc. "]"
csv<item> ::= item "," item "," etc.
(* Entry *)
program ::= statement*
statement ::=
let_declarations
| type_declaration
(* Type declarations *)
type_declaration ::= "type" type_name "=" type_expr
type_name == ident
type_expr ::=
cartesian
| sum_type
| record_type
cartesian ::= fun_type "*" etc.
fun_type ::=
core_type "->" fun_type
| core_type
core_type ::=
type_name
| type_param type_constr
| "(" cartesian ")"
type_param ==
core_type type_constr
| type_tuple type_constr
type_constr == type_name
type_tuple ::= "(" type_expr "," etc. ")"
sum_type ::= "|"? variant "|" etc.
variant ::= constr "of" cartesian
constr == uident
record_type ::=
"{" sep_or_term_list<field_decl,";"> "}"
field_decl ::= field_name ":" type_expr
field_name == ident
(* Non-recursive value declarations *)
let_declarations ::= "let" let_bindings
let_bindings := let_binding "and" etc.
let_binding ::=
value_name pattern+ "=" expr
| let_lhs "=" expr
value_name == ident
(* Patterns *)
let_lhs ::=
pattern "::" cons_pat
| pattern "," pattern "," etc.
| core_pattern
core_pattern ::=
variable
| "_"
| "(" ")"
| number
| "true"
| "false"
| string
| list_of<cons_pat>
| "(" ptuple ")"
| constr core_pattern
variable == ident
number == int
ptuple ::= csv<cons_pat>
unit ::= "(" ")"
cons_pat ::=
pattern "::" cons_pat
| pattern
pattern ::=
"(" cons_pat ")"
| core_pattern
(* Expressions *)
expr ::=
base_cond__<expr>
| match_expr<base_cond>
base_cond__<x> ::=
base_expr<x>
| conditional<x>
base_cond ::= base_cond__<base_cond>
base_expr<right_expr> ::=
let_expr<right_expr>
| fun_expr<right_expr>
| csv<op_expr>
| op_expr
conditional<right_expr> ::=
if_then_else<right_expr>
| if_then<right_expr>
if_then<right_expr> ::= "if" expr "then" right_expr
if_then_else<right_expr> ::=
"if" expr "then" closed_if "else" right_expr
base_if_then_else__<x> ::=
base_expr<x>
| if_then_else<x>
base_if_then_else ::=
base_if_then_else__<base_if_then_else>
closed_if ::=
base_if_then_else__<closed_if>
| match_expr<base_if_then_else>
match_expr<right_expr> ::=
"match" expr "with" cases<right_expr>
cases<right_expr> ::=
case<right_expr>
| cases<base_cond> "|" case<right_expr>
case<right_expr> ::= let_lhs "->" right_expr
let_in<right_expr> ::= "let" par_let "in" right_expr
fun_expr<right_expr> ::= "fun" pattern+ "->" right_expr
op_expr ::=
op_expr "||" %left %prec1 op_expr
| op_expr "&&" %left %prec2 op_expr
| op_expr "<" %left %prec3 op_expr
| op_expr "<=" %left %prec3 op_expr
| op_expr ">" %left %prec3 op_expr
| op_expr ">=" %left %prec3 op_expr
| op_expr "=" %left %prec3 op_expr
| op_expr "<>" %left %prec3 op_expr
| op_expr "^" %right %prec4 op_expr
| op_expr "::" %right %prec5 op_expr
| op_expr "+" %left %prec6 op_expr
| op_expr "-" %left %prec6 op_expr
| op_expr "*" %left %prec7 op_expr
| op_expr "div" %left %prec7 op_expr
| op_expr "mod" %left %prec7 op_expr
| "-" %prec8 op_expr
| "not" %prec8 op_expr
| call_expr
call_expr ::=
call_expr core_expr
| core_expr
core_expr ::=
number
| module_name "." variable
| string
| char
| "()"
| "false"
| "true"
| list_of<expr>
| "(" expr ")"
| constr
| sequence
| record_expr
module_name == uident
record_expr ::=
"{" sep_or_term_list<field_assignment,";"> "}"
field_assignment ::= field_name "=" expr
sequence ::= "begin" sep_or_term_list<expr,";">? "end"

View File

@ -1,336 +0,0 @@
(* Extended Backus-Naur Form (EBNF) for Mini-ML *)
(* LEXIS *)
let digit = ['0'-'9']
let natural = digit | digit (digit | '_')* digit
%token integer = '-'? natural
%token natural = natural 'n'
let small = ['a'-'z']
let capital = ['A'-'Z']
let letter = small | capital
let ichar = letter | digit | ['_' '\'']
%token ident = small ichar* | '_' ichar+
%token uident = capital ichar*
let esc = "\\n" | "\\\\" | "\\b" | "\\r" | "\\t"
let hexa = digit | ['A'-'F']
let byte = hexa hexa
let char_set = [^'\'' '\\'] # nl
| "\\'" | esc | "\\x" byte | "\\0" digit digit
%token chr = "'" char_set "'"
%token str
%token bool_or = "||" %left %prec1
%token bool_and = "&&" %left %prec2
%token lt = "<" %left %prec3
%token le = "<=" %left %prec3
%token gt = ">" %left %prec3
%token ge = ">=" %left %prec3
%token eq = "=" %left %prec3
%token ne = "<>" %left %prec3
%token cat = "^" %right %prec4
%token cons = "::" %right %prec5
%token plus = "+" %left %prec6
%token minus = "-" %left %prec6
%token times = "*" %left %prec7
%token slash = "/" %left %prec7
%token kwd_div = "div" %left %prec7
%token kwd_mod = "mod" %left %prec7
%token uminus = "-" %prec8
%token kwd_not = "not" %prec8
%token lpar = "("
%token rpar = ")"
%token lbracket = "["
%token rbracket = "]"
%token lbrace = "{"
%token rbrace = "}"
%token semi = ";"
%token comma = ","
%token colon = ":"
%token vbar = "|"
%token arrow = "->"
%token wild = "_"
(* SYNTAX *)
(* Helpers *)
(* The following are meant to be part of a library *)
%ocaml "Utils"
type 'item seq = 'item list
type 'item nseq = 'item * 'item seq
type ('item,'sep) nsepseq = 'item * ('sep * 'item) list
type ('item,'sep) sepseq = ('item,'sep) nsepseq option
type ('item,'sep) sep_or_term_list =
('item,'sep) nsepseq * 'sep option
%end
%menhir_decl "Parser"
%start program interactive_expr
%type <AST.t> program
%type <AST.expr> interactive_expr
%type <('item,'sep) sep_or_term_list> sep_or_term_list
%end
%menhir_rule "Parser"
seq(item):
(**) { [] }
| X seq(item) { $1::$2 }
nseq(item):
item seq(item) { $1,$2 }
nsepseq(item,sep):
item { $1, [] }
| item sep nsepseq(item,sep) { let h,t = $3 in $1, ($2,h)::t }
sepseq(item,sep):
(**) { None }
| nsepseq(item,sep) { Some $1 }
sep_or_term_list(item,sep):
nsepseq(item,sep) {
$1, None
}
| nseq(item sep {$1,$2}) {
let (first,sep), tail = $1 in
let rec trans (seq, prev_sep as acc) = function
[] -> acc
| (item,next_sep)::others ->
trans ((prev_sep,item)::seq, next_sep) others in
let list, term = trans ([],sep) tail
in (first, List.rev list), Some term }
%end
(* The following are specific to the present grammar *)
list<item> ::= "[" item ";" etc. "]"
tuple<item> ::= item "," item "," etc.
par<item> ::= "(" item ")"
(* Entry *)
program == statement*
statement ::=
let_declarations { `Let }
| type_declaration { `TypeDecl }
(* Type declarations *)
type_declaration == "type" type_name "=" type_expr
type_name == ident
type_expr ::=
cartesian { `T_Prod }
| sum_type { `T_Sum }
| record_type { `T_Record }
cartesian == fun_type "*" etc.
fun_type ::=
core_type "->" fun_type { `T_Fun }
| core_type { `T_Core }
core_type ::=
type_name { `T_Alias }
| type_param type_constr { `T_App }
| par<cartesian>
type_param ::=
par<type_expr "," etc.>
| core_type type_constr { `T_App }
type_constr ::=
ident { `T_Constr }
| "set"
| "map"
| "list"
sum_type == "|"? variant "|" etc.
variant == constr "of" cartesian
constr == uident
record_type ==
"{" sep_or_term_list<field_decl,";"> "}"
field_decl == field_name ":" type_expr
field_name == ident
(* Non-recursive value declarations *)
let_declarations == "let" let_bindings
let_bindings == let_binding "and" etc.
let_binding ::=
variable sub_irrefutable+ "=" expr { `LetFun }
| irrefutable "=" expr { `LetNonFun }
(* Patterns *)
irrefutable ::=
tuple<sub_irrefutable> { `P_Tuple }
| sub_irrefutable
sub_irrefutable ::=
variable { `P_Var }
| "_" { `P_Wild }
| unit { `P_Unit }
| par<closed_irrefutable>
closed_irrefutable ::=
tuple<sub_irrefutable>
| sub_irrefutable { `P_SubI }
pattern ::=
sub_pattern "::" tail { `P_Cons }
| tuple<sub_pattern> { `P_Tuple }
| core_pattern { `P_Core }
sub_pattern ::=
par<tail>
| core_pattern { `P_Core }
core_pattern ::=
variable { `P_Var }
| "_" { `P_Wild }
| unit { `P_Unit }
| integer { `P_Int }
| natural { `P_Nat }
| "true" { `P_True }
| "false" { `P_False }
| str { `P_Str }
| chr { `P_Chr }
| list<tail> { `P_List }
| constr sub_pattern { `P_Constr }
| record_pattern { `P_Record }
| par<tuple<tail>>
variable == ident
record_pattern ::=
"{" sep_or_term_list<field_pattern,";"> "}"
field_pattern ::= field_name "=" sub_pattern
unit ::= "(" ")"
tail ::=
sub_pattern "::" tail
| sub_pattern
(* Expressions *)
expr ::=
base_cond__<expr>
| match_expr<base_cond>
base_cond__<x> ::=
base_expr<x>
| conditional<x>
base_cond == base_cond__<base_cond>
base_expr<right_expr> ::=
let_expr<right_expr>
| fun_expr<right_expr>
| csv<op_expr>
conditional<right_expr> ::=
if_then_else<right_expr>
| if_then<right_expr>
if_then<right_expr> ::=
"if" expr "then" right_expr { `IfThen }
if_then_else<right_expr> ::=
"if" expr "then" closed_if "else" right_expr { `IfThenElse }
base_if_then_else__<x> ::=
base_expr<x>
| if_then_else<x>
base_if_then_else ::=
base_if_then_else__<base_if_then_else>
closed_if ::=
base_if_then_else__<closed_if>
| match_expr<base_if_then_else>
match_expr<right_expr> ::=
"match" expr "with" cases<right_expr>
cases<right_expr> ::=
case<right_expr>
| cases<base_cond> "|" case<right_expr>
case<right_expr> ::= pattern "->" right_expr
let_in<right_expr> ::= "let" par_let "in" right_expr
fun_expr<right_expr> ::= "fun" sub_pattern+ "->" right_expr
op_expr ::=
op_expr "||" op_expr
| op_expr "&&" op_expr
| op_expr "<" op_expr
| op_expr "<=" op_expr
| op_expr ">" op_expr
| op_expr ">=" op_expr
| op_expr "=" op_expr
| op_expr "<>" op_expr
| op_expr "^" op_expr
| op_expr "::" op_expr
| op_expr "+" op_expr
| op_expr "-" op_expr
| op_expr "*" op_expr
| op_expr "/" op_expr
| op_expr "div" op_expr
| op_expr "mod" op_expr
| "-" op_expr
| "not" op_expr
| call_expr
call_expr ::=
call_expr core_expr
| core_expr
core_expr ::=
variable
| module_name "." path
| unit
| integer
| natural
| "false"
| "true"
| str
| chr
| constr
| sequence
| record_expr
| list<expr>
| par<expr>
module_name == uident
path == ident "." etc.
record_expr ::=
"{" sep_or_term_list<field_assignment,";"> "}"
field_assignment ::= field_name "=" expr
sequence ::= "begin" sep_or_term_list<expr,";">? "end"

View File

@ -1,6 +1,5 @@
module Pascaligo = Pascaligo module Pascaligo = Pascaligo
module Camligo = Parser_camligo module Cameligo = Cameligo
module Ligodity = Ligodity
module Reasonligo = Reasonligo module Reasonligo = Reasonligo

View File

@ -1,8 +1,8 @@
open Trace open Trace
module Parser = Parser_reasonligo.Parser module Parser = Parser_reasonligo.Parser
module AST = Parser_ligodity.AST module AST = Parser_cameligo.AST
module ParserLog = Parser_ligodity.ParserLog module ParserLog = Parser_cameligo.ParserLog
module LexToken = Parser_reasonligo.LexToken module LexToken = Parser_reasonligo.LexToken
module Lexer = Lexer.Make(LexToken) module Lexer = Lexer.Make(LexToken)

View File

@ -4,7 +4,7 @@
[@@@warning "-42"] [@@@warning "-42"]
open Region open Region
module AST = Parser_ligodity.AST module AST = Parser_cameligo.AST
open AST open AST

View File

@ -105,9 +105,9 @@ let () =
if Utils.String.Set.mem "ast" options.verbose if Utils.String.Set.mem "ast" options.verbose
then let buffer = Buffer.create 131 in then let buffer = Buffer.create 131 in
begin begin
Parser_ligodity.ParserLog.offsets := options.offsets; Parser_cameligo.ParserLog.offsets := options.offsets;
Parser_ligodity.ParserLog.mode := options.mode; Parser_cameligo.ParserLog.mode := options.mode;
Parser_ligodity.ParserLog.print_tokens buffer ast; Parser_cameligo.ParserLog.print_tokens buffer ast;
Buffer.output_buffer stdout buffer Buffer.output_buffer stdout buffer
end end
with with

View File

@ -11,13 +11,13 @@
(modules reasonligo LexToken Parser) (modules reasonligo LexToken Parser)
(libraries (libraries
parser_shared parser_shared
parser_ligodity parser_cameligo
str str
simple-utils simple-utils
tezos-utils tezos-utils
getopt getopt
) )
(flags (:standard -open Simple_utils -open Parser_shared -open Parser_ligodity )) (flags (:standard -open Simple_utils -open Parser_shared -open Parser_cameligo ))
) )
(executable (executable

View File

@ -1,5 +1,5 @@
module Parser = Parser module Parser = Parser
module AST = Parser_ligodity.AST module AST = Parser_cameligo.AST
module Lexer = Lexer module Lexer = Lexer
module LexToken = LexToken module LexToken = LexToken
module ParserLog = Parser_ligodity.ParserLog module ParserLog = Parser_cameligo.ParserLog

View File

@ -9,7 +9,7 @@ INTERNAL DOCUMENTATION OF THE SHARED PARSER FUNCTIONALITY
The module EvalOpt parses the command-line for options to the The module EvalOpt parses the command-line for options to the
parser. That action is performed as a side-effect when the module parser. That action is performed as a side-effect when the module
is initialised at run-time: this is ugly and easy to fix. See is initialised at run-time: this is ugly and easy to fix. See
ligo/src/parser/ligodity/EvalOpt.ml{i} for the right way to do ligo/src/parser/cameligo/EvalOpt.ml{i} for the right way to do
it. Ignore them: the file actually calling directly the parser is it. Ignore them: the file actually calling directly the parser is
ligo/src/parser/parser.ml. Note that, as a consequence, no option ligo/src/parser/parser.ml. Note that, as a consequence, no option
is currently passed to the parser when building Pascaligo with is currently passed to the parser when building Pascaligo with

View File

@ -3,7 +3,7 @@
open Trace open Trace
open Ast_simplified open Ast_simplified
module Raw = Parser.Ligodity.AST module Raw = Parser.Cameligo.AST
module SMap = Map.String module SMap = Map.String
module Option = Simple_utils.Option module Option = Simple_utils.Option
(* TODO: move 1-parser/shared/Utils.ml{i} to Simple_utils/ *) (* TODO: move 1-parser/shared/Utils.ml{i} to Simple_utils/ *)
@ -101,7 +101,7 @@ module Errors = struct
let message () = "" in let message () = "" in
let data = [ let data = [
("expression" , ("expression" ,
thunk @@ Parser.Ligodity.ParserLog.expr_to_string t) thunk @@ Parser.Cameligo.ParserLog.expr_to_string t)
] in ] in
error ~data title message error ~data title message
@ -130,7 +130,7 @@ end
open Errors open Errors
open Operators.Simplify.Ligodity open Operators.Simplify.Cameligo
let r_split = Location.r_split let r_split = Location.r_split
@ -379,7 +379,7 @@ let rec simpl_expression :
let default_action () = let default_action () =
let%bind cases = simpl_cases lst in let%bind cases = simpl_cases lst in
return @@ e_matching ~loc e cases in return @@ e_matching ~loc e cases in
(* Hack to take care of patterns introduced by `parser/ligodity/Parser.mly` in "norm_fun_expr". TODO: Still needed? *) (* Hack to take care of patterns introduced by `parser/cameligo/Parser.mly` in "norm_fun_expr". TODO: Still needed? *)
match lst with match lst with
| [ (pattern , rhs) ] -> ( | [ (pattern , rhs) ] -> (
match pattern with match pattern with
@ -726,7 +726,7 @@ and simpl_cases : type a . (Raw.pattern * a) list -> (a, unit) matching result =
let title () = "Pattern" in let title () = "Pattern" in
let content () = let content () =
Printf.sprintf "Pattern : %s" Printf.sprintf "Pattern : %s"
(Parser.Ligodity.ParserLog.pattern_to_string x) in (Parser.Cameligo.ParserLog.pattern_to_string x) in
error title content error title content
in in
let as_variant () = let as_variant () =

View File

@ -4,7 +4,7 @@ open Trace
open Ast_simplified open Ast_simplified
module Raw = Parser.Ligodity.AST module Raw = Parser.Cameligo.AST
module SMap = Map.String module SMap = Map.String
module Option = Simple_utils.Option module Option = Simple_utils.Option

View File

@ -1,12 +1,12 @@
open Trace open Trace
open Function open Function
module I = Parser.Camligo.Ast module I = Parser.Cameligo.Ast
module O = Ast_simplified module O = Ast_simplified
open O.Combinators open O.Combinators
let unwrap : type a . a Location.wrap -> a = Location.unwrap let unwrap : type a . a Location.wrap -> a = Location.unwrap
open Operators.Simplify.Camligo open Operators.Simplify.Cameligo
let type_variable : string -> O.type_expression result = fun str -> let type_variable : string -> O.type_expression result = fun str ->
match List.assoc_opt str type_constants with match List.assoc_opt str type_constants with

View File

@ -8,7 +8,7 @@
ast_simplified ast_simplified
self_ast_simplified self_ast_simplified
operators) operators)
(modules ligodity pascaligo simplify) (modules cameligo pascaligo simplify)
(preprocess (preprocess
(pps (pps
ppx_let ppx_let

View File

@ -1,2 +1,2 @@
module Pascaligo = Pascaligo module Pascaligo = Pascaligo
module Ligodity = Ligodity module Cameligo = Cameligo

View File

@ -672,7 +672,7 @@ and type_expression : environment -> Solver.state -> ?tv_opt:O.type_value -> I.e
* } -> ( * } -> (
* let%bind input_type = * let%bind input_type =
* let%bind input_type = * let%bind input_type =
* (\* Hack to take care of let_in introduced by `simplify/ligodity.ml` in ECase's hack *\) * (\* Hack to take care of let_in introduced by `simplify/cameligo.ml` in ECase's hack *\)
* let default_action e () = fail @@ (needs_annotation e "the returned value") in * let default_action e () = fail @@ (needs_annotation e "the returned value") in
* match input_type with * match input_type with
* | Some ty -> ok ty * | Some ty -> ok ty

View File

@ -557,7 +557,7 @@ and type_expression : environment -> ?tv_opt:O.type_expression -> I.expression -
} -> ( } -> (
let%bind input_type = let%bind input_type =
let%bind input_type = let%bind input_type =
(* Hack to take care of let_in introduced by `simplify/ligodity.ml` in ECase's hack *) (* Hack to take care of let_in introduced by `simplify/cameligo.ml` in ECase's hack *)
let default_action e () = fail @@ (needs_annotation e "the returned value") in let default_action e () = fail @@ (needs_annotation e "the returned value") in
match input_type with match input_type with
| Some ty -> ok ty | Some ty -> ok ty

View File

@ -601,7 +601,7 @@ and type_expression' : environment -> ?tv_opt:O.type_value -> I.expression -> O.
} -> ( } -> (
let%bind input_type = let%bind input_type =
let%bind input_type = let%bind input_type =
(* Hack to take care of let_in introduced by `simplify/ligodity.ml` in ECase's hack *) (* Hack to take care of let_in introduced by `simplify/cameligo.ml` in ECase's hack *)
let default_action e () = fail @@ (needs_annotation e "the returned value") in let default_action e () = fail @@ (needs_annotation e "the returned value") in
match input_type with match input_type with
| Some ty -> ok ty | Some ty -> ok ty

View File

@ -137,22 +137,8 @@ module Simplify = struct
let type_operators = type_operators let type_operators = type_operators
end end
module Camligo = struct
let constants = function
| "Bytes.pack" -> ok C_BYTES_PACK
| "Crypto.hash" -> ok C_HASH (* TODO : Check if right *)
| "Operation.transaction" -> ok C_CALL
| "Operation.get_contract" -> ok C_CONTRACT
| "sender" -> ok C_SENDER
| "unit" -> ok C_UNIT
| "source" -> ok C_SOURCE
| _ -> simple_fail "Not a CamLIGO constant"
let type_constants = type_constants module Cameligo = struct
let type_operators = type_operators
end
module Ligodity = struct
let constants = function let constants = function
| "assert" -> ok C_ASSERTION | "assert" -> ok C_ASSERTION
| "Current.balance" -> ok C_BALANCE | "Current.balance" -> ok C_BALANCE

View File

@ -9,13 +9,8 @@ module Simplify : sig
val type_operators : string -> type_expression type_operator result val type_operators : string -> type_expression type_operator result
end end
module Camligo : sig
val constants : string -> constant result
val type_constants : string -> type_constant result
val type_operators : string -> type_expression type_operator result
end
module Ligodity : sig module Cameligo : sig
val constants : string -> constant result val constants : string -> constant result
val type_constants : string -> type_constant result val type_constants : string -> type_constant result
val type_operators : string -> type_expression type_operator result val type_operators : string -> type_expression type_operator result

View File

@ -1,6 +1,6 @@
open Trace open Trace
open Test_helpers open Test_helpers
open Parser.Camligo open Parser.Cameligo
let basic () : unit result = let basic () : unit result =
let%bind _ = User.parse_file "./contracts/new-syntax.mligo" in let%bind _ = User.parse_file "./contracts/new-syntax.mligo" in
@ -8,7 +8,7 @@ let basic () : unit result =
let simplify () : unit result = let simplify () : unit result =
let%bind raw = User.parse_file "./contracts/new-syntax.mligo" in let%bind raw = User.parse_file "./contracts/new-syntax.mligo" in
let%bind _simpl = Simplify.Camligo.main raw in let%bind _simpl = Simplify.Cameligo.main raw in
ok () ok ()
let main = "Multifix", [ let main = "Multifix", [