Merge branch 'dev' of gitlab.com:ligolang/ligo into feature/doc-pascaligo-loop

This commit is contained in:
Christian Rinderknecht 2020-01-31 13:30:28 +01:00
commit 729ecd3f12
65 changed files with 2625 additions and 1232 deletions

View File

@ -220,7 +220,7 @@ In our case, we have a `counter.ligo` contract that accepts a parameter of type
<!--DOCUSAURUS_CODE_TABS--> <!--DOCUSAURUS_CODE_TABS-->
<!--Pascaligo--> <!--Pascaligo-->
```pascaligo ```pascaligo skip
// counter.ligo // counter.ligo
type action is type action is
| Increment of int | Increment of int

View File

@ -139,21 +139,8 @@ Conditional logic is an important part of every real world program.
```pascaligo group=e ```pascaligo group=e
const min_age: nat = 16n; const min_age: nat = 16n;
(*
This function is really obnoxious, but it showcases
how the if statement and it's syntax can be used.
Normally, you'd use `with (age > min_age)` instead.
*)
function is_adult(const age: nat): bool is function is_adult(const age: nat): bool is
block { if (age > min_age) then True else False
var is_adult: bool := False;
if (age > min_age) then begin
is_adult := True;
end else begin
is_adult := False;
end
} with is_adult
``` ```
> You can run the function above with > You can run the function above with

View File

@ -200,6 +200,18 @@ const a: int = int(1n);
const b: nat = abs(1); const b: nat = abs(1);
``` ```
<!--END_DOCUSAURUS_CODE_TABS-->
## Check if a value is a `nat`
You can check if a value is a `nat`, by using a syntax specific built-in function, which accepts an `int` and returns an `option(nat)`, more specifically `Some(nat)` if the provided integer is a natural number, and `None` otherwise:
<!--DOCUSAURUS_CODE_TABS-->
<!--Pascaligo-->
```pascaligo
const its_a_nat: option(nat) = is_nat(1)
```
<!--ReasonLIGO--> <!--ReasonLIGO-->
```reasonligo group=e ```reasonligo group=e
let a: int = int(1n); let a: int = int(1n);

View File

@ -0,0 +1,4 @@
const min_age: nat = 16n;
function is_adult(const age: nat): bool is
if (age > min_age) then True else False

View File

@ -0,0 +1 @@
const its_a_nat: option(nat) = is_nat(1)

View File

@ -0,0 +1,4 @@
type int_map is map(int, int);
function get_first(const int_map: int_map): option(int) is int_map[1]
// empty map needs a type annotation
const first: option(int) = get_first(((map end) : int_map ));

View File

@ -3,7 +3,7 @@ id: types
title: Types title: Types
--- ---
LIGO is strongly and statically typed. This means that the compiler checks your program at compilation time and makes sure there won't be any type related runtime errors. LIGO types are built on top of Michelson's type system. LIGO is strongly and statically typed. This means that the compiler checks your program at compilation time and makes sure there won't be any type related runtime errors. LIGO types are built on top of Michelson's type system.
## Built-in types ## Built-in types
@ -36,6 +36,8 @@ let dog_breed: animal_breed = "Saluki";
<!--END_DOCUSAURUS_CODE_TABS--> <!--END_DOCUSAURUS_CODE_TABS-->
> Types in LIGO are `structural`, which means that `animalBreed`/`animal_breed` and `string` are interchangable and are considered equal.
## Simple types ## Simple types
<!--DOCUSAURUS_CODE_TABS--> <!--DOCUSAURUS_CODE_TABS-->
<!--Pascaligo--> <!--Pascaligo-->
@ -146,3 +148,18 @@ let ledger: account_balances =
``` ```
<!--END_DOCUSAURUS_CODE_TABS--> <!--END_DOCUSAURUS_CODE_TABS-->
## Annotations
In certain cases, type of an expression cannot be properly determined. This can be circumvented by annotating an expression with it's desired type, here's an example:
<!--DOCUSAURUS_CODE_TABS-->
<!--Pascaligo-->
```pascaligo
type int_map is map(int, int);
function get_first(const int_map: int_map): option(int) is int_map[1]
// empty map needs a type annotation
const first: option(int) = get_first(((map end) : int_map ));
```
<!--END_DOCUSAURUS_CODE_TABS-->

View File

@ -5624,15 +5624,6 @@
"integrity": "sha512-s5kLOcnH0XqDO+FvuaLX8DDjZ18CGFk7VygH40QoKPUQhW4e2rvM0rwUq0t8IQDOwYSeLK01U90OjzBTme2QqA==", "integrity": "sha512-s5kLOcnH0XqDO+FvuaLX8DDjZ18CGFk7VygH40QoKPUQhW4e2rvM0rwUq0t8IQDOwYSeLK01U90OjzBTme2QqA==",
"dev": true "dev": true
}, },
"klaw-sync": {
"version": "6.0.0",
"resolved": "https://registry.npmjs.org/klaw-sync/-/klaw-sync-6.0.0.tgz",
"integrity": "sha512-nIeuVSzdCCs6TDPTqI8w1Yre34sSq7AkZ4B3sfOBbI2CgVSB4Du4aLQijFU2+lhAFCwt9+42Hel6lQNIv6AntQ==",
"dev": true,
"requires": {
"graceful-fs": "^4.1.11"
}
},
"lazy-cache": { "lazy-cache": {
"version": "2.0.2", "version": "2.0.2",
"resolved": "https://registry.npmjs.org/lazy-cache/-/lazy-cache-2.0.2.tgz", "resolved": "https://registry.npmjs.org/lazy-cache/-/lazy-cache-2.0.2.tgz",
@ -7412,15 +7403,6 @@
"integrity": "sha1-1PRWKwzjaW5BrFLQ4ALlemNdxtw=", "integrity": "sha1-1PRWKwzjaW5BrFLQ4ALlemNdxtw=",
"dev": true "dev": true
}, },
"preprocess": {
"version": "3.1.0",
"resolved": "https://registry.npmjs.org/preprocess/-/preprocess-3.1.0.tgz",
"integrity": "sha1-pE5c3Vu7WlTwrSiaru2AmV19k4o=",
"dev": true,
"requires": {
"xregexp": "3.1.0"
}
},
"prismjs": { "prismjs": {
"version": "1.17.1", "version": "1.17.1",
"resolved": "https://registry.npmjs.org/prismjs/-/prismjs-1.17.1.tgz", "resolved": "https://registry.npmjs.org/prismjs/-/prismjs-1.17.1.tgz",
@ -7750,6 +7732,12 @@
"picomatch": "^2.0.4" "picomatch": "^2.0.4"
} }
}, },
"reason-highlightjs": {
"version": "0.2.1",
"resolved": "https://registry.npmjs.org/reason-highlightjs/-/reason-highlightjs-0.2.1.tgz",
"integrity": "sha512-DWWPtfeQjwKgHj2OOieEIAB544uAVjwOAIAg2Yu09CobdUe41Yah0Z67GEvmVtpYCGG/+3CZvDRM1hMVr1zN3A==",
"dev": true
},
"rechoir": { "rechoir": {
"version": "0.6.2", "version": "0.6.2",
"resolved": "https://registry.npmjs.org/rechoir/-/rechoir-0.6.2.tgz", "resolved": "https://registry.npmjs.org/rechoir/-/rechoir-0.6.2.tgz",
@ -9433,12 +9421,6 @@
"integrity": "sha512-Eux0i2QdDYKbdbA6AM6xE4m6ZTZr4G4xF9kahI2ukSEMCzwce2eX9WlTI5J3s+NU7hpasFsr8hWIONae7LluAQ==", "integrity": "sha512-Eux0i2QdDYKbdbA6AM6xE4m6ZTZr4G4xF9kahI2ukSEMCzwce2eX9WlTI5J3s+NU7hpasFsr8hWIONae7LluAQ==",
"dev": true "dev": true
}, },
"xregexp": {
"version": "3.1.0",
"resolved": "https://registry.npmjs.org/xregexp/-/xregexp-3.1.0.tgz",
"integrity": "sha1-FNhGHgvdOCJL/uUDmgiY/EL80zY=",
"dev": true
},
"xtend": { "xtend": {
"version": "4.0.2", "version": "4.0.2",
"resolved": "https://registry.npmjs.org/xtend/-/xtend-4.0.2.tgz", "resolved": "https://registry.npmjs.org/xtend/-/xtend-4.0.2.tgz",

View File

@ -4,7 +4,7 @@ maintainer: "ligolang@gmail.com"
authors: [ "Galfour" ] authors: [ "Galfour" ]
homepage: "https://gitlab.com/ligolang/tezos" homepage: "https://gitlab.com/ligolang/tezos"
bug-reports: "https://gitlab.com/ligolang/tezos/issues" bug-reports: "https://gitlab.com/ligolang/tezos/issues"
synopsis: "A higher-level language which compiles to Michelson" synopsis: "A high-level language which compiles to Michelson"
dev-repo: "git+https://gitlab.com/ligolang/tezos.git" dev-repo: "git+https://gitlab.com/ligolang/tezos.git"
license: "MIT" license: "MIT"
depends: [ depends: [
@ -21,6 +21,8 @@ depends: [
"yojson" "yojson"
"alcotest" { with-test } "alcotest" { with-test }
"getopt" "getopt"
"terminal_size"
"pprint"
# work around upstream in-place update # work around upstream in-place update
"ocaml-migrate-parsetree" { = "1.4.0" } "ocaml-migrate-parsetree" { = "1.4.0" }
] ]

View File

@ -19,7 +19,7 @@ let source_file n =
let open Arg in let open Arg in
let info = let info =
let docv = "SOURCE_FILE" in let docv = "SOURCE_FILE" in
let doc = "$(docv) is the path to the .ligo or .mligo file of the contract." in let doc = "$(docv) is the path to the smart contract file." in
info ~docv ~doc [] in info ~docv ~doc [] in
required @@ pos n (some string) None info required @@ pos n (some string) None info
@ -42,7 +42,7 @@ let syntax =
let open Arg in let open Arg in
let info = let info =
let docv = "SYNTAX" in let docv = "SYNTAX" in
let doc = "$(docv) is the syntax that will be used. Currently supported syntaxes are \"pascaligo\" and \"cameligo\". By default, the syntax is guessed from the extension (.ligo and .mligo, respectively)." in let doc = "$(docv) is the syntax that will be used. Currently supported syntaxes are \"pascaligo\", \"cameligo\" and \"reasonligo\". By default, the syntax is guessed from the extension (.ligo, .mligo, .religo respectively)." in
info ~docv ~doc ["syntax" ; "s"] in info ~docv ~doc ["syntax" ; "s"] in
value @@ opt string "auto" info value @@ opt string "auto" info
@ -50,7 +50,7 @@ let req_syntax n =
let open Arg in let open Arg in
let info = let info =
let docv = "SYNTAX" in let docv = "SYNTAX" in
let doc = "$(docv) is the syntax that will be used. Currently supported syntaxes are \"pascaligo\" and \"cameligo\". By default, the syntax is guessed from the extension (.ligo and .mligo, respectively)." in let doc = "$(docv) is the syntax that will be used. Currently supported syntaxes are \"pascaligo\", \"cameligo\" and \"reasonligo\". By default, the syntax is guessed from the extension (.ligo, .mligo, .religo respectively)." in
info ~docv ~doc [] in info ~docv ~doc [] in
required @@ pos n (some string) None info required @@ pos n (some string) None info
@ -58,7 +58,7 @@ let init_file =
let open Arg in let open Arg in
let info = let info =
let docv = "INIT_FILE" in let docv = "INIT_FILE" in
let doc = "$(docv) is the path to the .ligo or .mligo file to be used for context initialization." in let doc = "$(docv) is the path to smart contract file to be used for context initialization." in
info ~docv ~doc ["init-file"] in info ~docv ~doc ["init-file"] in
value @@ opt (some string) None info value @@ opt (some string) None info
@ -66,7 +66,7 @@ let amount =
let open Arg in let open Arg in
let info = let info =
let docv = "AMOUNT" in let docv = "AMOUNT" in
let doc = "$(docv) is the amount the michelson interpreter will use." in let doc = "$(docv) is the amount the Michelson interpreter will use." in
info ~docv ~doc ["amount"] in info ~docv ~doc ["amount"] in
value @@ opt string "0" info value @@ opt string "0" info
@ -74,7 +74,7 @@ let sender =
let open Arg in let open Arg in
let info = let info =
let docv = "SENDER" in let docv = "SENDER" in
let doc = "$(docv) is the sender the michelson interpreter transaction will use." in let doc = "$(docv) is the sender the Michelson interpreter transaction will use." in
info ~docv ~doc ["sender"] in info ~docv ~doc ["sender"] in
value @@ opt (some string) None info value @@ opt (some string) None info
@ -82,7 +82,7 @@ let source =
let open Arg in let open Arg in
let info = let info =
let docv = "SOURCE" in let docv = "SOURCE" in
let doc = "$(docv) is the source the michelson interpreter transaction will use." in let doc = "$(docv) is the source the Michelson interpreter transaction will use." in
info ~docv ~doc ["source"] in info ~docv ~doc ["source"] in
value @@ opt (some string) None info value @@ opt (some string) None info
@ -90,7 +90,7 @@ let predecessor_timestamp =
let open Arg in let open Arg in
let info = let info =
let docv = "PREDECESSOR_TIMESTAMP" in let docv = "PREDECESSOR_TIMESTAMP" in
let doc = "$(docv) is the pedecessor_timestamp (now value minus one minute) the michelson interpreter will use (e.g. '2000-01-01T10:10:10Z')" in let doc = "$(docv) is the predecessor_timestamp (now value minus one minute) the Michelson interpreter will use (e.g. '2000-01-01T10:10:10Z')" in
info ~docv ~doc ["predecessor-timestamp"] in info ~docv ~doc ["predecessor-timestamp"] in
value @@ opt (some string) None info value @@ opt (some string) None info
@ -135,58 +135,58 @@ let compile_file =
let term = let term =
Term.(const f $ source_file 0 $ entry_point 1 $ syntax $ display_format $ michelson_code_format) in Term.(const f $ source_file 0 $ entry_point 1 $ syntax $ display_format $ michelson_code_format) in
let cmdname = "compile-contract" in let cmdname = "compile-contract" in
let doc = "Subcommand: compile a contract." in let doc = "Subcommand: Compile a contract." in
(Term.ret term , Term.info ~doc cmdname) (Term.ret term , Term.info ~doc cmdname)
let print_cst = let print_cst =
let f source_file syntax display_format = ( let f source_file syntax display_format = (
toplevel ~display_format @@ toplevel ~display_format @@
let%bind pp = Compile.Of_source.pretty_print source_file (Syntax_name syntax) in let%bind pp = Compile.Of_source.pretty_print source_file (Syntax_name syntax) in
ok @@ Format.asprintf "%s \n" (Buffer.contents pp) ok @@ Format.asprintf "%s \n" (Buffer.contents pp)
) )
in in
let term = Term.(const f $ source_file 0 $ syntax $ display_format) in let term = Term.(const f $ source_file 0 $ syntax $ display_format) in
let cmdname = "print-cst" in let cmdname = "print-cst" in
let doc = "Subcommand: print the cst. Warning: intended for development of LIGO and can break at any time." in let doc = "Subcommand: Print the CST.\nWarning: Intended for development of LIGO and can break at any time." in
(Term.ret term, Term.info ~doc cmdname) (Term.ret term, Term.info ~doc cmdname)
let print_ast = let print_ast =
let f source_file syntax display_format = ( let f source_file syntax display_format = (
toplevel ~display_format @@ toplevel ~display_format @@
let%bind simplified = Compile.Of_source.compile source_file (Syntax_name syntax) in let%bind simplified = Compile.Of_source.compile source_file (Syntax_name syntax) in
ok @@ Format.asprintf "%a\n" Compile.Of_simplified.pretty_print simplified ok @@ Format.asprintf "%a\n" Compile.Of_simplified.pretty_print simplified
) )
in in
let term = Term.(const f $ source_file 0 $ syntax $ display_format) in let term = Term.(const f $ source_file 0 $ syntax $ display_format) in
let cmdname = "print-ast" in let cmdname = "print-ast" in
let doc = "Subcommand: print the ast. Warning: intended for development of LIGO and can break at any time." in let doc = "Subcommand: Print the AST.\n Warning: Intended for development of LIGO and can break at any time." in
(Term.ret term, Term.info ~doc cmdname) (Term.ret term, Term.info ~doc cmdname)
let print_typed_ast = let print_typed_ast =
let f source_file syntax display_format = ( let f source_file syntax display_format = (
toplevel ~display_format @@ toplevel ~display_format @@
let%bind simplified = Compile.Of_source.compile source_file (Syntax_name syntax) in let%bind simplified = Compile.Of_source.compile source_file (Syntax_name syntax) in
let%bind typed,_ = Compile.Of_simplified.compile simplified in let%bind typed,_ = Compile.Of_simplified.compile simplified in
ok @@ Format.asprintf "%a\n" Compile.Of_typed.pretty_print typed ok @@ Format.asprintf "%a\n" Compile.Of_typed.pretty_print typed
) )
in in
let term = Term.(const f $ source_file 0 $ syntax $ display_format) in let term = Term.(const f $ source_file 0 $ syntax $ display_format) in
let cmdname = "print-typed-ast" in let cmdname = "print-typed-ast" in
let doc = "Subcommand: print the typed ast. Warning: intended for development of LIGO and can break at any time." in let doc = "Subcommand: Print the typed AST.\n Warning: Intended for development of LIGO and can break at any time." in
(Term.ret term, Term.info ~doc cmdname) (Term.ret term, Term.info ~doc cmdname)
let print_mini_c = let print_mini_c =
let f source_file syntax display_format = ( let f source_file syntax display_format = (
toplevel ~display_format @@ toplevel ~display_format @@
let%bind simplified = Compile.Of_source.compile source_file (Syntax_name syntax) in let%bind simplified = Compile.Of_source.compile source_file (Syntax_name syntax) in
let%bind typed,_ = Compile.Of_simplified.compile simplified in let%bind typed,_ = Compile.Of_simplified.compile simplified in
let%bind mini_c = Compile.Of_typed.compile typed in let%bind mini_c = Compile.Of_typed.compile typed in
ok @@ Format.asprintf "%a\n" Compile.Of_mini_c.pretty_print mini_c ok @@ Format.asprintf "%a\n" Compile.Of_mini_c.pretty_print mini_c
) )
in in
let term = Term.(const f $ source_file 0 $ syntax $ display_format) in let term = Term.(const f $ source_file 0 $ syntax $ display_format) in
let cmdname = "print-mini-c" in let cmdname = "print-mini-c" in
let doc = "Subcommand: print mini c. Warning: intended for development of LIGO and can break at any time." in let doc = "Subcommand: Print Mini-C. Warning: Intended for development of LIGO and can break at any time." in
(Term.ret term, Term.info ~doc cmdname) (Term.ret term, Term.info ~doc cmdname)
let measure_contract = let measure_contract =
@ -203,7 +203,7 @@ let measure_contract =
let term = let term =
Term.(const f $ source_file 0 $ entry_point 1 $ syntax $ display_format) in Term.(const f $ source_file 0 $ entry_point 1 $ syntax $ display_format) in
let cmdname = "measure-contract" in let cmdname = "measure-contract" in
let doc = "Subcommand: measure a contract's compiled size in bytes." in let doc = "Subcommand: Measure a contract's compiled size in bytes." in
(Term.ret term , Term.info ~doc cmdname) (Term.ret term , Term.info ~doc cmdname)
let compile_parameter = let compile_parameter =
@ -232,7 +232,7 @@ let compile_parameter =
let term = let term =
Term.(const f $ source_file 0 $ entry_point 1 $ expression "PARAMETER" 2 $ syntax $ amount $ sender $ source $ predecessor_timestamp $ display_format $ michelson_code_format) in Term.(const f $ source_file 0 $ entry_point 1 $ expression "PARAMETER" 2 $ syntax $ amount $ sender $ source $ predecessor_timestamp $ display_format $ michelson_code_format) in
let cmdname = "compile-parameter" in let cmdname = "compile-parameter" in
let doc = "Subcommand: compile parameters to a michelson expression. The resulting michelson expression can be passed as an argument in a transaction which calls a contract." in let doc = "Subcommand: Compile parameters to a Michelson expression. The resulting Michelson expression can be passed as an argument in a transaction which calls a contract." in
(Term.ret term , Term.info ~doc cmdname) (Term.ret term , Term.info ~doc cmdname)
let interpret = let interpret =
@ -246,7 +246,7 @@ let interpret =
let env = Ast_typed.program_environment typed_prg in let env = Ast_typed.program_environment typed_prg in
ok (mini_c_prg,state,env) ok (mini_c_prg,state,env)
| None -> ok ([],Typer.Solver.initial_state,Ast_typed.Environment.full_empty) in | None -> ok ([],Typer.Solver.initial_state,Ast_typed.Environment.full_empty) in
let%bind v_syntax = Helpers.syntax_to_variant (Syntax_name syntax) init_file in let%bind v_syntax = Helpers.syntax_to_variant (Syntax_name syntax) init_file in
let%bind simplified_exp = Compile.Of_source.compile_expression v_syntax expression in let%bind simplified_exp = Compile.Of_source.compile_expression v_syntax expression in
let%bind (typed_exp,_) = Compile.Of_simplified.compile_expression ~env ~state simplified_exp in let%bind (typed_exp,_) = Compile.Of_simplified.compile_expression ~env ~state simplified_exp in
@ -265,7 +265,7 @@ let interpret =
let term = let term =
Term.(const f $ expression "EXPRESSION" 0 $ init_file $ syntax $ amount $ sender $ source $ predecessor_timestamp $ display_format ) in Term.(const f $ expression "EXPRESSION" 0 $ init_file $ syntax $ amount $ sender $ source $ predecessor_timestamp $ display_format ) in
let cmdname = "interpret" in let cmdname = "interpret" in
let doc = "Subcommand: interpret the expression in the context initialized by the provided source file." in let doc = "Subcommand: Interpret the expression in the context initialized by the provided source file." in
(Term.ret term , Term.info ~doc cmdname) (Term.ret term , Term.info ~doc cmdname)
@ -295,7 +295,7 @@ let compile_storage =
let term = let term =
Term.(const f $ source_file 0 $ entry_point 1 $ expression "STORAGE" 2 $ syntax $ amount $ sender $ source $ predecessor_timestamp $ display_format $ michelson_code_format) in Term.(const f $ source_file 0 $ entry_point 1 $ expression "STORAGE" 2 $ syntax $ amount $ sender $ source $ predecessor_timestamp $ display_format $ michelson_code_format) in
let cmdname = "compile-storage" in let cmdname = "compile-storage" in
let doc = "Subcommand: compile an initial storage in ligo syntax to a michelson expression. The resulting michelson expression can be passed as an argument in a transaction which originates a contract." in let doc = "Subcommand: Compile an initial storage in ligo syntax to a Michelson expression. The resulting Michelson expression can be passed as an argument in a transaction which originates a contract." in
(Term.ret term , Term.info ~doc cmdname) (Term.ret term , Term.info ~doc cmdname)
let dry_run = let dry_run =
@ -330,7 +330,7 @@ let dry_run =
let term = let term =
Term.(const f $ source_file 0 $ entry_point 1 $ expression "PARAMETER" 2 $ expression "STORAGE" 3 $ amount $ sender $ source $ predecessor_timestamp $ syntax $ display_format) in Term.(const f $ source_file 0 $ entry_point 1 $ expression "PARAMETER" 2 $ expression "STORAGE" 3 $ amount $ sender $ source $ predecessor_timestamp $ syntax $ display_format) in
let cmdname = "dry-run" in let cmdname = "dry-run" in
let doc = "Subcommand: run a smart-contract with the given storage and input." in let doc = "Subcommand: Run a smart-contract with the given storage and input." in
(Term.ret term , Term.info ~doc cmdname) (Term.ret term , Term.info ~doc cmdname)
let run_function = let run_function =
@ -361,7 +361,7 @@ let run_function =
let term = let term =
Term.(const f $ source_file 0 $ entry_point 1 $ expression "PARAMETER" 2 $ amount $ sender $ source $ predecessor_timestamp $ syntax $ display_format) in Term.(const f $ source_file 0 $ entry_point 1 $ expression "PARAMETER" 2 $ amount $ sender $ source $ predecessor_timestamp $ syntax $ display_format) in
let cmdname = "run-function" in let cmdname = "run-function" in
let doc = "Subcommand: run a function with the given parameter." in let doc = "Subcommand: Run a function with the given parameter." in
(Term.ret term , Term.info ~doc cmdname) (Term.ret term , Term.info ~doc cmdname)
let evaluate_value = let evaluate_value =
@ -380,7 +380,7 @@ let evaluate_value =
let term = let term =
Term.(const f $ source_file 0 $ entry_point 1 $ amount $ sender $ source $ predecessor_timestamp $ syntax $ display_format) in Term.(const f $ source_file 0 $ entry_point 1 $ amount $ sender $ source $ predecessor_timestamp $ syntax $ display_format) in
let cmdname = "evaluate-value" in let cmdname = "evaluate-value" in
let doc = "Subcommand: evaluate a given definition." in let doc = "Subcommand: Evaluate a given definition." in
(Term.ret term , Term.info ~doc cmdname) (Term.ret term , Term.info ~doc cmdname)
let compile_expression = let compile_expression =
@ -399,7 +399,7 @@ let compile_expression =
let term = let term =
Term.(const f $ expression "" 1 $ req_syntax 0 $ display_format $ michelson_code_format) in Term.(const f $ expression "" 1 $ req_syntax 0 $ display_format $ michelson_code_format) in
let cmdname = "compile-expression" in let cmdname = "compile-expression" in
let doc = "Subcommand: compile to a michelson value." in let doc = "Subcommand: Compile to a michelson value." in
(Term.ret term , Term.info ~doc cmdname) (Term.ret term , Term.info ~doc cmdname)
let dump_changelog = let dump_changelog =
@ -420,7 +420,7 @@ let list_declarations =
let term = let term =
Term.(const f $ source_file 0 $ syntax ) in Term.(const f $ source_file 0 $ syntax ) in
let cmdname = "list-declarations" in let cmdname = "list-declarations" in
let doc = "Subcommand: list all the top-level decalarations." in let doc = "Subcommand: List all the top-level declarations." in
(Term.ret term , Term.info ~doc cmdname) (Term.ret term , Term.info ~doc cmdname)
let run ?argv () = let run ?argv () =

View File

@ -18,56 +18,56 @@ let%expect_test _ =
Dump the LIGO changelog to stdout. Dump the LIGO changelog to stdout.
compile-contract compile-contract
Subcommand: compile a contract. Subcommand: Compile a contract.
compile-expression compile-expression
Subcommand: compile to a michelson value. Subcommand: Compile to a michelson value.
compile-parameter compile-parameter
Subcommand: compile parameters to a michelson expression. The Subcommand: Compile parameters to a Michelson expression. The
resulting michelson expression can be passed as an argument in a resulting Michelson expression can be passed as an argument in a
transaction which calls a contract. transaction which calls a contract.
compile-storage compile-storage
Subcommand: compile an initial storage in ligo syntax to a Subcommand: Compile an initial storage in ligo syntax to a
michelson expression. The resulting michelson expression can be Michelson expression. The resulting Michelson expression can be
passed as an argument in a transaction which originates a passed as an argument in a transaction which originates a
contract. contract.
dry-run dry-run
Subcommand: run a smart-contract with the given storage and input. Subcommand: Run a smart-contract with the given storage and input.
evaluate-value evaluate-value
Subcommand: evaluate a given definition. Subcommand: Evaluate a given definition.
interpret interpret
Subcommand: interpret the expression in the context initialized by Subcommand: Interpret the expression in the context initialized by
the provided source file. the provided source file.
list-declarations list-declarations
Subcommand: list all the top-level decalarations. Subcommand: List all the top-level declarations.
measure-contract measure-contract
Subcommand: measure a contract's compiled size in bytes. Subcommand: Measure a contract's compiled size in bytes.
print-ast print-ast
Subcommand: print the ast. Warning: intended for development of Subcommand: Print the AST. Warning: Intended for development of
LIGO and can break at any time. LIGO and can break at any time.
print-cst print-cst
Subcommand: print the cst. Warning: intended for development of Subcommand: Print the CST. Warning: Intended for development of
LIGO and can break at any time. LIGO and can break at any time.
print-mini-c print-mini-c
Subcommand: print mini c. Warning: intended for development of Subcommand: Print Mini-C. Warning: Intended for development of
LIGO and can break at any time. LIGO and can break at any time.
print-typed-ast print-typed-ast
Subcommand: print the typed ast. Warning: intended for development Subcommand: Print the typed AST. Warning: Intended for development
of LIGO and can break at any time. of LIGO and can break at any time.
run-function run-function
Subcommand: run a function with the given parameter. Subcommand: Run a function with the given parameter.
OPTIONS OPTIONS
--help[=FMT] (default=auto) --help[=FMT] (default=auto)
@ -94,56 +94,56 @@ let%expect_test _ =
Dump the LIGO changelog to stdout. Dump the LIGO changelog to stdout.
compile-contract compile-contract
Subcommand: compile a contract. Subcommand: Compile a contract.
compile-expression compile-expression
Subcommand: compile to a michelson value. Subcommand: Compile to a michelson value.
compile-parameter compile-parameter
Subcommand: compile parameters to a michelson expression. The Subcommand: Compile parameters to a Michelson expression. The
resulting michelson expression can be passed as an argument in a resulting Michelson expression can be passed as an argument in a
transaction which calls a contract. transaction which calls a contract.
compile-storage compile-storage
Subcommand: compile an initial storage in ligo syntax to a Subcommand: Compile an initial storage in ligo syntax to a
michelson expression. The resulting michelson expression can be Michelson expression. The resulting Michelson expression can be
passed as an argument in a transaction which originates a passed as an argument in a transaction which originates a
contract. contract.
dry-run dry-run
Subcommand: run a smart-contract with the given storage and input. Subcommand: Run a smart-contract with the given storage and input.
evaluate-value evaluate-value
Subcommand: evaluate a given definition. Subcommand: Evaluate a given definition.
interpret interpret
Subcommand: interpret the expression in the context initialized by Subcommand: Interpret the expression in the context initialized by
the provided source file. the provided source file.
list-declarations list-declarations
Subcommand: list all the top-level decalarations. Subcommand: List all the top-level declarations.
measure-contract measure-contract
Subcommand: measure a contract's compiled size in bytes. Subcommand: Measure a contract's compiled size in bytes.
print-ast print-ast
Subcommand: print the ast. Warning: intended for development of Subcommand: Print the AST. Warning: Intended for development of
LIGO and can break at any time. LIGO and can break at any time.
print-cst print-cst
Subcommand: print the cst. Warning: intended for development of Subcommand: Print the CST. Warning: Intended for development of
LIGO and can break at any time. LIGO and can break at any time.
print-mini-c print-mini-c
Subcommand: print mini c. Warning: intended for development of Subcommand: Print Mini-C. Warning: Intended for development of
LIGO and can break at any time. LIGO and can break at any time.
print-typed-ast print-typed-ast
Subcommand: print the typed ast. Warning: intended for development Subcommand: Print the typed AST. Warning: Intended for development
of LIGO and can break at any time. of LIGO and can break at any time.
run-function run-function
Subcommand: run a function with the given parameter. Subcommand: Run a function with the given parameter.
OPTIONS OPTIONS
--help[=FMT] (default=auto) --help[=FMT] (default=auto)
@ -157,7 +157,7 @@ let%expect_test _ =
run_ligo_good [ "compile-contract" ; "--help" ] ; run_ligo_good [ "compile-contract" ; "--help" ] ;
[%expect {| [%expect {|
NAME NAME
ligo-compile-contract - Subcommand: compile a contract. ligo-compile-contract - Subcommand: Compile a contract.
SYNOPSIS SYNOPSIS
ligo compile-contract [OPTION]... SOURCE_FILE ENTRY_POINT ligo compile-contract [OPTION]... SOURCE_FILE ENTRY_POINT
@ -167,8 +167,7 @@ let%expect_test _ =
ENTRY_POINT is entry-point that will be compiled. ENTRY_POINT is entry-point that will be compiled.
SOURCE_FILE (required) SOURCE_FILE (required)
SOURCE_FILE is the path to the .ligo or .mligo file of the SOURCE_FILE is the path to the smart contract file.
contract.
OPTIONS OPTIONS
--format=DISPLAY_FORMAT, --display-format=DISPLAY_FORMAT --format=DISPLAY_FORMAT, --display-format=DISPLAY_FORMAT
@ -191,8 +190,9 @@ let%expect_test _ =
-s SYNTAX, --syntax=SYNTAX (absent=auto) -s SYNTAX, --syntax=SYNTAX (absent=auto)
SYNTAX is the syntax that will be used. Currently supported SYNTAX is the syntax that will be used. Currently supported
syntaxes are "pascaligo" and "cameligo". By default, the syntax is syntaxes are "pascaligo", "cameligo" and "reasonligo". By default,
guessed from the extension (.ligo and .mligo, respectively). the syntax is guessed from the extension (.ligo, .mligo, .religo
respectively).
--version --version
Show version information. |} ] ; Show version information. |} ] ;
@ -200,8 +200,8 @@ let%expect_test _ =
run_ligo_good [ "compile-parameter" ; "--help" ] ; run_ligo_good [ "compile-parameter" ; "--help" ] ;
[%expect {| [%expect {|
NAME NAME
ligo-compile-parameter - Subcommand: compile parameters to a michelson ligo-compile-parameter - Subcommand: Compile parameters to a Michelson
expression. The resulting michelson expression can be passed as an expression. The resulting Michelson expression can be passed as an
argument in a transaction which calls a contract. argument in a transaction which calls a contract.
SYNOPSIS SYNOPSIS
@ -216,12 +216,11 @@ let%expect_test _ =
PARAMETER_EXPRESSION is the expression that will be compiled. PARAMETER_EXPRESSION is the expression that will be compiled.
SOURCE_FILE (required) SOURCE_FILE (required)
SOURCE_FILE is the path to the .ligo or .mligo file of the SOURCE_FILE is the path to the smart contract file.
contract.
OPTIONS OPTIONS
--amount=AMOUNT (absent=0) --amount=AMOUNT (absent=0)
AMOUNT is the amount the michelson interpreter will use. AMOUNT is the amount the Michelson interpreter will use.
--format=DISPLAY_FORMAT, --display-format=DISPLAY_FORMAT --format=DISPLAY_FORMAT, --display-format=DISPLAY_FORMAT
(absent=human-readable) (absent=human-readable)
@ -242,21 +241,22 @@ let%expect_test _ =
are 'text' (default), 'json' and 'hex'. are 'text' (default), 'json' and 'hex'.
--predecessor-timestamp=PREDECESSOR_TIMESTAMP --predecessor-timestamp=PREDECESSOR_TIMESTAMP
PREDECESSOR_TIMESTAMP is the pedecessor_timestamp (now value minus PREDECESSOR_TIMESTAMP is the predecessor_timestamp (now value
one minute) the michelson interpreter will use (e.g. minus one minute) the Michelson interpreter will use (e.g.
'2000-01-01T10:10:10Z') '2000-01-01T10:10:10Z')
-s SYNTAX, --syntax=SYNTAX (absent=auto) -s SYNTAX, --syntax=SYNTAX (absent=auto)
SYNTAX is the syntax that will be used. Currently supported SYNTAX is the syntax that will be used. Currently supported
syntaxes are "pascaligo" and "cameligo". By default, the syntax is syntaxes are "pascaligo", "cameligo" and "reasonligo". By default,
guessed from the extension (.ligo and .mligo, respectively). the syntax is guessed from the extension (.ligo, .mligo, .religo
respectively).
--sender=SENDER --sender=SENDER
SENDER is the sender the michelson interpreter transaction will SENDER is the sender the Michelson interpreter transaction will
use. use.
--source=SOURCE --source=SOURCE
SOURCE is the source the michelson interpreter transaction will SOURCE is the source the Michelson interpreter transaction will
use. use.
--version --version
@ -265,8 +265,8 @@ let%expect_test _ =
run_ligo_good [ "compile-storage" ; "--help" ] ; run_ligo_good [ "compile-storage" ; "--help" ] ;
[%expect {| [%expect {|
NAME NAME
ligo-compile-storage - Subcommand: compile an initial storage in ligo ligo-compile-storage - Subcommand: Compile an initial storage in ligo
syntax to a michelson expression. The resulting michelson expression syntax to a Michelson expression. The resulting Michelson expression
can be passed as an argument in a transaction which originates a can be passed as an argument in a transaction which originates a
contract. contract.
@ -279,15 +279,14 @@ let%expect_test _ =
ENTRY_POINT is entry-point that will be compiled. ENTRY_POINT is entry-point that will be compiled.
SOURCE_FILE (required) SOURCE_FILE (required)
SOURCE_FILE is the path to the .ligo or .mligo file of the SOURCE_FILE is the path to the smart contract file.
contract.
STORAGE_EXPRESSION (required) STORAGE_EXPRESSION (required)
STORAGE_EXPRESSION is the expression that will be compiled. STORAGE_EXPRESSION is the expression that will be compiled.
OPTIONS OPTIONS
--amount=AMOUNT (absent=0) --amount=AMOUNT (absent=0)
AMOUNT is the amount the michelson interpreter will use. AMOUNT is the amount the Michelson interpreter will use.
--format=DISPLAY_FORMAT, --display-format=DISPLAY_FORMAT --format=DISPLAY_FORMAT, --display-format=DISPLAY_FORMAT
(absent=human-readable) (absent=human-readable)
@ -308,21 +307,22 @@ let%expect_test _ =
are 'text' (default), 'json' and 'hex'. are 'text' (default), 'json' and 'hex'.
--predecessor-timestamp=PREDECESSOR_TIMESTAMP --predecessor-timestamp=PREDECESSOR_TIMESTAMP
PREDECESSOR_TIMESTAMP is the pedecessor_timestamp (now value minus PREDECESSOR_TIMESTAMP is the predecessor_timestamp (now value
one minute) the michelson interpreter will use (e.g. minus one minute) the Michelson interpreter will use (e.g.
'2000-01-01T10:10:10Z') '2000-01-01T10:10:10Z')
-s SYNTAX, --syntax=SYNTAX (absent=auto) -s SYNTAX, --syntax=SYNTAX (absent=auto)
SYNTAX is the syntax that will be used. Currently supported SYNTAX is the syntax that will be used. Currently supported
syntaxes are "pascaligo" and "cameligo". By default, the syntax is syntaxes are "pascaligo", "cameligo" and "reasonligo". By default,
guessed from the extension (.ligo and .mligo, respectively). the syntax is guessed from the extension (.ligo, .mligo, .religo
respectively).
--sender=SENDER --sender=SENDER
SENDER is the sender the michelson interpreter transaction will SENDER is the sender the Michelson interpreter transaction will
use. use.
--source=SOURCE --source=SOURCE
SOURCE is the source the michelson interpreter transaction will SOURCE is the source the Michelson interpreter transaction will
use. use.
--version --version
@ -331,7 +331,7 @@ let%expect_test _ =
run_ligo_good [ "dry-run" ; "--help" ] ; run_ligo_good [ "dry-run" ; "--help" ] ;
[%expect {| [%expect {|
NAME NAME
ligo-dry-run - Subcommand: run a smart-contract with the given storage ligo-dry-run - Subcommand: Run a smart-contract with the given storage
and input. and input.
SYNOPSIS SYNOPSIS
@ -346,15 +346,14 @@ let%expect_test _ =
PARAMETER_EXPRESSION is the expression that will be compiled. PARAMETER_EXPRESSION is the expression that will be compiled.
SOURCE_FILE (required) SOURCE_FILE (required)
SOURCE_FILE is the path to the .ligo or .mligo file of the SOURCE_FILE is the path to the smart contract file.
contract.
STORAGE_EXPRESSION (required) STORAGE_EXPRESSION (required)
STORAGE_EXPRESSION is the expression that will be compiled. STORAGE_EXPRESSION is the expression that will be compiled.
OPTIONS OPTIONS
--amount=AMOUNT (absent=0) --amount=AMOUNT (absent=0)
AMOUNT is the amount the michelson interpreter will use. AMOUNT is the amount the Michelson interpreter will use.
--format=DISPLAY_FORMAT, --display-format=DISPLAY_FORMAT --format=DISPLAY_FORMAT, --display-format=DISPLAY_FORMAT
(absent=human-readable) (absent=human-readable)
@ -370,21 +369,22 @@ let%expect_test _ =
`plain' whenever the TERM env var is `dumb' or undefined. `plain' whenever the TERM env var is `dumb' or undefined.
--predecessor-timestamp=PREDECESSOR_TIMESTAMP --predecessor-timestamp=PREDECESSOR_TIMESTAMP
PREDECESSOR_TIMESTAMP is the pedecessor_timestamp (now value minus PREDECESSOR_TIMESTAMP is the predecessor_timestamp (now value
one minute) the michelson interpreter will use (e.g. minus one minute) the Michelson interpreter will use (e.g.
'2000-01-01T10:10:10Z') '2000-01-01T10:10:10Z')
-s SYNTAX, --syntax=SYNTAX (absent=auto) -s SYNTAX, --syntax=SYNTAX (absent=auto)
SYNTAX is the syntax that will be used. Currently supported SYNTAX is the syntax that will be used. Currently supported
syntaxes are "pascaligo" and "cameligo". By default, the syntax is syntaxes are "pascaligo", "cameligo" and "reasonligo". By default,
guessed from the extension (.ligo and .mligo, respectively). the syntax is guessed from the extension (.ligo, .mligo, .religo
respectively).
--sender=SENDER --sender=SENDER
SENDER is the sender the michelson interpreter transaction will SENDER is the sender the Michelson interpreter transaction will
use. use.
--source=SOURCE --source=SOURCE
SOURCE is the source the michelson interpreter transaction will SOURCE is the source the Michelson interpreter transaction will
use. use.
--version --version
@ -393,7 +393,7 @@ let%expect_test _ =
run_ligo_good [ "run-function" ; "--help" ] ; run_ligo_good [ "run-function" ; "--help" ] ;
[%expect {| [%expect {|
NAME NAME
ligo-run-function - Subcommand: run a function with the given ligo-run-function - Subcommand: Run a function with the given
parameter. parameter.
SYNOPSIS SYNOPSIS
@ -408,12 +408,11 @@ let%expect_test _ =
PARAMETER_EXPRESSION is the expression that will be compiled. PARAMETER_EXPRESSION is the expression that will be compiled.
SOURCE_FILE (required) SOURCE_FILE (required)
SOURCE_FILE is the path to the .ligo or .mligo file of the SOURCE_FILE is the path to the smart contract file.
contract.
OPTIONS OPTIONS
--amount=AMOUNT (absent=0) --amount=AMOUNT (absent=0)
AMOUNT is the amount the michelson interpreter will use. AMOUNT is the amount the Michelson interpreter will use.
--format=DISPLAY_FORMAT, --display-format=DISPLAY_FORMAT --format=DISPLAY_FORMAT, --display-format=DISPLAY_FORMAT
(absent=human-readable) (absent=human-readable)
@ -429,21 +428,22 @@ let%expect_test _ =
`plain' whenever the TERM env var is `dumb' or undefined. `plain' whenever the TERM env var is `dumb' or undefined.
--predecessor-timestamp=PREDECESSOR_TIMESTAMP --predecessor-timestamp=PREDECESSOR_TIMESTAMP
PREDECESSOR_TIMESTAMP is the pedecessor_timestamp (now value minus PREDECESSOR_TIMESTAMP is the predecessor_timestamp (now value
one minute) the michelson interpreter will use (e.g. minus one minute) the Michelson interpreter will use (e.g.
'2000-01-01T10:10:10Z') '2000-01-01T10:10:10Z')
-s SYNTAX, --syntax=SYNTAX (absent=auto) -s SYNTAX, --syntax=SYNTAX (absent=auto)
SYNTAX is the syntax that will be used. Currently supported SYNTAX is the syntax that will be used. Currently supported
syntaxes are "pascaligo" and "cameligo". By default, the syntax is syntaxes are "pascaligo", "cameligo" and "reasonligo". By default,
guessed from the extension (.ligo and .mligo, respectively). the syntax is guessed from the extension (.ligo, .mligo, .religo
respectively).
--sender=SENDER --sender=SENDER
SENDER is the sender the michelson interpreter transaction will SENDER is the sender the Michelson interpreter transaction will
use. use.
--source=SOURCE --source=SOURCE
SOURCE is the source the michelson interpreter transaction will SOURCE is the source the Michelson interpreter transaction will
use. use.
--version --version
@ -452,7 +452,7 @@ let%expect_test _ =
run_ligo_good [ "evaluate-value" ; "--help" ] ; run_ligo_good [ "evaluate-value" ; "--help" ] ;
[%expect {| [%expect {|
NAME NAME
ligo-evaluate-value - Subcommand: evaluate a given definition. ligo-evaluate-value - Subcommand: Evaluate a given definition.
SYNOPSIS SYNOPSIS
ligo evaluate-value [OPTION]... SOURCE_FILE ENTRY_POINT ligo evaluate-value [OPTION]... SOURCE_FILE ENTRY_POINT
@ -462,12 +462,11 @@ let%expect_test _ =
ENTRY_POINT is entry-point that will be compiled. ENTRY_POINT is entry-point that will be compiled.
SOURCE_FILE (required) SOURCE_FILE (required)
SOURCE_FILE is the path to the .ligo or .mligo file of the SOURCE_FILE is the path to the smart contract file.
contract.
OPTIONS OPTIONS
--amount=AMOUNT (absent=0) --amount=AMOUNT (absent=0)
AMOUNT is the amount the michelson interpreter will use. AMOUNT is the amount the Michelson interpreter will use.
--format=DISPLAY_FORMAT, --display-format=DISPLAY_FORMAT --format=DISPLAY_FORMAT, --display-format=DISPLAY_FORMAT
(absent=human-readable) (absent=human-readable)
@ -483,21 +482,22 @@ let%expect_test _ =
`plain' whenever the TERM env var is `dumb' or undefined. `plain' whenever the TERM env var is `dumb' or undefined.
--predecessor-timestamp=PREDECESSOR_TIMESTAMP --predecessor-timestamp=PREDECESSOR_TIMESTAMP
PREDECESSOR_TIMESTAMP is the pedecessor_timestamp (now value minus PREDECESSOR_TIMESTAMP is the predecessor_timestamp (now value
one minute) the michelson interpreter will use (e.g. minus one minute) the Michelson interpreter will use (e.g.
'2000-01-01T10:10:10Z') '2000-01-01T10:10:10Z')
-s SYNTAX, --syntax=SYNTAX (absent=auto) -s SYNTAX, --syntax=SYNTAX (absent=auto)
SYNTAX is the syntax that will be used. Currently supported SYNTAX is the syntax that will be used. Currently supported
syntaxes are "pascaligo" and "cameligo". By default, the syntax is syntaxes are "pascaligo", "cameligo" and "reasonligo". By default,
guessed from the extension (.ligo and .mligo, respectively). the syntax is guessed from the extension (.ligo, .mligo, .religo
respectively).
--sender=SENDER --sender=SENDER
SENDER is the sender the michelson interpreter transaction will SENDER is the sender the Michelson interpreter transaction will
use. use.
--source=SOURCE --source=SOURCE
SOURCE is the source the michelson interpreter transaction will SOURCE is the source the Michelson interpreter transaction will
use. use.
--version --version
@ -506,7 +506,7 @@ let%expect_test _ =
run_ligo_good [ "compile-expression" ; "--help" ] ; run_ligo_good [ "compile-expression" ; "--help" ] ;
[%expect {| [%expect {|
NAME NAME
ligo-compile-expression - Subcommand: compile to a michelson value. ligo-compile-expression - Subcommand: Compile to a michelson value.
SYNOPSIS SYNOPSIS
ligo compile-expression [OPTION]... SYNTAX _EXPRESSION ligo compile-expression [OPTION]... SYNTAX _EXPRESSION
@ -517,8 +517,9 @@ let%expect_test _ =
SYNTAX (required) SYNTAX (required)
SYNTAX is the syntax that will be used. Currently supported SYNTAX is the syntax that will be used. Currently supported
syntaxes are "pascaligo" and "cameligo". By default, the syntax is syntaxes are "pascaligo", "cameligo" and "reasonligo". By default,
guessed from the extension (.ligo and .mligo, respectively). the syntax is guessed from the extension (.ligo, .mligo, .religo
respectively).
OPTIONS OPTIONS
--format=DISPLAY_FORMAT, --display-format=DISPLAY_FORMAT --format=DISPLAY_FORMAT, --display-format=DISPLAY_FORMAT

View File

@ -3,9 +3,10 @@ open Cli_expect
let%expect_test _ = let%expect_test _ =
run_ligo_bad [ "compile-contract" ; "../../test/lexer/broken_string.ligo" ; "main" ] ; run_ligo_bad [ "compile-contract" ; "../../test/lexer/broken_string.ligo" ; "main" ] ;
[%expect {| [%expect {|
ligo: lexer error: The string starting here is interrupted by a line break. ligo: : Lexical error in file "broken_string.ligo", line 1, characters 18-19:
The string starting here is interrupted by a line break.
Hint: Remove the break, close the string before or insert a backslash. Hint: Remove the break, close the string before or insert a backslash.
{"parser_loc":"in file \"broken_string.ligo\", line 1, characters 18-19"} {}
If you're not sure how to fix this error, you can If you're not sure how to fix this error, you can
@ -19,9 +20,10 @@ ligo: lexer error: The string starting here is interrupted by a line break.
run_ligo_bad [ "compile-contract" ; "../../test/lexer/broken_string.mligo" ; "main" ] ; run_ligo_bad [ "compile-contract" ; "../../test/lexer/broken_string.mligo" ; "main" ] ;
[%expect {| [%expect {|
ligo: lexer error: The string starting here is interrupted by a line break. ligo: : Lexical error in file "broken_string.mligo", line 1, characters 8-9:
The string starting here is interrupted by a line break.
Hint: Remove the break, close the string before or insert a backslash. Hint: Remove the break, close the string before or insert a backslash.
{"parser_loc":"in file \"broken_string.mligo\", line 1, characters 8-9"} {}
If you're not sure how to fix this error, you can If you're not sure how to fix this error, you can
@ -35,9 +37,10 @@ ligo: lexer error: The string starting here is interrupted by a line break.
run_ligo_bad [ "compile-contract" ; "../../test/lexer/broken_string.religo" ; "main" ] ; run_ligo_bad [ "compile-contract" ; "../../test/lexer/broken_string.religo" ; "main" ] ;
[%expect {| [%expect {|
ligo: lexer error: The string starting here is interrupted by a line break. ligo: : Lexical error in file "broken_string.religo", line 1, characters 8-9:
The string starting here is interrupted by a line break.
Hint: Remove the break, close the string before or insert a backslash. Hint: Remove the break, close the string before or insert a backslash.
{"parser_loc":"in file \"broken_string.religo\", line 1, characters 8-9"} {}
If you're not sure how to fix this error, you can If you're not sure how to fix this error, you can
@ -51,9 +54,10 @@ ligo: lexer error: The string starting here is interrupted by a line break.
run_ligo_bad [ "compile-contract" ; "../../test/lexer/negative_byte_sequence.ligo" ; "main" ] ; run_ligo_bad [ "compile-contract" ; "../../test/lexer/negative_byte_sequence.ligo" ; "main" ] ;
[%expect {| [%expect {|
ligo: lexer error: Negative byte sequence. ligo: : Lexical error in file "negative_byte_sequence.ligo", line 1, characters 18-23:
Negative byte sequence.
Hint: Remove the leading minus sign. Hint: Remove the leading minus sign.
{"parser_loc":"in file \"negative_byte_sequence.ligo\", line 1, characters 18-23"} {}
If you're not sure how to fix this error, you can If you're not sure how to fix this error, you can
@ -67,9 +71,10 @@ ligo: lexer error: Negative byte sequence.
run_ligo_bad [ "compile-contract" ; "../../test/lexer/negative_byte_sequence.mligo" ; "main" ] ; run_ligo_bad [ "compile-contract" ; "../../test/lexer/negative_byte_sequence.mligo" ; "main" ] ;
[%expect {| [%expect {|
ligo: lexer error: Negative byte sequence. ligo: : Lexical error in file "negative_byte_sequence.mligo", line 1, characters 8-13:
Negative byte sequence.
Hint: Remove the leading minus sign. Hint: Remove the leading minus sign.
{"parser_loc":"in file \"negative_byte_sequence.mligo\", line 1, characters 8-13"} {}
If you're not sure how to fix this error, you can If you're not sure how to fix this error, you can
@ -83,9 +88,10 @@ ligo: lexer error: Negative byte sequence.
run_ligo_bad [ "compile-contract" ; "../../test/lexer/negative_byte_sequence.religo" ; "main" ] ; run_ligo_bad [ "compile-contract" ; "../../test/lexer/negative_byte_sequence.religo" ; "main" ] ;
[%expect {| [%expect {|
ligo: lexer error: Negative byte sequence. ligo: : Lexical error in file "negative_byte_sequence.religo", line 1, characters 8-13:
Negative byte sequence.
Hint: Remove the leading minus sign. Hint: Remove the leading minus sign.
{"parser_loc":"in file \"negative_byte_sequence.religo\", line 1, characters 8-13"} {}
If you're not sure how to fix this error, you can If you're not sure how to fix this error, you can
@ -99,9 +105,10 @@ ligo: lexer error: Negative byte sequence.
run_ligo_bad [ "compile-contract" ; "../../test/lexer/reserved_name.ligo" ; "main" ] ; run_ligo_bad [ "compile-contract" ; "../../test/lexer/reserved_name.ligo" ; "main" ] ;
[%expect {| [%expect {|
ligo: lexer error: Reserved name: arguments. ligo: : Lexical error in file "reserved_name.ligo", line 1, characters 4-13:
Reserved name: arguments.
Hint: Change the name. Hint: Change the name.
{"parser_loc":"in file \"reserved_name.ligo\", line 1, characters 4-13"} {}
If you're not sure how to fix this error, you can If you're not sure how to fix this error, you can
@ -115,9 +122,10 @@ ligo: lexer error: Reserved name: arguments.
run_ligo_bad [ "compile-contract" ; "../../test/lexer/reserved_name.religo" ; "main" ] ; run_ligo_bad [ "compile-contract" ; "../../test/lexer/reserved_name.religo" ; "main" ] ;
[%expect {| [%expect {|
ligo: lexer error: Reserved name: end. ligo: : Lexical error in file "reserved_name.religo", line 1, characters 4-7:
Reserved name: end.
Hint: Change the name. Hint: Change the name.
{"parser_loc":"in file \"reserved_name.religo\", line 1, characters 4-7"} {}
If you're not sure how to fix this error, you can If you're not sure how to fix this error, you can
@ -131,9 +139,10 @@ ligo: lexer error: Reserved name: end.
run_ligo_bad [ "compile-contract" ; "../../test/lexer/reserved_name.mligo" ; "main" ] ; run_ligo_bad [ "compile-contract" ; "../../test/lexer/reserved_name.mligo" ; "main" ] ;
[%expect {| [%expect {|
ligo: lexer error: Reserved name: object. ligo: : Lexical error in file "reserved_name.mligo", line 1, characters 4-10:
Reserved name: object.
Hint: Change the name. Hint: Change the name.
{"parser_loc":"in file \"reserved_name.mligo\", line 1, characters 4-10"} {}
If you're not sure how to fix this error, you can If you're not sure how to fix this error, you can
@ -147,8 +156,9 @@ ligo: lexer error: Reserved name: object.
run_ligo_bad [ "compile-contract" ; "../../test/lexer/unexpected_character.ligo" ; "main" ] ; run_ligo_bad [ "compile-contract" ; "../../test/lexer/unexpected_character.ligo" ; "main" ] ;
[%expect {| [%expect {|
ligo: lexer error: Unexpected character '\239'. ligo: : Lexical error in file "unexpected_character.ligo", line 1, characters 18-19:
{"parser_loc":"in file \"unexpected_character.ligo\", line 1, characters 18-19"} Unexpected character '\239'.
{}
If you're not sure how to fix this error, you can If you're not sure how to fix this error, you can
@ -162,8 +172,9 @@ ligo: lexer error: Unexpected character '\239'.
run_ligo_bad [ "compile-contract" ; "../../test/lexer/unexpected_character.mligo" ; "main" ] ; run_ligo_bad [ "compile-contract" ; "../../test/lexer/unexpected_character.mligo" ; "main" ] ;
[%expect {| [%expect {|
ligo: lexer error: Unexpected character '\239'. ligo: : Lexical error in file "unexpected_character.mligo", line 1, characters 8-9:
{"parser_loc":"in file \"unexpected_character.mligo\", line 1, characters 8-9"} Unexpected character '\239'.
{}
If you're not sure how to fix this error, you can If you're not sure how to fix this error, you can
@ -177,8 +188,9 @@ ligo: lexer error: Unexpected character '\239'.
run_ligo_bad [ "compile-contract" ; "../../test/lexer/unexpected_character.religo" ; "main" ] ; run_ligo_bad [ "compile-contract" ; "../../test/lexer/unexpected_character.religo" ; "main" ] ;
[%expect {| [%expect {|
ligo: lexer error: Unexpected character '\239'. ligo: : Lexical error in file "unexpected_character.religo", line 1, characters 8-9:
{"parser_loc":"in file \"unexpected_character.religo\", line 1, characters 8-9"} Unexpected character '\239'.
{}
If you're not sure how to fix this error, you can If you're not sure how to fix this error, you can
@ -192,9 +204,10 @@ ligo: lexer error: Unexpected character '\239'.
run_ligo_bad [ "compile-contract" ; "../../test/lexer/unterminated_comment.mligo" ; "main" ] ; run_ligo_bad [ "compile-contract" ; "../../test/lexer/unterminated_comment.mligo" ; "main" ] ;
[%expect {| [%expect {|
ligo: lexer error: Unterminated comment. ligo: : Lexical error in file "unterminated_comment.mligo", line 1, characters 0-2:
Unterminated comment.
Hint: Close with "*)". Hint: Close with "*)".
{"parser_loc":"in file \"unterminated_comment.mligo\", line 1, characters 0-2"} {}
If you're not sure how to fix this error, you can If you're not sure how to fix this error, you can
@ -208,9 +221,10 @@ ligo: lexer error: Unterminated comment.
run_ligo_bad [ "compile-contract" ; "../../test/lexer/invalid_symbol.ligo" ; "main" ] ; run_ligo_bad [ "compile-contract" ; "../../test/lexer/invalid_symbol.ligo" ; "main" ] ;
[%expect {| [%expect {|
ligo: lexer error: Invalid symbol. ligo: : Lexical error in file "invalid_symbol.ligo", line 1, characters 17-20:
Invalid symbol.
Hint: Check the LIGO syntax you use. Hint: Check the LIGO syntax you use.
{"parser_loc":"in file \"invalid_symbol.ligo\", line 1, characters 17-20"} {}
If you're not sure how to fix this error, you can If you're not sure how to fix this error, you can
@ -224,9 +238,10 @@ ligo: lexer error: Invalid symbol.
run_ligo_bad [ "compile-contract" ; "../../test/lexer/invalid_symbol.mligo" ; "main" ] ; run_ligo_bad [ "compile-contract" ; "../../test/lexer/invalid_symbol.mligo" ; "main" ] ;
[%expect {| [%expect {|
ligo: lexer error: Invalid symbol. ligo: : Lexical error in file "invalid_symbol.mligo", line 1, characters 10-13:
Invalid symbol.
Hint: Check the LIGO syntax you use. Hint: Check the LIGO syntax you use.
{"parser_loc":"in file \"invalid_symbol.mligo\", line 1, characters 10-13"} {}
If you're not sure how to fix this error, you can If you're not sure how to fix this error, you can
@ -240,9 +255,10 @@ ligo: lexer error: Invalid symbol.
run_ligo_bad [ "compile-contract" ; "../../test/lexer/invalid_symbol.religo" ; "main" ] ; run_ligo_bad [ "compile-contract" ; "../../test/lexer/invalid_symbol.religo" ; "main" ] ;
[%expect {| [%expect {|
ligo: lexer error: Invalid symbol. ligo: : Lexical error in file "invalid_symbol.religo", line 1, characters 10-11:
Invalid symbol.
Hint: Check the LIGO syntax you use. Hint: Check the LIGO syntax you use.
{"parser_loc":"in file \"invalid_symbol.religo\", line 1, characters 10-11"} {}
If you're not sure how to fix this error, you can If you're not sure how to fix this error, you can
@ -256,9 +272,10 @@ ligo: lexer error: Invalid symbol.
run_ligo_bad [ "compile-contract" ; "../../test/lexer/missing_break.ligo" ; "main" ] ; run_ligo_bad [ "compile-contract" ; "../../test/lexer/missing_break.ligo" ; "main" ] ;
[%expect {| [%expect {|
ligo: lexer error: Missing break. ligo: : Lexical error in file "missing_break.ligo", line 1, characters 18-18:
Missing break.
Hint: Insert some space. Hint: Insert some space.
{"parser_loc":"in file \"missing_break.ligo\", line 1, characters 18-18"} {}
If you're not sure how to fix this error, you can If you're not sure how to fix this error, you can
@ -272,9 +289,10 @@ ligo: lexer error: Missing break.
run_ligo_bad [ "compile-contract" ; "../../test/lexer/missing_break.mligo" ; "main" ] ; run_ligo_bad [ "compile-contract" ; "../../test/lexer/missing_break.mligo" ; "main" ] ;
[%expect {| [%expect {|
ligo: lexer error: Missing break. ligo: : Lexical error in file "missing_break.mligo", line 1, characters 11-11:
Missing break.
Hint: Insert some space. Hint: Insert some space.
{"parser_loc":"in file \"missing_break.mligo\", line 1, characters 11-11"} {}
If you're not sure how to fix this error, you can If you're not sure how to fix this error, you can
@ -288,9 +306,10 @@ ligo: lexer error: Missing break.
run_ligo_bad [ "compile-contract" ; "../../test/lexer/missing_break.religo" ; "main" ] ; run_ligo_bad [ "compile-contract" ; "../../test/lexer/missing_break.religo" ; "main" ] ;
[%expect {| [%expect {|
ligo: lexer error: Missing break. ligo: : Lexical error in file "missing_break.religo", line 1, characters 11-11:
Missing break.
Hint: Insert some space. Hint: Insert some space.
{"parser_loc":"in file \"missing_break.religo\", line 1, characters 11-11"} {}
If you're not sure how to fix this error, you can If you're not sure how to fix this error, you can
@ -304,9 +323,10 @@ ligo: lexer error: Missing break.
run_ligo_bad [ "compile-contract" ; "../../test/lexer/invalid_character_in_string.ligo" ; "main" ] ; run_ligo_bad [ "compile-contract" ; "../../test/lexer/invalid_character_in_string.ligo" ; "main" ] ;
[%expect {| [%expect {|
ligo: lexer error: Invalid character in string. ligo: : Lexical error in file "invalid_character_in_string.ligo", line 1, characters 19-20:
Invalid character in string.
Hint: Remove or replace the character. Hint: Remove or replace the character.
{"parser_loc":"in file \"invalid_character_in_string.ligo\", line 1, characters 19-20"} {}
If you're not sure how to fix this error, you can If you're not sure how to fix this error, you can
@ -320,9 +340,10 @@ ligo: lexer error: Invalid character in string.
run_ligo_bad [ "compile-contract" ; "../../test/lexer/invalid_character_in_string.mligo" ; "main" ] ; run_ligo_bad [ "compile-contract" ; "../../test/lexer/invalid_character_in_string.mligo" ; "main" ] ;
[%expect {| [%expect {|
ligo: lexer error: Invalid character in string. ligo: : Lexical error in file "invalid_character_in_string.mligo", line 1, characters 9-10:
Invalid character in string.
Hint: Remove or replace the character. Hint: Remove or replace the character.
{"parser_loc":"in file \"invalid_character_in_string.mligo\", line 1, characters 9-10"} {}
If you're not sure how to fix this error, you can If you're not sure how to fix this error, you can
@ -336,9 +357,10 @@ ligo: lexer error: Invalid character in string.
run_ligo_bad [ "compile-contract" ; "../../test/lexer/invalid_character_in_string.religo" ; "main" ] ; run_ligo_bad [ "compile-contract" ; "../../test/lexer/invalid_character_in_string.religo" ; "main" ] ;
[%expect {| [%expect {|
ligo: lexer error: Invalid character in string. ligo: : Lexical error in file "invalid_character_in_string.religo", line 1, characters 9-10:
Invalid character in string.
Hint: Remove or replace the character. Hint: Remove or replace the character.
{"parser_loc":"in file \"invalid_character_in_string.religo\", line 1, characters 9-10"} {}
If you're not sure how to fix this error, you can If you're not sure how to fix this error, you can

View File

@ -3,8 +3,8 @@ open Cli_expect
let%expect_test _ = let%expect_test _ =
run_ligo_bad [ "compile-contract" ; "../../test/contracts/negative/error_syntax.ligo" ; "main" ] ; run_ligo_bad [ "compile-contract" ; "../../test/contracts/negative/error_syntax.ligo" ; "main" ] ;
[%expect {| [%expect {|
ligo: parser error: Parse error at "-" from (1, 16) to (1, 17). In file "|../../test/contracts/negative/error_syntax.ligo" ligo: : Parse error in file "error_syntax.ligo", line 1, characters 16-17, after "bar" and before "-".
{"parser_loc":"in file \"\", line 1, characters 16-17"} {}
If you're not sure how to fix this error, you can If you're not sure how to fix this error, you can

View File

@ -1,173 +1,171 @@
open Trace open Trace
type s_syntax = Syntax_name of string type s_syntax = Syntax_name of string
type v_syntax = Pascaligo | Cameligo | ReasonLIGO type v_syntax = PascaLIGO | CameLIGO | ReasonLIGO
let syntax_to_variant : s_syntax -> string option -> v_syntax result = let syntax_to_variant (Syntax_name syntax) source =
fun syntax source_filename -> match syntax, source with
let subr s n = "auto", Some sf ->
String.sub s (String.length s - n) n in (match Filename.extension sf with
let endswith s suffix = ".ligo" | ".pligo" -> ok PascaLIGO
let suffixlen = String.length suffix in | ".mligo" -> ok CameLIGO
( String.length s >= suffixlen | ".religo" -> ok ReasonLIGO
&& String.equal (subr s suffixlen) suffix) | _ -> simple_fail "Cannot auto-detect the syntax.\n\
in Hint: Use -s <name of syntax>\n")
let (Syntax_name syntax) = syntax in | ("pascaligo" | "PascaLIGO"), _ -> ok PascaLIGO
match (syntax , source_filename) with | ("cameligo" | "CameLIGO"), _ -> ok CameLIGO
| "auto" , Some sf when endswith sf ".ligo" -> ok Pascaligo | ("reasonligo" | "ReasonLIGO"), _ -> ok ReasonLIGO
| "auto" , Some sf when endswith sf ".mligo" -> ok Cameligo | _ -> simple_fail "Invalid syntax name.\n\
| "auto" , Some sf when endswith sf ".religo" -> ok ReasonLIGO Hint: Use \"pascaligo\", \"cameligo\" \
| "auto" , _ -> simple_fail "cannot auto-detect syntax, pleas use -s name_of_syntax" or \"reasonligo\".\n"
| "pascaligo" , _ -> ok Pascaligo
| "cameligo" , _ -> ok Cameligo
| "reasonligo", _ -> ok ReasonLIGO
| _ -> simple_fail "unrecognized parser"
let parsify_pascaligo = fun source -> let parsify_pascaligo source =
let%bind raw = let%bind raw =
trace (simple_error "parsing") @@ trace (simple_error "parsing") @@
Parser.Pascaligo.parse_file source in Parser.Pascaligo.parse_file source in
let%bind simplified = let%bind simplified =
trace (simple_error "simplifying") @@ trace (simple_error "simplifying") @@
Simplify.Pascaligo.simpl_program raw in Simplify.Pascaligo.simpl_program raw
ok simplified in ok simplified
let parsify_expression_pascaligo = fun source -> let parsify_expression_pascaligo source =
let%bind raw = let%bind raw =
trace (simple_error "parsing expression") @@ trace (simple_error "parsing expression") @@
Parser.Pascaligo.parse_expression source in Parser.Pascaligo.parse_expression source in
let%bind simplified = let%bind simplified =
trace (simple_error "simplifying expression") @@ trace (simple_error "simplifying expression") @@
Simplify.Pascaligo.simpl_expression raw in Simplify.Pascaligo.simpl_expression raw
ok simplified in ok simplified
let parsify_cameligo = fun source -> let parsify_cameligo source =
let%bind raw = let%bind raw =
trace (simple_error "parsing") @@ trace (simple_error "parsing") @@
Parser.Cameligo.parse_file source in Parser.Cameligo.parse_file source in
let%bind simplified = let%bind simplified =
trace (simple_error "simplifying") @@ trace (simple_error "simplifying") @@
Simplify.Cameligo.simpl_program raw in Simplify.Cameligo.simpl_program raw
ok simplified in ok simplified
let parsify_expression_cameligo = fun source -> let parsify_expression_cameligo source =
let%bind raw = let%bind raw =
trace (simple_error "parsing expression") @@ trace (simple_error "parsing expression") @@
Parser.Cameligo.parse_expression source in Parser.Cameligo.parse_expression source in
let%bind simplified = let%bind simplified =
trace (simple_error "simplifying expression") @@ trace (simple_error "simplifying expression") @@
Simplify.Cameligo.simpl_expression raw in Simplify.Cameligo.simpl_expression raw
ok simplified in ok simplified
let parsify_reasonligo = fun source -> let parsify_reasonligo source =
let%bind raw = let%bind raw =
trace (simple_error "parsing") @@ trace (simple_error "parsing") @@
Parser.Reasonligo.parse_file source in Parser.Reasonligo.parse_file source in
let%bind simplified = let%bind simplified =
trace (simple_error "simplifying") @@ trace (simple_error "simplifying") @@
Simplify.Cameligo.simpl_program raw in Simplify.Cameligo.simpl_program raw
ok simplified in ok simplified
let parsify_expression_reasonligo = fun source -> let parsify_expression_reasonligo source =
let%bind raw = let%bind raw =
trace (simple_error "parsing expression") @@ trace (simple_error "parsing expression") @@
Parser.Reasonligo.parse_expression source in Parser.Reasonligo.parse_expression source in
let%bind simplified = let%bind simplified =
trace (simple_error "simplifying expression") @@ trace (simple_error "simplifying expression") @@
Simplify.Cameligo.simpl_expression raw in Simplify.Cameligo.simpl_expression raw
ok simplified in ok simplified
let parsify = fun (syntax : v_syntax) source_filename -> let parsify syntax source =
let%bind parsify = match syntax with let%bind parsify =
| Pascaligo -> ok parsify_pascaligo match syntax with
| Cameligo -> ok parsify_cameligo PascaLIGO -> ok parsify_pascaligo
| ReasonLIGO -> ok parsify_reasonligo | CameLIGO -> ok parsify_cameligo
in | ReasonLIGO -> ok parsify_reasonligo in
let%bind parsified = parsify source_filename in
let%bind applied = Self_ast_simplified.all_program parsified in
ok applied
let parsify_expression = fun syntax source ->
let%bind parsify = match syntax with
| Pascaligo -> ok parsify_expression_pascaligo
| Cameligo -> ok parsify_expression_cameligo
| ReasonLIGO -> ok parsify_expression_reasonligo
in
let%bind parsified = parsify source in let%bind parsified = parsify source in
let%bind applied = Self_ast_simplified.all_expression parsified in let%bind applied = Self_ast_simplified.all_program parsified
ok applied in ok applied
let parsify_string_reasonligo = fun source -> let parsify_expression syntax source =
let%bind parsify = match syntax with
PascaLIGO -> ok parsify_expression_pascaligo
| CameLIGO -> ok parsify_expression_cameligo
| ReasonLIGO -> ok parsify_expression_reasonligo in
let%bind parsified = parsify source in
let%bind applied = Self_ast_simplified.all_expression parsified
in ok applied
let parsify_string_reasonligo source =
let%bind raw = let%bind raw =
trace (simple_error "parsing") @@ trace (simple_error "parsing") @@
Parser.Reasonligo.parse_string source in Parser.Reasonligo.parse_string source in
let%bind simplified = let%bind simplified =
trace (simple_error "simplifying") @@ trace (simple_error "simplifying") @@
Simplify.Cameligo.simpl_program raw in Simplify.Cameligo.simpl_program raw
ok simplified in ok simplified
let parsify_string_pascaligo = fun source -> let parsify_string_pascaligo source =
let%bind raw = let%bind raw =
trace (simple_error "parsing") @@ trace (simple_error "parsing") @@
Parser.Pascaligo.parse_string source in Parser.Pascaligo.parse_string source in
let%bind simplified = let%bind simplified =
trace (simple_error "simplifying") @@ trace (simple_error "simplifying") @@
Simplify.Pascaligo.simpl_program raw in Simplify.Pascaligo.simpl_program raw
ok simplified in ok simplified
let parsify_string_cameligo = fun source -> let parsify_string_cameligo source =
let%bind raw = let%bind raw =
trace (simple_error "parsing") @@ trace (simple_error "parsing") @@
Parser.Cameligo.parse_string source in Parser.Cameligo.parse_string source in
let%bind simplified = let%bind simplified =
trace (simple_error "simplifying") @@ trace (simple_error "simplifying") @@
Simplify.Cameligo.simpl_program raw in Simplify.Cameligo.simpl_program raw
ok simplified in ok simplified
let parsify_string = fun (syntax : v_syntax) source_filename -> let parsify_string syntax source =
let%bind parsify = match syntax with let%bind parsify =
| Pascaligo -> ok parsify_string_pascaligo match syntax with
| Cameligo -> ok parsify_string_cameligo PascaLIGO -> ok parsify_string_pascaligo
| ReasonLIGO -> ok parsify_string_reasonligo | CameLIGO -> ok parsify_string_cameligo
in | ReasonLIGO -> ok parsify_string_reasonligo in
let%bind parsified = parsify source_filename in let%bind parsified = parsify source in
let%bind applied = Self_ast_simplified.all_program parsified in let%bind applied = Self_ast_simplified.all_program parsified
ok applied in ok applied
let pretty_print_pascaligo = fun source -> let pretty_print_pascaligo source =
let%bind ast = Parser.Pascaligo.parse_file source in let%bind ast = Parser.Pascaligo.parse_file source in
let buffer = Buffer.create 59 in let buffer = Buffer.create 59 in
let state = Parser_pascaligo.ParserLog.mk_state let state =
~offsets:true Parser_pascaligo.ParserLog.mk_state
~mode:`Byte ~offsets:true
~buffer in ~mode:`Byte
~buffer in
Parser_pascaligo.ParserLog.pp_ast state ast; Parser_pascaligo.ParserLog.pp_ast state ast;
ok buffer ok buffer
let pretty_print_cameligo = fun source -> let pretty_print_cameligo source =
let%bind ast = Parser.Cameligo.parse_file source in let%bind ast = Parser.Cameligo.parse_file source in
let buffer = Buffer.create 59 in let buffer = Buffer.create 59 in
let state = Parser_cameligo.ParserLog.mk_state let state = (* TODO: Should flow from the CLI *)
~offsets:true Parser_cameligo.ParserLog.mk_state
~mode:`Byte ~offsets:true
~buffer in ~mode:`Point
~buffer in
Parser.Cameligo.ParserLog.pp_ast state ast; Parser.Cameligo.ParserLog.pp_ast state ast;
ok buffer ok buffer
let pretty_print_reasonligo = fun source -> let pretty_print_reasonligo source =
let%bind ast = Parser.Reasonligo.parse_file source in let%bind ast = Parser.Reasonligo.parse_file source in
let buffer = Buffer.create 59 in let buffer = Buffer.create 59 in
let state = Parser.Reasonligo.ParserLog.mk_state let state = (* TODO: Should flow from the CLI *)
~offsets:true Parser.Reasonligo.ParserLog.mk_state
~mode:`Byte ~offsets:true
~buffer in ~mode:`Point
~buffer in
Parser.Reasonligo.ParserLog.pp_ast state ast; Parser.Reasonligo.ParserLog.pp_ast state ast;
ok buffer ok buffer
let pretty_print = fun syntax source_filename -> let pretty_print syntax source =
let%bind v_syntax = syntax_to_variant syntax (Some source_filename) in let%bind v_syntax =
(match v_syntax with syntax_to_variant syntax (Some source) in
| Pascaligo -> pretty_print_pascaligo match v_syntax with
| Cameligo -> pretty_print_cameligo PascaLIGO -> pretty_print_pascaligo source
| ReasonLIGO -> pretty_print_reasonligo) | CameLIGO -> pretty_print_cameligo source
source_filename | ReasonLIGO -> pretty_print_reasonligo source

View File

@ -1,129 +1,180 @@
open Trace module AST = Parser_cameligo.AST
module Parser = Parser_cameligo.Parser
module AST = Parser_cameligo.AST
module ParserLog = Parser_cameligo.ParserLog
module LexToken = Parser_cameligo.LexToken module LexToken = Parser_cameligo.LexToken
module Lexer = Lexer.Make(LexToken) module Lexer = Lexer.Make(LexToken)
module Scoping = Parser_cameligo.Scoping
module Region = Simple_utils.Region
module ParErr = Parser_cameligo.ParErr
module SSet = Utils.String.Set
module Errors = struct (* Mock IOs TODO: Fill them with CLI options *)
let lexer_error (e: Lexer.error AST.reg) = module type IO =
let title () = "lexer error" in sig
let message () = Lexer.error_to_string e.value in val ext : string
let data = [ val options : EvalOpt.options
("parser_loc", end
fun () -> Format.asprintf "%a" Location.pp_lift @@ e.region
)
] in
error ~data title message
let parser_error source (start: Lexing.position) (stop: Lexing.position) lexbuf = module PreIO =
let title () = "parser error" in struct
let file = if source = "" then let ext = ".ligo"
"" let pre_options =
else EvalOpt.make ~libs:[]
Format.sprintf "In file \"%s|%s\"" start.pos_fname source ~verbose:SSet.empty
in ~offsets:true
let str = Format.sprintf ~mode:`Point
"Parse error at \"%s\" from (%d, %d) to (%d, %d). %s\n" ~cmd:EvalOpt.Quiet
(Lexing.lexeme lexbuf) ~mono:true
start.pos_lnum (start.pos_cnum - start.pos_bol) end
stop.pos_lnum (stop.pos_cnum - stop.pos_bol)
file
in
let message () = str in
let loc = if start.pos_cnum = -1 then
Region.make
~start:(Pos.min ~file:source)
~stop:(Pos.from_byte stop)
else
Region.make
~start:(Pos.from_byte start)
~stop:(Pos.from_byte stop)
in
let data =
[
("parser_loc",
fun () -> Format.asprintf "%a" Location.pp_lift @@ loc
)
]
in
error ~data title message
let unrecognized_error source (start: Lexing.position) (stop: Lexing.position) lexbuf = module Parser =
let title () = "unrecognized error" in struct
let file = if source = "" then type ast = AST.t
"" type expr = AST.expr
else include Parser_cameligo.Parser
Format.sprintf "In file \"%s|%s\"" start.pos_fname source end
in
let str = Format.sprintf
"Parse error at \"%s\" from (%d, %d) to (%d, %d). %s\n"
(Lexing.lexeme lexbuf)
start.pos_lnum (start.pos_cnum - start.pos_bol)
stop.pos_lnum (stop.pos_cnum - stop.pos_bol)
file
in
let message () = str in
let loc = Region.make
~start:(Pos.from_byte start)
~stop:(Pos.from_byte stop)
in
let data = [
("unrecognized_loc",
fun () -> Format.asprintf "%a" Location.pp_lift @@ loc
)
] in
error ~data title message
end module ParserLog =
struct
type ast = AST.t
type expr = AST.expr
include Parser_cameligo.ParserLog
end
open Errors module PreUnit =
ParserUnit.Make (Lexer)(AST)(Parser)(ParErr)(ParserLog)
type 'a parser = (Lexing.lexbuf -> LexToken.token) -> Lexing.lexbuf -> 'a module Errors =
struct
(* let data =
[("location",
fun () -> Format.asprintf "%a" Location.pp_lift @@ loc)] *)
let parse (parser: 'a parser) source lexbuf = let generic message =
let Lexer.{read ; close ; _} = Lexer.open_token_stream None in let title () = ""
let result = and message () = message.Region.value
try in Trace.error ~data:[] title message
ok (parser read lexbuf) end
with
| Parser.Error ->
let start = Lexing.lexeme_start_p lexbuf in
let stop = Lexing.lexeme_end_p lexbuf in
fail @@ (parser_error source start stop lexbuf)
| Lexer.Error e ->
fail @@ (lexer_error e)
| _ ->
let _ = Printexc.print_backtrace Pervasives.stdout in
let start = Lexing.lexeme_start_p lexbuf in
let stop = Lexing.lexeme_end_p lexbuf in
fail @@ (unrecognized_error source start stop lexbuf)
in
close ();
result
let parse_file (source: string) : AST.t result = let parse (module IO : IO) parser =
let module Unit = PreUnit (IO) in
let local_fail error =
Trace.fail
@@ Errors.generic
@@ Unit.format_error ~offsets:IO.options#offsets
IO.options#mode error in
match parser () with
Stdlib.Ok semantic_value -> Trace.ok semantic_value
(* Lexing and parsing errors *)
| Stdlib.Error error -> Trace.fail @@ Errors.generic error
(* Scoping errors *)
| exception Scoping.Error (Scoping.Reserved_name name) ->
let token =
Lexer.Token.mk_ident name.Region.value name.Region.region in
(match token with
Stdlib.Error LexToken.Reserved_name ->
Trace.fail @@ Errors.generic @@ Region.wrap_ghost "Reserved name."
| Ok invalid ->
local_fail
("Reserved name.\nHint: Change the name.\n", None, invalid))
| exception Scoping.Error (Scoping.Duplicate_variant name) ->
let token =
Lexer.Token.mk_constr name.Region.value name.Region.region
in local_fail
("Duplicate constructor in this sum type declaration.\n\
Hint: Change the constructor.\n", None, token)
| exception Scoping.Error (Scoping.Non_linear_pattern var) ->
let token =
Lexer.Token.mk_ident var.Region.value var.Region.region in
(match token with
Stdlib.Error LexToken.Reserved_name ->
Trace.fail @@ Errors.generic @@ Region.wrap_ghost "Reserved name."
| Ok invalid ->
local_fail ("Repeated variable in this pattern.\n\
Hint: Change the name.\n",
None, invalid))
| exception Scoping.Error (Scoping.Duplicate_field name) ->
let token =
Lexer.Token.mk_ident name.Region.value name.Region.region in
(match token with
Stdlib.Error LexToken.Reserved_name ->
Trace.fail @@ Errors.generic @@ Region.wrap_ghost "Reserved name."
| Ok invalid ->
local_fail
("Duplicate field name in this record declaration.\n\
Hint: Change the name.\n",
None, invalid))
let parse_file (source: string) =
let module IO =
struct
let ext = PreIO.ext
let options =
PreIO.pre_options ~input:(Some source) ~expr:false
end in
let lib_path =
match IO.options#libs with
[] -> ""
| libs -> let mk_I dir path = Printf.sprintf " -I %s%s" dir path
in List.fold_right mk_I libs "" in
let prefix =
match IO.options#input with
None | Some "-" -> "temp"
| Some file -> Filename.(remove_extension @@ basename file) in
let suffix = ".pp" ^ IO.ext in
let pp_input = let pp_input =
let prefix = Filename.(source |> basename |> remove_extension) if SSet.mem "cpp" IO.options#verbose
and suffix = ".pp.mligo" then prefix ^ suffix
in prefix ^ suffix in else let pp_input, pp_out =
Filename.open_temp_file prefix suffix
let cpp_cmd = Printf.sprintf "cpp -traditional-cpp %s > %s" in close_out pp_out; pp_input in
source pp_input in let cpp_cmd =
match IO.options#input with
None | Some "-" ->
Printf.sprintf "cpp -traditional-cpp%s - > %s"
lib_path pp_input
| Some file ->
Printf.sprintf "cpp -traditional-cpp%s %s > %s"
lib_path file pp_input in
let open Trace in
let%bind () = sys_command cpp_cmd in let%bind () = sys_command cpp_cmd in
let module Unit = PreUnit (IO) in
match Lexer.(open_token_stream @@ File pp_input) with
Ok instance ->
let thunk () = Unit.apply instance Unit.parse_contract
in parse (module IO) thunk
| Stdlib.Error (Lexer.File_opening msg) ->
Trace.fail @@ Errors.generic @@ Region.wrap_ghost msg
let%bind channel = let parse_string (s: string) =
generic_try (simple_error "error opening file") @@ let module IO =
(fun () -> open_in pp_input) in struct
let lexbuf = Lexing.from_channel channel in let ext = PreIO.ext
parse (Parser.contract) source lexbuf let options = PreIO.pre_options ~input:None ~expr:false
end in
let module Unit = PreUnit (IO) in
match Lexer.(open_token_stream @@ String s) with
Ok instance ->
let thunk () = Unit.apply instance Unit.parse_contract
in parse (module IO) thunk
| Stdlib.Error (Lexer.File_opening msg) ->
Trace.fail @@ Errors.generic @@ Region.wrap_ghost msg
let parse_string (s:string) : AST.t result = let parse_expression (s: string) =
let lexbuf = Lexing.from_string s in let module IO =
parse Parser.contract "" lexbuf struct
let ext = PreIO.ext
let parse_expression (s:string) : AST.expr result = let options = PreIO.pre_options ~input:None ~expr:true
let lexbuf = Lexing.from_string s in end in
parse Parser.interactive_expr "" lexbuf let module Unit = PreUnit (IO) in
match Lexer.(open_token_stream @@ String s) with
Ok instance ->
let thunk () = Unit.apply instance Unit.parse_expr
in parse (module IO) thunk
| Stdlib.Error (Lexer.File_opening msg) ->
Trace.fail @@ Errors.generic @@ Region.wrap_ghost msg

View File

@ -90,7 +90,7 @@ tuple(item):
(* Possibly empty semicolon-separated values between brackets *) (* Possibly empty semicolon-separated values between brackets *)
list(item): list__(item):
"[" sep_or_term_list(item,";")? "]" { "[" sep_or_term_list(item,";")? "]" {
let compound = Brackets ($1,$3) let compound = Brackets ($1,$3)
and region = cover $1 $3 in and region = cover $1 $3 in
@ -182,7 +182,7 @@ sum_type:
variant: variant:
"<constr>" { {$1 with value={constr=$1; arg=None}} } "<constr>" { {$1 with value={constr=$1; arg=None}} }
| "<constr>" "of" cartesian { | "<constr>" "of" fun_type {
let region = cover $1.region (type_expr_to_region $3) let region = cover $1.region (type_expr_to_region $3)
and value = {constr=$1; arg = Some ($2,$3)} and value = {constr=$1; arg = Some ($2,$3)}
in {region; value} } in {region; value} }
@ -217,6 +217,7 @@ let_declaration:
let_binding: let_binding:
"<ident>" nseq(sub_irrefutable) type_annotation? "=" expr { "<ident>" nseq(sub_irrefutable) type_annotation? "=" expr {
Scoping.check_reserved_name $1;
let binders = Utils.nseq_cons (PVar $1) $2 in let binders = Utils.nseq_cons (PVar $1) $2 in
Utils.nseq_iter Scoping.check_pattern binders; Utils.nseq_iter Scoping.check_pattern binders;
{binders; lhs_type=$3; eq=$4; let_rhs=$5} {binders; lhs_type=$3; eq=$4; let_rhs=$5}
@ -293,7 +294,7 @@ core_pattern:
| "false" { PFalse $1 } | "false" { PFalse $1 }
| "true" { PTrue $1 } | "true" { PTrue $1 }
| par(ptuple) { PPar $1 } | par(ptuple) { PPar $1 }
| list(tail) { PList (PListComp $1) } | list__(tail) { PList (PListComp $1) }
| constr_pattern { PConstr $1 } | constr_pattern { PConstr $1 }
| record_pattern { PRecord $1 } | record_pattern { PRecord $1 }
@ -584,7 +585,7 @@ core_expr:
| unit { EUnit $1 } | unit { EUnit $1 }
| "false" { ELogic (BoolExpr (False $1)) } | "false" { ELogic (BoolExpr (False $1)) }
| "true" { ELogic (BoolExpr (True $1)) } | "true" { ELogic (BoolExpr (True $1)) }
| list(expr) { EList (EListComp $1) } | list__(expr) { EList (EListComp $1) }
| sequence { ESeq $1 } | sequence { ESeq $1 }
| record_expr { ERecord $1 } | record_expr { ERecord $1 }
| update_record { EUpdate $1 } | update_record { EUpdate $1 }

View File

@ -27,12 +27,11 @@ module Unit =
(* Main *) (* Main *)
let issue_error point = let issue_error error : ('a, string Region.reg) Stdlib.result =
let error = Unit.format_error ~offsets:IO.options#offsets Stdlib.Error (Unit.format_error ~offsets:IO.options#offsets
IO.options#mode point IO.options#mode error)
in Stdlib.Error error
let parse parser : ('a,string) Stdlib.result = let parse parser : ('a, string Region.reg) Stdlib.result =
try parser () with try parser () with
(* Scoping errors *) (* Scoping errors *)
@ -81,11 +80,61 @@ let parse parser : ('a,string) Stdlib.result =
None, invalid None, invalid
in issue_error point) in issue_error point)
(* Preprocessing the input source with CPP *)
module SSet = Utils.String.Set
let sprintf = Printf.sprintf
(* Path for CPP inclusions (#include) *)
let lib_path =
match IO.options#libs with
[] -> ""
| libs -> let mk_I dir path = sprintf " -I %s%s" dir path
in List.fold_right mk_I libs ""
let prefix =
match IO.options#input with
None | Some "-" -> "temp"
| Some file -> Filename.(file |> basename |> remove_extension)
let suffix = ".pp" ^ IO.ext
let pp_input =
if SSet.mem "cpp" IO.options#verbose
then prefix ^ suffix
else let pp_input, pp_out =
Filename.open_temp_file prefix suffix
in close_out pp_out; pp_input
let cpp_cmd =
match IO.options#input with
None | Some "-" ->
sprintf "cpp -traditional-cpp%s - > %s"
lib_path pp_input
| Some file ->
sprintf "cpp -traditional-cpp%s %s > %s"
lib_path file pp_input
let () = let () =
if IO.options#expr if Sys.command cpp_cmd <> 0 then
then match parse (fun () -> Unit.parse Unit.parse_expr) with Printf.eprintf "External error: \"%s\" failed." cpp_cmd
Stdlib.Ok _ -> ()
| Error msg -> Printf.eprintf "\027[31m%s\027[0m%!" msg (* Instantiating the lexer and calling the parser *)
else match parse (fun () -> Unit.parse Unit.parse_contract) with
Stdlib.Ok _ -> () let lexer_inst =
| Error msg -> Printf.eprintf "\027[31m%s\027[0m%!" msg match Lexer.open_token_stream (Lexer.File pp_input) with
Ok instance ->
if IO.options#expr
then
match parse (fun () -> Unit.apply instance Unit.parse_expr) with
Stdlib.Ok _ -> ()
| Error Region.{value; _} ->
Printf.eprintf "\027[31m%s\027[0m%!" value
else
(match parse (fun () -> Unit.apply instance Unit.parse_contract) with
Stdlib.Ok _ -> ()
| Error Region.{value; _} ->
Printf.eprintf "\027[31m%s\027[0m%!" value)
| Stdlib.Error (Lexer.File_opening msg) ->
Printf.eprintf "\027[31m%s\027[0m%!" msg

View File

@ -31,19 +31,30 @@ module VarSet = Set.Make (Ord)
let reserved = let reserved =
let open SSet in let open SSet in
empty empty
|> add "abs"
|> add "address"
|> add "amount"
|> add "assert" |> add "assert"
|> add "balance" |> add "balance"
|> add "time" |> add "black2b"
|> add "amount" |> add "check"
|> add "gas"
|> add "sender"
|> add "source"
|> add "failwith"
|> add "continue" |> add "continue"
|> add "stop" |> add "failwith"
|> add "gas"
|> add "hash"
|> add "hash_key"
|> add "implicit_account"
|> add "int" |> add "int"
|> add "abs" |> add "pack"
|> add "self_address"
|> add "sender"
|> add "sha256"
|> add "sha512"
|> add "source"
|> add "stop"
|> add "time"
|> add "unit" |> add "unit"
|> add "unpack"
let check_reserved_names vars = let check_reserved_names vars =
let is_reserved elt = SSet.mem elt.value reserved in let is_reserved elt = SSet.mem elt.value reserved in

View File

@ -15,17 +15,16 @@
(name parser_cameligo) (name parser_cameligo)
(public_name ligo.parser.cameligo) (public_name ligo.parser.cameligo)
(modules (modules
Scoping AST cameligo Parser ParserLog LexToken) Scoping AST cameligo Parser ParserLog LexToken ParErr)
(libraries (libraries
menhirLib menhirLib
parser_shared parser_shared
str str
simple-utils simple-utils
tezos-utils tezos-utils)
getopt)
(preprocess (preprocess
(pps bisect_ppx --conditional)) (pps bisect_ppx --conditional))
(flags (:standard -open Simple_utils -open Parser_shared))) (flags (:standard -open Parser_shared -open Simple_utils)))
;; Build of the unlexer (for covering the ;; Build of the unlexer (for covering the
;; error states of the LR automaton) ;; error states of the LR automaton)
@ -52,8 +51,7 @@
(executable (executable
(name ParserMain) (name ParserMain)
(libraries parser_cameligo) (libraries parser_cameligo)
(modules (modules ParserMain)
ParErr ParserMain)
(preprocess (preprocess
(pps bisect_ppx --conditional)) (pps bisect_ppx --conditional))
(flags (:standard -open Simple_utils -open Parser_shared -open Parser_cameligo))) (flags (:standard -open Simple_utils -open Parser_shared -open Parser_cameligo)))
@ -70,4 +68,4 @@
(rule (rule
(targets all.mligo) (targets all.mligo)
(deps (:script_cover ../../../../vendors/ligo-utils/simple-utils/cover.sh) Parser.mly LexToken.mli ParToken.mly Parser.msg Unlexer.exe) (deps (:script_cover ../../../../vendors/ligo-utils/simple-utils/cover.sh) Parser.mly LexToken.mli ParToken.mly Parser.msg Unlexer.exe)
(action (run %{script_cover} --lex-tokens=LexToken.mli --par-tokens=ParToken.mly --ext=mligo --unlexer=./Unlexer.exe --messages=Parser.msg --dir=. --concatenate Parser.mly ))) (action (run %{script_cover} --lex-tokens=LexToken.mli --par-tokens=ParToken.mly --ext=mligo --unlexer=./Unlexer.exe --messages=Parser.msg --dir=. --concatenate Parser.mly )))

View File

@ -1,154 +1,191 @@
open Trace module AST = Parser_pascaligo.AST
module AST = Parser_pascaligo.AST
module LexToken = Parser_pascaligo.LexToken module LexToken = Parser_pascaligo.LexToken
module Lexer = Lexer.Make(LexToken) module Lexer = Lexer.Make(LexToken)
module Scoping = Parser_pascaligo.Scoping module Scoping = Parser_pascaligo.Scoping
module Parser = Parser_pascaligo.Parser module Region = Simple_utils.Region
module ParErr = Parser_pascaligo.ParErr
module SSet = Utils.String.Set
(* Mock IOs TODO: Fill them with CLI options *)
module type IO =
sig
val ext : string
val options : EvalOpt.options
end
module PreIO =
struct
let ext = ".ligo"
let pre_options =
EvalOpt.make ~libs:[]
~verbose:SSet.empty
~offsets:true
~mode:`Point
~cmd:EvalOpt.Quiet
~mono:true
end
module Parser =
struct
type ast = AST.t
type expr = AST.expr
include Parser_pascaligo.Parser
end
module ParserLog =
struct
type ast = AST.t
type expr = AST.expr
include Parser_pascaligo.ParserLog
end
module PreUnit =
ParserUnit.Make (Lexer)(AST)(Parser)(ParErr)(ParserLog)
module Errors = module Errors =
struct struct
let reserved_name Region.{value; region} = (* let data =
let title () = Printf.sprintf "reserved name \"%s\"" value in [("location",
let message () = "" in fun () -> Format.asprintf "%a" Location.pp_lift @@ loc)] *)
let data = [
("location",
fun () -> Format.asprintf "%a" Location.pp_lift @@ region)]
in error ~data title message
let non_linear_pattern Region.{value; region} = let generic message =
let title () = let title () = ""
Printf.sprintf "repeated variable \"%s\" in this pattern" value in and message () = message.Region.value
let message () = "" in in Trace.error ~data:[] title message
let data = [ end
("location",
fun () -> Format.asprintf "%a" Location.pp_lift @@ region)]
in error ~data title message
let duplicate_parameter Region.{value; region} = let parse (module IO : IO) parser =
let title () = let module Unit = PreUnit (IO) in
Printf.sprintf "duplicate parameter \"%s\"" value in let local_fail error =
let message () = "" in Trace.fail
let data = [ @@ Errors.generic
("location", @@ Unit.format_error ~offsets:IO.options#offsets
fun () -> Format.asprintf "%a" Location.pp_lift @@ region)] IO.options#mode error in
in error ~data title message match parser () with
Stdlib.Ok semantic_value -> Trace.ok semantic_value
let duplicate_variant Region.{value; region} = (* Lexing and parsing errors *)
let title () =
Printf.sprintf "duplicate variant \"%s\" in this\
type declaration" value in
let message () = "" in
let data = [
("location",
fun () -> Format.asprintf "%a" Location.pp_lift @@ region)]
in error ~data title message
let unrecognized_error source (start: Lexing.position) | Stdlib.Error error -> Trace.fail @@ Errors.generic error
(stop: Lexing.position) lexbuf = (* Scoping errors *)
let title () = "unrecognized error" in
let file =
if source = "" then ""
else
Format.sprintf "In file \"%s|%s\"" start.pos_fname source in
let message () =
Format.sprintf
"Parse error at \"%s\" from (%d, %d) to (%d, %d). %s\n"
(Lexing.lexeme lexbuf)
start.pos_lnum (start.pos_cnum - start.pos_bol)
stop.pos_lnum (stop.pos_cnum - stop.pos_bol)
file in
let loc = Region.make ~start:(Pos.from_byte start)
~stop:(Pos.from_byte stop) in
let data = [
("unrecognized_loc",
fun () -> Format.asprintf "%a" Location.pp_lift @@ loc)]
in error ~data title message
let parser_error source (start: Lexing.position) | exception Scoping.Error (Scoping.Reserved_name name) ->
(stop: Lexing.position) lexbuf = let token =
let title () = "parser error" in Lexer.Token.mk_ident name.Region.value name.Region.region in
let file = (match token with
if source = "" then "" Stdlib.Error LexToken.Reserved_name ->
else Trace.fail @@ Errors.generic @@ Region.wrap_ghost "Reserved name."
Format.sprintf "In file \"%s|%s\"" start.pos_fname source in | Ok invalid ->
let message () = local_fail
Format.sprintf ("Reserved name.\nHint: Change the name.\n", None, invalid))
"Parse error at \"%s\" from (%d, %d) to (%d, %d). %s\n"
(Lexing.lexeme lexbuf)
start.pos_lnum (start.pos_cnum - start.pos_bol)
stop.pos_lnum (stop.pos_cnum - stop.pos_bol)
file in
let loc =
if start.pos_cnum = -1 then
Region.make
~start:(Pos.min ~file:source) ~stop:(Pos.from_byte stop)
else
Region.make ~start:(Pos.from_byte start)
~stop:(Pos.from_byte stop) in
let data =
[("parser_loc",
fun () -> Format.asprintf "%a" Location.pp_lift @@ loc)] in
error ~data title message
let lexer_error (e: Lexer.error AST.reg) = | exception Scoping.Error (Scoping.Duplicate_parameter name) ->
let title () = "lexer error" in let token =
let message () = Lexer.error_to_string e.value in Lexer.Token.mk_ident name.Region.value name.Region.region in
let data = [ (match token with
("parser_loc", Stdlib.Error LexToken.Reserved_name ->
fun () -> Format.asprintf "%a" Location.pp_lift @@ e.region)] Trace.fail @@ Errors.generic @@ Region.wrap_ghost "Reserved name."
in error ~data title message | Ok invalid ->
end local_fail
("Duplicate parameter.\nHint: Change the name.\n",
None, invalid))
open Errors | exception Scoping.Error (Scoping.Duplicate_variant name) ->
let token =
Lexer.Token.mk_constr name.Region.value name.Region.region
in local_fail
("Duplicate constructor in this sum type declaration.\n\
Hint: Change the constructor.\n", None, token)
type 'a parser = (Lexing.lexbuf -> LexToken.token) -> Lexing.lexbuf -> 'a | exception Scoping.Error (Scoping.Non_linear_pattern var) ->
let token =
Lexer.Token.mk_ident var.Region.value var.Region.region in
(match token with
Stdlib.Error LexToken.Reserved_name ->
Trace.fail @@ Errors.generic @@ Region.wrap_ghost "Reserved name."
| Ok invalid ->
local_fail ("Repeated variable in this pattern.\n\
Hint: Change the name.\n",
None, invalid))
let parse (parser: 'a parser) source lexbuf = | exception Scoping.Error (Scoping.Duplicate_field name) ->
let Lexer.{read; close; _} = Lexer.open_token_stream None in let token =
let result = Lexer.Token.mk_ident name.Region.value name.Region.region in
try ok (parser read lexbuf) with (match token with
Lexer.Error e -> Stdlib.Error LexToken.Reserved_name ->
fail @@ lexer_error e Trace.fail @@ Errors.generic @@ Region.wrap_ghost "Reserved name."
| Parser.Error -> | Ok invalid ->
let start = Lexing.lexeme_start_p lexbuf in local_fail
let stop = Lexing.lexeme_end_p lexbuf in ("Duplicate field name in this record declaration.\n\
fail @@ parser_error source start stop lexbuf Hint: Change the name.\n",
| Scoping.Error (Scoping.Non_linear_pattern var) -> None, invalid))
fail @@ non_linear_pattern var
| Scoping.Error (Duplicate_parameter name) ->
fail @@ duplicate_parameter name
| Scoping.Error (Duplicate_variant name) ->
fail @@ duplicate_variant name
| Scoping.Error (Reserved_name name) ->
fail @@ reserved_name name
| _ ->
let () = Printexc.print_backtrace Pervasives.stdout in
let start = Lexing.lexeme_start_p lexbuf in
let stop = Lexing.lexeme_end_p lexbuf in
fail @@ unrecognized_error source start stop lexbuf
in close (); result
let parse_file (source: string) : AST.t result = let parse_file source =
let module IO =
struct
let ext = PreIO.ext
let options =
PreIO.pre_options ~input:(Some source) ~expr:false
end in
let module Unit = PreUnit (IO) in
let lib_path =
match IO.options#libs with
[] -> ""
| libs -> let mk_I dir path = Printf.sprintf " -I %s%s" dir path
in List.fold_right mk_I libs "" in
let prefix =
match IO.options#input with
None | Some "-" -> "temp"
| Some file -> Filename.(remove_extension @@ basename file) in
let suffix = ".pp" ^ IO.ext in
let pp_input = let pp_input =
let prefix = Filename.(source |> basename |> remove_extension) if SSet.mem "cpp" IO.options#verbose
and suffix = ".pp.ligo" then prefix ^ suffix
in prefix ^ suffix in else let pp_input, pp_out =
Filename.open_temp_file prefix suffix
let cpp_cmd = Printf.sprintf "cpp -traditional-cpp %s > %s" in close_out pp_out; pp_input in
source pp_input in let cpp_cmd =
match IO.options#input with
None | Some "-" ->
Printf.sprintf "cpp -traditional-cpp%s - > %s"
lib_path pp_input
| Some file ->
Printf.sprintf "cpp -traditional-cpp%s %s > %s"
lib_path file pp_input in
let open Trace in
let%bind () = sys_command cpp_cmd in let%bind () = sys_command cpp_cmd in
match Lexer.(open_token_stream @@ File pp_input) with
Ok instance ->
let thunk () = Unit.apply instance Unit.parse_contract
in parse (module IO) thunk
| Stdlib.Error (Lexer.File_opening msg) ->
Trace.fail @@ Errors.generic @@ Region.wrap_ghost msg
let%bind channel = let parse_string (s: string) =
generic_try (simple_error "error opening file") @@ let module IO =
(fun () -> open_in pp_input) in struct
let lexbuf = Lexing.from_channel channel in let ext = PreIO.ext
parse (Parser.contract) source lexbuf let options = PreIO.pre_options ~input:None ~expr:false
end in
let module Unit = PreUnit (IO) in
match Lexer.(open_token_stream @@ String s) with
Ok instance ->
let thunk () = Unit.apply instance Unit.parse_contract
in parse (module IO) thunk
| Stdlib.Error (Lexer.File_opening msg) ->
Trace.fail @@ Errors.generic @@ Region.wrap_ghost msg
let parse_string (s:string) : AST.t result = let parse_expression (s: string) =
let lexbuf = Lexing.from_string s in let module IO =
parse (Parser.contract) "" lexbuf struct
let ext = PreIO.ext
let parse_expression (s:string) : AST.expr result = let options = PreIO.pre_options ~input:None ~expr:true
let lexbuf = Lexing.from_string s in end in
parse (Parser.interactive_expr) "" lexbuf let module Unit = PreUnit (IO) in
match Lexer.(open_token_stream @@ String s) with
Ok instance ->
let thunk () = Unit.apply instance Unit.parse_expr
in parse (module IO) thunk
| Stdlib.Error (Lexer.File_opening msg) ->
Trace.fail @@ Errors.generic @@ Region.wrap_ghost msg

View File

@ -17,6 +17,7 @@ $HOME/git/ligo/vendors/ligo-utils/simple-utils/region.ml
../shared/Utils.ml ../shared/Utils.ml
../shared/ParserAPI.mli ../shared/ParserAPI.mli
../shared/ParserAPI.ml ../shared/ParserAPI.ml
../shared/LexerUnit.mli
../shared/LexerUnit.ml ../shared/LexerUnit.ml
../shared/ParserUnit.mli ../shared/ParserUnit.mli
../shared/ParserUnit.ml ../shared/ParserUnit.ml

View File

@ -1,4 +1,6 @@
(** Driver for the PascaLIGO lexer *) (* Driver for the PascaLIGO lexer *)
module Region = Simple_utils.Region
module IO = module IO =
struct struct

View File

@ -141,23 +141,23 @@ type_decl:
in {region; value} } in {region; value} }
type_expr: type_expr:
sum_type | record_type | cartesian { $1 } fun_type | sum_type | record_type { $1 }
cartesian: fun_type:
function_type { $1 } cartesian { $1 }
| function_type "*" nsepseq(function_type,"*") { | cartesian "->" fun_type {
let value = Utils.nsepseq_cons $1 $2 $3 in
let region = nsepseq_to_region type_expr_to_region value
in TProd {region; value} }
function_type:
core_type { $1 }
| core_type "->" function_type {
let start = type_expr_to_region $1 let start = type_expr_to_region $1
and stop = type_expr_to_region $3 in and stop = type_expr_to_region $3 in
let region = cover start stop in let region = cover start stop in
TFun {region; value = $1,$2,$3} } TFun {region; value = $1,$2,$3} }
cartesian:
core_type { $1 }
| core_type "*" nsepseq(core_type,"*") {
let value = Utils.nsepseq_cons $1 $2 $3 in
let region = nsepseq_to_region type_expr_to_region value
in TProd {region; value} }
core_type: core_type:
type_name { TVar $1 } type_name { TVar $1 }
| par(type_expr) { TPar $1 } | par(type_expr) { TPar $1 }
@ -201,7 +201,7 @@ sum_type:
variant: variant:
"<constr>" { {$1 with value = {constr=$1; arg=None}} } "<constr>" { {$1 with value = {constr=$1; arg=None}} }
| "<constr>" "of" cartesian { | "<constr>" "of" fun_type {
let region = cover $1.region (type_expr_to_region $3) let region = cover $1.region (type_expr_to_region $3)
and value = {constr=$1; arg = Some ($2,$3)} and value = {constr=$1; arg = Some ($2,$3)}
in {region; value} } in {region; value} }
@ -315,7 +315,7 @@ param_decl:
in ParamConst {region; value} } in ParamConst {region; value} }
param_type: param_type:
cartesian { $1 } fun_type { $1 }
block: block:
"begin" sep_or_term_list(statement,";") "end" { "begin" sep_or_term_list(statement,";") "end" {

View File

@ -1,4 +1,4 @@
(** Driver for the PascaLIGO parser *) (* Driver for the PascaLIGO parser *)
module IO = module IO =
struct struct
@ -27,12 +27,11 @@ module Unit =
(* Main *) (* Main *)
let issue_error point = let issue_error error : ('a, string Region.reg) Stdlib.result =
let error = Unit.format_error ~offsets:IO.options#offsets Stdlib.Error (Unit.format_error ~offsets:IO.options#offsets
IO.options#mode point IO.options#mode error)
in Stdlib.Error error
let parse parser : ('a,string) Stdlib.result = let parse parser : ('a, string Region.reg) Stdlib.result =
try parser () with try parser () with
(* Scoping errors *) (* Scoping errors *)
@ -87,16 +86,67 @@ let parse parser : ('a,string) Stdlib.result =
reserved name for the lexer. *) reserved name for the lexer. *)
Stdlib.Error _ -> assert false Stdlib.Error _ -> assert false
| Ok invalid -> | Ok invalid ->
let point = "Duplicate field name in this record declaration.\n\ let point =
Hint: Change the name.\n", "Duplicate field name in this record declaration.\n\
None, invalid Hint: Change the name.\n",
in issue_error point) None, invalid
in issue_error point)
(* Preprocessing the input source with CPP *)
module SSet = Utils.String.Set
let sprintf = Printf.sprintf
(* Path for CPP inclusions (#include) *)
let lib_path =
match IO.options#libs with
[] -> ""
| libs -> let mk_I dir path = sprintf " -I %s%s" dir path
in List.fold_right mk_I libs ""
let prefix =
match IO.options#input with
None | Some "-" -> "temp"
| Some file -> Filename.(file |> basename |> remove_extension)
let suffix = ".pp" ^ IO.ext
let pp_input =
if SSet.mem "cpp" IO.options#verbose
then prefix ^ suffix
else let pp_input, pp_out =
Filename.open_temp_file prefix suffix
in close_out pp_out; pp_input
let cpp_cmd =
match IO.options#input with
None | Some "-" ->
sprintf "cpp -traditional-cpp%s - > %s"
lib_path pp_input
| Some file ->
sprintf "cpp -traditional-cpp%s %s > %s"
lib_path file pp_input
let () = let () =
if IO.options#expr if Sys.command cpp_cmd <> 0 then
then match parse (fun () -> Unit.parse Unit.parse_expr) with Printf.eprintf "External error: \"%s\" failed." cpp_cmd
Stdlib.Ok _ -> ()
| Error msg -> Printf.eprintf "\027[31m%s\027[0m%!" msg (* Instantiating the lexer and calling the parser *)
else match parse (fun () -> Unit.parse Unit.parse_contract) with
Stdlib.Ok _ -> () let lexer_inst =
| Error msg -> Printf.eprintf "\027[31m%s\027[0m%!" msg match Lexer.open_token_stream (Lexer.File pp_input) with
Ok instance ->
if IO.options#expr
then
match parse (fun () -> Unit.apply instance Unit.parse_expr) with
Stdlib.Ok _ -> ()
| Error Region.{value; _} ->
Printf.eprintf "\027[31m%s\027[0m%!" value
else
(match parse (fun () -> Unit.apply instance Unit.parse_contract) with
Stdlib.Ok _ -> ()
| Error Region.{value; _} ->
Printf.eprintf "\027[31m%s\027[0m%!" value)
| Stdlib.Error (Lexer.File_opening msg) ->
Printf.eprintf "\027[31m%s\027[0m%!" msg

View File

@ -7,7 +7,7 @@
(menhir (menhir
(merge_into Parser) (merge_into Parser)
(modules ParToken Parser) (modules ParToken Parser)
(flags -la 1 --table --strict --external-tokens LexToken)) (flags -la 1 --table --strict --explain --external-tokens LexToken))
;; Build of the parser as a library ;; Build of the parser as a library
@ -20,8 +20,7 @@
menhirLib menhirLib
parser_shared parser_shared
hex hex
simple-utils simple-utils)
tezos-utils)
(preprocess (preprocess
(pps bisect_ppx --conditional)) (pps bisect_ppx --conditional))
(flags (:standard -open Parser_shared -open Simple_utils))) (flags (:standard -open Parser_shared -open Simple_utils)))
@ -52,8 +51,7 @@
(executable (executable
(name ParserMain) (name ParserMain)
(libraries parser_pascaligo) (libraries parser_pascaligo)
(modules (modules ParserMain)
ParserMain)
(preprocess (preprocess
(pps bisect_ppx --conditional)) (pps bisect_ppx --conditional))
(flags (:standard -open Simple_utils -open Parser_shared -open Parser_pascaligo))) (flags (:standard -open Simple_utils -open Parser_shared -open Parser_pascaligo)))

View File

@ -1,131 +1,191 @@
open Trace open Trace
module Parser = Parser_reasonligo.Parser module AST = Parser_cameligo.AST
module AST = Parser_cameligo.AST module LexToken = Parser_reasonligo.LexToken
module ParserLog = Parser_cameligo.ParserLog module Lexer = Lexer.Make(LexToken)
module LexToken = Parser_reasonligo.LexToken module Scoping = Parser_cameligo.Scoping
module Lexer = Lexer.Make(LexToken) module Region = Simple_utils.Region
module ParErr = Parser_reasonligo.ParErr
module SyntaxError = Parser_reasonligo.SyntaxError module SyntaxError = Parser_reasonligo.SyntaxError
module Scoping = Parser_cameligo.Scoping module SSet = Utils.String.Set
(* Mock IOs TODO: Fill them with CLI options *)
module type IO =
sig
val ext : string
val options : EvalOpt.options
end
module PreIO =
struct
let ext = ".ligo"
let pre_options =
EvalOpt.make ~libs:[]
~verbose:SSet.empty
~offsets:true
~mode:`Point
~cmd:EvalOpt.Quiet
~mono:true
end
module Parser =
struct
type ast = AST.t
type expr = AST.expr
include Parser_reasonligo.Parser
end
module ParserLog =
struct
type ast = AST.t
type expr = AST.expr
include Parser_cameligo.ParserLog
end
module PreUnit =
ParserUnit.Make (Lexer)(AST)(Parser)(ParErr)(ParserLog)
module Errors = module Errors =
struct struct
let lexer_error (e: Lexer.error AST.reg) = let generic message =
let title () = "lexer error" in let title () = ""
let message () = Lexer.error_to_string e.value in and message () = message.Region.value
let data = [ in Trace.error ~data:[] title message
("parser_loc",
fun () -> Format.asprintf "%a" Location.pp_lift @@ e.region)]
in error ~data title message
let wrong_function_arguments expr = let wrong_function_arguments (expr: AST.expr) =
let title () = "wrong function arguments" in let title () = "" in
let message () = "" in let message () = "Wrong function arguments.\n" in
let expression_loc = AST.expr_to_region expr in let expression_loc = AST.expr_to_region expr in
let data = [ let data = [
("location", ("location",
fun () -> Format.asprintf "%a" Location.pp_lift @@ expression_loc)] fun () -> Format.asprintf "%a" Location.pp_lift @@ expression_loc)]
in error ~data title message in error ~data title message
end
let parser_error source (start: Lexing.position) let parse (module IO : IO) parser =
(end_: Lexing.position) lexbuf = let module Unit = PreUnit (IO) in
let title () = "parser error" in let local_fail error =
let file = Trace.fail
if source = "" then "" @@ Errors.generic
else @@ Unit.format_error ~offsets:IO.options#offsets
Format.sprintf "In file \"%s|%s\"" start.pos_fname source in IO.options#mode error in
let str = match parser () with
Format.sprintf Stdlib.Ok semantic_value -> Trace.ok semantic_value
"Parse error at \"%s\" from (%d, %d) to (%d, %d). %s\n"
(Lexing.lexeme lexbuf)
start.pos_lnum (start.pos_cnum - start.pos_bol)
end_.pos_lnum (end_.pos_cnum - end_.pos_bol)
file in
let message () = str in
let loc =
if start.pos_cnum = -1
then Region.make
~start:(Pos.min ~file:source)
~stop:(Pos.from_byte end_)
else Region.make
~start:(Pos.from_byte start)
~stop:(Pos.from_byte end_) in
let data =
[("parser_loc",
fun () -> Format.asprintf "%a" Location.pp_lift @@ loc)]
in error ~data title message
let unrecognized_error source (start: Lexing.position) (end_: Lexing.position) lexbuf = (* Lexing and parsing errors *)
let title () = "unrecognized error" in
let file =
if source = "" then ""
else
Format.sprintf "In file \"%s|%s\"" start.pos_fname source in
let str =
Format.sprintf
"Parse error at \"%s\" from (%d, %d) to (%d, %d). %s\n"
(Lexing.lexeme lexbuf)
start.pos_lnum (start.pos_cnum - start.pos_bol)
end_.pos_lnum (end_.pos_cnum - end_.pos_bol)
file in
let message () = str in
let loc = Region.make
~start:(Pos.from_byte start)
~stop:(Pos.from_byte end_) in
let data = [
("location",
fun () -> Format.asprintf "%a" Location.pp_lift @@ loc)]
in error ~data title message
end | Stdlib.Error error -> Trace.fail @@ Errors.generic error
(* Scoping errors *)
open Errors | exception Scoping.Error (Scoping.Reserved_name name) ->
let token =
Lexer.Token.mk_ident name.Region.value name.Region.region in
(match token with
Stdlib.Error LexToken.Reserved_name ->
Trace.fail @@ Errors.generic @@ Region.wrap_ghost "Reserved name."
| Ok invalid ->
local_fail
("Reserved name.\nHint: Change the name.\n", None, invalid))
type 'a parser = (Lexing.lexbuf -> LexToken.token) -> Lexing.lexbuf -> 'a | exception Scoping.Error (Scoping.Duplicate_variant name) ->
let token =
Lexer.Token.mk_constr name.Region.value name.Region.region
in local_fail
("Duplicate constructor in this sum type declaration.\n\
Hint: Change the constructor.\n", None, token)
let parse (parser: 'a parser) source lexbuf = | exception Scoping.Error (Scoping.Non_linear_pattern var) ->
let Lexer.{read ; close ; _} = Lexer.open_token_stream None in let token =
let result = Lexer.Token.mk_ident var.Region.value var.Region.region in
try (match token with
ok (parser read lexbuf) Stdlib.Error LexToken.Reserved_name ->
with Trace.fail @@ Errors.generic @@ Region.wrap_ghost "Reserved name."
| SyntaxError.Error (WrongFunctionArguments e) -> | Ok invalid ->
fail @@ (wrong_function_arguments e) local_fail ("Repeated variable in this pattern.\n\
| Parser.Error -> Hint: Change the name.\n",
let start = Lexing.lexeme_start_p lexbuf in None, invalid))
let end_ = Lexing.lexeme_end_p lexbuf in
fail @@ (parser_error source start end_ lexbuf)
| Lexer.Error e ->
fail @@ (lexer_error e)
| _ ->
let _ = Printexc.print_backtrace Pervasives.stdout in
let start = Lexing.lexeme_start_p lexbuf in
let end_ = Lexing.lexeme_end_p lexbuf in
fail @@ (unrecognized_error source start end_ lexbuf)
in
close ();
result
let parse_file (source: string) : AST.t result = | exception Scoping.Error (Scoping.Duplicate_field name) ->
let token =
Lexer.Token.mk_ident name.Region.value name.Region.region in
(match token with
Stdlib.Error LexToken.Reserved_name ->
Trace.fail @@ Errors.generic @@ Region.wrap_ghost "Reserved name."
| Ok invalid ->
local_fail
("Duplicate field name in this record declaration.\n\
Hint: Change the name.\n",
None, invalid))
| exception SyntaxError.Error (SyntaxError.WrongFunctionArguments expr) ->
Trace.fail @@ Errors.wrong_function_arguments expr
let parse_file (source: string) =
let module IO =
struct
let ext = PreIO.ext
let options =
PreIO.pre_options ~input:(Some source) ~expr:false
end in
let lib_path =
match IO.options#libs with
[] -> ""
| libs -> let mk_I dir path = Printf.sprintf " -I %s%s" dir path
in List.fold_right mk_I libs "" in
let prefix =
match IO.options#input with
None | Some "-" -> "temp"
| Some file -> Filename.(remove_extension @@ basename file) in
let suffix = ".pp" ^ IO.ext in
let pp_input = let pp_input =
let prefix = Filename.(source |> basename |> remove_extension) if SSet.mem "cpp" IO.options#verbose
and suffix = ".pp.religo" then prefix ^ suffix
in prefix ^ suffix in else let pp_input, pp_out =
Filename.open_temp_file prefix suffix
let cpp_cmd = Printf.sprintf "cpp -traditional-cpp %s > %s" in close_out pp_out; pp_input in
source pp_input in let cpp_cmd =
match IO.options#input with
None | Some "-" ->
Printf.sprintf "cpp -traditional-cpp%s - > %s"
lib_path pp_input
| Some file ->
Printf.sprintf "cpp -traditional-cpp%s %s > %s"
lib_path file pp_input in
let open Trace in
let%bind () = sys_command cpp_cmd in let%bind () = sys_command cpp_cmd in
let module Unit = PreUnit (IO) in
match Lexer.(open_token_stream @@ File pp_input) with
Ok instance ->
let thunk () = Unit.apply instance Unit.parse_contract
in parse (module IO) thunk
| Stdlib.Error (Lexer.File_opening msg) ->
Trace.fail @@ Errors.generic @@ Region.wrap_ghost msg
let%bind channel = let parse_string (s: string) =
generic_try (simple_error "error opening file") @@ let module IO =
(fun () -> open_in pp_input) in struct
let lexbuf = Lexing.from_channel channel in let ext = PreIO.ext
parse (Parser.contract) source lexbuf let options = PreIO.pre_options ~input:None ~expr:false
end in
let module Unit = PreUnit (IO) in
match Lexer.(open_token_stream @@ String s) with
Ok instance ->
let thunk () = Unit.apply instance Unit.parse_contract
in parse (module IO) thunk
| Stdlib.Error (Lexer.File_opening msg) ->
Trace.fail @@ Errors.generic @@ Region.wrap_ghost msg
let parse_string (s:string) : AST.t result = let parse_expression (s: string) =
let lexbuf = Lexing.from_string s in let module IO =
parse (Parser.contract) "" lexbuf struct
let ext = PreIO.ext
let parse_expression (s:string) : AST.expr result = let options = PreIO.pre_options ~input:None ~expr:true
let lexbuf = Lexing.from_string s in end in
parse (Parser.interactive_expr) "" lexbuf let module Unit = PreUnit (IO) in
match Lexer.(open_token_stream @@ String s) with
Ok instance ->
let thunk () = Unit.apply instance Unit.parse_expr
in parse (module IO) thunk
| Stdlib.Error (Lexer.File_opening msg) ->
Trace.fail @@ Errors.generic @@ Region.wrap_ghost msg

View File

@ -24,6 +24,24 @@ type 'a sequence_or_record =
let (<@) f g x = f (g x) let (<@) f g x = f (g x)
(**
Covert nsepseq to a chain of TFun's.
Necessary to handle cases like:
`type foo = (int, int) => int;`
*)
let rec nsepseq_to_curry hd rest =
match hd, rest with
| hd, (sep, item) :: rest ->
let start = type_expr_to_region hd in
let stop = nsepseq_to_region type_expr_to_region (hd, rest) in
let region = cover start stop in
TFun {
value = hd, sep, (nsepseq_to_curry item rest);
region
}
| hd, [] -> hd
(* END HEADER *) (* END HEADER *)
%} %}
@ -119,7 +137,7 @@ tuple(item):
(* Possibly empty semicolon-separated values between brackets *) (* Possibly empty semicolon-separated values between brackets *)
list(item): list__(item):
"[" sep_or_term_list(item,";")? "]" { "[" sep_or_term_list(item,";")? "]" {
let compound = Brackets ($1,$3) let compound = Brackets ($1,$3)
and region = cover $1 $3 in and region = cover $1 $3 in
@ -159,24 +177,40 @@ type_decl:
type_expr: type_expr:
cartesian | sum_type | record_type { $1 } cartesian | sum_type | record_type { $1 }
cartesian: type_expr_func:
fun_type { $1 } "=>" cartesian {
| fun_type "," nsepseq(fun_type,",") { $1, $2
let value = Utils.nsepseq_cons $1 $2 $3 in }
let region = nsepseq_to_region type_expr_to_region value
in TProd {region; value} }
fun_type: cartesian:
core_type { $1 } core_type { $1 }
| core_type "=>" fun_type { | type_name type_expr_func {
let start = type_expr_to_region $1 let (arrow, c) = $2 in
and stop = type_expr_to_region $3 in let value = TVar $1, arrow, c in
let region = cover start stop in let region = cover $1.region (type_expr_to_region c) in
TFun {region; value=$1,$2,$3} } TFun { region; value }
}
| "(" cartesian ")" type_expr_func {
let (arrow, c) = $4 in
let value = $2, arrow, c in
let region = cover $1 (type_expr_to_region c) in
TFun { region; value }
}
| "(" cartesian "," nsepseq(cartesian,",") ")" type_expr_func? {
match $6 with
| Some (arrow, c) ->
let (hd, rest) = Utils.nsepseq_cons $2 $3 $4 in
let rest = rest @ [(arrow, c)] in
nsepseq_to_curry hd rest
| None ->
let value = Utils.nsepseq_cons $2 $3 $4 in
let region = cover $1 $5 in
TProd {region; value}
}
core_type: core_type:
type_name { TVar $1 } type_name { TVar $1 }
| par(type_expr) { TPar $1 } | par(cartesian) { TPar $1 }
| module_name "." type_name { | module_name "." type_name {
let module_name = $1.value in let module_name = $1.value in
let type_name = $3.value in let type_name = $3.value in
@ -230,13 +264,13 @@ field_decl:
(* Top-level non-recursive definitions *) (* Top-level non-recursive definitions *)
let_declaration: let_declaration:
seq(Attr) "let" let_binding { seq(Attr) "let" let_binding {
let attributes = $1 in let attributes = $1 in
let kwd_let = $2 in let kwd_let = $2 in
let binding = $3 in let binding = $3 in
let value = kwd_let, binding, attributes in let value = kwd_let, binding, attributes in
let stop = expr_to_region binding.let_rhs in let stop = expr_to_region binding.let_rhs in
let region = cover $2 stop let region = cover $2 stop
in {region; value} } in {region; value} }
es6_func: es6_func:
@ -335,7 +369,7 @@ core_pattern:
| "false" { PFalse $1 } | "false" { PFalse $1 }
| "<string>" { PString $1 } | "<string>" { PString $1 }
| par(ptuple) { PPar $1 } | par(ptuple) { PPar $1 }
| list(sub_pattern) { PList (PListComp $1) } | list__(sub_pattern) { PList (PListComp $1) }
| constr_pattern { PConstr $1 } | constr_pattern { PConstr $1 }
| record_pattern { PRecord $1 } | record_pattern { PRecord $1 }
@ -439,23 +473,21 @@ fun_expr:
{p.value with inside = arg_to_pattern p.value.inside} {p.value with inside = arg_to_pattern p.value.inside}
in PPar {p with value} in PPar {p with value}
| EUnit u -> PUnit u | EUnit u -> PUnit u
| ETuple { value; region } -> | ETuple { value; region } ->
PTuple { value = Utils.nsepseq_map arg_to_pattern value; region} PTuple { value = Utils.nsepseq_map arg_to_pattern value; region}
| EAnnot {region; value = {inside = t, colon, typ; _}} -> | EAnnot {region; value = {inside = t, colon, typ; _}} ->
let value = { pattern = arg_to_pattern t; colon; type_expr = typ} in let value = { pattern = arg_to_pattern t; colon; type_expr = typ} in
PPar { PPar {
value = { value = {
lpar = Region.ghost; lpar = Region.ghost;
rpar = Region.ghost; rpar = Region.ghost;
inside = PTyped {region; value} inside = PTyped {region; value}
}; };
region region
} }
| e -> ( | e ->
let open! SyntaxError in let open! SyntaxError in
raise (Error (WrongFunctionArguments e)) raise (Error (WrongFunctionArguments e)) in
)
in
let fun_args_to_pattern = function let fun_args_to_pattern = function
EAnnot { EAnnot {
value = { value = {
@ -473,17 +505,55 @@ fun_expr:
_} -> _} ->
(* ((foo:x, bar) : type) *) (* ((foo:x, bar) : type) *)
(arg_to_pattern fun_arg, []) (arg_to_pattern fun_arg, [])
| EPar {value = {inside = fun_arg; _ }; _} -> | EPar {value = {inside = EFun {
value = {
binders = PTyped { value = { pattern; colon; type_expr }; region = fun_region }, [];
arrow;
body;
_
};
_
}; _ }; region} ->
let expr_to_type = function
| EVar v -> TVar v
| e -> let open! SyntaxError
in raise (Error (WrongFunctionArguments e))
in
let type_expr = (
match type_expr with
| TProd {value; _} ->
let (hd, rest) = value in
let rest = rest @ [(arrow, expr_to_type body)] in
nsepseq_to_curry hd rest
| e ->
TFun {
value = e, arrow, expr_to_type body;
region = fun_region
}
)
in
PTyped {
value = {
pattern;
colon;
type_expr
};
region;
}, []
| EPar {value = {inside = fun_arg; _ }; _} ->
arg_to_pattern fun_arg, [] arg_to_pattern fun_arg, []
| EAnnot e -> | EAnnot _ as e ->
arg_to_pattern (EAnnot e), [] arg_to_pattern e, []
| ETuple {value = fun_args; _} -> | ETuple {value = fun_args; _} ->
let bindings = let bindings =
List.map (arg_to_pattern <@ snd) (snd fun_args) in List.map (arg_to_pattern <@ snd) (snd fun_args) in
List.iter Scoping.check_pattern bindings; List.iter Scoping.check_pattern bindings;
arg_to_pattern (fst fun_args), bindings arg_to_pattern (fst fun_args), bindings
| EUnit e -> | EUnit _ as e ->
arg_to_pattern (EUnit e), [] arg_to_pattern e, []
| EVar _ as e ->
arg_to_pattern e, []
| e -> let open! SyntaxError | e -> let open! SyntaxError
in raise (Error (WrongFunctionArguments e)) in raise (Error (WrongFunctionArguments e))
in in
@ -576,8 +646,8 @@ case_clause(right_expr):
let_expr(right_expr): let_expr(right_expr):
seq(Attr) "let" let_binding ";" right_expr { seq(Attr) "let" let_binding ";" right_expr {
let attributes = $1 in let attributes = $1 in
let kwd_let = $2 in let kwd_let = $2 in
let binding = $3 in let binding = $3 in
let kwd_in = $4 in let kwd_in = $4 in
let body = $5 in let body = $5 in
@ -727,8 +797,8 @@ common_expr:
| "true" { ELogic (BoolExpr (True $1)) } | "true" { ELogic (BoolExpr (True $1)) }
core_expr_2: core_expr_2:
common_expr { $1 } common_expr { $1 }
| list(expr) { EList (EListComp $1) } | list__(expr) { EList (EListComp $1) }
list_or_spread: list_or_spread:
"[" expr "," sep_or_term_list(expr, ",") "]" { "[" expr "," sep_or_term_list(expr, ",") "]" {
@ -807,11 +877,11 @@ projection:
field_path = snd $4} field_path = snd $4}
in {region; value} } in {region; value} }
path : path:
"<ident>" {Name $1} "<ident>" { Name $1 }
| projection { Path $1} | projection { Path $1 }
update_record : update_record:
"{""..."path "," sep_or_term_list(field_path_assignment,",") "}" { "{""..."path "," sep_or_term_list(field_path_assignment,",") "}" {
let region = cover $1 $6 in let region = cover $1 $6 in
let ne_elements, terminator = $5 in let ne_elements, terminator = $5 in

View File

@ -27,12 +27,11 @@ module Unit =
(* Main *) (* Main *)
let issue_error point = let issue_error error : ('a, string Region.reg) Stdlib.result =
let error = Unit.format_error ~offsets:IO.options#offsets Stdlib.Error (Unit.format_error ~offsets:IO.options#offsets
IO.options#mode point IO.options#mode error)
in Stdlib.Error error
let parse parser : ('a,string) Stdlib.result = let parse parser : ('a, string Region.reg) Stdlib.result =
try parser () with try parser () with
(* Ad hoc errors from the parser *) (* Ad hoc errors from the parser *)
@ -43,10 +42,10 @@ let parse parser : ('a,string) Stdlib.result =
Examples of valid functions:\n\ Examples of valid functions:\n\
let x = (a: string, b: int) : int => 3;\n\ let x = (a: string, b: int) : int => 3;\n\
let x = (a: string) : string => \"Hello, \" ++ a;\n" let x = (a: string) : string => \"Hello, \" ++ a;\n"
and reg = AST.expr_to_region expr in and region = AST.expr_to_region expr in
let error = Unit.short_error ~offsets:IO.options#offsets let error = Unit.short_error ~offsets:IO.options#offsets
IO.options#mode msg reg IO.options#mode msg region
in Stdlib.Error error in Stdlib.Error Region.{value=error; region}
(* Scoping errors *) (* Scoping errors *)
@ -96,11 +95,61 @@ let parse parser : ('a,string) Stdlib.result =
None, invalid None, invalid
in issue_error point) in issue_error point)
(* Preprocessing the input source with CPP *)
module SSet = Utils.String.Set
let sprintf = Printf.sprintf
(* Path for CPP inclusions (#include) *)
let lib_path =
match IO.options#libs with
[] -> ""
| libs -> let mk_I dir path = sprintf " -I %s%s" dir path
in List.fold_right mk_I libs ""
let prefix =
match IO.options#input with
None | Some "-" -> "temp"
| Some file -> Filename.(file |> basename |> remove_extension)
let suffix = ".pp" ^ IO.ext
let pp_input =
if SSet.mem "cpp" IO.options#verbose
then prefix ^ suffix
else let pp_input, pp_out =
Filename.open_temp_file prefix suffix
in close_out pp_out; pp_input
let cpp_cmd =
match IO.options#input with
None | Some "-" ->
sprintf "cpp -traditional-cpp%s - > %s"
lib_path pp_input
| Some file ->
sprintf "cpp -traditional-cpp%s %s > %s"
lib_path file pp_input
let () = let () =
if IO.options#expr if Sys.command cpp_cmd <> 0 then
then match parse (fun () -> Unit.parse Unit.parse_expr) with Printf.eprintf "External error: \"%s\" failed." cpp_cmd
Stdlib.Ok _ -> ()
| Error msg -> Printf.eprintf "\027[31m%s\027[0m%!" msg (* Instantiating the lexer and calling the parser *)
else match parse (fun () -> Unit.parse Unit.parse_contract) with
Stdlib.Ok _ -> () let lexer_inst =
| Error msg -> Printf.eprintf "\027[31m%s\027[0m%!" msg match Lexer.open_token_stream (Lexer.File pp_input) with
Ok instance ->
if IO.options#expr
then
match parse (fun () -> Unit.apply instance Unit.parse_expr) with
Stdlib.Ok _ -> ()
| Error Region.{value; _} ->
Printf.eprintf "\027[31m%s\027[0m%!" value
else
(match parse (fun () -> Unit.apply instance Unit.parse_contract) with
Stdlib.Ok _ -> ()
| Error Region.{value; _} ->
Printf.eprintf "\027[31m%s\027[0m%!" value)
| Stdlib.Error (Lexer.File_opening msg) ->
Printf.eprintf "\027[31m%s\027[0m%!" msg

View File

@ -7,7 +7,7 @@
(menhir (menhir
(merge_into Parser) (merge_into Parser)
(modules ParToken Parser) (modules ParToken Parser)
(flags -la 1 --table --explain --strict --external-tokens LexToken)) (flags -la 1 --table --strict --explain --external-tokens LexToken))
;; Build of the parser as a library ;; Build of the parser as a library
@ -15,18 +15,16 @@
(name parser_reasonligo) (name parser_reasonligo)
(public_name ligo.parser.reasonligo) (public_name ligo.parser.reasonligo)
(modules (modules
SyntaxError reasonligo LexToken Parser) SyntaxError reasonligo LexToken ParErr Parser)
(libraries (libraries
menhirLib menhirLib
parser_shared parser_shared
parser_cameligo parser_cameligo
str str
simple-utils simple-utils)
tezos-utils
getopt)
(preprocess (preprocess
(pps bisect_ppx --conditional)) (pps bisect_ppx --conditional))
(flags (:standard -open Simple_utils -open Parser_shared -open Parser_cameligo))) (flags (:standard -open Parser_shared -open Simple_utils -open Parser_cameligo)))
;; Build of the unlexer (for covering the ;; Build of the unlexer (for covering the
;; error states of the LR automaton) ;; error states of the LR automaton)
@ -55,8 +53,7 @@
(libraries (libraries
parser_reasonligo parser_reasonligo
parser_cameligo) parser_cameligo)
(modules (modules ParserMain)
ParErr ParserMain)
(preprocess (preprocess
(pps bisect_ppx --conditional)) (pps bisect_ppx --conditional))
(flags (:standard -open Simple_utils -open Parser_cameligo -open Parser_shared -open Parser_reasonligo))) (flags (:standard -open Simple_utils -open Parser_cameligo -open Parser_shared -open Parser_reasonligo)))

View File

@ -145,7 +145,16 @@ module type S =
close : unit -> unit close : unit -> unit
} }
val open_token_stream : file_path option -> instance type input =
File of file_path (* "-" means stdin *)
| Stdin
| String of string
| Channel of in_channel
| Buffer of Lexing.lexbuf
type open_err = File_opening of string
val open_token_stream : input -> (instance, open_err) Stdlib.result
(* Error reporting *) (* Error reporting *)
@ -157,7 +166,7 @@ module type S =
val format_error : val format_error :
?offsets:bool -> [`Byte | `Point] -> ?offsets:bool -> [`Byte | `Point] ->
error Region.reg -> file:bool -> string error Region.reg -> file:bool -> string Region.reg
end end

View File

@ -165,9 +165,18 @@ module type S =
get_last : unit -> Region.t; get_last : unit -> Region.t;
get_file : unit -> file_path; get_file : unit -> file_path;
close : unit -> unit close : unit -> unit
} }
val open_token_stream : file_path option -> instance type input =
File of file_path (* "-" means stdin *)
| Stdin
| String of string
| Channel of in_channel
| Buffer of Lexing.lexbuf
type open_err = File_opening of string
val open_token_stream : input -> (instance, open_err) Stdlib.result
(* Error reporting *) (* Error reporting *)
@ -179,7 +188,7 @@ module type S =
val format_error : val format_error :
?offsets:bool -> [`Byte | `Point] -> ?offsets:bool -> [`Byte | `Point] ->
error Region.reg -> file:bool -> string error Region.reg -> file:bool -> string Region.reg
end end
(* The functorised interface (* The functorised interface
@ -444,8 +453,9 @@ module Make (Token: TOKEN) : (S with module Token = Token) =
let format_error ?(offsets=true) mode Region.{region; value} ~file = let format_error ?(offsets=true) mode Region.{region; value} ~file =
let msg = error_to_string value let msg = error_to_string value
and reg = region#to_string ~file ~offsets mode and reg = region#to_string ~file ~offsets mode in
in sprintf "Lexical error %s:\n%s" reg msg let value = sprintf "Lexical error %s:\n%s" reg msg
in Region.{value; region}
let fail region value = raise (Error Region.{region; value}) let fail region value = raise (Error Region.{region; value})
@ -516,15 +526,12 @@ module Make (Token: TOKEN) : (S with module Token = Token) =
let region, lexeme, state = sync state buffer in let region, lexeme, state = sync state buffer in
let lexeme = Str.string_before lexeme (String.index lexeme 't') in let lexeme = Str.string_before lexeme (String.index lexeme 't') in
match format_tz lexeme with match format_tz lexeme with
| Some tz -> ( None -> assert false
match Token.mk_mutez (Z.to_string tz ^ "mutez") region with | Some tz ->
Ok token -> match Token.mk_mutez (Z.to_string tz ^ "mutez") region with
token, state Ok token -> token, state
| Error Token.Non_canonical_zero -> | Error Token.Non_canonical_zero ->
fail region Non_canonical_zero fail region Non_canonical_zero
)
| None -> assert false
let mk_ident state buffer = let mk_ident state buffer =
let region, lexeme, state = sync state buffer in let region, lexeme, state = sync state buffer in
@ -554,7 +561,6 @@ module Make (Token: TOKEN) : (S with module Token = Token) =
let region, _, state = sync state buffer let region, _, state = sync state buffer
in Token.eof region, state in Token.eof region, state
(* END HEADER *) (* END HEADER *)
} }
@ -580,8 +586,9 @@ let byte_seq = byte | byte (byte | '_')* byte
let bytes = "0x" (byte_seq? as seq) let bytes = "0x" (byte_seq? as seq)
let esc = "\\n" | "\\\"" | "\\\\" | "\\b" let esc = "\\n" | "\\\"" | "\\\\" | "\\b"
| "\\r" | "\\t" | "\\x" byte | "\\r" | "\\t" | "\\x" byte
let pascaligo_sym = "=/=" | '#' | ":="
let cameligo_sym = "<>" | "::" | "||" | "&&" let pascaligo_sym = "=/=" | '#' | ":="
let cameligo_sym = "<>" | "::" | "||" | "&&"
let reasonligo_sym = '!' | "=>" | "!=" | "==" | "++" | "..." | "||" | "&&" let reasonligo_sym = '!' | "=>" | "!=" | "==" | "++" | "..." | "||" | "&&"
let symbol = let symbol =
@ -680,7 +687,7 @@ and scan state = parse
Some special errors are recognised in the semantic actions of the Some special errors are recognised in the semantic actions of the
following regular expressions. The first error is a minus sign following regular expressions. The first error is a minus sign
separated from the integer it applies by some markup (space or separated from the integer it applies to by some markup (space or
tabs). The second is a minus sign immediately followed by tabs). The second is a minus sign immediately followed by
anything else than a natural number (matched above) or markup and anything else than a natural number (matched above) or markup and
a number (previous error). The third is the strange occurrence of a number (previous error). The third is the strange occurrence of
@ -865,10 +872,20 @@ type instance = {
close : unit -> unit close : unit -> unit
} }
let open_token_stream file_path_opt = type input =
let file_path = match file_path_opt with File of file_path (* "-" means stdin *)
None | Some "-" -> "" | Stdin
| Some file_path -> file_path in | String of string
| Channel of in_channel
| Buffer of Lexing.lexbuf
type open_err = File_opening of string
let open_token_stream input =
let file_path = match input with
File file_path ->
if file_path = "-" then "" else file_path
| _ -> "" in
let pos = Pos.min ~file:file_path in let pos = Pos.min ~file:file_path in
let buf_reg = ref (pos#byte, pos#byte) let buf_reg = ref (pos#byte, pos#byte)
and first_call = ref true and first_call = ref true
@ -939,7 +956,7 @@ let open_token_stream file_path_opt =
match FQueue.deq !state.units with match FQueue.deq !state.units with
None -> None ->
scan buffer; scan buffer;
read_token ~log buffer read ~log buffer
| Some (units, (left_mark, token)) -> | Some (units, (left_mark, token)) ->
log left_mark token; log left_mark token;
state := {!state with units; state := {!state with units;
@ -949,15 +966,33 @@ let open_token_stream file_path_opt =
patch_buffer (Token.to_region token)#byte_pos buffer; patch_buffer (Token.to_region token)#byte_pos buffer;
token in token in
let cin = match file_path_opt with let buf_close_res =
None | Some "-" -> stdin match input with
| Some file_path -> open_in file_path in File "" | File "-" | Stdin ->
let buffer = Lexing.from_channel cin in Ok (Lexing.from_channel stdin, fun () -> close_in stdin)
let () = match file_path_opt with | File path ->
None | Some "-" -> () (try
| Some file_path -> reset ~file:file_path buffer let chan = open_in path in
and close () = close_in cin in let close () = close_in chan in
{read = read_token; buffer; get_win; get_pos; get_last; get_file; close} Ok (Lexing.from_channel chan, close)
with
Sys_error msg -> Stdlib.Error (File_opening msg))
| String s ->
Ok (Lexing.from_string s, fun () -> ())
| Channel chan ->
let close () = close_in chan in
Ok (Lexing.from_channel chan, close)
| Buffer b -> Ok (b, fun () -> ()) in
match buf_close_res with
Ok (buffer, close) ->
let () =
match input with
File path when path <> "" -> reset ~file:path buffer
| _ -> () in
let instance = {
read; buffer; get_win; get_pos; get_last; get_file; close}
in Ok instance
| Error _ as e -> e
end (* of functor [Make] in HEADER *) end (* of functor [Make] in HEADER *)
(* END TRAILER *) (* END TRAILER *)

View File

@ -1,4 +1,6 @@
(** Embedding the LIGO lexer in a debug module *) (* Embedding the LIGO lexer in a debug module *)
module Region = Simple_utils.Region
module type S = module type S =
sig sig
@ -14,7 +16,7 @@ module type S =
val trace : val trace :
?offsets:bool -> [`Byte | `Point] -> ?offsets:bool -> [`Byte | `Point] ->
file_path option -> EvalOpt.command -> file_path option -> EvalOpt.command ->
(unit, string) Stdlib.result (unit, string Region.reg) Stdlib.result
end end
module Make (Lexer: Lexer.S) : (S with module Lexer = Lexer) = module Make (Lexer: Lexer.S) : (S with module Lexer = Lexer) =
@ -48,28 +50,31 @@ module Make (Lexer: Lexer.S) : (S with module Lexer = Lexer) =
type file_path = string type file_path = string
let trace ?(offsets=true) mode file_path_opt command : let trace ?(offsets=true) mode file_path_opt command :
(unit, string) Stdlib.result = (unit, string Region.reg) Stdlib.result =
try let input =
let Lexer.{read; buffer; close; _} = match file_path_opt with
Lexer.open_token_stream file_path_opt in Some file_path -> Lexer.File file_path
let log = output_token ~offsets mode command stdout | None -> Lexer.Stdin in
and close_all () = close (); close_out stdout in match Lexer.open_token_stream input with
let rec iter () = Ok Lexer.{read; buffer; close; _} ->
match read ~log buffer with let log = output_token ~offsets mode command stdout
token -> and close_all () = close (); close_out stdout in
if Token.is_eof token let rec iter () =
then Stdlib.Ok () match read ~log buffer with
else iter () token ->
| exception Lexer.Error error -> if Token.is_eof token
let file = then Stdlib.Ok ()
match file_path_opt with else iter ()
None | Some "-" -> false | exception Lexer.Error error ->
| Some _ -> true in let file =
let msg = match file_path_opt with
Lexer.format_error ~offsets mode ~file error None | Some "-" -> false
in Stdlib.Error msg in | Some _ -> true in
let result = iter () let msg =
in (close_all (); result) Lexer.format_error ~offsets mode ~file error
with Sys_error msg -> Stdlib.Error msg in Stdlib.Error msg in
let result = iter ()
in close_all (); result
| Stdlib.Error (Lexer.File_opening msg) ->
close_out stdout; Stdlib.Error (Region.wrap_ghost msg)
end end

View File

@ -1,3 +1,5 @@
module Region = Simple_utils.Region
module type S = module type S =
sig sig
module Lexer : Lexer.S module Lexer : Lexer.S
@ -12,7 +14,7 @@ module type S =
val trace : val trace :
?offsets:bool -> [`Byte | `Point] -> ?offsets:bool -> [`Byte | `Point] ->
file_path option -> EvalOpt.command -> file_path option -> EvalOpt.command ->
(unit, string) Stdlib.result (unit, string Region.reg) Stdlib.result
end end
module Make (Lexer: Lexer.S) : S with module Lexer = Lexer module Make (Lexer: Lexer.S) : S with module Lexer = Lexer

View File

@ -1,5 +1,7 @@
(* Functor to build a standalone LIGO lexer *) (* Functor to build a standalone LIGO lexer *)
module Region = Simple_utils.Region
module type IO = module type IO =
sig sig
val ext : string (* LIGO file extension *) val ext : string (* LIGO file extension *)
@ -49,7 +51,7 @@ module Make (IO: IO) (Lexer: Lexer.S) =
(* Running the lexer on the input file *) (* Running the lexer on the input file *)
let scan () : (Lexer.token list, string) Stdlib.result = let scan () : (Lexer.token list, string Region.reg) Stdlib.result =
(* Preprocessing the input *) (* Preprocessing the input *)
if SSet.mem "cpp" IO.options#verbose if SSet.mem "cpp" IO.options#verbose
@ -59,36 +61,36 @@ module Make (IO: IO) (Lexer: Lexer.S) =
if Sys.command cpp_cmd <> 0 then if Sys.command cpp_cmd <> 0 then
let msg = let msg =
sprintf "External error: the command \"%s\" failed." cpp_cmd sprintf "External error: the command \"%s\" failed." cpp_cmd
in Stdlib.Error msg in Stdlib.Error (Region.wrap_ghost msg)
else else
try match Lexer.open_token_stream (Lexer.File pp_input) with
let Lexer.{read; buffer; close; _} = Ok Lexer.{read; buffer; close; _} ->
Lexer.open_token_stream (Some pp_input) in let close_all () = close (); close_out stdout in
let close_all () = close (); close_out stdout in let rec read_tokens tokens =
let rec read_tokens tokens = match read ~log:(fun _ _ -> ()) buffer with
match read ~log:(fun _ _ -> ()) buffer with token ->
token -> if Lexer.Token.is_eof token
if Lexer.Token.is_eof token then Stdlib.Ok (List.rev tokens)
then Stdlib.Ok (List.rev tokens) else read_tokens (token::tokens)
else read_tokens (token::tokens) | exception Lexer.Error error ->
| exception Lexer.Error error -> let file =
let file = match IO.options#input with
match IO.options#input with None | Some "-" -> false
None | Some "-" -> false | Some _ -> true in
| Some _ -> true in let msg =
let msg = Lexer.format_error ~offsets:IO.options#offsets
Lexer.format_error ~offsets:IO.options#offsets IO.options#mode ~file error
IO.options#mode ~file error in Stdlib.Error msg in
in Stdlib.Error msg in let result = read_tokens []
let result = read_tokens [] in close_all (); result
in close_all (); result | Stdlib.Error (Lexer.File_opening msg) ->
with Sys_error msg -> close_out stdout; Stdlib.Error msg close_out stdout; Stdlib.Error (Region.wrap_ghost msg)
(* Tracing the lexing (effectful) *) (* Tracing the lexing (effectful) *)
module Log = LexerLog.Make (Lexer) module Log = LexerLog.Make (Lexer)
let trace () : (unit, string) Stdlib.result = let trace () : (unit, string Region.reg) Stdlib.result =
(* Preprocessing the input *) (* Preprocessing the input *)
if SSet.mem "cpp" IO.options#verbose if SSet.mem "cpp" IO.options#verbose
@ -98,7 +100,7 @@ module Make (IO: IO) (Lexer: Lexer.S) =
if Sys.command cpp_cmd <> 0 then if Sys.command cpp_cmd <> 0 then
let msg = let msg =
sprintf "External error: the command \"%s\" failed." cpp_cmd sprintf "External error: the command \"%s\" failed." cpp_cmd
in Stdlib.Error msg in Stdlib.Error (Region.wrap_ghost msg)
else else
Log.trace ~offsets:IO.options#offsets Log.trace ~offsets:IO.options#offsets
IO.options#mode IO.options#mode

View File

@ -1,5 +1,7 @@
(* Functor to build a standalone LIGO lexer *) (* Functor to build a standalone LIGO lexer *)
module Region = Simple_utils.Region
module type IO = module type IO =
sig sig
val ext : string (* LIGO file extension *) val ext : string (* LIGO file extension *)
@ -8,6 +10,6 @@ module type IO =
module Make (IO: IO) (Lexer: Lexer.S) : module Make (IO: IO) (Lexer: Lexer.S) :
sig sig
val scan : unit -> (Lexer.token list, string) Stdlib.result val scan : unit -> (Lexer.token list, string Region.reg) Stdlib.result
val trace : unit -> (unit, string) Stdlib.result val trace : unit -> (unit, string Region.reg) Stdlib.result
end end

View File

@ -24,6 +24,7 @@ module type PARSER =
val interactive_expr : val interactive_expr :
(Lexing.lexbuf -> token) -> Lexing.lexbuf -> expr (Lexing.lexbuf -> token) -> Lexing.lexbuf -> expr
val contract : val contract :
(Lexing.lexbuf -> token) -> Lexing.lexbuf -> ast (Lexing.lexbuf -> token) -> Lexing.lexbuf -> ast
@ -41,6 +42,7 @@ module type PARSER =
sig sig
val interactive_expr : val interactive_expr :
Lexing.position -> expr MenhirInterpreter.checkpoint Lexing.position -> expr MenhirInterpreter.checkpoint
val contract : val contract :
Lexing.position -> ast MenhirInterpreter.checkpoint Lexing.position -> ast MenhirInterpreter.checkpoint
end end
@ -102,7 +104,9 @@ module Make (IO : IO)
let invalid_lexeme = Lexer.Token.to_lexeme invalid in let invalid_lexeme = Lexer.Token.to_lexeme invalid in
Printf.sprintf "%s and before \"%s\"" s invalid_lexeme in Printf.sprintf "%s and before \"%s\"" s invalid_lexeme in
let header = header ^ trailer in let header = header ^ trailer in
header ^ (if msg = "" then ".\n" else ":\n" ^ msg) let msg =
header ^ (if msg = "" then ".\n" else ":\n" ^ msg)
in Region.{value=msg; region=invalid_region}
let failure get_win checkpoint = let failure get_win checkpoint =
let message = ParErr.message (state checkpoint) in let message = ParErr.message (state checkpoint) in

View File

@ -54,6 +54,8 @@ module Make (IO: IO)
(Parser: PARSER with type token = Lexer.Token.token) (Parser: PARSER with type token = Lexer.Token.token)
(ParErr: sig val message : int -> string end) : (ParErr: sig val message : int -> string end) :
sig sig
(* WARNING: The following parsers may all raise [Lexer.Error] *)
(* The monolithic API of Menhir *) (* The monolithic API of Menhir *)
val mono_contract : val mono_contract :
@ -74,5 +76,6 @@ module Make (IO: IO)
val incr_contract : Lexer.instance -> Parser.ast val incr_contract : Lexer.instance -> Parser.ast
val incr_expr : Lexer.instance -> Parser.expr val incr_expr : Lexer.instance -> Parser.expr
val format_error : ?offsets:bool -> [`Point | `Byte] -> error -> string val format_error :
?offsets:bool -> [`Point | `Byte] -> error -> string Region.reg
end end

View File

@ -37,42 +37,13 @@ module Make (Lexer: Lexer.S)
open Printf open Printf
module SSet = Utils.String.Set module SSet = Utils.String.Set
(* Error printing and exception tracing *) (* Log of the lexer *)
let () = Printexc.record_backtrace true module Log = LexerLog.Make (Lexer)
(* Preprocessing the input source and opening the input channels *) let log =
Log.output_token ~offsets:IO.options#offsets
(* Path for CPP inclusions (#include) *) IO.options#mode IO.options#cmd stdout
let lib_path =
match IO.options#libs with
[] -> ""
| libs -> let mk_I dir path = sprintf " -I %s%s" dir path
in List.fold_right mk_I libs ""
let prefix =
match IO.options#input with
None | Some "-" -> "temp"
| Some file -> Filename.(file |> basename |> remove_extension)
let suffix = ".pp" ^ IO.ext
let pp_input =
if SSet.mem "cpp" IO.options#verbose
then prefix ^ suffix
else let pp_input, pp_out =
Filename.open_temp_file prefix suffix
in close_out pp_out; pp_input
let cpp_cmd =
match IO.options#input with
None | Some "-" ->
sprintf "cpp -traditional-cpp%s - > %s"
lib_path pp_input
| Some file ->
sprintf "cpp -traditional-cpp%s %s > %s"
lib_path file pp_input
(* Error handling (reexported from [ParserAPI]) *) (* Error handling (reexported from [ParserAPI]) *)
@ -81,8 +52,6 @@ module Make (Lexer: Lexer.S)
type invalid = Parser.token type invalid = Parser.token
type error = message * valid option * invalid type error = message * valid option * invalid
exception Point of error
(* Instantiating the parser *) (* Instantiating the parser *)
module Front = ParserAPI.Make (IO)(Lexer)(Parser)(ParErr) module Front = ParserAPI.Make (IO)(Lexer)(Parser)(ParErr)
@ -94,18 +63,23 @@ module Make (Lexer: Lexer.S)
(* Parsing an expression *) (* Parsing an expression *)
let parse_expr lexer_inst tokeniser output state : let parse_expr lexer_inst :
(AST.expr, string) Stdlib.result = (AST.expr, message Region.reg) Stdlib.result =
let close_all () = let output = Buffer.create 131 in
lexer_inst.Lexer.close (); close_out stdout in let state =
let lexbuf = lexer_inst.Lexer.buffer in ParserLog.mk_state ~offsets:IO.options#offsets
~mode:IO.options#mode
~buffer:output in
let close () = lexer_inst.Lexer.close () in
let expr = let expr =
try try
if IO.options#mono then if IO.options#mono then
Front.mono_expr tokeniser lexbuf let tokeniser = lexer_inst.Lexer.read ~log
and lexbuf = lexer_inst.Lexer.buffer
in Front.mono_expr tokeniser lexbuf
else else
Front.incr_expr lexer_inst Front.incr_expr lexer_inst
with exn -> close_all (); raise exn in with exn -> close (); raise exn in
let () = let () =
if SSet.mem "ast-tokens" IO.options#verbose then if SSet.mem "ast-tokens" IO.options#verbose then
begin begin
@ -120,22 +94,27 @@ module Make (Lexer: Lexer.S)
ParserLog.pp_expr state expr; ParserLog.pp_expr state expr;
Buffer.output_buffer stdout output Buffer.output_buffer stdout output
end end
in close_all (); Ok expr in close (); Ok expr
(* Parsing a contract *) (* Parsing a contract *)
let parse_contract lexer_inst tokeniser output state let parse_contract lexer_inst :
: (AST.t, string) Stdlib.result = (AST.t, message Region.reg) Stdlib.result =
let close_all () = let output = Buffer.create 131 in
lexer_inst.Lexer.close (); close_out stdout in let state =
let lexbuf = lexer_inst.Lexer.buffer in ParserLog.mk_state ~offsets:IO.options#offsets
~mode:IO.options#mode
~buffer:output in
let close () = lexer_inst.Lexer.close () in
let ast = let ast =
try try
if IO.options#mono then if IO.options#mono then
Front.mono_contract tokeniser lexbuf let tokeniser = lexer_inst.Lexer.read ~log
and lexbuf = lexer_inst.Lexer.buffer
in Front.mono_contract tokeniser lexbuf
else else
Front.incr_contract lexer_inst Front.incr_contract lexer_inst
with exn -> close_all (); raise exn in with exn -> close (); raise exn in
let () = let () =
if SSet.mem "ast-tokens" IO.options#verbose then if SSet.mem "ast-tokens" IO.options#verbose then
begin begin
@ -150,74 +129,45 @@ module Make (Lexer: Lexer.S)
ParserLog.pp_ast state ast; ParserLog.pp_ast state ast;
Buffer.output_buffer stdout output Buffer.output_buffer stdout output
end end
in close_all (); Ok ast in close (); Ok ast
(* Wrapper for the parsers above *) (* Wrapper for the parsers above *)
let parse parser = type 'a parser = Lexer.instance -> ('a, message Region.reg) result
(* Preprocessing the input *)
if SSet.mem "cpp" IO.options#verbose let apply lexer_inst parser =
then eprintf "%s\n%!" cpp_cmd (* Calling the parser and filtering errors *)
else ();
if Sys.command cpp_cmd <> 0 then match parser lexer_inst with
let msg = Stdlib.Error _ as error -> error
sprintf "External error: \"%s\" failed." cpp_cmd | Stdlib.Ok _ as node -> node
in Stdlib.Error msg
else
(* Instantiating the lexer *)
let lexer_inst = Lexer.open_token_stream (Some pp_input) in (* Lexing errors *)
(* Making the tokeniser *) | exception Lexer.Error err ->
let file =
match IO.options#input with
None | Some "-" -> false
| Some _ -> true in
let error =
Lexer.format_error ~offsets:IO.options#offsets
IO.options#mode err ~file
in Stdlib.Error error
let module Log = LexerLog.Make (Lexer) in (* Incremental API of Menhir *)
let log = | exception Front.Point point ->
Log.output_token ~offsets:IO.options#offsets let error =
IO.options#mode IO.options#cmd stdout in Front.format_error ~offsets:IO.options#offsets
IO.options#mode point
in Stdlib.Error error
let tokeniser = lexer_inst.Lexer.read ~log in (* Monolithic API of Menhir *)
let output = Buffer.create 131 in | exception Parser.Error ->
let state = ParserLog.mk_state let invalid, valid_opt =
~offsets:IO.options#offsets match lexer_inst.Lexer.get_win () with
~mode:IO.options#mode Lexer.Nil ->
~buffer:output in
(* Calling the specific parser (that is, the parameter) *)
match parser lexer_inst tokeniser output state with
Stdlib.Error _ as error -> error
| Stdlib.Ok _ as node -> node
(* Lexing errors *)
| exception Lexer.Error err ->
let file =
match IO.options#input with
None | Some "-" -> false
| Some _ -> true in
let error =
Lexer.format_error ~offsets:IO.options#offsets
IO.options#mode err ~file
in Stdlib.Error error
(* Incremental API of Menhir *)
| exception Front.Point point ->
let error =
Front.format_error ~offsets:IO.options#offsets
IO.options#mode point
in Stdlib.Error error
(* Monolithic API of Menhir *)
| exception Parser.Error ->
let invalid, valid_opt =
match lexer_inst.Lexer.get_win () with
Lexer.Nil ->
assert false (* Safe: There is always at least EOF. *) assert false (* Safe: There is always at least EOF. *)
| Lexer.One invalid -> invalid, None | Lexer.One invalid -> invalid, None
| Lexer.Two (invalid, valid) -> invalid, Some valid in | Lexer.Two (invalid, valid) -> invalid, Some valid in
@ -227,8 +177,8 @@ module Make (Lexer: Lexer.S)
IO.options#mode point IO.options#mode point
in Stdlib.Error error in Stdlib.Error error
(* I/O errors *) (* I/O errors *)
| exception Sys_error error -> Stdlib.Error error
| exception Sys_error error ->
Stdlib.Error (Region.wrap_ghost error)
end end

View File

@ -23,49 +23,37 @@ module type Pretty =
val print_expr : state -> expr -> unit val print_expr : state -> expr -> unit
end end
module Make (Lexer: Lexer.S) module Make (Lexer : Lexer.S)
(AST: sig type t type expr end) (AST : sig type t type expr end)
(Parser: ParserAPI.PARSER (Parser : ParserAPI.PARSER
with type ast = AST.t with type ast = AST.t
and type expr = AST.expr and type expr = AST.expr
and type token = Lexer.token) and type token = Lexer.token)
(ParErr: sig val message : int -> string end) (ParErr : sig val message : int -> string end)
(ParserLog: Pretty with type ast = AST.t (ParserLog : Pretty with type ast = AST.t
and type expr = AST.expr) and type expr = AST.expr)
(IO: IO) : (IO: IO) :
sig sig
(* Error handling (reexported from [ParserAPI]) *) (* Error handling reexported from [ParserAPI] without the
exception [Point] *)
type message = string type message = string
type valid = Parser.token type valid = Parser.token
type invalid = Parser.token type invalid = Parser.token
type error = message * valid option * invalid type error = message * valid option * invalid
exception Point of error
val format_error : val format_error :
?offsets:bool -> [`Byte | `Point] -> error -> string ?offsets:bool -> [`Byte | `Point] -> error -> string Region.reg
val short_error : val short_error :
?offsets:bool -> [`Point | `Byte] -> string -> Region.t -> string ?offsets:bool -> [`Point | `Byte] -> message -> Region.t -> string
(* Parsers *) (* Parsers *)
val parse : type 'a parser = Lexer.instance -> ('a, message Region.reg) result
(Lexer.instance ->
(Lexing.lexbuf -> Lexer.token) ->
Buffer.t -> ParserLog.state -> ('a, string) result) ->
('a, string) result
val parse_contract : val apply : Lexer.instance -> 'a parser -> ('a, message Region.reg) result
Lexer.instance ->
(Lexing.lexbuf -> Lexer.token) ->
Buffer.t -> ParserLog.state ->
(AST.t, string) Stdlib.result
val parse_expr :
Lexer.instance ->
(Lexing.lexbuf -> Lexer.token) ->
Buffer.t -> ParserLog.state -> (AST.expr, string) Stdlib.result
val parse_contract : AST.t parser
val parse_expr : AST.expr parser
end end

View File

@ -32,46 +32,48 @@ module Errors = struct
in in
let data = [ let data = [
("expected", fun () -> expected_name); ("expected", fun () -> expected_name);
("location" , fun () -> Format.asprintf "%a" Location.pp_lift @@ Raw.pattern_to_region actual) ("location",
] in fun () -> Format.asprintf "%a" Location.pp_lift @@
error ~data title message Raw.pattern_to_region actual)]
in error ~data title message
let unsupported_let_in_function (patterns : Raw.pattern list) = let unsupported_let_in_function (patterns : Raw.pattern list) =
let title () = "unsupported 'let ... in' function" in let title () = "" in
let message () = "defining functions via 'let ... in' is not supported yet" in let message () = "\nDefining functions with \"let ... in\" \
is not supported yet.\n" in
let patterns_loc = let patterns_loc =
List.fold_left (fun a p -> Region.cover a (Raw.pattern_to_region p)) List.fold_left (fun a p -> Region.cover a (Raw.pattern_to_region p))
Region.ghost patterns in Region.ghost patterns in
let data = [ let data = [
("location", fun () -> Format.asprintf "%a" Location.pp_lift @@ patterns_loc) ("location",
] in fun () -> Format.asprintf "%a" Location.pp_lift @@ patterns_loc)]
error ~data title message in error ~data title message
let unknown_predefined_type name = let unknown_predefined_type name =
let title () = "type constants" in let title () = "Type constants" in
let message () = let message () =
Format.asprintf "unknown predefined type \"%s\"" name.Region.value in Format.asprintf "Unknown predefined type \"%s\".\n"
name.Region.value in
let data = [ let data = [
("location", ("location",
fun () -> Format.asprintf "%a" Location.pp_lift @@ name.Region.region) fun () -> Format.asprintf "%a" Location.pp_lift @@ name.Region.region)]
] in in error ~data title message
error ~data title message
let untyped_fun_param var = let untyped_fun_param var =
let title () = "function parameter" in let title () = "" in
let message () = let message () =
Format.asprintf "untyped function parameters are not supported yet" in Format.asprintf "\nUntyped function parameters \
are not supported yet.\n" in
let param_loc = var.Region.region in let param_loc = var.Region.region in
let data = [ let data = [
("location", ("location",
fun () -> Format.asprintf "%a" Location.pp_lift @@ param_loc) fun () -> Format.asprintf "%a" Location.pp_lift @@ param_loc)]
] in in error ~data title message
error ~data title message
let unsupported_tuple_pattern p = let unsupported_tuple_pattern p =
let title () = "tuple pattern" in let title () = "" in
let message () = let message () =
Format.asprintf "tuple patterns are not supported yet" in Format.asprintf "\nTuple patterns are not supported yet.\n" in
let pattern_loc = Raw.pattern_to_region p in let pattern_loc = Raw.pattern_to_region p in
let data = [ let data = [
("location", ("location",
@ -80,21 +82,20 @@ module Errors = struct
error ~data title message error ~data title message
let unsupported_cst_constr p = let unsupported_cst_constr p =
let title () = "constant constructor" in let title () = "" in
let message () = let message () =
Format.asprintf "constant constructors are not supported yet" in Format.asprintf "\nConstant constructors are not supported yet.\n" in
let pattern_loc = Raw.pattern_to_region p in let pattern_loc = Raw.pattern_to_region p in
let data = [ let data = [
("location", ("location",
fun () -> Format.asprintf "%a" Location.pp_lift @@ pattern_loc) fun () -> Format.asprintf "%a" Location.pp_lift @@ pattern_loc)]
] in in error ~data title message
error ~data title message
let unsupported_non_var_pattern p = let unsupported_non_var_pattern p =
let title () = "pattern is not a variable" in let title () = "" in
let message () = let message () =
Format.asprintf "non-variable patterns in constructors \ Format.asprintf "\nNon-variable patterns in constructors \
are not supported yet" in are not supported yet.\n" in
let pattern_loc = Raw.pattern_to_region p in let pattern_loc = Raw.pattern_to_region p in
let data = [ let data = [
("location", ("location",
@ -103,20 +104,20 @@ module Errors = struct
error ~data title message error ~data title message
let simplifying_expr t = let simplifying_expr t =
let title () = "simplifying expression" in let title () = "Simplifying expression" in
let message () = "" in let message () = "" in
let data = [ let data = [
("expression" , ("expression" ,
(** TODO: The labelled arguments should be flowing from the CLI. *) (** TODO: The labelled arguments should be flowing from the CLI. *)
thunk @@ Parser.Cameligo.ParserLog.expr_to_string thunk @@ Parser.Cameligo.ParserLog.expr_to_string
~offsets:true ~mode:`Point t) ~offsets:true ~mode:`Point t)]
] in in error ~data title message
error ~data title message
let only_constructors p = let only_constructors p =
let title () = "constructors in patterns" in let title () = "" in
let message () = let message () =
Format.asprintf "currently, only constructors are supported in patterns" in Format.asprintf "\nCurrently, only constructors are \
supported in patterns.\n" in
let pattern_loc = Raw.pattern_to_region p in let pattern_loc = Raw.pattern_to_region p in
let data = [ let data = [
("location", ("location",
@ -125,18 +126,18 @@ module Errors = struct
error ~data title message error ~data title message
let unsupported_sugared_lists region = let unsupported_sugared_lists region =
let title () = "lists in patterns" in let title () = "" in
let message () = let message () =
Format.asprintf "currently, only empty lists and constructors (::) \ Format.asprintf "\nCurrently, only empty lists and \
are supported in patterns" in constructors (::) \
are supported in patterns.\n" in
let data = [ let data = [
("location", ("location",
fun () -> Format.asprintf "%a" Location.pp_lift @@ region) fun () -> Format.asprintf "%a" Location.pp_lift @@ region)]
] in in error ~data title message
error ~data title message
let corner_case description = let corner_case description =
let title () = "corner case" in let title () = "Corner case" in
let message () = description in let message () = description in
error title message error title message
@ -286,9 +287,9 @@ let rec simpl_expression :
let simpl_update = fun (u:Raw.update Region.reg) -> let simpl_update = fun (u:Raw.update Region.reg) ->
let (u, loc) = r_split u in let (u, loc) = r_split u in
let (name, path) = simpl_path u.record in let (name, path) = simpl_path u.record in
let record = match path with let record = match path with
| [] -> e_variable (Var.of_name name) | [] -> e_variable (Var.of_name name)
| _ -> e_accessor (e_variable (Var.of_name name)) path in | _ -> e_accessor (e_variable (Var.of_name name)) path in
let updates = u.updates.value.ne_elements in let updates = u.updates.value.ne_elements in
let%bind updates' = let%bind updates' =
let aux (f:Raw.field_path_assign Raw.reg) = let aux (f:Raw.field_path_assign Raw.reg) =
@ -296,7 +297,7 @@ let rec simpl_expression :
let%bind expr = simpl_expression f.field_expr in let%bind expr = simpl_expression f.field_expr in
ok ( List.map (fun (x: _ Raw.reg) -> x.value) (npseq_to_list f.field_path), expr) ok ( List.map (fun (x: _ Raw.reg) -> x.value) (npseq_to_list f.field_path), expr)
in in
bind_map_list aux @@ npseq_to_list updates bind_map_list aux @@ npseq_to_list updates
in in
let aux ur (path, expr) = let aux ur (path, expr) =
let rec aux record = function let rec aux record = function
@ -356,7 +357,7 @@ let rec simpl_expression :
| hd :: tl -> | hd :: tl ->
e_let_in hd e_let_in hd
inline inline
(e_accessor rhs_b_expr [Access_tuple ((List.length prep_vars) - (List.length tl) - 1)]) (e_accessor rhs_b_expr [Access_tuple ((List.length prep_vars) - (List.length tl) - 1)])
(chain_let_in tl body) (chain_let_in tl body)
| [] -> body (* Precluded by corner case assertion above *) | [] -> body (* Precluded by corner case assertion above *)
in in
@ -733,7 +734,7 @@ and simpl_declaration : Raw.declaration -> declaration Location.wrap list result
match v_type with match v_type with
| Some v_type -> ok (to_option (simpl_type_expression v_type)) | Some v_type -> ok (to_option (simpl_type_expression v_type))
| None -> ok None | None -> ok None
in in
let%bind simpl_rhs_expr = simpl_expression rhs_expr in let%bind simpl_rhs_expr = simpl_expression rhs_expr in
ok @@ loc x @@ Declaration_constant (Var.of_name v.value, v_type_expression, inline, simpl_rhs_expr) ) ok @@ loc x @@ Declaration_constant (Var.of_name v.value, v_type_expression, inline, simpl_rhs_expr) )
in let%bind variables = ok @@ npseq_to_list pt.value in let%bind variables = ok @@ npseq_to_list pt.value
@ -834,9 +835,9 @@ and simpl_cases : type a . (Raw.pattern * a) list -> (a, unit) matching result =
| PConstr v -> | PConstr v ->
let const, pat_opt = let const, pat_opt =
match v with match v with
PConstrApp {value; _} -> PConstrApp {value; _} ->
(match value with (match value with
| constr, None -> | constr, None ->
constr, Some (PVar {value = "unit"; region = Region.ghost}) constr, Some (PVar {value = "unit"; region = Region.ghost})
| _ -> value) | _ -> value)
| PSomeApp {value=region,pat; _} -> | PSomeApp {value=region,pat; _} ->

View File

@ -68,9 +68,9 @@ let detect_free_variables (for_body : expression) (local_decl_names : expression
module Errors = struct module Errors = struct
let unsupported_cst_constr p = let unsupported_cst_constr p =
let title () = "constant constructor" in let title () = "" in
let message () = let message () =
Format.asprintf "constant constructors are not supported yet" in Format.asprintf "\nConstant constructors are not supported yet.\n" in
let pattern_loc = Raw.pattern_to_region p in let pattern_loc = Raw.pattern_to_region p in
let data = [ let data = [
("location", ("location",
@ -79,11 +79,11 @@ module Errors = struct
error ~data title message error ~data title message
let corner_case ~loc message = let corner_case ~loc message =
let title () = "corner case" in let title () = "\nCorner case" in
let content () = "We don't have a good error message for this case. \ let content () = "We do not have a good error message for this case. \
We are striving find ways to better report them and \ We are striving find ways to better report them and \
find the use-cases that generate them. \ find the use-cases that generate them. \
Please report this to the developers." in Please report this to the developers.\n" in
let data = [ let data = [
("location" , fun () -> loc) ; ("location" , fun () -> loc) ;
("message" , fun () -> message) ; ("message" , fun () -> message) ;
@ -91,9 +91,9 @@ module Errors = struct
error ~data title content error ~data title content
let unknown_predefined_type name = let unknown_predefined_type name =
let title () = "type constants" in let title () = "\nType constants" in
let message () = let message () =
Format.asprintf "unknown predefined type \"%s\"" name.Region.value in Format.asprintf "Unknown predefined type \"%s\".\n" name.Region.value in
let data = [ let data = [
("location", ("location",
fun () -> Format.asprintf "%a" Location.pp_lift @@ name.Region.region) fun () -> Format.asprintf "%a" Location.pp_lift @@ name.Region.region)
@ -101,10 +101,10 @@ module Errors = struct
error ~data title message error ~data title message
let unsupported_non_var_pattern p = let unsupported_non_var_pattern p =
let title () = "pattern is not a variable" in let title () = "" in
let message () = let message () =
Format.asprintf "non-variable patterns in constructors \ Format.asprintf "\nNon-variable patterns in constructors \
are not supported yet" in are not supported yet.\n" in
let pattern_loc = Raw.pattern_to_region p in let pattern_loc = Raw.pattern_to_region p in
let data = [ let data = [
("location", ("location",
@ -113,9 +113,10 @@ module Errors = struct
error ~data title message error ~data title message
let only_constructors p = let only_constructors p =
let title () = "constructors in patterns" in let title () = "" in
let message () = let message () =
Format.asprintf "currently, only constructors are supported in patterns" in Format.asprintf "\nCurrently, only constructors \
are supported in patterns.\n" in
let pattern_loc = Raw.pattern_to_region p in let pattern_loc = Raw.pattern_to_region p in
let data = [ let data = [
("location", ("location",
@ -124,9 +125,9 @@ module Errors = struct
error ~data title message error ~data title message
let unsupported_tuple_pattern p = let unsupported_tuple_pattern p =
let title () = "tuple pattern" in let title () = "" in
let message () = let message () =
Format.asprintf "tuple patterns are not supported yet" in Format.asprintf "\nTuple patterns are not supported yet.\n" in
let pattern_loc = Raw.pattern_to_region p in let pattern_loc = Raw.pattern_to_region p in
let data = [ let data = [
("location", ("location",
@ -139,10 +140,10 @@ module Errors = struct
error ~data title message error ~data title message
let unsupported_deep_Some_patterns pattern = let unsupported_deep_Some_patterns pattern =
let title () = "option patterns" in let title () = "" in
let message () = let message () =
Format.asprintf "currently, only variables in Some constructors \ Format.asprintf "\nCurrently, only variables in constructors \
in patterns are supported" in \"Some\" in patterns are supported.\n" in
let pattern_loc = Raw.pattern_to_region pattern in let pattern_loc = Raw.pattern_to_region pattern in
let data = [ let data = [
("location", ("location",
@ -151,10 +152,10 @@ module Errors = struct
error ~data title message error ~data title message
let unsupported_deep_list_patterns cons = let unsupported_deep_list_patterns cons =
let title () = "lists in patterns" in let title () = "" in
let message () = let message () =
Format.asprintf "currently, only empty lists and x::y \ Format.asprintf "\nCurrently, only empty lists and x::y \
are supported in patterns" in are supported in patterns.\n" in
let data = [ let data = [
("location", ("location",
fun () -> Format.asprintf "%a" Location.pp_lift @@ cons.Region.region) fun () -> Format.asprintf "%a" Location.pp_lift @@ cons.Region.region)
@ -164,7 +165,7 @@ module Errors = struct
(* Logging *) (* Logging *)
let simplifying_instruction t = let simplifying_instruction t =
let title () = "simplifiying instruction" in let title () = "\nSimplifiying instruction" in
let message () = "" in let message () = "" in
(** TODO: The labelled arguments should be flowing from the CLI. *) (** TODO: The labelled arguments should be flowing from the CLI. *)
let data = [ let data = [
@ -1185,7 +1186,7 @@ and simpl_for_int : Raw.for_int -> (_ -> expression result) result = fun fi ->
- references to the iterated value ==> variable `#COMPILER#elt_X` - references to the iterated value ==> variable `#COMPILER#elt_X`
Note: In the case of an inner loop capturing variable from an outer loop Note: In the case of an inner loop capturing variable from an outer loop
the free variable name can be `#COMPILER#acc.Y` and because we do not the free variable name can be `#COMPILER#acc.Y` and because we do not
capture the accumulator record in the inner loop, we don't want to capture the accumulator record in the inner loop, we do not want to
generate `#COMPILER#acc.#COMPILER#acc.Y` but `#COMPILER#acc.Y` generate `#COMPILER#acc.#COMPILER#acc.Y` but `#COMPILER#acc.Y`
5) Append the return value to the body 5) Append the return value to the body
@ -1321,12 +1322,9 @@ and simpl_for_collect : Raw.for_collect -> (_ -> expression result) result = fun
| None -> e_skip () | None -> e_skip ()
| Some seq -> e_let_in (Var.of_name "#COMPILER#folded_record", None) false fold seq in (* TODO fresh *) | Some seq -> e_let_in (Var.of_name "#COMPILER#folded_record", None) false fold seq in (* TODO fresh *)
return_statement @@ final_sequence return_statement @@ final_sequence
(*
and simpl_declaration : Raw.declaration -> declaration Location.wrap result =
*)
and simpl_declaration_list declarations : and simpl_declaration_list declarations :
Ast_simplified.declaration Location.wrap list result = Ast_simplified.declaration Location.wrap list result =
let open Raw in let open Raw in
let rec hook acc = function let rec hook acc = function
[] -> acc [] -> acc
@ -1387,8 +1385,7 @@ and simpl_declaration_list declarations :
Declaration_constant (name, ty_opt, inline, expr) in Declaration_constant (name, ty_opt, inline, expr) in
let res = Location.wrap ~loc new_decl in let res = Location.wrap ~loc new_decl in
hook (bind_list_cons res acc) declarations hook (bind_list_cons res acc) declarations
in in hook (ok @@ []) (List.rev declarations)
hook (ok @@ []) (List.rev declarations)
let simpl_program : Raw.ast -> program result = let simpl_program : Raw.ast -> program result =
fun t -> simpl_declaration_list @@ nseq_to_list t.decl fun t -> simpl_declaration_list @@ nseq_to_list t.decl

View File

@ -11,8 +11,13 @@ and type_expression = {
} }
and declaration = and declaration =
| Declaration_type of (type_variable * type_expression) | Declaration_type of (type_variable * type_expression)
(* A Declaration_constant is described by
* a name
* an optional type annotation
* a boolean indicating whether it should be inlined
* an expression *)
| Declaration_constant of (expression_variable * type_expression option * inline * expression) | Declaration_constant of (expression_variable * type_expression option * inline * expression)
(* | Macro_declaration of macro_declaration *)
and expr = expression and expr = expression

View File

@ -8,8 +8,12 @@ type program = declaration Location.wrap list
and inline = bool and inline = bool
and declaration = and declaration =
(* A Declaration_constant is described by
* a name + a type-annotated expression
* a boolean indicating whether it should be inlined
* the environment before the declaration (the original environment)
* the environment after the declaration (i.e. with that new declaration added to the original environment). *)
| Declaration_constant of (named_expression * inline * (full_environment * full_environment)) | Declaration_constant of (named_expression * inline * (full_environment * full_environment))
(* | Macro_declaration of macro_declaration *)
and environment_element_definition = and environment_element_definition =
| ED_binder | ED_binder

View File

@ -1,52 +1,45 @@
// Test a PascaLIGO function which takes another PascaLIGO function as an argument // Test a PascaLIGO function which takes another PascaLIGO function as an argument
function foobar (const i : int) : int is function foobar (const i : int) : int is
block { begin
function foo (const i : int) : int is function foo (const i : int) : int is i;
i ; function bar (const f : int -> int) : int is f (i);
function bar (const f : int -> int) : int is end with bar (foo);
f ( i ) ;
} with bar (foo) ;
// higher order function with more than one argument // higher order function with more than one argument
function higher2(const i: int; const f: int -> int): int is function higher2(const i : int; const f : int -> int): int is
block { begin
const ii: int = f(i) const ii: int = f (i)
} with ii end with ii
function foobar2 (const i : int) : int is function foobar2 (const i : int) : int is
block { begin
function foo2 (const i : int) : int is function foo2 (const i : int) : int is i
i; end with higher2 (i,foo2)
} with higher2(i,foo2)
const a : int = 0; const a : int = 0;
function foobar3 (const i : int) : int is function foobar3 (const i : int) : int is
block { begin
function foo2 (const i : int) : int is function foo2 (const i : int) : int is a+i
(a+i); end with higher2 (i,foo2)
} with higher2(i,foo2)
function f (const i : int) : int is function f (const i : int) : int is i
i
function g (const i : int) : int is function g (const i : int) : int is f (i)
f(i)
function foobar4 (const i : int) : int is function foobar4 (const i : int) : int is g (g (i))
g(g(i))
function higher3(const i: int; const f: int -> int; const g: int -> int): int is function higher3(const i : int; const f : int -> int; const g : int -> int)
block { : int is
const ii: int = f(g(i)); begin
} with ii const ii : int = f(g(i))
end with ii
function foobar5 (const i : int) : int is function foobar5 (const i : int) : int is
block { begin
const a : int = 0; const a : int = 0;
function foo (const i : int) : int is function foo (const i : int) : int is a+i;
(a+i); function goo (const i : int) : int is foo (i)
function goo (const i : int) : int is end with higher3(i,foo,goo)
foo(i);
} with higher3(i,foo,goo)
function foobar6 (const i : int) : (int->int) is f function foobar6 (const i : int) : int -> int is f

View File

@ -0,0 +1,14 @@
let g (b: int) = b + 3
let f (b: int * int) : int -> int = g
let a (b: int * int -> int -> int) : int = (b (5,3)) 5
let test1 (_: int) =
a f
let n (a, b: int * int): int = a + b
let o (p: int * int -> int): int = p((3, 9))
let test2 (ignore: int) = o(n)

View File

@ -0,0 +1,49 @@
/*
The difference between tuples and arguments is subtle in ReasonLIGO.
`f(a, b);`
f is called with two arguments
`f((a, b));`
f is called with a tuple.
*/
type fun_type = (int, int) => int;
let arguments = (b: int, c: int) => {
b + c;
};
let arguments_type_def = (b: fun_type) => b(5, 3);
let arguments_test = (ignore: int) => arguments_type_def(arguments);
type tuple_type = ((int, int)) => int;
let tuple = ((a, b): (int, int)) => {
a + b;
};
let tuple_type_def = (b: tuple_type) => b((5, 3));
let tuple_test = (ignore: int) => tuple_type_def(tuple);
/* inline */
let arguments_inline = (b: int, c: int) => {
b + c;
};
let arguments_type_def_inline = (b: (int, int) => int) => b(5, 3);
let arguments_test_inline = (ignore: int) => arguments_type_def_inline(arguments_inline);
let tuple_inline = ((a, b): (int, int)) => {
a + b;
};
let tuple_type_def_inline = (b: ((int, int)) => int) => b((5, 3));
let tuple_test_inline = (ignore: int) => tuple_type_def_inline(tuple_inline);

View File

@ -2098,6 +2098,44 @@ let empty_case_religo () : unit result =
in in
ok () ok ()
let tuple_type_mligo () : unit result =
let%bind program = mtype_file "./contracts/tuple_type.mligo" in
let%bind () =
let input _ = e_int 0 in
let expected _ = e_int 8 in
expect_eq_n program "test1" input expected
in
let%bind () =
let input _ = e_int 0 in
let expected _ = e_int 12 in
expect_eq_n program "test2" input expected
in
ok ()
let tuple_type_religo () : unit result =
let%bind program = retype_file "./contracts/tuple_type.religo" in
let%bind () =
let input _ = e_int 0 in
let expected _ = e_int 8 in
expect_eq_n program "arguments_test" input expected
in
let%bind () =
let input _ = e_int 0 in
let expected _ = e_int 8 in
expect_eq_n program "tuple_test" input expected
in
let%bind () =
let input _ = e_int 0 in
let expected _ = e_int 8 in
expect_eq_n program "arguments_test_inline" input expected
in
let%bind () =
let input _ = e_int 0 in
let expected _ = e_int 8 in
expect_eq_n program "tuple_test_inline" input expected
in
ok ()
let main = test_suite "Integration (End to End)" [ let main = test_suite "Integration (End to End)" [
test "bytes unpack" bytes_unpack ; test "bytes unpack" bytes_unpack ;
test "bytes unpack (mligo)" bytes_unpack_mligo ; test "bytes unpack (mligo)" bytes_unpack_mligo ;
@ -2258,4 +2296,6 @@ let main = test_suite "Integration (End to End)" [
test "empty case" empty_case ; test "empty case" empty_case ;
test "empty case (mligo)" empty_case_mligo ; test "empty case (mligo)" empty_case_mligo ;
test "empty case (religo)" empty_case_religo ; test "empty case (religo)" empty_case_religo ;
test "tuple type (mligo)" tuple_type_mligo ;
test "tuple type (religo)" tuple_type_religo ;
] ]

0
vendors/Preproc/.EMain.tag vendored Normal file
View File

0
vendors/Preproc/.Eparser.mly.tag vendored Normal file
View File

0
vendors/Preproc/.ProcMain.tag vendored Normal file
View File

1
vendors/Preproc/.links vendored Normal file
View File

@ -0,0 +1 @@
$HOME/git/OCaml-build/Makefile

33
vendors/Preproc/EMain.ml vendored Normal file
View File

@ -0,0 +1,33 @@
(* This module is only used for testing modules [Escan] and [Eparser]
as units *)
module Lexer = struct
let run () =
match Array.length Sys.argv with
2 -> Escan.trace Sys.argv.(1)
| _ -> prerr_endline ("Usage: " ^ Sys.argv.(0) ^ " [file]")
end
module Parser = struct
let run () =
if Array.length Sys.argv = 2
then
match open_in Sys.argv.(1) with
exception Sys_error msg -> prerr_endline msg
| cin ->
let buffer = Lexing.from_channel cin in
let open Error in
let () =
try
let tree = Eparser.pp_expression Escan.token buffer in
let value = Preproc.(eval Env.empty tree)
in (print_string (string_of_bool value);
print_newline ())
with Lexer diag -> print "Lexical" diag
| Parser diag -> print "Syntactical" diag
| Eparser.Error -> print "" ("Parse", mk_seg buffer, 1)
in close_in cin
else prerr_endline ("Usage: " ^ Sys.argv.(0) ^ " [file]")
end
let _ = Parser.run()

50
vendors/Preproc/Eparser.mly vendored Normal file
View File

@ -0,0 +1,50 @@
%{
(* Grammar for boolean expressions in preprocessing directives of C# *)
%}
%token True False
%token <string> Ident
%token OR AND EQ NEQ NOT EOL LPAR RPAR
(* Entries *)
%start pp_expression
%type <Etree.t> pp_expression
%%
(* Grammar *)
pp_expression:
e=pp_or_expression EOL { e }
pp_or_expression:
e=pp_and_expression { e }
| e1=pp_or_expression OR e2=pp_and_expression {
Etree.Or (e1,e2)
}
pp_and_expression:
e=pp_equality_expression { e }
| e1=pp_and_expression AND e2=pp_unary_expression {
Etree.And (e1,e2)
}
pp_equality_expression:
e=pp_unary_expression { e }
| e1=pp_equality_expression EQ e2=pp_unary_expression {
Etree.Eq (e1,e2)
}
| e1=pp_equality_expression NEQ e2=pp_unary_expression {
Etree.Neq (e1,e2)
}
pp_unary_expression:
e=pp_primary_expression { e }
| NOT e=pp_unary_expression { Etree.Not e }
pp_primary_expression:
True { Etree.True }
| False { Etree.False }
| id=Ident { Etree.Ident id }
| LPAR e=pp_or_expression RPAR { e }

31
vendors/Preproc/Error.ml vendored Normal file
View File

@ -0,0 +1,31 @@
(* This module provides support for managing and printing errors when
preprocessing C# source files. *)
type message = string
type start = Lexing.position
type stop = Lexing.position
type seg = start * stop
let mk_seg buffer =
Lexing.(lexeme_start_p buffer, lexeme_end_p buffer)
type vline = int
exception Lexer of (message * seg * vline)
exception Parser of (message * seg * vline)
let print (kind: string) (msg, (start, stop), vend) =
let open Lexing in
let delta = vend - stop.pos_lnum in
let vstart = start.pos_lnum + delta
in assert (msg <> "");
prerr_endline
((if kind = "" then msg else kind) ^ " error at line "
^ string_of_int vstart ^ ", char "
^ string_of_int (start.pos_cnum - start.pos_bol)
^ (if stop.pos_lnum = start.pos_lnum
then "--" ^ string_of_int (stop.pos_cnum - stop.pos_bol)
else " to line " ^ string_of_int vend
^ ", char "
^ string_of_int (stop.pos_cnum - stop.pos_bol))
^ (if kind = "" then "." else ":\n" ^ msg))

95
vendors/Preproc/Escan.mll vendored Normal file
View File

@ -0,0 +1,95 @@
{
(* Auxiliary scanner for boolean expressions of the C# preprocessor *)
(* Concrete syntax of tokens. See module [Eparser]. *)
let string_of_token =
let open Eparser
in function True -> "true"
| False -> "false"
| Ident id -> id
| OR -> "||"
| AND -> "&&"
| EQ -> "=="
| NEQ -> "!="
| NOT -> "!"
| LPAR -> "("
| RPAR -> ")"
| EOL -> "EOL"
}
(* Regular expressions for literals *)
(* White space *)
let newline = '\n' | '\r' | "\r\n"
let blank = ' ' | '\t'
(* Unicode escape sequences *)
let digit = ['0'-'9']
let hexdigit = digit | ['A'-'F' 'a'-'f']
let four_hex = hexdigit hexdigit hexdigit hexdigit
let uni_esc = "\\u" four_hex | "\\U" four_hex four_hex
(* Identifiers *)
let lowercase = ['a'-'z']
let uppercase = ['A'-'Z']
let letter = lowercase | uppercase | uni_esc
let start = '_' | letter
let alphanum = letter | digit | '_'
let ident = start alphanum*
(* Rules *)
rule token = parse
blank+ { token lexbuf }
| newline { Lexing.new_line lexbuf; Eparser.EOL }
| eof { Eparser.EOL }
| "true" { Eparser.True }
| "false" { Eparser.False }
| ident as id { Eparser.Ident id }
| '(' { Eparser.LPAR }
| ')' { Eparser.RPAR }
| "||" { Eparser.OR }
| "&&" { Eparser.AND }
| "==" { Eparser.EQ }
| "!=" { Eparser.NEQ }
| "!" { Eparser.NOT }
| "//" { inline_com lexbuf }
| _ as c { let code = Char.code c in
let msg = "Invalid character " ^ String.make 1 c
^ " (" ^ string_of_int code ^ ")."
in raise Error.(Lexer (msg, mk_seg lexbuf, 1))
}
and inline_com = parse
newline { Lexing.new_line lexbuf; Eparser.EOL }
| eof { Eparser.EOL }
| _ { inline_com lexbuf }
{
(* Standalone lexer for debugging purposes. See module [Topexp]. *)
type filename = string
let trace (name: filename) =
match open_in name with
cin ->
let buffer = Lexing.from_channel cin
and cout = stdout in
let rec iter () =
match token buffer with
Eparser.EOL -> close_in cin; close_out cout
| t -> begin
output_string cout (string_of_token t);
output_string cout "\n";
flush cout;
iter ()
end
| exception Error.Lexer diag -> Error.print "Lexical" diag
in iter ()
| exception Sys_error msg -> prerr_endline msg
}

28
vendors/Preproc/Etree.ml vendored Normal file
View File

@ -0,0 +1,28 @@
(* This module defines and exports the type [t] of conditional
expressions of C# directives.
To avoid over-engineering, we moved the definition of the function
[eval] below into the module [Preproc] itself.
*)
type t =
Or of t * t
| And of t * t
| Eq of t * t
| Neq of t * t
| Not of t
| True
| False
| Ident of string
(*
let rec eval env = function
Or (e1,e2) -> eval env e1 || eval env e2
| And (e1,e2) -> eval env e1 && eval env e2
| Eq (e1,e2) -> eval env e1 = eval env e2
| Neq (e1,e2) -> eval env e1 != eval env e2
| Not e -> not (eval env e)
| True -> true
| False -> false
| Ident id -> Preproc.Env.mem id env
*)

21
vendors/Preproc/LICENSE vendored Normal file
View File

@ -0,0 +1,21 @@
MIT License
Copyright (c) 2018 Christian Rinderknecht
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

4
vendors/Preproc/Makefile.cfg vendored Normal file
View File

@ -0,0 +1,4 @@
SHELL := dash
BFLAGS := -strict-sequence -w +A-48-4
#OCAMLC := ocamlcp
#OCAMLOPT := ocamloptp

585
vendors/Preproc/Preproc.mll vendored Normal file
View File

@ -0,0 +1,585 @@
(* Preprocessor for C#, to be processed by [ocamllex]. *)
{
(* STRING PROCESSING *)
(* The value of [mk_str len p] ("make string") is a string of length
[len] containing the [len] characters in the list [p], in reverse
order. For instance, [mk_str 3 ['c';'b';'a'] = "abc"]. *)
let mk_str (len: int) (p: char list) : string =
let () = assert (len = List.length p) in
let bytes = Bytes.make len ' ' in
let rec fill i = function
[] -> bytes
| char::l -> Bytes.set bytes i char; fill (i-1) l
in fill (len-1) p |> Bytes.to_string
(* The call [explode s a] is the list made by pushing the characters
in the string [s] on top of [a], in reverse order. For example,
[explode "ba" ['c';'d'] = ['a'; 'b'; 'c'; 'd']]. *)
let explode s acc =
let rec push = function
0 -> acc
| i -> s.[i-1] :: push (i-1)
in push (String.length s)
(* ERROR HANDLING *)
let stop msg seg = raise (Error.Lexer (msg, seg,1))
let fail msg buffer = stop msg (Error.mk_seg buffer)
exception Local_err of Error.message
let handle_err scan buffer =
try scan buffer with Local_err msg -> fail msg buffer
(* LEXING ENGINE *)
(* Copying the current lexeme to [stdout] *)
let copy buffer = print_string (Lexing.lexeme buffer)
(* End of lines *)
let handle_nl buffer = Lexing.new_line buffer; copy buffer
(* C# PREPROCESSOR DIRECTIVES *)
(* The type [mode] defines the two scanning modes of the preprocessor:
either we copy the current characters or we skip them. *)
type mode = Copy | Skip
(* Trace of directives
We keep track of directives #if, #elif, #else, #region and #endregion.
*)
type cond = If of mode | Elif of mode | Else | Region
type trace = cond list
(* The function [reduce_cond] is called when a #endif directive is
found, and the trace (see type [trace] above) needs updating. *)
let rec reduce_cond seg = function
[] -> stop "Dangling #endif." seg
| If mode::trace -> trace, mode
| Region::_ -> stop "Invalid scoping of #region" seg
| _::trace -> reduce_cond seg trace
(* The function [reduce_reg] is called when a #endregion directive is
read, and the trace needs updating. *)
let reduce_reg seg = function
[] -> stop "Dangling #endregion." seg
| Region::trace -> trace
| _ -> stop "Invalid scoping of #endregion" seg
(* The function [extend] is called when encountering conditional
directives #if, #else and #elif. As its name suggests, it extends
the current trace with the current conditional directive, whilst
performing some validity checks. *)
let extend seg cond trace =
match cond, trace with
If _, Elif _::_ ->
stop "Directive #if cannot follow #elif." seg
| Else, Else::_ ->
stop "Directive #else cannot follow #else." seg
| Else, [] ->
stop "Dangling #else." seg
| Elif _, Else::_ ->
stop "Directive #elif cannot follow #else." seg
| Elif _, [] ->
stop "Dangling #elif." seg
| _ -> cond::trace
(* The function [last_mode] seeks the last mode as recorded in the
trace (see type [trace] above). *)
let rec last_mode = function
[] -> assert false
| (If mode | Elif mode)::_ -> mode
| _::trace -> last_mode trace
(* Line offsets
The value [Inline] of type [offset] means that the current location
cannot be reached from the start of the line with only white
space. The same holds for the special value [Prefix 0]. Values of
the form [Prefix n] mean that the current location can be reached
from the start of the line with [n] white spaces (padding). These
distinctions are needed because preprocessor directives cannot
occur inside lines.
*)
type offset = Prefix of int | Inline
let expand = function
Prefix 0 | Inline -> ()
| Prefix n -> print_string (String.make n ' ')
(* Directives *)
let directives = [
"if"; "else"; "elif"; "endif"; "define"; "undef";
"error"; "warning"; "line"; "region"; "endregion";
"include"]
(* Environments and preprocessor expressions
The evaluation of conditional directives may involve symbols whose
value may be defined using #define directives, or undefined by
means of #undef. Therefore, we need to evaluate conditional
expressions in an environment made of a set of defined symbols.
Note that we rely on an external lexer and parser for the
conditional expressions. See modules [Escan] and [Eparser].
*)
module Env = Set.Make(String)
let rec eval env =
let open Etree
in function
Or (e1,e2) -> eval env e1 || eval env e2
| And (e1,e2) -> eval env e1 && eval env e2
| Eq (e1,e2) -> eval env e1 = eval env e2
| Neq (e1,e2) -> eval env e1 != eval env e2
| Not e -> not (eval env e)
| True -> true
| False -> false
| Ident id -> Env.mem id env
let expr env buffer =
let tree = Eparser.pp_expression Escan.token buffer
in if eval env tree then Copy else Skip
(* END OF HEADER *)
}
(* REGULAR EXPRESSIONS *)
(* White space *)
let nl = '\n' | '\r' | "\r\n"
let blank = ' ' | '\t'
(* Integers *)
let int_suf = 'U' | 'u' | 'L' | 'l' | "UL" | "Ul" | "uL"
| "ul" | "LU" | "Lu" | "lU" | "lu"
let digit = ['0'-'9']
let dec = digit+ int_suf?
let hexdigit = digit | ['A'-'F' 'a'-'f']
let hex_pre = "0x" | "0X"
let hexa = hex_pre hexdigit+ int_suf?
let integer = dec | hexa
(* Unicode escape sequences *)
let four_hex = hexdigit hexdigit hexdigit hexdigit
let uni_esc = "\\u" four_hex | "\\U" four_hex four_hex
(* Identifiers *)
let lowercase = ['a'-'z']
let uppercase = ['A'-'Z']
let letter = lowercase | uppercase | uni_esc
let start = '_' | letter
let alphanum = letter | digit | '_'
let ident = start alphanum*
(* Real *)
let decimal = digit+
let exponent = ['e' 'E'] ['+' '-']? decimal
let real_suf = ['F' 'f' 'D' 'd' 'M' 'm']
let real = (decimal? '.')? decimal exponent? real_suf?
(* Characters *)
let single = [^ '\n' '\r']
let esc = "\\'" | "\\\"" | "\\\\" | "\\0" | "\\a" | "\\b" | "\\f"
| "\\n" | "\\r" | "\\t" | "\\v"
let hex_esc = "\\x" hexdigit hexdigit? hexdigit? hexdigit?
let character = single | esc | hex_esc | uni_esc
let char = "'" character "'"
(* Directives *)
let directive = '#' (blank* as space) (ident as id)
(* Rules *)
(* The rule [scan] scans the input buffer for directives, strings,
comments, blanks, new lines and end of file characters. As a
result, either the matched input is copied to [stdout] or not,
depending on the compilation directives. If not copied, new line
characters are output.
Scanning is triggered by the function call [scan env mode offset
trace lexbuf], where [env] is the set of defined symbols
(introduced by `#define'), [mode] specifies whether we are copying
or skipping the input, [offset] informs about the location in the
line (either there is a prefix of blanks, or at least a non-blank
character has been read), and [trace] is the stack of conditional
directives read so far.
The first call is [scan Env.empty Copy (Prefix 0) []], meaning that
we start with an empty environment, that copying the input is
enabled by default, and that we are at the start of a line and no
previous conditional directives have been read yet.
When an "#if" is matched, the trace is extended by the call [extend
lexbuf (If mode) trace], during the evaluation of which the
syntactic validity of having encountered an "#if" is checked (for
example, it would be invalid had an "#elif" been last read). Note
that the current mode is stored in the trace with the current
directive -- that mode may be later restored (see below for some
examples). Moreover, the directive would be deemed invalid if its
current position in the line (that is, its offset) were not
preceeded by blanks or nothing, otherwise the rule [expr] is called
to scan the boolean expression associated with the "#if": if it
evaluates to [true], the result is [Copy], meaning that we may copy
what follows, otherwise skip it -- the actual decision depending on
the current mode. That new mode is used if we were in copy mode,
and the offset is reset to the start of a new line (as we read a
new line in [expr]); otherwise we were in skipping mode and the
value of the conditional expression must be ignored (but not its
syntax), and we continue skipping the input.
When an "#else" is matched, the trace is extended with [Else],
then, if the directive is not at a wrong offset, the rest of the
line is scanned with [pp_newline]. If we were in copy mode, the new
mode toggles to skipping mode; otherwise, the trace is searched for
the last encountered "#if" of "#elif" and the associated mode is
restored.
The case "#elif" is the result of the fusion (in the technical
sense) of the code for dealing with an "#else" followed by an
"#if".
When an "#endif" is matched, the trace is reduced, that is, all
conditional directives are popped until an [If mode'] is found and
[mode'] is restored as the current mode.
Consider the following four cases, where the modes (Copy/Skip) are
located between the lines:
Copy ----+ Copy ----+
#if true | #if true |
Copy | Copy |
#else | #else |
+-- Skip --+ | +-- Skip --+ |
#if true | | | #if false | | |
| Skip | | | Skip | |
#else | | | #else | | |
+-> Skip | | +-> Skip | |
#endif | | #endif | |
Skip <-+ | Skip <-+ |
#endif | #endif |
Copy <---+ Copy <---+
+-- Copy ----+ Copy --+-+
#if false | | #if false | |
| Skip | Skip | |
#else | | #else | |
+-> Copy --+ | +-+-- Copy <-+ |
#if true | | #if false | | |
Copy | | | | Skip |
#else | | #else | | |
Skip | | | +-> Copy |
#endif | | #endif | |
Copy <-+ | +---> Copy |
#endif | #endif |
Copy <---+ Copy <---+
The following four cases feature #elif. Note that we put between
brackets the mode saved for the #elif, which is sometimes restored
later.
Copy --+ Copy --+
#if true | #if true |
Copy | Copy |
#elif true +--[Skip] | #elif false +--[Skip] |
| Skip | | Skip |
#else | | #else | |
+-> Skip | +-> Skip |
#endif | #endif |
Copy <-+ Copy <-+
+-- Copy --+-+ +-- Copy ----+
#if false | | | #if false | |
| Skip | | | Skip |
#elif true +->[Copy] | | #elif false +->[Copy]--+ |
Copy <-+ | Skip | |
#else | #else | |
Skip | Copy <-+ |
#endif | #endif |
Copy <---+ Copy <---+
Note how "#elif" indeed behaves like an "#else" followed by an
"#if", and the mode stored with the data constructor [Elif]
corresponds to the mode before the virtual "#if".
Important note: Comments and strings are recognised as such only in
copy mode, which is a different behaviour from the preprocessor of
GNU GCC, which always does.
*)
rule scan env mode offset trace = parse
nl { handle_nl lexbuf;
scan env mode (Prefix 0) trace lexbuf }
| blank { match offset with
Prefix n -> scan env mode (Prefix (n+1)) trace lexbuf
| Inline -> copy lexbuf;
scan env mode Inline trace lexbuf }
| directive {
if not (List.mem id directives)
then fail "Invalid preprocessing directive." lexbuf
else if offset = Inline
then fail "Directive invalid inside line." lexbuf
else let seg = Error.mk_seg lexbuf in
match id with
"include" ->
let curr_line = Lexing.(lexbuf.lex_curr_p.pos_lnum)
and curr_file = Lexing.(lexbuf.lex_curr_p.pos_fname)
|> Filename.basename
and incl_file = scan_inclusion lexbuf in
let incl_buffer =
open_in incl_file |> Lexing.from_channel in
Printf.printf "# 1 \"%s\" 1\n" incl_file;
cat incl_buffer;
Printf.printf "# %i \"%s\" 2\n" (curr_line+1) curr_file;
scan env mode offset trace lexbuf
| "if" ->
let mode' = expr env lexbuf in
let new_mode = if mode = Copy then mode' else Skip in
let trace' = extend seg (If mode) trace
in scan env new_mode (Prefix 0) trace' lexbuf
| "else" ->
let () = pp_newline lexbuf in
let new_mode =
if mode = Copy then Skip else last_mode trace in
let trace' = extend seg Else trace
in scan env new_mode (Prefix 0) trace' lexbuf
| "elif" ->
let mode' = expr env lexbuf in
let trace', new_mode =
match mode with
Copy -> extend seg (Elif Skip) trace, Skip
| Skip -> let old_mode = last_mode trace
in extend seg (Elif old_mode) trace,
if old_mode = Copy then mode' else Skip
in scan env new_mode (Prefix 0) trace' lexbuf
| "endif" ->
let () = pp_newline lexbuf in
let trace', new_mode = reduce_cond seg trace
in scan env new_mode (Prefix 0) trace' lexbuf
| "define" ->
let id, seg = ident env lexbuf
in if id="true" || id="false"
then let msg = "Symbol \"" ^ id ^ "\" cannot be defined."
in stop msg seg
else if Env.mem id env
then let msg = "Symbol \"" ^ id
^ "\" was already defined."
in stop msg seg
else scan (Env.add id env) mode (Prefix 0) trace lexbuf
| "undef" ->
let id, _ = ident env lexbuf
in scan (Env.remove id env) mode (Prefix 0) trace lexbuf
| "error" ->
stop (message [] lexbuf) seg
| "warning" ->
let start_p, end_p = seg in
let msg = message [] lexbuf in
let open Lexing
in prerr_endline
("Warning at line " ^ string_of_int start_p.pos_lnum
^ ", char "
^ string_of_int (start_p.pos_cnum - start_p.pos_bol)
^ "--" ^ string_of_int (end_p.pos_cnum - end_p.pos_bol)
^ ":\n" ^ msg);
scan env mode (Prefix 0) trace lexbuf
| "region" ->
let msg = message [] lexbuf
in expand offset;
print_endline ("#" ^ space ^ "region" ^ msg);
scan env mode (Prefix 0) (Region::trace) lexbuf
| "endregion" ->
let msg = message [] lexbuf
in expand offset;
print_endline ("#" ^ space ^ "endregion" ^ msg);
scan env mode (Prefix 0) (reduce_reg seg trace) lexbuf
| "line" ->
expand offset;
print_string ("#" ^ space ^ "line");
line_ind lexbuf;
scan env mode (Prefix 0) trace lexbuf
| _ -> assert false
}
| eof { match trace with
[] -> expand offset; flush stdout; (env, trace)
| _ -> fail "Missing #endif." lexbuf }
| '"' { if mode = Copy then begin
expand offset; copy lexbuf;
handle_err in_norm_str lexbuf
end;
scan env mode Inline trace lexbuf }
| "@\"" { if mode = Copy then begin
expand offset; copy lexbuf;
handle_err in_verb_str lexbuf
end;
scan env mode Inline trace lexbuf }
| "//" { if mode = Copy then begin
expand offset; copy lexbuf;
in_line_com mode lexbuf
end;
scan env mode Inline trace lexbuf }
| "/*" { if mode = Copy then begin
expand offset; copy lexbuf;
handle_err in_block_com lexbuf
end;
scan env mode Inline trace lexbuf }
| _ { if mode = Copy then (expand offset; copy lexbuf);
scan env mode Inline trace lexbuf }
(* Support for #define and #undef *)
and ident env = parse
blank* { let r = __ident env lexbuf
in pp_newline lexbuf; r }
and __ident env = parse
ident as id { id, Error.mk_seg lexbuf }
(* Line indicator (#line) *)
and line_ind = parse
blank* as space { print_string space; line_indicator lexbuf }
and line_indicator = parse
decimal as ind {
print_string ind;
end_indicator lexbuf
}
| ident as id {
match id with
"default" | "hidden" ->
print_endline (id ^ message [] lexbuf)
| _ -> fail "Invalid line indicator." lexbuf
}
| nl | eof { fail "Line indicator expected." lexbuf }
and end_indicator = parse
blank* nl { copy lexbuf; handle_nl lexbuf }
| blank* eof { copy lexbuf }
| blank* "//" { copy lexbuf; print_endline (message [] lexbuf) }
| blank+ '"' { copy lexbuf;
handle_err in_norm_str lexbuf;
opt_line_com lexbuf }
| _ { fail "Line comment or blank expected." lexbuf }
and opt_line_com = parse
nl { handle_nl lexbuf }
| eof { copy lexbuf }
| blank+ { copy lexbuf; opt_line_com lexbuf }
| "//" { print_endline ("//" ^ message [] lexbuf) }
(* New lines and verbatim sequence of characters *)
and pp_newline = parse
nl { handle_nl lexbuf }
| blank+ { pp_newline lexbuf }
| "//" { in_line_com Skip lexbuf }
| _ { fail "Only a single-line comment allowed." lexbuf }
and message acc = parse
nl { Lexing.new_line lexbuf;
mk_str (List.length acc) acc }
| eof { mk_str (List.length acc) acc }
| _ as c { message (c::acc) lexbuf }
(* Comments *)
and in_line_com mode = parse
nl { handle_nl lexbuf }
| eof { flush stdout }
| _ { if mode = Copy then copy lexbuf; in_line_com mode lexbuf }
and in_block_com = parse
nl { handle_nl lexbuf; in_block_com lexbuf }
| "*/" { copy lexbuf }
| eof { raise (Local_err "Unterminated comment.") }
| _ { copy lexbuf; in_block_com lexbuf }
(* Include a file *)
and cat = parse
eof { () }
| _ { copy lexbuf; cat lexbuf }
(* Included filename *)
and scan_inclusion = parse
blank+ { scan_inclusion lexbuf }
| '"' { handle_err (in_inclusion [] 0) lexbuf }
and in_inclusion acc len = parse
'"' { mk_str len acc }
| nl { fail "Newline invalid in string." lexbuf }
| eof { raise (Local_err "Unterminated string.") }
| _ as c { in_inclusion (c::acc) (len+1) lexbuf }
(* Strings *)
and in_norm_str = parse
"\\\"" { copy lexbuf; in_norm_str lexbuf }
| '"' { copy lexbuf }
| nl { fail "Newline invalid in string." lexbuf }
| eof { raise (Local_err "Unterminated string.") }
| _ { copy lexbuf; in_norm_str lexbuf }
and in_verb_str = parse
"\"\"" { copy lexbuf; in_verb_str lexbuf }
| '"' { copy lexbuf }
| nl { handle_nl lexbuf; in_verb_str lexbuf }
| eof { raise (Local_err "Unterminated string.") }
| _ { copy lexbuf; in_verb_str lexbuf }
{
(* The function [lex] is a wrapper of [scan], which also checks that
the trace is empty at the end. Note that we discard the
environment at the end. *)
let lex buffer =
let _env, trace = scan Env.empty Copy (Prefix 0) [] buffer
in assert (trace = [])
(* Exported definitions *)
type filename = string
let trace (name: filename) : unit =
match open_in name with
cin ->
let open Lexing in
let buffer = from_channel cin in
let pos_fname = Filename.basename name in
let () = buffer.lex_curr_p <- {buffer.lex_curr_p with pos_fname} in
let open Error
in (try lex buffer with
Lexer diag -> print "Lexical" diag
| Parser diag -> print "Syntactical" diag
| Eparser.Error -> print "" ("Parse", mk_seg buffer, 1));
close_in cin; flush stdout
| exception Sys_error msg -> prerr_endline msg
}

5
vendors/Preproc/ProcMain.ml vendored Normal file
View File

@ -0,0 +1,5 @@
(* This is the entry point of the C# preprocessor. See [Makefile.cfg]. *)
match Array.length Sys.argv with
2 -> Preproc.trace Sys.argv.(1)
| _ -> prerr_endline ("Usage: " ^ Sys.argv.(0) ^ " [file]")

1
vendors/Preproc/README.md vendored Normal file
View File

@ -0,0 +1 @@
# A C# preprocessor in OCaml

23
vendors/Preproc/build.sh vendored Executable file
View File

@ -0,0 +1,23 @@
#!/bin/sh
set -x
ocamllex.opt Escan.mll
ocamllex.opt Preproc.mll
menhir -la 1 Eparser.mly
ocamlfind ocamlopt -strict-sequence -w +A-48-4 -c Etree.ml
ocamlfind ocamlopt -strict-sequence -w +A-48-4 -c Error.ml
ocamlfind ocamlopt -strict-sequence -w +A-48-4 -c Etree.ml
ocamlfind ocamlopt -strict-sequence -w +A-48-4 -c Error.ml
ocamlfind ocamlc -strict-sequence -w +A-48-4 -c Eparser.mli
camlcmd="ocamlfind ocamlc -I _i686 -strict-sequence -w +A-48-4 "
menhir --infer --ocamlc="$camlcmd" Eparser.mly
ocamlfind ocamlopt -strict-sequence -w +A-48-4 -c Escan.ml
ocamlfind ocamlopt -strict-sequence -w +A-48-4 -c Eparser.ml
ocamlfind ocamlopt -strict-sequence -w +A-48-4 -c Preproc.ml
ocamlfind ocamlopt -strict-sequence -w +A-48-4 -c Escan.ml
ocamlfind ocamlopt -strict-sequence -w +A-48-4 -c Preproc.ml
ocamlfind ocamlopt -strict-sequence -w +A-48-4 -c EMain.ml
ocamlfind ocamlopt -strict-sequence -w +A-48-4 -c EMain.ml
ocamlfind ocamlopt -o EMain.opt Etree.cmx Eparser.cmx Error.cmx Escan.cmx Preproc.cmx EMain.cmx
ocamlfind ocamlopt -strict-sequence -w +A-48-4 -c ProcMain.ml
ocamlfind ocamlopt -strict-sequence -w +A-48-4 -c ProcMain.ml
ocamlfind ocamlopt -o ProcMain.opt Etree.cmx Eparser.cmx Error.cmx Escan.cmx Preproc.cmx ProcMain.cmx

3
vendors/Preproc/clean.sh vendored Executable file
View File

@ -0,0 +1,3 @@
#!/bin/sh
\rm -f *.cm* *.o *.byte *.opt

20
vendors/Preproc/dune vendored Normal file
View File

@ -0,0 +1,20 @@
(ocamllex Escan Preproc)
(menhir
(modules Eparser))
(library
(name PreProc)
; (public_name ligo.preproc)
(wrapped false)
(modules Eparser Error Escan Etree Preproc))
(test
(modules ProcMain)
(libraries PreProc)
(name ProcMain))
(test
(modules EMain)
(libraries PreProc)
(name EMain))

View File

@ -539,8 +539,8 @@ let bind_smap (s:_ X_map.String.t) =
let aux k v prev = let aux k v prev =
prev >>? fun prev' -> prev >>? fun prev' ->
v >>? fun v' -> v >>? fun v' ->
ok @@ add k v' prev' in ok @@ add k v' prev'
fold aux s (ok empty) in fold aux s (ok empty)
let bind_fold_smap f init (smap : _ X_map.String.t) = let bind_fold_smap f init (smap : _ X_map.String.t) =
let aux k v prev = let aux k v prev =
@ -558,11 +558,11 @@ let bind_map_list f lst = bind_list (List.map f lst)
let rec bind_map_list_seq f lst = match lst with let rec bind_map_list_seq f lst = match lst with
| [] -> ok [] | [] -> ok []
| hd :: tl -> ( | hd :: tl ->
let%bind hd' = f hd in let%bind hd' = f hd in
let%bind tl' = bind_map_list_seq f tl in let%bind tl' = bind_map_list_seq f tl in
ok (hd' :: tl') ok (hd' :: tl')
)
let bind_map_ne_list : _ -> 'a X_list.Ne.t -> 'b X_list.Ne.t result = let bind_map_ne_list : _ -> 'a X_list.Ne.t -> 'b X_list.Ne.t result =
fun f lst -> bind_ne_list (X_list.Ne.map f lst) fun f lst -> bind_ne_list (X_list.Ne.map f lst)
let bind_iter_list : (_ -> unit result) -> _ list -> unit result = let bind_iter_list : (_ -> unit result) -> _ list -> unit result =
@ -575,11 +575,8 @@ let bind_location (x:_ Location.wrap) =
let bind_map_location f x = bind_location (Location.map f x) let bind_map_location f x = bind_location (Location.map f x)
let bind_fold_list f init lst = let bind_fold_list f init lst =
let aux x y = let aux x y = x >>? fun x -> f x y
x >>? fun x -> in List.fold_left aux (ok init) lst
f x y
in
List.fold_left aux (ok init) lst
module TMap(X : Map.OrderedType) = struct module TMap(X : Map.OrderedType) = struct
module MX = Map.Make(X) module MX = Map.Make(X)
@ -587,8 +584,7 @@ module TMap(X : Map.OrderedType) = struct
let aux k v x = let aux k v x =
x >>? fun x -> x >>? fun x ->
f ~x ~k ~v f ~x ~k ~v
in in MX.fold aux map (ok init)
MX.fold aux map (ok init)
let bind_map_Map f map = let bind_map_Map f map =
let aux k v map' = let aux k v map' =
@ -596,33 +592,26 @@ module TMap(X : Map.OrderedType) = struct
f ~k ~v >>? fun v' -> f ~k ~v >>? fun v' ->
ok @@ MX.update k (function ok @@ MX.update k (function
| None -> Some v' | None -> Some v'
| Some _ -> failwith "key collision, shouldn't happen in bind_map_Map") | Some _ ->
failwith "Key collision: Should not happen in bind_map_Map")
map' map'
in in MX.fold aux map (ok MX.empty)
MX.fold aux map (ok MX.empty)
end end
let bind_fold_pair f init (a,b) = let bind_fold_pair f init (a,b) =
let aux x y = let aux x y = x >>? fun x -> f x y
x >>? fun x -> in List.fold_left aux (ok init) [a;b]
f x y
in
List.fold_left aux (ok init) [a;b]
let bind_fold_triple f init (a,b,c) = let bind_fold_triple f init (a,b,c) =
let aux x y = let aux x y = x >>? fun x -> f x y
x >>? fun x -> in List.fold_left aux (ok init) [a;b;c]
f x y
in
List.fold_left aux (ok init) [a;b;c]
let bind_fold_map_list = fun f acc lst -> let bind_fold_map_list f acc lst =
let rec aux (acc , prev) f = function let rec aux (acc, prev) f = function
| [] -> ok (acc , prev) | [] -> ok (acc, prev)
| hd :: tl -> | hd :: tl ->
f acc hd >>? fun (acc' , hd') -> f acc hd >>? fun (acc' , hd') ->
aux (acc' , hd' :: prev) f tl aux (acc', hd'::prev) f tl in
in
aux (acc , []) f lst >>? fun (acc' , lst') -> aux (acc , []) f lst >>? fun (acc' , lst') ->
ok @@ (acc' , List.rev lst') ok @@ (acc' , List.rev lst')
@ -637,23 +626,18 @@ let bind_fold_map_right_list = fun f acc lst ->
ok lst' ok lst'
let bind_fold_right_list f init lst = let bind_fold_right_list f init lst =
let aux x y = let aux x y = x >>? fun x -> f x y
x >>? fun x -> in X_list.fold_right' aux (ok init) lst
f x y
in
X_list.fold_right' aux (ok init) lst
let bind_find_map_list error f lst = let bind_find_map_list error f lst =
let rec aux lst = let rec aux lst =
match lst with match lst with
| [] -> fail error | [] -> fail error
| hd :: tl -> ( | hd :: tl ->
match f hd with match f hd with
| Error _ -> aux tl | Error _ -> aux tl
| o -> o | o -> o
) in aux lst
in
aux lst
let bind_list_iter f lst = let bind_list_iter f lst =
let aux () y = f y in let aux () y = f y in
@ -663,28 +647,29 @@ let bind_or (a, b) =
match a with match a with
| Ok _ as o -> o | Ok _ as o -> o
| _ -> b | _ -> b
let bind_map_or (fa , fb) c =
bind_or (fa c , fb c)
let bind_lr (type a b) ((a : a result), (b:b result)) : [`Left of a | `Right of b] result = let bind_map_or (fa, fb) c = bind_or (fa c, fb c)
let bind_lr (type a b) ((a : a result), (b:b result))
: [`Left of a | `Right of b] result =
match (a, b) with match (a, b) with
| (Ok _ as o), _ -> map (fun x -> `Left x) o | (Ok _ as o), _ -> map (fun x -> `Left x) o
| _, (Ok _ as o) -> map (fun x -> `Right x) o | _, (Ok _ as o) -> map (fun x -> `Right x) o
| _, Error b -> Error b | _, Error b -> Error b
let bind_lr_lazy (type a b) ((a : a result), (b:unit -> b result)) : [`Left of a | `Right of b] result = let bind_lr_lazy (type a b) ((a : a result), (b:unit -> b result))
: [`Left of a | `Right of b] result =
match a with match a with
| Ok _ as o -> map (fun x -> `Left x) o | Ok _ as o -> map (fun x -> `Left x) o
| _ -> ( | _ -> match b() with
match b() with | Ok _ as o -> map (fun x -> `Right x) o
| Ok _ as o -> map (fun x -> `Right x) o | Error b -> Error b
| Error b -> Error b
)
let bind_and (a, b) = let bind_and (a, b) =
a >>? fun a -> a >>? fun a ->
b >>? fun b -> b >>? fun b ->
ok (a, b) ok (a, b)
let bind_and3 (a, b, c) = let bind_and3 (a, b, c) =
a >>? fun a -> a >>? fun a ->
b >>? fun b -> b >>? fun b ->
@ -692,18 +677,18 @@ let bind_and3 (a, b, c) =
ok (a, b, c) ok (a, b, c)
let bind_pair = bind_and let bind_pair = bind_and
let bind_map_pair f (a, b) = let bind_map_pair f (a, b) =
bind_pair (f a, f b) bind_pair (f a, f b)
let bind_fold_map_pair f acc (a, b) =
f acc a >>? fun (acc' , a') ->
f acc' b >>? fun (acc'' , b') ->
ok (acc'' , (a' , b'))
let bind_map_triple f (a, b, c) =
bind_and3 (f a, f b, f c)
let bind_list_cons v lst = let bind_fold_map_pair f acc (a, b) =
lst >>? fun lst -> f acc a >>? fun (acc', a') ->
ok (v::lst) f acc' b >>? fun (acc'', b') ->
ok (acc'', (a', b'))
let bind_map_triple f (a, b, c) = bind_and3 (f a, f b, f c)
let bind_list_cons v lst = lst >>? fun lst -> ok (v::lst)
let rec bind_chain : ('a -> 'a result) list -> 'a -> 'a result = fun fs x -> let rec bind_chain : ('a -> 'a result) list -> 'a -> 'a result = fun fs x ->
match fs with match fs with
@ -716,29 +701,23 @@ let rec bind_chain : ('a -> 'a result) list -> 'a -> 'a result = fun fs x ->
(** (**
Wraps a call that might trigger an exception in a result. Wraps a call that might trigger an exception in a result.
*) *)
let generic_try err f = let generic_try err f = try ok @@ f () with _ -> fail err
try (
ok @@ f ()
) with _ -> fail err
(** (**
Same, but with a handler that generates an error based on the exception, Same, but with a handler that generates an error based on the exception,
rather than a fixed error. rather than a fixed error.
*) *)
let specific_try handler f = let specific_try handler f =
try ( try ok @@ f () with exn -> fail (handler exn)
ok @@ f ()
) with exn -> fail (handler exn)
(** (**
Same, but tailored to `Sys_error`s, found in `Sys` from `Pervasives`. Same, but tailored to `Sys_error`s, found in `Sys` from `Pervasives`.
*) *)
let sys_try f = let sys_try f =
let handler = function let handler = function
| Sys_error str -> error (thunk "Sys_error") (fun () -> str) Sys_error str -> error (thunk "Sys_error") (fun () -> str)
| exn -> raise exn | exn -> raise exn
in in specific_try handler f
specific_try handler f
(** (**
Same, but for a given command. Same, but for a given command.
@ -746,53 +725,60 @@ let sys_try f =
let sys_command command = let sys_command command =
sys_try (fun () -> Sys.command command) >>? function sys_try (fun () -> Sys.command command) >>? function
| 0 -> ok () | 0 -> ok ()
| n -> fail (fun () -> error (thunk "Nonzero return code") (fun () -> (string_of_int n)) ()) | n -> fail (fun () -> error (thunk "Nonzero return code.")
(fun () -> (string_of_int n)) ())
(** (**
Assertion module. Assertion module.
Would make sense to move it outside Trace. Would make sense to move it outside Trace.
*) *)
module Assert = struct module Assert = struct
let assert_fail ?(msg="didn't fail") = function let assert_fail ?(msg="Did not fail.") = function
| Ok _ -> simple_fail msg Ok _ -> simple_fail msg
| _ -> ok () | _ -> ok ()
let assert_true ?(msg="not true") = function let assert_true ?(msg="Not true.") = function
| true -> ok () true -> ok ()
| false -> simple_fail msg | false -> simple_fail msg
let assert_equal ?msg expected actual = let assert_equal ?msg expected actual =
assert_true ?msg (expected = actual) assert_true ?msg (expected = actual)
let assert_equal_string ?msg expected actual = let assert_equal_string ?msg expected actual =
let msg = let msg =
let default = Format.asprintf "Not equal string : expected \"%s\", got \"%s\"" expected actual in let default =
X_option.unopt ~default msg in Format.asprintf "Not equal string: Expected \"%s\", got \"%s\""
assert_equal ~msg expected actual expected actual
in X_option.unopt ~default msg
in assert_equal ~msg expected actual
let assert_equal_int ?msg expected actual = let assert_equal_int ?msg expected actual =
let msg = let msg =
let default = Format.asprintf "Not equal int : expected %d, got %d" expected actual in let default =
X_option.unopt ~default msg in Format.asprintf "Not equal int : expected %d, got %d"
assert_equal ~msg expected actual expected actual
in X_option.unopt ~default msg
in assert_equal ~msg expected actual
let assert_equal_bool ?msg expected actual = let assert_equal_bool ?msg expected actual =
let msg = let msg =
let default = Format.asprintf "Not equal bool : expected %b, got %b" expected actual in let default =
Format.asprintf "Not equal bool: expected %b, got %b"
expected actual in
X_option.unopt ~default msg in X_option.unopt ~default msg in
assert_equal ~msg expected actual assert_equal ~msg expected actual
let assert_none ?(msg="not a none") opt = match opt with let assert_none ?(msg="Not a None value.") opt = match opt with
| None -> ok () | None -> ok ()
| _ -> simple_fail msg | _ -> simple_fail msg
let assert_list_size ?(msg="lst doesn't have the right size") lst n = let assert_list_size ?(msg="Wrong list size.") lst n =
assert_true ~msg List.(length lst = n) assert_true ~msg List.(length lst = n)
let assert_list_empty ?(msg="lst isn't empty") lst = let assert_list_empty ?(msg="Non-empty list.") lst =
assert_true ~msg List.(length lst = 0) assert_true ~msg List.(length lst = 0)
let assert_list_same_size ?(msg="lists don't have same size") a b = let assert_list_same_size ?(msg="Lists with different lengths.") a b =
assert_true ~msg List.(length a = length b) assert_true ~msg List.(length a = length b)
let assert_list_size_2 ~msg = function let assert_list_size_2 ~msg = function