Merge branch 'rinderknecht-dev' into 'dev'

Refactoring of the front-end towards integration of the local and global builds

See merge request ligolang/ligo!358
This commit is contained in:
Christian Rinderknecht 2020-01-29 16:50:42 +00:00
commit d938dd0492
52 changed files with 2373 additions and 1171 deletions

View File

@ -4,7 +4,7 @@ maintainer: "ligolang@gmail.com"
authors: [ "Galfour" ]
homepage: "https://gitlab.com/ligolang/tezos"
bug-reports: "https://gitlab.com/ligolang/tezos/issues"
synopsis: "A higher-level language which compiles to Michelson"
synopsis: "A high-level language which compiles to Michelson"
dev-repo: "git+https://gitlab.com/ligolang/tezos.git"
license: "MIT"
depends: [
@ -21,6 +21,8 @@ depends: [
"yojson"
"alcotest" { with-test }
"getopt"
"terminal_size"
"pprint"
# work around upstream in-place update
"ocaml-migrate-parsetree" { = "1.4.0" }
]

View File

@ -19,7 +19,7 @@ let source_file n =
let open Arg in
let info =
let docv = "SOURCE_FILE" in
let doc = "$(docv) is the path to the .ligo or .mligo file of the contract." in
let doc = "$(docv) is the path to the smart contract file." in
info ~docv ~doc [] in
required @@ pos n (some string) None info
@ -42,7 +42,7 @@ let syntax =
let open Arg in
let info =
let docv = "SYNTAX" in
let doc = "$(docv) is the syntax that will be used. Currently supported syntaxes are \"pascaligo\" and \"cameligo\". By default, the syntax is guessed from the extension (.ligo and .mligo, respectively)." in
let doc = "$(docv) is the syntax that will be used. Currently supported syntaxes are \"pascaligo\", \"cameligo\" and \"reasonligo\". By default, the syntax is guessed from the extension (.ligo, .mligo, .religo respectively)." in
info ~docv ~doc ["syntax" ; "s"] in
value @@ opt string "auto" info
@ -58,7 +58,7 @@ let init_file =
let open Arg in
let info =
let docv = "INIT_FILE" in
let doc = "$(docv) is the path to the .ligo or .mligo file to be used for context initialization." in
let doc = "$(docv) is the path to smart contract file to be used for context initialization." in
info ~docv ~doc ["init-file"] in
value @@ opt (some string) None info
@ -66,7 +66,7 @@ let amount =
let open Arg in
let info =
let docv = "AMOUNT" in
let doc = "$(docv) is the amount the michelson interpreter will use." in
let doc = "$(docv) is the amount the Michelson interpreter will use." in
info ~docv ~doc ["amount"] in
value @@ opt string "0" info
@ -74,7 +74,7 @@ let sender =
let open Arg in
let info =
let docv = "SENDER" in
let doc = "$(docv) is the sender the michelson interpreter transaction will use." in
let doc = "$(docv) is the sender the Michelson interpreter transaction will use." in
info ~docv ~doc ["sender"] in
value @@ opt (some string) None info
@ -82,7 +82,7 @@ let source =
let open Arg in
let info =
let docv = "SOURCE" in
let doc = "$(docv) is the source the michelson interpreter transaction will use." in
let doc = "$(docv) is the source the Michelson interpreter transaction will use." in
info ~docv ~doc ["source"] in
value @@ opt (some string) None info
@ -90,7 +90,7 @@ let predecessor_timestamp =
let open Arg in
let info =
let docv = "PREDECESSOR_TIMESTAMP" in
let doc = "$(docv) is the pedecessor_timestamp (now value minus one minute) the michelson interpreter will use (e.g. '2000-01-01T10:10:10Z')" in
let doc = "$(docv) is the predecessor_timestamp (now value minus one minute) the Michelson interpreter will use (e.g. '2000-01-01T10:10:10Z')" in
info ~docv ~doc ["predecessor-timestamp"] in
value @@ opt (some string) None info
@ -135,58 +135,58 @@ let compile_file =
let term =
Term.(const f $ source_file 0 $ entry_point 1 $ syntax $ display_format $ michelson_code_format) in
let cmdname = "compile-contract" in
let doc = "Subcommand: compile a contract." in
let doc = "Subcommand: Compile a contract." in
(Term.ret term , Term.info ~doc cmdname)
let print_cst =
let print_cst =
let f source_file syntax display_format = (
toplevel ~display_format @@
let%bind pp = Compile.Of_source.pretty_print source_file (Syntax_name syntax) in
let%bind pp = Compile.Of_source.pretty_print source_file (Syntax_name syntax) in
ok @@ Format.asprintf "%s \n" (Buffer.contents pp)
)
in
let term = Term.(const f $ source_file 0 $ syntax $ display_format) in
let cmdname = "print-cst" in
let doc = "Subcommand: print the cst. Warning: intended for development of LIGO and can break at any time." in
let cmdname = "print-cst" in
let doc = "Subcommand: Print the CST.\nWarning: Intended for development of LIGO and can break at any time." in
(Term.ret term, Term.info ~doc cmdname)
let print_ast =
let print_ast =
let f source_file syntax display_format = (
toplevel ~display_format @@
let%bind simplified = Compile.Of_source.compile source_file (Syntax_name syntax) in
ok @@ Format.asprintf "%a\n" Compile.Of_simplified.pretty_print simplified
ok @@ Format.asprintf "%a\n" Compile.Of_simplified.pretty_print simplified
)
in
let term = Term.(const f $ source_file 0 $ syntax $ display_format) in
let cmdname = "print-ast" in
let doc = "Subcommand: print the ast. Warning: intended for development of LIGO and can break at any time." in
let cmdname = "print-ast" in
let doc = "Subcommand: Print the AST.\n Warning: Intended for development of LIGO and can break at any time." in
(Term.ret term, Term.info ~doc cmdname)
let print_typed_ast =
let print_typed_ast =
let f source_file syntax display_format = (
toplevel ~display_format @@
let%bind simplified = Compile.Of_source.compile source_file (Syntax_name syntax) in
let%bind typed,_ = Compile.Of_simplified.compile simplified in
ok @@ Format.asprintf "%a\n" Compile.Of_typed.pretty_print typed
ok @@ Format.asprintf "%a\n" Compile.Of_typed.pretty_print typed
)
in
let term = Term.(const f $ source_file 0 $ syntax $ display_format) in
let cmdname = "print-typed-ast" in
let doc = "Subcommand: print the typed ast. Warning: intended for development of LIGO and can break at any time." in
let cmdname = "print-typed-ast" in
let doc = "Subcommand: Print the typed AST.\n Warning: Intended for development of LIGO and can break at any time." in
(Term.ret term, Term.info ~doc cmdname)
let print_mini_c =
let print_mini_c =
let f source_file syntax display_format = (
toplevel ~display_format @@
let%bind simplified = Compile.Of_source.compile source_file (Syntax_name syntax) in
let%bind typed,_ = Compile.Of_simplified.compile simplified in
let%bind mini_c = Compile.Of_typed.compile typed in
ok @@ Format.asprintf "%a\n" Compile.Of_mini_c.pretty_print mini_c
ok @@ Format.asprintf "%a\n" Compile.Of_mini_c.pretty_print mini_c
)
in
let term = Term.(const f $ source_file 0 $ syntax $ display_format) in
let cmdname = "print-mini-c" in
let doc = "Subcommand: print mini c. Warning: intended for development of LIGO and can break at any time." in
let cmdname = "print-mini-c" in
let doc = "Subcommand: Print Mini-C. Warning: Intended for development of LIGO and can break at any time." in
(Term.ret term, Term.info ~doc cmdname)
let measure_contract =
@ -203,7 +203,7 @@ let measure_contract =
let term =
Term.(const f $ source_file 0 $ entry_point 1 $ syntax $ display_format) in
let cmdname = "measure-contract" in
let doc = "Subcommand: measure a contract's compiled size in bytes." in
let doc = "Subcommand: Measure a contract's compiled size in bytes." in
(Term.ret term , Term.info ~doc cmdname)
let compile_parameter =
@ -232,7 +232,7 @@ let compile_parameter =
let term =
Term.(const f $ source_file 0 $ entry_point 1 $ expression "PARAMETER" 2 $ syntax $ amount $ sender $ source $ predecessor_timestamp $ display_format $ michelson_code_format) in
let cmdname = "compile-parameter" in
let doc = "Subcommand: compile parameters to a michelson expression. The resulting michelson expression can be passed as an argument in a transaction which calls a contract." in
let doc = "Subcommand: Compile parameters to a Michelson expression. The resulting Michelson expression can be passed as an argument in a transaction which calls a contract." in
(Term.ret term , Term.info ~doc cmdname)
let interpret =
@ -246,7 +246,7 @@ let interpret =
let env = Ast_typed.program_environment typed_prg in
ok (mini_c_prg,state,env)
| None -> ok ([],Typer.Solver.initial_state,Ast_typed.Environment.full_empty) in
let%bind v_syntax = Helpers.syntax_to_variant (Syntax_name syntax) init_file in
let%bind simplified_exp = Compile.Of_source.compile_expression v_syntax expression in
let%bind (typed_exp,_) = Compile.Of_simplified.compile_expression ~env ~state simplified_exp in
@ -265,7 +265,7 @@ let interpret =
let term =
Term.(const f $ expression "EXPRESSION" 0 $ init_file $ syntax $ amount $ sender $ source $ predecessor_timestamp $ display_format ) in
let cmdname = "interpret" in
let doc = "Subcommand: interpret the expression in the context initialized by the provided source file." in
let doc = "Subcommand: Interpret the expression in the context initialized by the provided source file." in
(Term.ret term , Term.info ~doc cmdname)
@ -295,7 +295,7 @@ let compile_storage =
let term =
Term.(const f $ source_file 0 $ entry_point 1 $ expression "STORAGE" 2 $ syntax $ amount $ sender $ source $ predecessor_timestamp $ display_format $ michelson_code_format) in
let cmdname = "compile-storage" in
let doc = "Subcommand: compile an initial storage in ligo syntax to a michelson expression. The resulting michelson expression can be passed as an argument in a transaction which originates a contract." in
let doc = "Subcommand: Compile an initial storage in ligo syntax to a Michelson expression. The resulting Michelson expression can be passed as an argument in a transaction which originates a contract." in
(Term.ret term , Term.info ~doc cmdname)
let dry_run =
@ -330,7 +330,7 @@ let dry_run =
let term =
Term.(const f $ source_file 0 $ entry_point 1 $ expression "PARAMETER" 2 $ expression "STORAGE" 3 $ amount $ sender $ source $ predecessor_timestamp $ syntax $ display_format) in
let cmdname = "dry-run" in
let doc = "Subcommand: run a smart-contract with the given storage and input." in
let doc = "Subcommand: Run a smart-contract with the given storage and input." in
(Term.ret term , Term.info ~doc cmdname)
let run_function =
@ -361,7 +361,7 @@ let run_function =
let term =
Term.(const f $ source_file 0 $ entry_point 1 $ expression "PARAMETER" 2 $ amount $ sender $ source $ predecessor_timestamp $ syntax $ display_format) in
let cmdname = "run-function" in
let doc = "Subcommand: run a function with the given parameter." in
let doc = "Subcommand: Run a function with the given parameter." in
(Term.ret term , Term.info ~doc cmdname)
let evaluate_value =
@ -380,7 +380,7 @@ let evaluate_value =
let term =
Term.(const f $ source_file 0 $ entry_point 1 $ amount $ sender $ source $ predecessor_timestamp $ syntax $ display_format) in
let cmdname = "evaluate-value" in
let doc = "Subcommand: evaluate a given definition." in
let doc = "Subcommand: Evaluate a given definition." in
(Term.ret term , Term.info ~doc cmdname)
let compile_expression =
@ -399,7 +399,7 @@ let compile_expression =
let term =
Term.(const f $ expression "" 1 $ req_syntax 0 $ display_format $ michelson_code_format) in
let cmdname = "compile-expression" in
let doc = "Subcommand: compile to a michelson value." in
let doc = "Subcommand: Compile to a michelson value." in
(Term.ret term , Term.info ~doc cmdname)
let dump_changelog =
@ -420,7 +420,7 @@ let list_declarations =
let term =
Term.(const f $ source_file 0 $ syntax ) in
let cmdname = "list-declarations" in
let doc = "Subcommand: list all the top-level decalarations." in
let doc = "Subcommand: List all the top-level declarations." in
(Term.ret term , Term.info ~doc cmdname)
let run ?argv () =

View File

@ -18,56 +18,56 @@ let%expect_test _ =
Dump the LIGO changelog to stdout.
compile-contract
Subcommand: compile a contract.
Subcommand: Compile a contract.
compile-expression
Subcommand: compile to a michelson value.
Subcommand: Compile to a michelson value.
compile-parameter
Subcommand: compile parameters to a michelson expression. The
resulting michelson expression can be passed as an argument in a
Subcommand: Compile parameters to a Michelson expression. The
resulting Michelson expression can be passed as an argument in a
transaction which calls a contract.
compile-storage
Subcommand: compile an initial storage in ligo syntax to a
michelson expression. The resulting michelson expression can be
Subcommand: Compile an initial storage in ligo syntax to a
Michelson expression. The resulting Michelson expression can be
passed as an argument in a transaction which originates a
contract.
dry-run
Subcommand: run a smart-contract with the given storage and input.
Subcommand: Run a smart-contract with the given storage and input.
evaluate-value
Subcommand: evaluate a given definition.
Subcommand: Evaluate a given definition.
interpret
Subcommand: interpret the expression in the context initialized by
Subcommand: Interpret the expression in the context initialized by
the provided source file.
list-declarations
Subcommand: list all the top-level decalarations.
Subcommand: List all the top-level declarations.
measure-contract
Subcommand: measure a contract's compiled size in bytes.
Subcommand: Measure a contract's compiled size in bytes.
print-ast
Subcommand: print the ast. Warning: intended for development of
Subcommand: Print the AST. Warning: Intended for development of
LIGO and can break at any time.
print-cst
Subcommand: print the cst. Warning: intended for development of
Subcommand: Print the CST. Warning: Intended for development of
LIGO and can break at any time.
print-mini-c
Subcommand: print mini c. Warning: intended for development of
Subcommand: Print Mini-C. Warning: Intended for development of
LIGO and can break at any time.
print-typed-ast
Subcommand: print the typed ast. Warning: intended for development
Subcommand: Print the typed AST. Warning: Intended for development
of LIGO and can break at any time.
run-function
Subcommand: run a function with the given parameter.
Subcommand: Run a function with the given parameter.
OPTIONS
--help[=FMT] (default=auto)
@ -94,56 +94,56 @@ let%expect_test _ =
Dump the LIGO changelog to stdout.
compile-contract
Subcommand: compile a contract.
Subcommand: Compile a contract.
compile-expression
Subcommand: compile to a michelson value.
Subcommand: Compile to a michelson value.
compile-parameter
Subcommand: compile parameters to a michelson expression. The
resulting michelson expression can be passed as an argument in a
Subcommand: Compile parameters to a Michelson expression. The
resulting Michelson expression can be passed as an argument in a
transaction which calls a contract.
compile-storage
Subcommand: compile an initial storage in ligo syntax to a
michelson expression. The resulting michelson expression can be
Subcommand: Compile an initial storage in ligo syntax to a
Michelson expression. The resulting Michelson expression can be
passed as an argument in a transaction which originates a
contract.
dry-run
Subcommand: run a smart-contract with the given storage and input.
Subcommand: Run a smart-contract with the given storage and input.
evaluate-value
Subcommand: evaluate a given definition.
Subcommand: Evaluate a given definition.
interpret
Subcommand: interpret the expression in the context initialized by
Subcommand: Interpret the expression in the context initialized by
the provided source file.
list-declarations
Subcommand: list all the top-level decalarations.
Subcommand: List all the top-level declarations.
measure-contract
Subcommand: measure a contract's compiled size in bytes.
Subcommand: Measure a contract's compiled size in bytes.
print-ast
Subcommand: print the ast. Warning: intended for development of
Subcommand: Print the AST. Warning: Intended for development of
LIGO and can break at any time.
print-cst
Subcommand: print the cst. Warning: intended for development of
Subcommand: Print the CST. Warning: Intended for development of
LIGO and can break at any time.
print-mini-c
Subcommand: print mini c. Warning: intended for development of
Subcommand: Print Mini-C. Warning: Intended for development of
LIGO and can break at any time.
print-typed-ast
Subcommand: print the typed ast. Warning: intended for development
Subcommand: Print the typed AST. Warning: Intended for development
of LIGO and can break at any time.
run-function
Subcommand: run a function with the given parameter.
Subcommand: Run a function with the given parameter.
OPTIONS
--help[=FMT] (default=auto)
@ -157,7 +157,7 @@ let%expect_test _ =
run_ligo_good [ "compile-contract" ; "--help" ] ;
[%expect {|
NAME
ligo-compile-contract - Subcommand: compile a contract.
ligo-compile-contract - Subcommand: Compile a contract.
SYNOPSIS
ligo compile-contract [OPTION]... SOURCE_FILE ENTRY_POINT
@ -167,8 +167,7 @@ let%expect_test _ =
ENTRY_POINT is entry-point that will be compiled.
SOURCE_FILE (required)
SOURCE_FILE is the path to the .ligo or .mligo file of the
contract.
SOURCE_FILE is the path to the smart contract file.
OPTIONS
--format=DISPLAY_FORMAT, --display-format=DISPLAY_FORMAT
@ -191,8 +190,9 @@ let%expect_test _ =
-s SYNTAX, --syntax=SYNTAX (absent=auto)
SYNTAX is the syntax that will be used. Currently supported
syntaxes are "pascaligo" and "cameligo". By default, the syntax is
guessed from the extension (.ligo and .mligo, respectively).
syntaxes are "pascaligo", "cameligo" and "reasonligo". By default,
the syntax is guessed from the extension (.ligo, .mligo, .religo
respectively).
--version
Show version information. |} ] ;
@ -200,8 +200,8 @@ let%expect_test _ =
run_ligo_good [ "compile-parameter" ; "--help" ] ;
[%expect {|
NAME
ligo-compile-parameter - Subcommand: compile parameters to a michelson
expression. The resulting michelson expression can be passed as an
ligo-compile-parameter - Subcommand: Compile parameters to a Michelson
expression. The resulting Michelson expression can be passed as an
argument in a transaction which calls a contract.
SYNOPSIS
@ -216,12 +216,11 @@ let%expect_test _ =
PARAMETER_EXPRESSION is the expression that will be compiled.
SOURCE_FILE (required)
SOURCE_FILE is the path to the .ligo or .mligo file of the
contract.
SOURCE_FILE is the path to the smart contract file.
OPTIONS
--amount=AMOUNT (absent=0)
AMOUNT is the amount the michelson interpreter will use.
AMOUNT is the amount the Michelson interpreter will use.
--format=DISPLAY_FORMAT, --display-format=DISPLAY_FORMAT
(absent=human-readable)
@ -242,21 +241,22 @@ let%expect_test _ =
are 'text' (default), 'json' and 'hex'.
--predecessor-timestamp=PREDECESSOR_TIMESTAMP
PREDECESSOR_TIMESTAMP is the pedecessor_timestamp (now value minus
one minute) the michelson interpreter will use (e.g.
PREDECESSOR_TIMESTAMP is the predecessor_timestamp (now value
minus one minute) the Michelson interpreter will use (e.g.
'2000-01-01T10:10:10Z')
-s SYNTAX, --syntax=SYNTAX (absent=auto)
SYNTAX is the syntax that will be used. Currently supported
syntaxes are "pascaligo" and "cameligo". By default, the syntax is
guessed from the extension (.ligo and .mligo, respectively).
syntaxes are "pascaligo", "cameligo" and "reasonligo". By default,
the syntax is guessed from the extension (.ligo, .mligo, .religo
respectively).
--sender=SENDER
SENDER is the sender the michelson interpreter transaction will
SENDER is the sender the Michelson interpreter transaction will
use.
--source=SOURCE
SOURCE is the source the michelson interpreter transaction will
SOURCE is the source the Michelson interpreter transaction will
use.
--version
@ -265,8 +265,8 @@ let%expect_test _ =
run_ligo_good [ "compile-storage" ; "--help" ] ;
[%expect {|
NAME
ligo-compile-storage - Subcommand: compile an initial storage in ligo
syntax to a michelson expression. The resulting michelson expression
ligo-compile-storage - Subcommand: Compile an initial storage in ligo
syntax to a Michelson expression. The resulting Michelson expression
can be passed as an argument in a transaction which originates a
contract.
@ -279,15 +279,14 @@ let%expect_test _ =
ENTRY_POINT is entry-point that will be compiled.
SOURCE_FILE (required)
SOURCE_FILE is the path to the .ligo or .mligo file of the
contract.
SOURCE_FILE is the path to the smart contract file.
STORAGE_EXPRESSION (required)
STORAGE_EXPRESSION is the expression that will be compiled.
OPTIONS
--amount=AMOUNT (absent=0)
AMOUNT is the amount the michelson interpreter will use.
AMOUNT is the amount the Michelson interpreter will use.
--format=DISPLAY_FORMAT, --display-format=DISPLAY_FORMAT
(absent=human-readable)
@ -308,21 +307,22 @@ let%expect_test _ =
are 'text' (default), 'json' and 'hex'.
--predecessor-timestamp=PREDECESSOR_TIMESTAMP
PREDECESSOR_TIMESTAMP is the pedecessor_timestamp (now value minus
one minute) the michelson interpreter will use (e.g.
PREDECESSOR_TIMESTAMP is the predecessor_timestamp (now value
minus one minute) the Michelson interpreter will use (e.g.
'2000-01-01T10:10:10Z')
-s SYNTAX, --syntax=SYNTAX (absent=auto)
SYNTAX is the syntax that will be used. Currently supported
syntaxes are "pascaligo" and "cameligo". By default, the syntax is
guessed from the extension (.ligo and .mligo, respectively).
syntaxes are "pascaligo", "cameligo" and "reasonligo". By default,
the syntax is guessed from the extension (.ligo, .mligo, .religo
respectively).
--sender=SENDER
SENDER is the sender the michelson interpreter transaction will
SENDER is the sender the Michelson interpreter transaction will
use.
--source=SOURCE
SOURCE is the source the michelson interpreter transaction will
SOURCE is the source the Michelson interpreter transaction will
use.
--version
@ -331,7 +331,7 @@ let%expect_test _ =
run_ligo_good [ "dry-run" ; "--help" ] ;
[%expect {|
NAME
ligo-dry-run - Subcommand: run a smart-contract with the given storage
ligo-dry-run - Subcommand: Run a smart-contract with the given storage
and input.
SYNOPSIS
@ -346,15 +346,14 @@ let%expect_test _ =
PARAMETER_EXPRESSION is the expression that will be compiled.
SOURCE_FILE (required)
SOURCE_FILE is the path to the .ligo or .mligo file of the
contract.
SOURCE_FILE is the path to the smart contract file.
STORAGE_EXPRESSION (required)
STORAGE_EXPRESSION is the expression that will be compiled.
OPTIONS
--amount=AMOUNT (absent=0)
AMOUNT is the amount the michelson interpreter will use.
AMOUNT is the amount the Michelson interpreter will use.
--format=DISPLAY_FORMAT, --display-format=DISPLAY_FORMAT
(absent=human-readable)
@ -370,21 +369,22 @@ let%expect_test _ =
`plain' whenever the TERM env var is `dumb' or undefined.
--predecessor-timestamp=PREDECESSOR_TIMESTAMP
PREDECESSOR_TIMESTAMP is the pedecessor_timestamp (now value minus
one minute) the michelson interpreter will use (e.g.
PREDECESSOR_TIMESTAMP is the predecessor_timestamp (now value
minus one minute) the Michelson interpreter will use (e.g.
'2000-01-01T10:10:10Z')
-s SYNTAX, --syntax=SYNTAX (absent=auto)
SYNTAX is the syntax that will be used. Currently supported
syntaxes are "pascaligo" and "cameligo". By default, the syntax is
guessed from the extension (.ligo and .mligo, respectively).
syntaxes are "pascaligo", "cameligo" and "reasonligo". By default,
the syntax is guessed from the extension (.ligo, .mligo, .religo
respectively).
--sender=SENDER
SENDER is the sender the michelson interpreter transaction will
SENDER is the sender the Michelson interpreter transaction will
use.
--source=SOURCE
SOURCE is the source the michelson interpreter transaction will
SOURCE is the source the Michelson interpreter transaction will
use.
--version
@ -393,7 +393,7 @@ let%expect_test _ =
run_ligo_good [ "run-function" ; "--help" ] ;
[%expect {|
NAME
ligo-run-function - Subcommand: run a function with the given
ligo-run-function - Subcommand: Run a function with the given
parameter.
SYNOPSIS
@ -408,12 +408,11 @@ let%expect_test _ =
PARAMETER_EXPRESSION is the expression that will be compiled.
SOURCE_FILE (required)
SOURCE_FILE is the path to the .ligo or .mligo file of the
contract.
SOURCE_FILE is the path to the smart contract file.
OPTIONS
--amount=AMOUNT (absent=0)
AMOUNT is the amount the michelson interpreter will use.
AMOUNT is the amount the Michelson interpreter will use.
--format=DISPLAY_FORMAT, --display-format=DISPLAY_FORMAT
(absent=human-readable)
@ -429,21 +428,22 @@ let%expect_test _ =
`plain' whenever the TERM env var is `dumb' or undefined.
--predecessor-timestamp=PREDECESSOR_TIMESTAMP
PREDECESSOR_TIMESTAMP is the pedecessor_timestamp (now value minus
one minute) the michelson interpreter will use (e.g.
PREDECESSOR_TIMESTAMP is the predecessor_timestamp (now value
minus one minute) the Michelson interpreter will use (e.g.
'2000-01-01T10:10:10Z')
-s SYNTAX, --syntax=SYNTAX (absent=auto)
SYNTAX is the syntax that will be used. Currently supported
syntaxes are "pascaligo" and "cameligo". By default, the syntax is
guessed from the extension (.ligo and .mligo, respectively).
syntaxes are "pascaligo", "cameligo" and "reasonligo". By default,
the syntax is guessed from the extension (.ligo, .mligo, .religo
respectively).
--sender=SENDER
SENDER is the sender the michelson interpreter transaction will
SENDER is the sender the Michelson interpreter transaction will
use.
--source=SOURCE
SOURCE is the source the michelson interpreter transaction will
SOURCE is the source the Michelson interpreter transaction will
use.
--version
@ -452,7 +452,7 @@ let%expect_test _ =
run_ligo_good [ "evaluate-value" ; "--help" ] ;
[%expect {|
NAME
ligo-evaluate-value - Subcommand: evaluate a given definition.
ligo-evaluate-value - Subcommand: Evaluate a given definition.
SYNOPSIS
ligo evaluate-value [OPTION]... SOURCE_FILE ENTRY_POINT
@ -462,12 +462,11 @@ let%expect_test _ =
ENTRY_POINT is entry-point that will be compiled.
SOURCE_FILE (required)
SOURCE_FILE is the path to the .ligo or .mligo file of the
contract.
SOURCE_FILE is the path to the smart contract file.
OPTIONS
--amount=AMOUNT (absent=0)
AMOUNT is the amount the michelson interpreter will use.
AMOUNT is the amount the Michelson interpreter will use.
--format=DISPLAY_FORMAT, --display-format=DISPLAY_FORMAT
(absent=human-readable)
@ -483,21 +482,22 @@ let%expect_test _ =
`plain' whenever the TERM env var is `dumb' or undefined.
--predecessor-timestamp=PREDECESSOR_TIMESTAMP
PREDECESSOR_TIMESTAMP is the pedecessor_timestamp (now value minus
one minute) the michelson interpreter will use (e.g.
PREDECESSOR_TIMESTAMP is the predecessor_timestamp (now value
minus one minute) the Michelson interpreter will use (e.g.
'2000-01-01T10:10:10Z')
-s SYNTAX, --syntax=SYNTAX (absent=auto)
SYNTAX is the syntax that will be used. Currently supported
syntaxes are "pascaligo" and "cameligo". By default, the syntax is
guessed from the extension (.ligo and .mligo, respectively).
syntaxes are "pascaligo", "cameligo" and "reasonligo". By default,
the syntax is guessed from the extension (.ligo, .mligo, .religo
respectively).
--sender=SENDER
SENDER is the sender the michelson interpreter transaction will
SENDER is the sender the Michelson interpreter transaction will
use.
--source=SOURCE
SOURCE is the source the michelson interpreter transaction will
SOURCE is the source the Michelson interpreter transaction will
use.
--version
@ -506,7 +506,7 @@ let%expect_test _ =
run_ligo_good [ "compile-expression" ; "--help" ] ;
[%expect {|
NAME
ligo-compile-expression - Subcommand: compile to a michelson value.
ligo-compile-expression - Subcommand: Compile to a michelson value.
SYNOPSIS
ligo compile-expression [OPTION]... SYNTAX _EXPRESSION

View File

@ -3,9 +3,10 @@ open Cli_expect
let%expect_test _ =
run_ligo_bad [ "compile-contract" ; "../../test/lexer/broken_string.ligo" ; "main" ] ;
[%expect {|
ligo: lexer error: The string starting here is interrupted by a line break.
ligo: : Lexical error in file "broken_string.ligo", line 1, characters 18-19:
The string starting here is interrupted by a line break.
Hint: Remove the break, close the string before or insert a backslash.
{"parser_loc":"in file \"broken_string.ligo\", line 1, characters 18-19"}
{}
If you're not sure how to fix this error, you can
@ -19,9 +20,10 @@ ligo: lexer error: The string starting here is interrupted by a line break.
run_ligo_bad [ "compile-contract" ; "../../test/lexer/broken_string.mligo" ; "main" ] ;
[%expect {|
ligo: lexer error: The string starting here is interrupted by a line break.
ligo: : Lexical error in file "broken_string.mligo", line 1, characters 8-9:
The string starting here is interrupted by a line break.
Hint: Remove the break, close the string before or insert a backslash.
{"parser_loc":"in file \"broken_string.mligo\", line 1, characters 8-9"}
{}
If you're not sure how to fix this error, you can
@ -35,9 +37,10 @@ ligo: lexer error: The string starting here is interrupted by a line break.
run_ligo_bad [ "compile-contract" ; "../../test/lexer/broken_string.religo" ; "main" ] ;
[%expect {|
ligo: lexer error: The string starting here is interrupted by a line break.
ligo: : Lexical error in file "broken_string.religo", line 1, characters 8-9:
The string starting here is interrupted by a line break.
Hint: Remove the break, close the string before or insert a backslash.
{"parser_loc":"in file \"broken_string.religo\", line 1, characters 8-9"}
{}
If you're not sure how to fix this error, you can
@ -51,9 +54,10 @@ ligo: lexer error: The string starting here is interrupted by a line break.
run_ligo_bad [ "compile-contract" ; "../../test/lexer/negative_byte_sequence.ligo" ; "main" ] ;
[%expect {|
ligo: lexer error: Negative byte sequence.
ligo: : Lexical error in file "negative_byte_sequence.ligo", line 1, characters 18-23:
Negative byte sequence.
Hint: Remove the leading minus sign.
{"parser_loc":"in file \"negative_byte_sequence.ligo\", line 1, characters 18-23"}
{}
If you're not sure how to fix this error, you can
@ -67,9 +71,10 @@ ligo: lexer error: Negative byte sequence.
run_ligo_bad [ "compile-contract" ; "../../test/lexer/negative_byte_sequence.mligo" ; "main" ] ;
[%expect {|
ligo: lexer error: Negative byte sequence.
ligo: : Lexical error in file "negative_byte_sequence.mligo", line 1, characters 8-13:
Negative byte sequence.
Hint: Remove the leading minus sign.
{"parser_loc":"in file \"negative_byte_sequence.mligo\", line 1, characters 8-13"}
{}
If you're not sure how to fix this error, you can
@ -83,9 +88,10 @@ ligo: lexer error: Negative byte sequence.
run_ligo_bad [ "compile-contract" ; "../../test/lexer/negative_byte_sequence.religo" ; "main" ] ;
[%expect {|
ligo: lexer error: Negative byte sequence.
ligo: : Lexical error in file "negative_byte_sequence.religo", line 1, characters 8-13:
Negative byte sequence.
Hint: Remove the leading minus sign.
{"parser_loc":"in file \"negative_byte_sequence.religo\", line 1, characters 8-13"}
{}
If you're not sure how to fix this error, you can
@ -99,9 +105,10 @@ ligo: lexer error: Negative byte sequence.
run_ligo_bad [ "compile-contract" ; "../../test/lexer/reserved_name.ligo" ; "main" ] ;
[%expect {|
ligo: lexer error: Reserved name: arguments.
ligo: : Lexical error in file "reserved_name.ligo", line 1, characters 4-13:
Reserved name: arguments.
Hint: Change the name.
{"parser_loc":"in file \"reserved_name.ligo\", line 1, characters 4-13"}
{}
If you're not sure how to fix this error, you can
@ -115,9 +122,10 @@ ligo: lexer error: Reserved name: arguments.
run_ligo_bad [ "compile-contract" ; "../../test/lexer/reserved_name.religo" ; "main" ] ;
[%expect {|
ligo: lexer error: Reserved name: end.
ligo: : Lexical error in file "reserved_name.religo", line 1, characters 4-7:
Reserved name: end.
Hint: Change the name.
{"parser_loc":"in file \"reserved_name.religo\", line 1, characters 4-7"}
{}
If you're not sure how to fix this error, you can
@ -131,9 +139,10 @@ ligo: lexer error: Reserved name: end.
run_ligo_bad [ "compile-contract" ; "../../test/lexer/reserved_name.mligo" ; "main" ] ;
[%expect {|
ligo: lexer error: Reserved name: object.
ligo: : Lexical error in file "reserved_name.mligo", line 1, characters 4-10:
Reserved name: object.
Hint: Change the name.
{"parser_loc":"in file \"reserved_name.mligo\", line 1, characters 4-10"}
{}
If you're not sure how to fix this error, you can
@ -147,8 +156,9 @@ ligo: lexer error: Reserved name: object.
run_ligo_bad [ "compile-contract" ; "../../test/lexer/unexpected_character.ligo" ; "main" ] ;
[%expect {|
ligo: lexer error: Unexpected character '\239'.
{"parser_loc":"in file \"unexpected_character.ligo\", line 1, characters 18-19"}
ligo: : Lexical error in file "unexpected_character.ligo", line 1, characters 18-19:
Unexpected character '\239'.
{}
If you're not sure how to fix this error, you can
@ -162,8 +172,9 @@ ligo: lexer error: Unexpected character '\239'.
run_ligo_bad [ "compile-contract" ; "../../test/lexer/unexpected_character.mligo" ; "main" ] ;
[%expect {|
ligo: lexer error: Unexpected character '\239'.
{"parser_loc":"in file \"unexpected_character.mligo\", line 1, characters 8-9"}
ligo: : Lexical error in file "unexpected_character.mligo", line 1, characters 8-9:
Unexpected character '\239'.
{}
If you're not sure how to fix this error, you can
@ -177,8 +188,9 @@ ligo: lexer error: Unexpected character '\239'.
run_ligo_bad [ "compile-contract" ; "../../test/lexer/unexpected_character.religo" ; "main" ] ;
[%expect {|
ligo: lexer error: Unexpected character '\239'.
{"parser_loc":"in file \"unexpected_character.religo\", line 1, characters 8-9"}
ligo: : Lexical error in file "unexpected_character.religo", line 1, characters 8-9:
Unexpected character '\239'.
{}
If you're not sure how to fix this error, you can
@ -192,9 +204,10 @@ ligo: lexer error: Unexpected character '\239'.
run_ligo_bad [ "compile-contract" ; "../../test/lexer/unterminated_comment.mligo" ; "main" ] ;
[%expect {|
ligo: lexer error: Unterminated comment.
ligo: : Lexical error in file "unterminated_comment.mligo", line 1, characters 0-2:
Unterminated comment.
Hint: Close with "*)".
{"parser_loc":"in file \"unterminated_comment.mligo\", line 1, characters 0-2"}
{}
If you're not sure how to fix this error, you can
@ -208,9 +221,10 @@ ligo: lexer error: Unterminated comment.
run_ligo_bad [ "compile-contract" ; "../../test/lexer/invalid_symbol.ligo" ; "main" ] ;
[%expect {|
ligo: lexer error: Invalid symbol.
ligo: : Lexical error in file "invalid_symbol.ligo", line 1, characters 17-20:
Invalid symbol.
Hint: Check the LIGO syntax you use.
{"parser_loc":"in file \"invalid_symbol.ligo\", line 1, characters 17-20"}
{}
If you're not sure how to fix this error, you can
@ -224,9 +238,10 @@ ligo: lexer error: Invalid symbol.
run_ligo_bad [ "compile-contract" ; "../../test/lexer/invalid_symbol.mligo" ; "main" ] ;
[%expect {|
ligo: lexer error: Invalid symbol.
ligo: : Lexical error in file "invalid_symbol.mligo", line 1, characters 10-13:
Invalid symbol.
Hint: Check the LIGO syntax you use.
{"parser_loc":"in file \"invalid_symbol.mligo\", line 1, characters 10-13"}
{}
If you're not sure how to fix this error, you can
@ -240,9 +255,10 @@ ligo: lexer error: Invalid symbol.
run_ligo_bad [ "compile-contract" ; "../../test/lexer/invalid_symbol.religo" ; "main" ] ;
[%expect {|
ligo: lexer error: Invalid symbol.
ligo: : Lexical error in file "invalid_symbol.religo", line 1, characters 10-11:
Invalid symbol.
Hint: Check the LIGO syntax you use.
{"parser_loc":"in file \"invalid_symbol.religo\", line 1, characters 10-11"}
{}
If you're not sure how to fix this error, you can
@ -256,9 +272,10 @@ ligo: lexer error: Invalid symbol.
run_ligo_bad [ "compile-contract" ; "../../test/lexer/missing_break.ligo" ; "main" ] ;
[%expect {|
ligo: lexer error: Missing break.
ligo: : Lexical error in file "missing_break.ligo", line 1, characters 18-18:
Missing break.
Hint: Insert some space.
{"parser_loc":"in file \"missing_break.ligo\", line 1, characters 18-18"}
{}
If you're not sure how to fix this error, you can
@ -272,9 +289,10 @@ ligo: lexer error: Missing break.
run_ligo_bad [ "compile-contract" ; "../../test/lexer/missing_break.mligo" ; "main" ] ;
[%expect {|
ligo: lexer error: Missing break.
ligo: : Lexical error in file "missing_break.mligo", line 1, characters 11-11:
Missing break.
Hint: Insert some space.
{"parser_loc":"in file \"missing_break.mligo\", line 1, characters 11-11"}
{}
If you're not sure how to fix this error, you can
@ -288,9 +306,10 @@ ligo: lexer error: Missing break.
run_ligo_bad [ "compile-contract" ; "../../test/lexer/missing_break.religo" ; "main" ] ;
[%expect {|
ligo: lexer error: Missing break.
ligo: : Lexical error in file "missing_break.religo", line 1, characters 11-11:
Missing break.
Hint: Insert some space.
{"parser_loc":"in file \"missing_break.religo\", line 1, characters 11-11"}
{}
If you're not sure how to fix this error, you can
@ -304,9 +323,10 @@ ligo: lexer error: Missing break.
run_ligo_bad [ "compile-contract" ; "../../test/lexer/invalid_character_in_string.ligo" ; "main" ] ;
[%expect {|
ligo: lexer error: Invalid character in string.
ligo: : Lexical error in file "invalid_character_in_string.ligo", line 1, characters 19-20:
Invalid character in string.
Hint: Remove or replace the character.
{"parser_loc":"in file \"invalid_character_in_string.ligo\", line 1, characters 19-20"}
{}
If you're not sure how to fix this error, you can
@ -320,9 +340,10 @@ ligo: lexer error: Invalid character in string.
run_ligo_bad [ "compile-contract" ; "../../test/lexer/invalid_character_in_string.mligo" ; "main" ] ;
[%expect {|
ligo: lexer error: Invalid character in string.
ligo: : Lexical error in file "invalid_character_in_string.mligo", line 1, characters 9-10:
Invalid character in string.
Hint: Remove or replace the character.
{"parser_loc":"in file \"invalid_character_in_string.mligo\", line 1, characters 9-10"}
{}
If you're not sure how to fix this error, you can
@ -336,9 +357,10 @@ ligo: lexer error: Invalid character in string.
run_ligo_bad [ "compile-contract" ; "../../test/lexer/invalid_character_in_string.religo" ; "main" ] ;
[%expect {|
ligo: lexer error: Invalid character in string.
ligo: : Lexical error in file "invalid_character_in_string.religo", line 1, characters 9-10:
Invalid character in string.
Hint: Remove or replace the character.
{"parser_loc":"in file \"invalid_character_in_string.religo\", line 1, characters 9-10"}
{}
If you're not sure how to fix this error, you can

View File

@ -3,8 +3,8 @@ open Cli_expect
let%expect_test _ =
run_ligo_bad [ "compile-contract" ; "../../test/contracts/negative/error_syntax.ligo" ; "main" ] ;
[%expect {|
ligo: parser error: Parse error at "-" from (1, 16) to (1, 17). In file "|../../test/contracts/negative/error_syntax.ligo"
{"parser_loc":"in file \"\", line 1, characters 16-17"}
ligo: : Parse error in file "error_syntax.ligo", line 1, characters 16-17, after "bar" and before "-".
{}
If you're not sure how to fix this error, you can

View File

@ -1,173 +1,171 @@
open Trace
type s_syntax = Syntax_name of string
type v_syntax = Pascaligo | Cameligo | ReasonLIGO
type v_syntax = PascaLIGO | CameLIGO | ReasonLIGO
let syntax_to_variant : s_syntax -> string option -> v_syntax result =
fun syntax source_filename ->
let subr s n =
String.sub s (String.length s - n) n in
let endswith s suffix =
let suffixlen = String.length suffix in
( String.length s >= suffixlen
&& String.equal (subr s suffixlen) suffix)
in
let (Syntax_name syntax) = syntax in
match (syntax , source_filename) with
| "auto" , Some sf when endswith sf ".ligo" -> ok Pascaligo
| "auto" , Some sf when endswith sf ".mligo" -> ok Cameligo
| "auto" , Some sf when endswith sf ".religo" -> ok ReasonLIGO
| "auto" , _ -> simple_fail "cannot auto-detect syntax, pleas use -s name_of_syntax"
| "pascaligo" , _ -> ok Pascaligo
| "cameligo" , _ -> ok Cameligo
| "reasonligo", _ -> ok ReasonLIGO
| _ -> simple_fail "unrecognized parser"
let syntax_to_variant (Syntax_name syntax) source =
match syntax, source with
"auto", Some sf ->
(match Filename.extension sf with
".ligo" | ".pligo" -> ok PascaLIGO
| ".mligo" -> ok CameLIGO
| ".religo" -> ok ReasonLIGO
| _ -> simple_fail "Cannot auto-detect the syntax.\n\
Hint: Use -s <name of syntax>\n")
| ("pascaligo" | "PascaLIGO"), _ -> ok PascaLIGO
| ("cameligo" | "CameLIGO"), _ -> ok CameLIGO
| ("reasonligo" | "ReasonLIGO"), _ -> ok ReasonLIGO
| _ -> simple_fail "Invalid syntax name.\n\
Hint: Use \"pascaligo\", \"cameligo\" \
or \"reasonligo\".\n"
let parsify_pascaligo = fun source ->
let parsify_pascaligo source =
let%bind raw =
trace (simple_error "parsing") @@
Parser.Pascaligo.parse_file source in
let%bind simplified =
trace (simple_error "simplifying") @@
Simplify.Pascaligo.simpl_program raw in
ok simplified
Simplify.Pascaligo.simpl_program raw
in ok simplified
let parsify_expression_pascaligo = fun source ->
let parsify_expression_pascaligo source =
let%bind raw =
trace (simple_error "parsing expression") @@
Parser.Pascaligo.parse_expression source in
let%bind simplified =
trace (simple_error "simplifying expression") @@
Simplify.Pascaligo.simpl_expression raw in
ok simplified
Simplify.Pascaligo.simpl_expression raw
in ok simplified
let parsify_cameligo = fun source ->
let parsify_cameligo source =
let%bind raw =
trace (simple_error "parsing") @@
Parser.Cameligo.parse_file source in
let%bind simplified =
trace (simple_error "simplifying") @@
Simplify.Cameligo.simpl_program raw in
ok simplified
Simplify.Cameligo.simpl_program raw
in ok simplified
let parsify_expression_cameligo = fun source ->
let parsify_expression_cameligo source =
let%bind raw =
trace (simple_error "parsing expression") @@
Parser.Cameligo.parse_expression source in
let%bind simplified =
trace (simple_error "simplifying expression") @@
Simplify.Cameligo.simpl_expression raw in
ok simplified
Simplify.Cameligo.simpl_expression raw
in ok simplified
let parsify_reasonligo = fun source ->
let parsify_reasonligo source =
let%bind raw =
trace (simple_error "parsing") @@
Parser.Reasonligo.parse_file source in
let%bind simplified =
trace (simple_error "simplifying") @@
Simplify.Cameligo.simpl_program raw in
ok simplified
Simplify.Cameligo.simpl_program raw
in ok simplified
let parsify_expression_reasonligo = fun source ->
let parsify_expression_reasonligo source =
let%bind raw =
trace (simple_error "parsing expression") @@
Parser.Reasonligo.parse_expression source in
let%bind simplified =
trace (simple_error "simplifying expression") @@
Simplify.Cameligo.simpl_expression raw in
ok simplified
Simplify.Cameligo.simpl_expression raw
in ok simplified
let parsify = fun (syntax : v_syntax) source_filename ->
let%bind parsify = match syntax with
| Pascaligo -> ok parsify_pascaligo
| Cameligo -> ok parsify_cameligo
| ReasonLIGO -> ok parsify_reasonligo
in
let%bind parsified = parsify source_filename in
let%bind applied = Self_ast_simplified.all_program parsified in
ok applied
let parsify_expression = fun syntax source ->
let%bind parsify = match syntax with
| Pascaligo -> ok parsify_expression_pascaligo
| Cameligo -> ok parsify_expression_cameligo
| ReasonLIGO -> ok parsify_expression_reasonligo
in
let parsify syntax source =
let%bind parsify =
match syntax with
PascaLIGO -> ok parsify_pascaligo
| CameLIGO -> ok parsify_cameligo
| ReasonLIGO -> ok parsify_reasonligo in
let%bind parsified = parsify source in
let%bind applied = Self_ast_simplified.all_expression parsified in
ok applied
let%bind applied = Self_ast_simplified.all_program parsified
in ok applied
let parsify_string_reasonligo = fun source ->
let parsify_expression syntax source =
let%bind parsify = match syntax with
PascaLIGO -> ok parsify_expression_pascaligo
| CameLIGO -> ok parsify_expression_cameligo
| ReasonLIGO -> ok parsify_expression_reasonligo in
let%bind parsified = parsify source in
let%bind applied = Self_ast_simplified.all_expression parsified
in ok applied
let parsify_string_reasonligo source =
let%bind raw =
trace (simple_error "parsing") @@
Parser.Reasonligo.parse_string source in
let%bind simplified =
trace (simple_error "simplifying") @@
Simplify.Cameligo.simpl_program raw in
ok simplified
Simplify.Cameligo.simpl_program raw
in ok simplified
let parsify_string_pascaligo = fun source ->
let parsify_string_pascaligo source =
let%bind raw =
trace (simple_error "parsing") @@
Parser.Pascaligo.parse_string source in
let%bind simplified =
trace (simple_error "simplifying") @@
Simplify.Pascaligo.simpl_program raw in
ok simplified
Simplify.Pascaligo.simpl_program raw
in ok simplified
let parsify_string_cameligo = fun source ->
let parsify_string_cameligo source =
let%bind raw =
trace (simple_error "parsing") @@
Parser.Cameligo.parse_string source in
let%bind simplified =
trace (simple_error "simplifying") @@
Simplify.Cameligo.simpl_program raw in
ok simplified
Simplify.Cameligo.simpl_program raw
in ok simplified
let parsify_string = fun (syntax : v_syntax) source_filename ->
let%bind parsify = match syntax with
| Pascaligo -> ok parsify_string_pascaligo
| Cameligo -> ok parsify_string_cameligo
| ReasonLIGO -> ok parsify_string_reasonligo
in
let%bind parsified = parsify source_filename in
let%bind applied = Self_ast_simplified.all_program parsified in
ok applied
let parsify_string syntax source =
let%bind parsify =
match syntax with
PascaLIGO -> ok parsify_string_pascaligo
| CameLIGO -> ok parsify_string_cameligo
| ReasonLIGO -> ok parsify_string_reasonligo in
let%bind parsified = parsify source in
let%bind applied = Self_ast_simplified.all_program parsified
in ok applied
let pretty_print_pascaligo = fun source ->
let pretty_print_pascaligo source =
let%bind ast = Parser.Pascaligo.parse_file source in
let buffer = Buffer.create 59 in
let state = Parser_pascaligo.ParserLog.mk_state
~offsets:true
~mode:`Byte
~buffer in
let state =
Parser_pascaligo.ParserLog.mk_state
~offsets:true
~mode:`Byte
~buffer in
Parser_pascaligo.ParserLog.pp_ast state ast;
ok buffer
let pretty_print_cameligo = fun source ->
let pretty_print_cameligo source =
let%bind ast = Parser.Cameligo.parse_file source in
let buffer = Buffer.create 59 in
let state = Parser_cameligo.ParserLog.mk_state
~offsets:true
~mode:`Byte
~buffer in
let state = (* TODO: Should flow from the CLI *)
Parser_cameligo.ParserLog.mk_state
~offsets:true
~mode:`Point
~buffer in
Parser.Cameligo.ParserLog.pp_ast state ast;
ok buffer
let pretty_print_reasonligo = fun source ->
let pretty_print_reasonligo source =
let%bind ast = Parser.Reasonligo.parse_file source in
let buffer = Buffer.create 59 in
let state = Parser.Reasonligo.ParserLog.mk_state
~offsets:true
~mode:`Byte
~buffer in
let state = (* TODO: Should flow from the CLI *)
Parser.Reasonligo.ParserLog.mk_state
~offsets:true
~mode:`Point
~buffer in
Parser.Reasonligo.ParserLog.pp_ast state ast;
ok buffer
let pretty_print = fun syntax source_filename ->
let%bind v_syntax = syntax_to_variant syntax (Some source_filename) in
(match v_syntax with
| Pascaligo -> pretty_print_pascaligo
| Cameligo -> pretty_print_cameligo
| ReasonLIGO -> pretty_print_reasonligo)
source_filename
let pretty_print syntax source =
let%bind v_syntax =
syntax_to_variant syntax (Some source) in
match v_syntax with
PascaLIGO -> pretty_print_pascaligo source
| CameLIGO -> pretty_print_cameligo source
| ReasonLIGO -> pretty_print_reasonligo source

View File

@ -1,129 +1,180 @@
open Trace
module Parser = Parser_cameligo.Parser
module AST = Parser_cameligo.AST
module ParserLog = Parser_cameligo.ParserLog
module AST = Parser_cameligo.AST
module LexToken = Parser_cameligo.LexToken
module Lexer = Lexer.Make(LexToken)
module Lexer = Lexer.Make(LexToken)
module Scoping = Parser_cameligo.Scoping
module Region = Simple_utils.Region
module ParErr = Parser_cameligo.ParErr
module SSet = Utils.String.Set
module Errors = struct
(* Mock IOs TODO: Fill them with CLI options *)
let lexer_error (e: Lexer.error AST.reg) =
let title () = "lexer error" in
let message () = Lexer.error_to_string e.value in
let data = [
("parser_loc",
fun () -> Format.asprintf "%a" Location.pp_lift @@ e.region
)
] in
error ~data title message
module type IO =
sig
val ext : string
val options : EvalOpt.options
end
let parser_error source (start: Lexing.position) (stop: Lexing.position) lexbuf =
let title () = "parser error" in
let file = if source = "" then
""
else
Format.sprintf "In file \"%s|%s\"" start.pos_fname source
in
let str = Format.sprintf
"Parse error at \"%s\" from (%d, %d) to (%d, %d). %s\n"
(Lexing.lexeme lexbuf)
start.pos_lnum (start.pos_cnum - start.pos_bol)
stop.pos_lnum (stop.pos_cnum - stop.pos_bol)
file
in
let message () = str in
let loc = if start.pos_cnum = -1 then
Region.make
~start:(Pos.min ~file:source)
~stop:(Pos.from_byte stop)
else
Region.make
~start:(Pos.from_byte start)
~stop:(Pos.from_byte stop)
in
let data =
[
("parser_loc",
fun () -> Format.asprintf "%a" Location.pp_lift @@ loc
)
]
in
error ~data title message
module PreIO =
struct
let ext = ".ligo"
let pre_options =
EvalOpt.make ~libs:[]
~verbose:SSet.empty
~offsets:true
~mode:`Point
~cmd:EvalOpt.Quiet
~mono:true
end
let unrecognized_error source (start: Lexing.position) (stop: Lexing.position) lexbuf =
let title () = "unrecognized error" in
let file = if source = "" then
""
else
Format.sprintf "In file \"%s|%s\"" start.pos_fname source
in
let str = Format.sprintf
"Parse error at \"%s\" from (%d, %d) to (%d, %d). %s\n"
(Lexing.lexeme lexbuf)
start.pos_lnum (start.pos_cnum - start.pos_bol)
stop.pos_lnum (stop.pos_cnum - stop.pos_bol)
file
in
let message () = str in
let loc = Region.make
~start:(Pos.from_byte start)
~stop:(Pos.from_byte stop)
in
let data = [
("unrecognized_loc",
fun () -> Format.asprintf "%a" Location.pp_lift @@ loc
)
] in
error ~data title message
module Parser =
struct
type ast = AST.t
type expr = AST.expr
include Parser_cameligo.Parser
end
end
module ParserLog =
struct
type ast = AST.t
type expr = AST.expr
include Parser_cameligo.ParserLog
end
open Errors
module PreUnit =
ParserUnit.Make (Lexer)(AST)(Parser)(ParErr)(ParserLog)
type 'a parser = (Lexing.lexbuf -> LexToken.token) -> Lexing.lexbuf -> 'a
module Errors =
struct
(* let data =
[("location",
fun () -> Format.asprintf "%a" Location.pp_lift @@ loc)] *)
let parse (parser: 'a parser) source lexbuf =
let Lexer.{read ; close ; _} = Lexer.open_token_stream None in
let result =
try
ok (parser read lexbuf)
with
| Parser.Error ->
let start = Lexing.lexeme_start_p lexbuf in
let stop = Lexing.lexeme_end_p lexbuf in
fail @@ (parser_error source start stop lexbuf)
| Lexer.Error e ->
fail @@ (lexer_error e)
| _ ->
let _ = Printexc.print_backtrace Pervasives.stdout in
let start = Lexing.lexeme_start_p lexbuf in
let stop = Lexing.lexeme_end_p lexbuf in
fail @@ (unrecognized_error source start stop lexbuf)
in
close ();
result
let generic message =
let title () = ""
and message () = message.Region.value
in Trace.error ~data:[] title message
end
let parse_file (source: string) : AST.t result =
let parse (module IO : IO) parser =
let module Unit = PreUnit (IO) in
let local_fail error =
Trace.fail
@@ Errors.generic
@@ Unit.format_error ~offsets:IO.options#offsets
IO.options#mode error in
match parser () with
Stdlib.Ok semantic_value -> Trace.ok semantic_value
(* Lexing and parsing errors *)
| Stdlib.Error error -> Trace.fail @@ Errors.generic error
(* Scoping errors *)
| exception Scoping.Error (Scoping.Reserved_name name) ->
let token =
Lexer.Token.mk_ident name.Region.value name.Region.region in
(match token with
Stdlib.Error LexToken.Reserved_name ->
Trace.fail @@ Errors.generic @@ Region.wrap_ghost "Reserved name."
| Ok invalid ->
local_fail
("Reserved name.\nHint: Change the name.\n", None, invalid))
| exception Scoping.Error (Scoping.Duplicate_variant name) ->
let token =
Lexer.Token.mk_constr name.Region.value name.Region.region
in local_fail
("Duplicate constructor in this sum type declaration.\n\
Hint: Change the constructor.\n", None, token)
| exception Scoping.Error (Scoping.Non_linear_pattern var) ->
let token =
Lexer.Token.mk_ident var.Region.value var.Region.region in
(match token with
Stdlib.Error LexToken.Reserved_name ->
Trace.fail @@ Errors.generic @@ Region.wrap_ghost "Reserved name."
| Ok invalid ->
local_fail ("Repeated variable in this pattern.\n\
Hint: Change the name.\n",
None, invalid))
| exception Scoping.Error (Scoping.Duplicate_field name) ->
let token =
Lexer.Token.mk_ident name.Region.value name.Region.region in
(match token with
Stdlib.Error LexToken.Reserved_name ->
Trace.fail @@ Errors.generic @@ Region.wrap_ghost "Reserved name."
| Ok invalid ->
local_fail
("Duplicate field name in this record declaration.\n\
Hint: Change the name.\n",
None, invalid))
let parse_file (source: string) =
let module IO =
struct
let ext = PreIO.ext
let options =
PreIO.pre_options ~input:(Some source) ~expr:false
end in
let lib_path =
match IO.options#libs with
[] -> ""
| libs -> let mk_I dir path = Printf.sprintf " -I %s%s" dir path
in List.fold_right mk_I libs "" in
let prefix =
match IO.options#input with
None | Some "-" -> "temp"
| Some file -> Filename.(remove_extension @@ basename file) in
let suffix = ".pp" ^ IO.ext in
let pp_input =
let prefix = Filename.(source |> basename |> remove_extension)
and suffix = ".pp.mligo"
in prefix ^ suffix in
let cpp_cmd = Printf.sprintf "cpp -traditional-cpp %s > %s"
source pp_input in
if SSet.mem "cpp" IO.options#verbose
then prefix ^ suffix
else let pp_input, pp_out =
Filename.open_temp_file prefix suffix
in close_out pp_out; pp_input in
let cpp_cmd =
match IO.options#input with
None | Some "-" ->
Printf.sprintf "cpp -traditional-cpp%s - > %s"
lib_path pp_input
| Some file ->
Printf.sprintf "cpp -traditional-cpp%s %s > %s"
lib_path file pp_input in
let open Trace in
let%bind () = sys_command cpp_cmd in
let module Unit = PreUnit (IO) in
match Lexer.(open_token_stream @@ File pp_input) with
Ok instance ->
let thunk () = Unit.apply instance Unit.parse_contract
in parse (module IO) thunk
| Stdlib.Error (Lexer.File_opening msg) ->
Trace.fail @@ Errors.generic @@ Region.wrap_ghost msg
let%bind channel =
generic_try (simple_error "error opening file") @@
(fun () -> open_in pp_input) in
let lexbuf = Lexing.from_channel channel in
parse (Parser.contract) source lexbuf
let parse_string (s: string) =
let module IO =
struct
let ext = PreIO.ext
let options = PreIO.pre_options ~input:None ~expr:false
end in
let module Unit = PreUnit (IO) in
match Lexer.(open_token_stream @@ String s) with
Ok instance ->
let thunk () = Unit.apply instance Unit.parse_contract
in parse (module IO) thunk
| Stdlib.Error (Lexer.File_opening msg) ->
Trace.fail @@ Errors.generic @@ Region.wrap_ghost msg
let parse_string (s:string) : AST.t result =
let lexbuf = Lexing.from_string s in
parse Parser.contract "" lexbuf
let parse_expression (s:string) : AST.expr result =
let lexbuf = Lexing.from_string s in
parse Parser.interactive_expr "" lexbuf
let parse_expression (s: string) =
let module IO =
struct
let ext = PreIO.ext
let options = PreIO.pre_options ~input:None ~expr:true
end in
let module Unit = PreUnit (IO) in
match Lexer.(open_token_stream @@ String s) with
Ok instance ->
let thunk () = Unit.apply instance Unit.parse_expr
in parse (module IO) thunk
| Stdlib.Error (Lexer.File_opening msg) ->
Trace.fail @@ Errors.generic @@ Region.wrap_ghost msg

View File

@ -90,7 +90,7 @@ tuple(item):
(* Possibly empty semicolon-separated values between brackets *)
list(item):
list__(item):
"[" sep_or_term_list(item,";")? "]" {
let compound = Brackets ($1,$3)
and region = cover $1 $3 in
@ -182,7 +182,7 @@ sum_type:
variant:
"<constr>" { {$1 with value={constr=$1; arg=None}} }
| "<constr>" "of" cartesian {
| "<constr>" "of" fun_type {
let region = cover $1.region (type_expr_to_region $3)
and value = {constr=$1; arg = Some ($2,$3)}
in {region; value} }
@ -217,6 +217,7 @@ let_declaration:
let_binding:
"<ident>" nseq(sub_irrefutable) type_annotation? "=" expr {
Scoping.check_reserved_name $1;
let binders = Utils.nseq_cons (PVar $1) $2 in
Utils.nseq_iter Scoping.check_pattern binders;
{binders; lhs_type=$3; eq=$4; let_rhs=$5}
@ -293,7 +294,7 @@ core_pattern:
| "false" { PFalse $1 }
| "true" { PTrue $1 }
| par(ptuple) { PPar $1 }
| list(tail) { PList (PListComp $1) }
| list__(tail) { PList (PListComp $1) }
| constr_pattern { PConstr $1 }
| record_pattern { PRecord $1 }
@ -584,7 +585,7 @@ core_expr:
| unit { EUnit $1 }
| "false" { ELogic (BoolExpr (False $1)) }
| "true" { ELogic (BoolExpr (True $1)) }
| list(expr) { EList (EListComp $1) }
| list__(expr) { EList (EListComp $1) }
| sequence { ESeq $1 }
| record_expr { ERecord $1 }
| update_record { EUpdate $1 }

View File

@ -27,12 +27,11 @@ module Unit =
(* Main *)
let issue_error point =
let error = Unit.format_error ~offsets:IO.options#offsets
IO.options#mode point
in Stdlib.Error error
let issue_error error : ('a, string Region.reg) Stdlib.result =
Stdlib.Error (Unit.format_error ~offsets:IO.options#offsets
IO.options#mode error)
let parse parser : ('a,string) Stdlib.result =
let parse parser : ('a, string Region.reg) Stdlib.result =
try parser () with
(* Scoping errors *)
@ -81,11 +80,61 @@ let parse parser : ('a,string) Stdlib.result =
None, invalid
in issue_error point)
(* Preprocessing the input source with CPP *)
module SSet = Utils.String.Set
let sprintf = Printf.sprintf
(* Path for CPP inclusions (#include) *)
let lib_path =
match IO.options#libs with
[] -> ""
| libs -> let mk_I dir path = sprintf " -I %s%s" dir path
in List.fold_right mk_I libs ""
let prefix =
match IO.options#input with
None | Some "-" -> "temp"
| Some file -> Filename.(file |> basename |> remove_extension)
let suffix = ".pp" ^ IO.ext
let pp_input =
if SSet.mem "cpp" IO.options#verbose
then prefix ^ suffix
else let pp_input, pp_out =
Filename.open_temp_file prefix suffix
in close_out pp_out; pp_input
let cpp_cmd =
match IO.options#input with
None | Some "-" ->
sprintf "cpp -traditional-cpp%s - > %s"
lib_path pp_input
| Some file ->
sprintf "cpp -traditional-cpp%s %s > %s"
lib_path file pp_input
let () =
if IO.options#expr
then match parse (fun () -> Unit.parse Unit.parse_expr) with
Stdlib.Ok _ -> ()
| Error msg -> Printf.eprintf "\027[31m%s\027[0m%!" msg
else match parse (fun () -> Unit.parse Unit.parse_contract) with
Stdlib.Ok _ -> ()
| Error msg -> Printf.eprintf "\027[31m%s\027[0m%!" msg
if Sys.command cpp_cmd <> 0 then
Printf.eprintf "External error: \"%s\" failed." cpp_cmd
(* Instantiating the lexer and calling the parser *)
let lexer_inst =
match Lexer.open_token_stream (Lexer.File pp_input) with
Ok instance ->
if IO.options#expr
then
match parse (fun () -> Unit.apply instance Unit.parse_expr) with
Stdlib.Ok _ -> ()
| Error Region.{value; _} ->
Printf.eprintf "\027[31m%s\027[0m%!" value
else
(match parse (fun () -> Unit.apply instance Unit.parse_contract) with
Stdlib.Ok _ -> ()
| Error Region.{value; _} ->
Printf.eprintf "\027[31m%s\027[0m%!" value)
| Stdlib.Error (Lexer.File_opening msg) ->
Printf.eprintf "\027[31m%s\027[0m%!" msg

View File

@ -31,19 +31,30 @@ module VarSet = Set.Make (Ord)
let reserved =
let open SSet in
empty
|> add "abs"
|> add "address"
|> add "amount"
|> add "assert"
|> add "balance"
|> add "time"
|> add "amount"
|> add "gas"
|> add "sender"
|> add "source"
|> add "failwith"
|> add "black2b"
|> add "check"
|> add "continue"
|> add "stop"
|> add "failwith"
|> add "gas"
|> add "hash"
|> add "hash_key"
|> add "implicit_account"
|> add "int"
|> add "abs"
|> add "pack"
|> add "self_address"
|> add "sender"
|> add "sha256"
|> add "sha512"
|> add "source"
|> add "stop"
|> add "time"
|> add "unit"
|> add "unpack"
let check_reserved_names vars =
let is_reserved elt = SSet.mem elt.value reserved in

View File

@ -15,17 +15,16 @@
(name parser_cameligo)
(public_name ligo.parser.cameligo)
(modules
Scoping AST cameligo Parser ParserLog LexToken)
Scoping AST cameligo Parser ParserLog LexToken ParErr)
(libraries
menhirLib
parser_shared
str
simple-utils
tezos-utils
getopt)
tezos-utils)
(preprocess
(pps bisect_ppx --conditional))
(flags (:standard -open Simple_utils -open Parser_shared)))
(flags (:standard -open Parser_shared -open Simple_utils)))
;; Build of the unlexer (for covering the
;; error states of the LR automaton)
@ -52,8 +51,7 @@
(executable
(name ParserMain)
(libraries parser_cameligo)
(modules
ParErr ParserMain)
(modules ParserMain)
(preprocess
(pps bisect_ppx --conditional))
(flags (:standard -open Simple_utils -open Parser_shared -open Parser_cameligo)))
@ -70,4 +68,4 @@
(rule
(targets all.mligo)
(deps (:script_cover ../../../../vendors/ligo-utils/simple-utils/cover.sh) Parser.mly LexToken.mli ParToken.mly Parser.msg Unlexer.exe)
(action (run %{script_cover} --lex-tokens=LexToken.mli --par-tokens=ParToken.mly --ext=mligo --unlexer=./Unlexer.exe --messages=Parser.msg --dir=. --concatenate Parser.mly )))
(action (run %{script_cover} --lex-tokens=LexToken.mli --par-tokens=ParToken.mly --ext=mligo --unlexer=./Unlexer.exe --messages=Parser.msg --dir=. --concatenate Parser.mly )))

View File

@ -1,154 +1,191 @@
open Trace
module AST = Parser_pascaligo.AST
module AST = Parser_pascaligo.AST
module LexToken = Parser_pascaligo.LexToken
module Lexer = Lexer.Make(LexToken)
module Scoping = Parser_pascaligo.Scoping
module Parser = Parser_pascaligo.Parser
module Lexer = Lexer.Make(LexToken)
module Scoping = Parser_pascaligo.Scoping
module Region = Simple_utils.Region
module ParErr = Parser_pascaligo.ParErr
module SSet = Utils.String.Set
(* Mock IOs TODO: Fill them with CLI options *)
module type IO =
sig
val ext : string
val options : EvalOpt.options
end
module PreIO =
struct
let ext = ".ligo"
let pre_options =
EvalOpt.make ~libs:[]
~verbose:SSet.empty
~offsets:true
~mode:`Point
~cmd:EvalOpt.Quiet
~mono:true
end
module Parser =
struct
type ast = AST.t
type expr = AST.expr
include Parser_pascaligo.Parser
end
module ParserLog =
struct
type ast = AST.t
type expr = AST.expr
include Parser_pascaligo.ParserLog
end
module PreUnit =
ParserUnit.Make (Lexer)(AST)(Parser)(ParErr)(ParserLog)
module Errors =
struct
let reserved_name Region.{value; region} =
let title () = Printf.sprintf "reserved name \"%s\"" value in
let message () = "" in
let data = [
("location",
fun () -> Format.asprintf "%a" Location.pp_lift @@ region)]
in error ~data title message
(* let data =
[("location",
fun () -> Format.asprintf "%a" Location.pp_lift @@ loc)] *)
let non_linear_pattern Region.{value; region} =
let title () =
Printf.sprintf "repeated variable \"%s\" in this pattern" value in
let message () = "" in
let data = [
("location",
fun () -> Format.asprintf "%a" Location.pp_lift @@ region)]
in error ~data title message
let generic message =
let title () = ""
and message () = message.Region.value
in Trace.error ~data:[] title message
end
let duplicate_parameter Region.{value; region} =
let title () =
Printf.sprintf "duplicate parameter \"%s\"" value in
let message () = "" in
let data = [
("location",
fun () -> Format.asprintf "%a" Location.pp_lift @@ region)]
in error ~data title message
let parse (module IO : IO) parser =
let module Unit = PreUnit (IO) in
let local_fail error =
Trace.fail
@@ Errors.generic
@@ Unit.format_error ~offsets:IO.options#offsets
IO.options#mode error in
match parser () with
Stdlib.Ok semantic_value -> Trace.ok semantic_value
let duplicate_variant Region.{value; region} =
let title () =
Printf.sprintf "duplicate variant \"%s\" in this\
type declaration" value in
let message () = "" in
let data = [
("location",
fun () -> Format.asprintf "%a" Location.pp_lift @@ region)]
in error ~data title message
(* Lexing and parsing errors *)
let unrecognized_error source (start: Lexing.position)
(stop: Lexing.position) lexbuf =
let title () = "unrecognized error" in
let file =
if source = "" then ""
else
Format.sprintf "In file \"%s|%s\"" start.pos_fname source in
let message () =
Format.sprintf
"Parse error at \"%s\" from (%d, %d) to (%d, %d). %s\n"
(Lexing.lexeme lexbuf)
start.pos_lnum (start.pos_cnum - start.pos_bol)
stop.pos_lnum (stop.pos_cnum - stop.pos_bol)
file in
let loc = Region.make ~start:(Pos.from_byte start)
~stop:(Pos.from_byte stop) in
let data = [
("unrecognized_loc",
fun () -> Format.asprintf "%a" Location.pp_lift @@ loc)]
in error ~data title message
| Stdlib.Error error -> Trace.fail @@ Errors.generic error
(* Scoping errors *)
let parser_error source (start: Lexing.position)
(stop: Lexing.position) lexbuf =
let title () = "parser error" in
let file =
if source = "" then ""
else
Format.sprintf "In file \"%s|%s\"" start.pos_fname source in
let message () =
Format.sprintf
"Parse error at \"%s\" from (%d, %d) to (%d, %d). %s\n"
(Lexing.lexeme lexbuf)
start.pos_lnum (start.pos_cnum - start.pos_bol)
stop.pos_lnum (stop.pos_cnum - stop.pos_bol)
file in
let loc =
if start.pos_cnum = -1 then
Region.make
~start:(Pos.min ~file:source) ~stop:(Pos.from_byte stop)
else
Region.make ~start:(Pos.from_byte start)
~stop:(Pos.from_byte stop) in
let data =
[("parser_loc",
fun () -> Format.asprintf "%a" Location.pp_lift @@ loc)] in
error ~data title message
| exception Scoping.Error (Scoping.Reserved_name name) ->
let token =
Lexer.Token.mk_ident name.Region.value name.Region.region in
(match token with
Stdlib.Error LexToken.Reserved_name ->
Trace.fail @@ Errors.generic @@ Region.wrap_ghost "Reserved name."
| Ok invalid ->
local_fail
("Reserved name.\nHint: Change the name.\n", None, invalid))
let lexer_error (e: Lexer.error AST.reg) =
let title () = "lexer error" in
let message () = Lexer.error_to_string e.value in
let data = [
("parser_loc",
fun () -> Format.asprintf "%a" Location.pp_lift @@ e.region)]
in error ~data title message
end
| exception Scoping.Error (Scoping.Duplicate_parameter name) ->
let token =
Lexer.Token.mk_ident name.Region.value name.Region.region in
(match token with
Stdlib.Error LexToken.Reserved_name ->
Trace.fail @@ Errors.generic @@ Region.wrap_ghost "Reserved name."
| Ok invalid ->
local_fail
("Duplicate parameter.\nHint: Change the name.\n",
None, invalid))
open Errors
| exception Scoping.Error (Scoping.Duplicate_variant name) ->
let token =
Lexer.Token.mk_constr name.Region.value name.Region.region
in local_fail
("Duplicate constructor in this sum type declaration.\n\
Hint: Change the constructor.\n", None, token)
type 'a parser = (Lexing.lexbuf -> LexToken.token) -> Lexing.lexbuf -> 'a
| exception Scoping.Error (Scoping.Non_linear_pattern var) ->
let token =
Lexer.Token.mk_ident var.Region.value var.Region.region in
(match token with
Stdlib.Error LexToken.Reserved_name ->
Trace.fail @@ Errors.generic @@ Region.wrap_ghost "Reserved name."
| Ok invalid ->
local_fail ("Repeated variable in this pattern.\n\
Hint: Change the name.\n",
None, invalid))
let parse (parser: 'a parser) source lexbuf =
let Lexer.{read; close; _} = Lexer.open_token_stream None in
let result =
try ok (parser read lexbuf) with
Lexer.Error e ->
fail @@ lexer_error e
| Parser.Error ->
let start = Lexing.lexeme_start_p lexbuf in
let stop = Lexing.lexeme_end_p lexbuf in
fail @@ parser_error source start stop lexbuf
| Scoping.Error (Scoping.Non_linear_pattern var) ->
fail @@ non_linear_pattern var
| Scoping.Error (Duplicate_parameter name) ->
fail @@ duplicate_parameter name
| Scoping.Error (Duplicate_variant name) ->
fail @@ duplicate_variant name
| Scoping.Error (Reserved_name name) ->
fail @@ reserved_name name
| _ ->
let () = Printexc.print_backtrace Pervasives.stdout in
let start = Lexing.lexeme_start_p lexbuf in
let stop = Lexing.lexeme_end_p lexbuf in
fail @@ unrecognized_error source start stop lexbuf
in close (); result
| exception Scoping.Error (Scoping.Duplicate_field name) ->
let token =
Lexer.Token.mk_ident name.Region.value name.Region.region in
(match token with
Stdlib.Error LexToken.Reserved_name ->
Trace.fail @@ Errors.generic @@ Region.wrap_ghost "Reserved name."
| Ok invalid ->
local_fail
("Duplicate field name in this record declaration.\n\
Hint: Change the name.\n",
None, invalid))
let parse_file (source: string) : AST.t result =
let parse_file source =
let module IO =
struct
let ext = PreIO.ext
let options =
PreIO.pre_options ~input:(Some source) ~expr:false
end in
let module Unit = PreUnit (IO) in
let lib_path =
match IO.options#libs with
[] -> ""
| libs -> let mk_I dir path = Printf.sprintf " -I %s%s" dir path
in List.fold_right mk_I libs "" in
let prefix =
match IO.options#input with
None | Some "-" -> "temp"
| Some file -> Filename.(remove_extension @@ basename file) in
let suffix = ".pp" ^ IO.ext in
let pp_input =
let prefix = Filename.(source |> basename |> remove_extension)
and suffix = ".pp.ligo"
in prefix ^ suffix in
let cpp_cmd = Printf.sprintf "cpp -traditional-cpp %s > %s"
source pp_input in
if SSet.mem "cpp" IO.options#verbose
then prefix ^ suffix
else let pp_input, pp_out =
Filename.open_temp_file prefix suffix
in close_out pp_out; pp_input in
let cpp_cmd =
match IO.options#input with
None | Some "-" ->
Printf.sprintf "cpp -traditional-cpp%s - > %s"
lib_path pp_input
| Some file ->
Printf.sprintf "cpp -traditional-cpp%s %s > %s"
lib_path file pp_input in
let open Trace in
let%bind () = sys_command cpp_cmd in
match Lexer.(open_token_stream @@ File pp_input) with
Ok instance ->
let thunk () = Unit.apply instance Unit.parse_contract
in parse (module IO) thunk
| Stdlib.Error (Lexer.File_opening msg) ->
Trace.fail @@ Errors.generic @@ Region.wrap_ghost msg
let%bind channel =
generic_try (simple_error "error opening file") @@
(fun () -> open_in pp_input) in
let lexbuf = Lexing.from_channel channel in
parse (Parser.contract) source lexbuf
let parse_string (s: string) =
let module IO =
struct
let ext = PreIO.ext
let options = PreIO.pre_options ~input:None ~expr:false
end in
let module Unit = PreUnit (IO) in
match Lexer.(open_token_stream @@ String s) with
Ok instance ->
let thunk () = Unit.apply instance Unit.parse_contract
in parse (module IO) thunk
| Stdlib.Error (Lexer.File_opening msg) ->
Trace.fail @@ Errors.generic @@ Region.wrap_ghost msg
let parse_string (s:string) : AST.t result =
let lexbuf = Lexing.from_string s in
parse (Parser.contract) "" lexbuf
let parse_expression (s:string) : AST.expr result =
let lexbuf = Lexing.from_string s in
parse (Parser.interactive_expr) "" lexbuf
let parse_expression (s: string) =
let module IO =
struct
let ext = PreIO.ext
let options = PreIO.pre_options ~input:None ~expr:true
end in
let module Unit = PreUnit (IO) in
match Lexer.(open_token_stream @@ String s) with
Ok instance ->
let thunk () = Unit.apply instance Unit.parse_expr
in parse (module IO) thunk
| Stdlib.Error (Lexer.File_opening msg) ->
Trace.fail @@ Errors.generic @@ Region.wrap_ghost msg

View File

@ -17,6 +17,7 @@ $HOME/git/ligo/vendors/ligo-utils/simple-utils/region.ml
../shared/Utils.ml
../shared/ParserAPI.mli
../shared/ParserAPI.ml
../shared/LexerUnit.mli
../shared/LexerUnit.ml
../shared/ParserUnit.mli
../shared/ParserUnit.ml

View File

@ -1,4 +1,6 @@
(** Driver for the PascaLIGO lexer *)
(* Driver for the PascaLIGO lexer *)
module Region = Simple_utils.Region
module IO =
struct
@ -11,4 +13,5 @@ module M = LexerUnit.Make (IO) (Lexer.Make (LexToken))
let () =
match M.trace () with
Stdlib.Ok _ -> ()
| Error msg -> Printf.eprintf "\027[31m%s\027[0m%!" msg
| Error Region.{value; _} ->
Printf.eprintf "\027[31m%s\027[0m%!" value

View File

@ -141,23 +141,23 @@ type_decl:
in {region; value} }
type_expr:
sum_type | record_type | cartesian { $1 }
fun_type | sum_type | record_type { $1 }
cartesian:
function_type { $1 }
| function_type "*" nsepseq(function_type,"*") {
let value = Utils.nsepseq_cons $1 $2 $3 in
let region = nsepseq_to_region type_expr_to_region value
in TProd {region; value} }
function_type:
core_type { $1 }
| core_type "->" function_type {
fun_type:
cartesian { $1 }
| cartesian "->" fun_type {
let start = type_expr_to_region $1
and stop = type_expr_to_region $3 in
let region = cover start stop in
TFun {region; value = $1,$2,$3} }
cartesian:
core_type { $1 }
| core_type "*" nsepseq(core_type,"*") {
let value = Utils.nsepseq_cons $1 $2 $3 in
let region = nsepseq_to_region type_expr_to_region value
in TProd {region; value} }
core_type:
type_name { TVar $1 }
| par(type_expr) { TPar $1 }
@ -201,7 +201,7 @@ sum_type:
variant:
"<constr>" { {$1 with value = {constr=$1; arg=None}} }
| "<constr>" "of" cartesian {
| "<constr>" "of" fun_type {
let region = cover $1.region (type_expr_to_region $3)
and value = {constr=$1; arg = Some ($2,$3)}
in {region; value} }
@ -315,7 +315,7 @@ param_decl:
in ParamConst {region; value} }
param_type:
cartesian { $1 }
fun_type { $1 }
block:
"begin" sep_or_term_list(statement,";") "end" {

View File

@ -1,4 +1,4 @@
(** Driver for the PascaLIGO parser *)
(* Driver for the PascaLIGO parser *)
module IO =
struct
@ -27,12 +27,11 @@ module Unit =
(* Main *)
let issue_error point =
let error = Unit.format_error ~offsets:IO.options#offsets
IO.options#mode point
in Stdlib.Error error
let issue_error error : ('a, string Region.reg) Stdlib.result =
Stdlib.Error (Unit.format_error ~offsets:IO.options#offsets
IO.options#mode error)
let parse parser : ('a,string) Stdlib.result =
let parse parser : ('a, string Region.reg) Stdlib.result =
try parser () with
(* Scoping errors *)
@ -87,16 +86,67 @@ let parse parser : ('a,string) Stdlib.result =
reserved name for the lexer. *)
Stdlib.Error _ -> assert false
| Ok invalid ->
let point = "Duplicate field name in this record declaration.\n\
Hint: Change the name.\n",
None, invalid
in issue_error point)
let point =
"Duplicate field name in this record declaration.\n\
Hint: Change the name.\n",
None, invalid
in issue_error point)
(* Preprocessing the input source with CPP *)
module SSet = Utils.String.Set
let sprintf = Printf.sprintf
(* Path for CPP inclusions (#include) *)
let lib_path =
match IO.options#libs with
[] -> ""
| libs -> let mk_I dir path = sprintf " -I %s%s" dir path
in List.fold_right mk_I libs ""
let prefix =
match IO.options#input with
None | Some "-" -> "temp"
| Some file -> Filename.(file |> basename |> remove_extension)
let suffix = ".pp" ^ IO.ext
let pp_input =
if SSet.mem "cpp" IO.options#verbose
then prefix ^ suffix
else let pp_input, pp_out =
Filename.open_temp_file prefix suffix
in close_out pp_out; pp_input
let cpp_cmd =
match IO.options#input with
None | Some "-" ->
sprintf "cpp -traditional-cpp%s - > %s"
lib_path pp_input
| Some file ->
sprintf "cpp -traditional-cpp%s %s > %s"
lib_path file pp_input
let () =
if IO.options#expr
then match parse (fun () -> Unit.parse Unit.parse_expr) with
Stdlib.Ok _ -> ()
| Error msg -> Printf.eprintf "\027[31m%s\027[0m%!" msg
else match parse (fun () -> Unit.parse Unit.parse_contract) with
Stdlib.Ok _ -> ()
| Error msg -> Printf.eprintf "\027[31m%s\027[0m%!" msg
if Sys.command cpp_cmd <> 0 then
Printf.eprintf "External error: \"%s\" failed." cpp_cmd
(* Instantiating the lexer and calling the parser *)
let lexer_inst =
match Lexer.open_token_stream (Lexer.File pp_input) with
Ok instance ->
if IO.options#expr
then
match parse (fun () -> Unit.apply instance Unit.parse_expr) with
Stdlib.Ok _ -> ()
| Error Region.{value; _} ->
Printf.eprintf "\027[31m%s\027[0m%!" value
else
(match parse (fun () -> Unit.apply instance Unit.parse_contract) with
Stdlib.Ok _ -> ()
| Error Region.{value; _} ->
Printf.eprintf "\027[31m%s\027[0m%!" value)
| Stdlib.Error (Lexer.File_opening msg) ->
Printf.eprintf "\027[31m%s\027[0m%!" msg

View File

@ -7,7 +7,7 @@
(menhir
(merge_into Parser)
(modules ParToken Parser)
(flags -la 1 --table --strict --external-tokens LexToken))
(flags -la 1 --table --strict --explain --external-tokens LexToken))
;; Build of the parser as a library
@ -20,8 +20,7 @@
menhirLib
parser_shared
hex
simple-utils
tezos-utils)
simple-utils)
(preprocess
(pps bisect_ppx --conditional))
(flags (:standard -open Parser_shared -open Simple_utils)))
@ -52,8 +51,7 @@
(executable
(name ParserMain)
(libraries parser_pascaligo)
(modules
ParserMain)
(modules ParserMain)
(preprocess
(pps bisect_ppx --conditional))
(flags (:standard -open Simple_utils -open Parser_shared -open Parser_pascaligo)))

View File

@ -1,131 +1,191 @@
open Trace
module Parser = Parser_reasonligo.Parser
module AST = Parser_cameligo.AST
module ParserLog = Parser_cameligo.ParserLog
module LexToken = Parser_reasonligo.LexToken
module Lexer = Lexer.Make(LexToken)
module AST = Parser_cameligo.AST
module LexToken = Parser_reasonligo.LexToken
module Lexer = Lexer.Make(LexToken)
module Scoping = Parser_cameligo.Scoping
module Region = Simple_utils.Region
module ParErr = Parser_reasonligo.ParErr
module SyntaxError = Parser_reasonligo.SyntaxError
module Scoping = Parser_cameligo.Scoping
module SSet = Utils.String.Set
(* Mock IOs TODO: Fill them with CLI options *)
module type IO =
sig
val ext : string
val options : EvalOpt.options
end
module PreIO =
struct
let ext = ".ligo"
let pre_options =
EvalOpt.make ~libs:[]
~verbose:SSet.empty
~offsets:true
~mode:`Point
~cmd:EvalOpt.Quiet
~mono:true
end
module Parser =
struct
type ast = AST.t
type expr = AST.expr
include Parser_reasonligo.Parser
end
module ParserLog =
struct
type ast = AST.t
type expr = AST.expr
include Parser_cameligo.ParserLog
end
module PreUnit =
ParserUnit.Make (Lexer)(AST)(Parser)(ParErr)(ParserLog)
module Errors =
struct
let lexer_error (e: Lexer.error AST.reg) =
let title () = "lexer error" in
let message () = Lexer.error_to_string e.value in
let data = [
("parser_loc",
fun () -> Format.asprintf "%a" Location.pp_lift @@ e.region)]
in error ~data title message
let generic message =
let title () = ""
and message () = message.Region.value
in Trace.error ~data:[] title message
let wrong_function_arguments expr =
let title () = "wrong function arguments" in
let message () = "" in
let wrong_function_arguments (expr: AST.expr) =
let title () = "" in
let message () = "Wrong function arguments.\n" in
let expression_loc = AST.expr_to_region expr in
let data = [
("location",
fun () -> Format.asprintf "%a" Location.pp_lift @@ expression_loc)]
("location",
fun () -> Format.asprintf "%a" Location.pp_lift @@ expression_loc)]
in error ~data title message
end
let parser_error source (start: Lexing.position)
(end_: Lexing.position) lexbuf =
let title () = "parser error" in
let file =
if source = "" then ""
else
Format.sprintf "In file \"%s|%s\"" start.pos_fname source in
let str =
Format.sprintf
"Parse error at \"%s\" from (%d, %d) to (%d, %d). %s\n"
(Lexing.lexeme lexbuf)
start.pos_lnum (start.pos_cnum - start.pos_bol)
end_.pos_lnum (end_.pos_cnum - end_.pos_bol)
file in
let message () = str in
let loc =
if start.pos_cnum = -1
then Region.make
~start:(Pos.min ~file:source)
~stop:(Pos.from_byte end_)
else Region.make
~start:(Pos.from_byte start)
~stop:(Pos.from_byte end_) in
let data =
[("parser_loc",
fun () -> Format.asprintf "%a" Location.pp_lift @@ loc)]
in error ~data title message
let parse (module IO : IO) parser =
let module Unit = PreUnit (IO) in
let local_fail error =
Trace.fail
@@ Errors.generic
@@ Unit.format_error ~offsets:IO.options#offsets
IO.options#mode error in
match parser () with
Stdlib.Ok semantic_value -> Trace.ok semantic_value
let unrecognized_error source (start: Lexing.position) (end_: Lexing.position) lexbuf =
let title () = "unrecognized error" in
let file =
if source = "" then ""
else
Format.sprintf "In file \"%s|%s\"" start.pos_fname source in
let str =
Format.sprintf
"Parse error at \"%s\" from (%d, %d) to (%d, %d). %s\n"
(Lexing.lexeme lexbuf)
start.pos_lnum (start.pos_cnum - start.pos_bol)
end_.pos_lnum (end_.pos_cnum - end_.pos_bol)
file in
let message () = str in
let loc = Region.make
~start:(Pos.from_byte start)
~stop:(Pos.from_byte end_) in
let data = [
("location",
fun () -> Format.asprintf "%a" Location.pp_lift @@ loc)]
in error ~data title message
(* Lexing and parsing errors *)
end
| Stdlib.Error error -> Trace.fail @@ Errors.generic error
(* Scoping errors *)
open Errors
| exception Scoping.Error (Scoping.Reserved_name name) ->
let token =
Lexer.Token.mk_ident name.Region.value name.Region.region in
(match token with
Stdlib.Error LexToken.Reserved_name ->
Trace.fail @@ Errors.generic @@ Region.wrap_ghost "Reserved name."
| Ok invalid ->
local_fail
("Reserved name.\nHint: Change the name.\n", None, invalid))
type 'a parser = (Lexing.lexbuf -> LexToken.token) -> Lexing.lexbuf -> 'a
| exception Scoping.Error (Scoping.Duplicate_variant name) ->
let token =
Lexer.Token.mk_constr name.Region.value name.Region.region
in local_fail
("Duplicate constructor in this sum type declaration.\n\
Hint: Change the constructor.\n", None, token)
let parse (parser: 'a parser) source lexbuf =
let Lexer.{read ; close ; _} = Lexer.open_token_stream None in
let result =
try
ok (parser read lexbuf)
with
| SyntaxError.Error (WrongFunctionArguments e) ->
fail @@ (wrong_function_arguments e)
| Parser.Error ->
let start = Lexing.lexeme_start_p lexbuf in
let end_ = Lexing.lexeme_end_p lexbuf in
fail @@ (parser_error source start end_ lexbuf)
| Lexer.Error e ->
fail @@ (lexer_error e)
| _ ->
let _ = Printexc.print_backtrace Pervasives.stdout in
let start = Lexing.lexeme_start_p lexbuf in
let end_ = Lexing.lexeme_end_p lexbuf in
fail @@ (unrecognized_error source start end_ lexbuf)
in
close ();
result
| exception Scoping.Error (Scoping.Non_linear_pattern var) ->
let token =
Lexer.Token.mk_ident var.Region.value var.Region.region in
(match token with
Stdlib.Error LexToken.Reserved_name ->
Trace.fail @@ Errors.generic @@ Region.wrap_ghost "Reserved name."
| Ok invalid ->
local_fail ("Repeated variable in this pattern.\n\
Hint: Change the name.\n",
None, invalid))
let parse_file (source: string) : AST.t result =
| exception Scoping.Error (Scoping.Duplicate_field name) ->
let token =
Lexer.Token.mk_ident name.Region.value name.Region.region in
(match token with
Stdlib.Error LexToken.Reserved_name ->
Trace.fail @@ Errors.generic @@ Region.wrap_ghost "Reserved name."
| Ok invalid ->
local_fail
("Duplicate field name in this record declaration.\n\
Hint: Change the name.\n",
None, invalid))
| exception SyntaxError.Error (SyntaxError.WrongFunctionArguments expr) ->
Trace.fail @@ Errors.wrong_function_arguments expr
let parse_file (source: string) =
let module IO =
struct
let ext = PreIO.ext
let options =
PreIO.pre_options ~input:(Some source) ~expr:false
end in
let lib_path =
match IO.options#libs with
[] -> ""
| libs -> let mk_I dir path = Printf.sprintf " -I %s%s" dir path
in List.fold_right mk_I libs "" in
let prefix =
match IO.options#input with
None | Some "-" -> "temp"
| Some file -> Filename.(remove_extension @@ basename file) in
let suffix = ".pp" ^ IO.ext in
let pp_input =
let prefix = Filename.(source |> basename |> remove_extension)
and suffix = ".pp.religo"
in prefix ^ suffix in
let cpp_cmd = Printf.sprintf "cpp -traditional-cpp %s > %s"
source pp_input in
if SSet.mem "cpp" IO.options#verbose
then prefix ^ suffix
else let pp_input, pp_out =
Filename.open_temp_file prefix suffix
in close_out pp_out; pp_input in
let cpp_cmd =
match IO.options#input with
None | Some "-" ->
Printf.sprintf "cpp -traditional-cpp%s - > %s"
lib_path pp_input
| Some file ->
Printf.sprintf "cpp -traditional-cpp%s %s > %s"
lib_path file pp_input in
let open Trace in
let%bind () = sys_command cpp_cmd in
let module Unit = PreUnit (IO) in
match Lexer.(open_token_stream @@ File pp_input) with
Ok instance ->
let thunk () = Unit.apply instance Unit.parse_contract
in parse (module IO) thunk
| Stdlib.Error (Lexer.File_opening msg) ->
Trace.fail @@ Errors.generic @@ Region.wrap_ghost msg
let%bind channel =
generic_try (simple_error "error opening file") @@
(fun () -> open_in pp_input) in
let lexbuf = Lexing.from_channel channel in
parse (Parser.contract) source lexbuf
let parse_string (s: string) =
let module IO =
struct
let ext = PreIO.ext
let options = PreIO.pre_options ~input:None ~expr:false
end in
let module Unit = PreUnit (IO) in
match Lexer.(open_token_stream @@ String s) with
Ok instance ->
let thunk () = Unit.apply instance Unit.parse_contract
in parse (module IO) thunk
| Stdlib.Error (Lexer.File_opening msg) ->
Trace.fail @@ Errors.generic @@ Region.wrap_ghost msg
let parse_string (s:string) : AST.t result =
let lexbuf = Lexing.from_string s in
parse (Parser.contract) "" lexbuf
let parse_expression (s:string) : AST.expr result =
let lexbuf = Lexing.from_string s in
parse (Parser.interactive_expr) "" lexbuf
let parse_expression (s: string) =
let module IO =
struct
let ext = PreIO.ext
let options = PreIO.pre_options ~input:None ~expr:true
end in
let module Unit = PreUnit (IO) in
match Lexer.(open_token_stream @@ String s) with
Ok instance ->
let thunk () = Unit.apply instance Unit.parse_expr
in parse (module IO) thunk
| Stdlib.Error (Lexer.File_opening msg) ->
Trace.fail @@ Errors.generic @@ Region.wrap_ghost msg

View File

@ -119,7 +119,7 @@ tuple(item):
(* Possibly empty semicolon-separated values between brackets *)
list(item):
list__(item):
"[" sep_or_term_list(item,";")? "]" {
let compound = Brackets ($1,$3)
and region = cover $1 $3 in
@ -230,13 +230,13 @@ field_decl:
(* Top-level non-recursive definitions *)
let_declaration:
seq(Attr) "let" let_binding {
seq(Attr) "let" let_binding {
let attributes = $1 in
let kwd_let = $2 in
let binding = $3 in
let value = kwd_let, binding, attributes in
let stop = expr_to_region binding.let_rhs in
let region = cover $2 stop
let kwd_let = $2 in
let binding = $3 in
let value = kwd_let, binding, attributes in
let stop = expr_to_region binding.let_rhs in
let region = cover $2 stop
in {region; value} }
es6_func:
@ -335,7 +335,7 @@ core_pattern:
| "false" { PFalse $1 }
| "<string>" { PString $1 }
| par(ptuple) { PPar $1 }
| list(sub_pattern) { PList (PListComp $1) }
| list__(sub_pattern) { PList (PListComp $1) }
| constr_pattern { PConstr $1 }
| record_pattern { PRecord $1 }
@ -439,23 +439,21 @@ fun_expr:
{p.value with inside = arg_to_pattern p.value.inside}
in PPar {p with value}
| EUnit u -> PUnit u
| ETuple { value; region } ->
| ETuple { value; region } ->
PTuple { value = Utils.nsepseq_map arg_to_pattern value; region}
| EAnnot {region; value = {inside = t, colon, typ; _}} ->
| EAnnot {region; value = {inside = t, colon, typ; _}} ->
let value = { pattern = arg_to_pattern t; colon; type_expr = typ} in
PPar {
value = {
lpar = Region.ghost;
rpar = Region.ghost;
rpar = Region.ghost;
inside = PTyped {region; value}
};
region
}
| e -> (
let open! SyntaxError in
raise (Error (WrongFunctionArguments e))
)
in
| e ->
let open! SyntaxError in
raise (Error (WrongFunctionArguments e)) in
let fun_args_to_pattern = function
EAnnot {
value = {
@ -576,8 +574,8 @@ case_clause(right_expr):
let_expr(right_expr):
seq(Attr) "let" let_binding ";" right_expr {
let attributes = $1 in
let kwd_let = $2 in
let attributes = $1 in
let kwd_let = $2 in
let binding = $3 in
let kwd_in = $4 in
let body = $5 in
@ -727,8 +725,8 @@ common_expr:
| "true" { ELogic (BoolExpr (True $1)) }
core_expr_2:
common_expr { $1 }
| list(expr) { EList (EListComp $1) }
common_expr { $1 }
| list__(expr) { EList (EListComp $1) }
list_or_spread:
"[" expr "," sep_or_term_list(expr, ",") "]" {
@ -807,11 +805,11 @@ projection:
field_path = snd $4}
in {region; value} }
path :
"<ident>" {Name $1}
| projection { Path $1}
path:
"<ident>" { Name $1 }
| projection { Path $1 }
update_record :
update_record:
"{""..."path "," sep_or_term_list(field_path_assignment,",") "}" {
let region = cover $1 $6 in
let ne_elements, terminator = $5 in

View File

@ -27,12 +27,11 @@ module Unit =
(* Main *)
let issue_error point =
let error = Unit.format_error ~offsets:IO.options#offsets
IO.options#mode point
in Stdlib.Error error
let issue_error error : ('a, string Region.reg) Stdlib.result =
Stdlib.Error (Unit.format_error ~offsets:IO.options#offsets
IO.options#mode error)
let parse parser : ('a,string) Stdlib.result =
let parse parser : ('a, string Region.reg) Stdlib.result =
try parser () with
(* Ad hoc errors from the parser *)
@ -43,10 +42,10 @@ let parse parser : ('a,string) Stdlib.result =
Examples of valid functions:\n\
let x = (a: string, b: int) : int => 3;\n\
let x = (a: string) : string => \"Hello, \" ++ a;\n"
and reg = AST.expr_to_region expr in
let error = Unit.short_error ~offsets:IO.options#offsets
IO.options#mode msg reg
in Stdlib.Error error
and region = AST.expr_to_region expr in
let error = Unit.short_error ~offsets:IO.options#offsets
IO.options#mode msg region
in Stdlib.Error Region.{value=error; region}
(* Scoping errors *)
@ -96,11 +95,61 @@ let parse parser : ('a,string) Stdlib.result =
None, invalid
in issue_error point)
(* Preprocessing the input source with CPP *)
module SSet = Utils.String.Set
let sprintf = Printf.sprintf
(* Path for CPP inclusions (#include) *)
let lib_path =
match IO.options#libs with
[] -> ""
| libs -> let mk_I dir path = sprintf " -I %s%s" dir path
in List.fold_right mk_I libs ""
let prefix =
match IO.options#input with
None | Some "-" -> "temp"
| Some file -> Filename.(file |> basename |> remove_extension)
let suffix = ".pp" ^ IO.ext
let pp_input =
if SSet.mem "cpp" IO.options#verbose
then prefix ^ suffix
else let pp_input, pp_out =
Filename.open_temp_file prefix suffix
in close_out pp_out; pp_input
let cpp_cmd =
match IO.options#input with
None | Some "-" ->
sprintf "cpp -traditional-cpp%s - > %s"
lib_path pp_input
| Some file ->
sprintf "cpp -traditional-cpp%s %s > %s"
lib_path file pp_input
let () =
if IO.options#expr
then match parse (fun () -> Unit.parse Unit.parse_expr) with
Stdlib.Ok _ -> ()
| Error msg -> Printf.eprintf "\027[31m%s\027[0m%!" msg
else match parse (fun () -> Unit.parse Unit.parse_contract) with
Stdlib.Ok _ -> ()
| Error msg -> Printf.eprintf "\027[31m%s\027[0m%!" msg
if Sys.command cpp_cmd <> 0 then
Printf.eprintf "External error: \"%s\" failed." cpp_cmd
(* Instantiating the lexer and calling the parser *)
let lexer_inst =
match Lexer.open_token_stream (Lexer.File pp_input) with
Ok instance ->
if IO.options#expr
then
match parse (fun () -> Unit.apply instance Unit.parse_expr) with
Stdlib.Ok _ -> ()
| Error Region.{value; _} ->
Printf.eprintf "\027[31m%s\027[0m%!" value
else
(match parse (fun () -> Unit.apply instance Unit.parse_contract) with
Stdlib.Ok _ -> ()
| Error Region.{value; _} ->
Printf.eprintf "\027[31m%s\027[0m%!" value)
| Stdlib.Error (Lexer.File_opening msg) ->
Printf.eprintf "\027[31m%s\027[0m%!" msg

View File

@ -7,7 +7,7 @@
(menhir
(merge_into Parser)
(modules ParToken Parser)
(flags -la 1 --table --explain --strict --external-tokens LexToken))
(flags -la 1 --table --strict --explain --external-tokens LexToken))
;; Build of the parser as a library
@ -15,18 +15,16 @@
(name parser_reasonligo)
(public_name ligo.parser.reasonligo)
(modules
SyntaxError reasonligo LexToken Parser)
SyntaxError reasonligo LexToken ParErr Parser)
(libraries
menhirLib
parser_shared
parser_cameligo
str
simple-utils
tezos-utils
getopt)
simple-utils)
(preprocess
(pps bisect_ppx --conditional))
(flags (:standard -open Simple_utils -open Parser_shared -open Parser_cameligo)))
(flags (:standard -open Parser_shared -open Simple_utils -open Parser_cameligo)))
;; Build of the unlexer (for covering the
;; error states of the LR automaton)
@ -55,8 +53,7 @@
(libraries
parser_reasonligo
parser_cameligo)
(modules
ParErr ParserMain)
(modules ParserMain)
(preprocess
(pps bisect_ppx --conditional))
(flags (:standard -open Simple_utils -open Parser_cameligo -open Parser_shared -open Parser_reasonligo)))

View File

@ -145,7 +145,16 @@ module type S =
close : unit -> unit
}
val open_token_stream : file_path option -> instance
type input =
File of file_path (* "-" means stdin *)
| Stdin
| String of string
| Channel of in_channel
| Buffer of Lexing.lexbuf
type open_err = File_opening of string
val open_token_stream : input -> (instance, open_err) Stdlib.result
(* Error reporting *)
@ -157,7 +166,7 @@ module type S =
val format_error :
?offsets:bool -> [`Byte | `Point] ->
error Region.reg -> file:bool -> string
error Region.reg -> file:bool -> string Region.reg
end

View File

@ -165,9 +165,18 @@ module type S =
get_last : unit -> Region.t;
get_file : unit -> file_path;
close : unit -> unit
}
}
val open_token_stream : file_path option -> instance
type input =
File of file_path (* "-" means stdin *)
| Stdin
| String of string
| Channel of in_channel
| Buffer of Lexing.lexbuf
type open_err = File_opening of string
val open_token_stream : input -> (instance, open_err) Stdlib.result
(* Error reporting *)
@ -179,7 +188,7 @@ module type S =
val format_error :
?offsets:bool -> [`Byte | `Point] ->
error Region.reg -> file:bool -> string
error Region.reg -> file:bool -> string Region.reg
end
(* The functorised interface
@ -443,8 +452,9 @@ module Make (Token: TOKEN) : (S with module Token = Token) =
let format_error ?(offsets=true) mode Region.{region; value} ~file =
let msg = error_to_string value
and reg = region#to_string ~file ~offsets mode
in sprintf "Lexical error %s:\n%s" reg msg
and reg = region#to_string ~file ~offsets mode in
let value = sprintf "Lexical error %s:\n%s" reg msg
in Region.{value; region}
let fail region value = raise (Error Region.{region; value})
@ -515,15 +525,12 @@ module Make (Token: TOKEN) : (S with module Token = Token) =
let region, lexeme, state = sync state buffer in
let lexeme = Str.string_before lexeme (String.index lexeme 't') in
match format_tz lexeme with
| Some tz -> (
match Token.mk_mutez (Z.to_string tz ^ "mutez") region with
Ok token ->
token, state
None -> assert false
| Some tz ->
match Token.mk_mutez (Z.to_string tz ^ "mutez") region with
Ok token -> token, state
| Error Token.Non_canonical_zero ->
fail region Non_canonical_zero
)
| None -> assert false
let mk_ident state buffer =
let region, lexeme, state = sync state buffer in
@ -553,7 +560,6 @@ module Make (Token: TOKEN) : (S with module Token = Token) =
let region, _, state = sync state buffer
in Token.eof region, state
(* END HEADER *)
}
@ -579,8 +585,9 @@ let byte_seq = byte | byte (byte | '_')* byte
let bytes = "0x" (byte_seq? as seq)
let esc = "\\n" | "\\\"" | "\\\\" | "\\b"
| "\\r" | "\\t" | "\\x" byte
let pascaligo_sym = "=/=" | '#' | ":="
let cameligo_sym = "<>" | "::" | "||" | "&&"
let pascaligo_sym = "=/=" | '#' | ":="
let cameligo_sym = "<>" | "::" | "||" | "&&"
let reasonligo_sym = '!' | "=>" | "!=" | "==" | "++" | "..." | "||" | "&&"
let symbol =
@ -679,7 +686,7 @@ and scan state = parse
Some special errors are recognised in the semantic actions of the
following regular expressions. The first error is a minus sign
separated from the integer it applies by some markup (space or
separated from the integer it applies to by some markup (space or
tabs). The second is a minus sign immediately followed by
anything else than a natural number (matched above) or markup and
a number (previous error). The third is the strange occurrence of
@ -864,10 +871,20 @@ type instance = {
close : unit -> unit
}
let open_token_stream file_path_opt =
let file_path = match file_path_opt with
None | Some "-" -> ""
| Some file_path -> file_path in
type input =
File of file_path (* "-" means stdin *)
| Stdin
| String of string
| Channel of in_channel
| Buffer of Lexing.lexbuf
type open_err = File_opening of string
let open_token_stream input =
let file_path = match input with
File file_path ->
if file_path = "-" then "" else file_path
| _ -> "" in
let pos = Pos.min ~file:file_path in
let buf_reg = ref (pos#byte, pos#byte)
and first_call = ref true
@ -934,11 +951,11 @@ let open_token_stream file_path_opt =
in fail region Missing_break
| _ -> () in
let rec read_token ?(log=fun _ _ -> ()) buffer =
let rec read ?(log=fun _ _ -> ()) buffer =
match FQueue.deq !state.units with
None ->
scan buffer;
read_token ~log buffer
read ~log buffer
| Some (units, (left_mark, token)) ->
log left_mark token;
state := {!state with units;
@ -948,15 +965,33 @@ let open_token_stream file_path_opt =
patch_buffer (Token.to_region token)#byte_pos buffer;
token in
let cin = match file_path_opt with
None | Some "-" -> stdin
| Some file_path -> open_in file_path in
let buffer = Lexing.from_channel cin in
let () = match file_path_opt with
None | Some "-" -> ()
| Some file_path -> reset ~file:file_path buffer
and close () = close_in cin in
{read = read_token; buffer; get_win; get_pos; get_last; get_file; close}
let buf_close_res =
match input with
File "" | File "-" | Stdin ->
Ok (Lexing.from_channel stdin, fun () -> close_in stdin)
| File path ->
(try
let chan = open_in path in
let close () = close_in chan in
Ok (Lexing.from_channel chan, close)
with
Sys_error msg -> Stdlib.Error (File_opening msg))
| String s ->
Ok (Lexing.from_string s, fun () -> ())
| Channel chan ->
let close () = close_in chan in
Ok (Lexing.from_channel chan, close)
| Buffer b -> Ok (b, fun () -> ()) in
match buf_close_res with
Ok (buffer, close) ->
let () =
match input with
File path when path <> "" -> reset ~file:path buffer
| _ -> () in
let instance = {
read; buffer; get_win; get_pos; get_last; get_file; close}
in Ok instance
| Error _ as e -> e
end (* of functor [Make] in HEADER *)
(* END TRAILER *)

View File

@ -1,4 +1,6 @@
(** Embedding the LIGO lexer in a debug module *)
(* Embedding the LIGO lexer in a debug module *)
module Region = Simple_utils.Region
module type S =
sig
@ -14,7 +16,7 @@ module type S =
val trace :
?offsets:bool -> [`Byte | `Point] ->
file_path option -> EvalOpt.command ->
(unit, string) Stdlib.result
(unit, string Region.reg) Stdlib.result
end
module Make (Lexer: Lexer.S) : (S with module Lexer = Lexer) =
@ -48,28 +50,31 @@ module Make (Lexer: Lexer.S) : (S with module Lexer = Lexer) =
type file_path = string
let trace ?(offsets=true) mode file_path_opt command :
(unit, string) Stdlib.result =
try
let Lexer.{read; buffer; close; _} =
Lexer.open_token_stream file_path_opt in
let log = output_token ~offsets mode command stdout
and close_all () = close (); close_out stdout in
let rec iter () =
match read ~log buffer with
token ->
if Token.is_eof token
then Stdlib.Ok ()
else iter ()
| exception Lexer.Error error ->
let file =
match file_path_opt with
None | Some "-" -> false
| Some _ -> true in
let msg =
Lexer.format_error ~offsets mode ~file error
in Stdlib.Error msg in
let result = iter ()
in (close_all (); result)
with Sys_error msg -> Stdlib.Error msg
(unit, string Region.reg) Stdlib.result =
let input =
match file_path_opt with
Some file_path -> Lexer.File file_path
| None -> Lexer.Stdin in
match Lexer.open_token_stream input with
Ok Lexer.{read; buffer; close; _} ->
let log = output_token ~offsets mode command stdout
and close_all () = close (); close_out stdout in
let rec iter () =
match read ~log buffer with
token ->
if Token.is_eof token
then Stdlib.Ok ()
else iter ()
| exception Lexer.Error error ->
let file =
match file_path_opt with
None | Some "-" -> false
| Some _ -> true in
let msg =
Lexer.format_error ~offsets mode ~file error
in Stdlib.Error msg in
let result = iter ()
in close_all (); result
| Stdlib.Error (Lexer.File_opening msg) ->
close_out stdout; Stdlib.Error (Region.wrap_ghost msg)
end

View File

@ -1,3 +1,5 @@
module Region = Simple_utils.Region
module type S =
sig
module Lexer : Lexer.S
@ -12,7 +14,7 @@ module type S =
val trace :
?offsets:bool -> [`Byte | `Point] ->
file_path option -> EvalOpt.command ->
(unit, string) Stdlib.result
(unit, string Region.reg) Stdlib.result
end
module Make (Lexer: Lexer.S) : S with module Lexer = Lexer

View File

@ -1,5 +1,7 @@
(* Functor to build a standalone LIGO lexer *)
module Region = Simple_utils.Region
module type IO =
sig
val ext : string (* LIGO file extension *)
@ -49,7 +51,7 @@ module Make (IO: IO) (Lexer: Lexer.S) =
(* Running the lexer on the input file *)
let scan () : (Lexer.token list, string) Stdlib.result =
let scan () : (Lexer.token list, string Region.reg) Stdlib.result =
(* Preprocessing the input *)
if SSet.mem "cpp" IO.options#verbose
@ -59,36 +61,36 @@ module Make (IO: IO) (Lexer: Lexer.S) =
if Sys.command cpp_cmd <> 0 then
let msg =
sprintf "External error: the command \"%s\" failed." cpp_cmd
in Stdlib.Error msg
in Stdlib.Error (Region.wrap_ghost msg)
else
try
let Lexer.{read; buffer; close; _} =
Lexer.open_token_stream (Some pp_input) in
let close_all () = close (); close_out stdout in
let rec read_tokens tokens =
match read ~log:(fun _ _ -> ()) buffer with
token ->
if Lexer.Token.is_eof token
then Stdlib.Ok (List.rev tokens)
else read_tokens (token::tokens)
| exception Lexer.Error error ->
let file =
match IO.options#input with
None | Some "-" -> false
| Some _ -> true in
let msg =
Lexer.format_error ~offsets:IO.options#offsets
IO.options#mode ~file error
in Stdlib.Error msg in
let result = read_tokens []
in close_all (); result
with Sys_error msg -> close_out stdout; Stdlib.Error msg
match Lexer.open_token_stream (Lexer.File pp_input) with
Ok Lexer.{read; buffer; close; _} ->
let close_all () = close (); close_out stdout in
let rec read_tokens tokens =
match read ~log:(fun _ _ -> ()) buffer with
token ->
if Lexer.Token.is_eof token
then Stdlib.Ok (List.rev tokens)
else read_tokens (token::tokens)
| exception Lexer.Error error ->
let file =
match IO.options#input with
None | Some "-" -> false
| Some _ -> true in
let msg =
Lexer.format_error ~offsets:IO.options#offsets
IO.options#mode ~file error
in Stdlib.Error msg in
let result = read_tokens []
in close_all (); result
| Stdlib.Error (Lexer.File_opening msg) ->
close_out stdout; Stdlib.Error (Region.wrap_ghost msg)
(* Tracing the lexing (effectful) *)
module Log = LexerLog.Make (Lexer)
let trace () : (unit, string) Stdlib.result =
let trace () : (unit, string Region.reg) Stdlib.result =
(* Preprocessing the input *)
if SSet.mem "cpp" IO.options#verbose
@ -98,7 +100,7 @@ module Make (IO: IO) (Lexer: Lexer.S) =
if Sys.command cpp_cmd <> 0 then
let msg =
sprintf "External error: the command \"%s\" failed." cpp_cmd
in Stdlib.Error msg
in Stdlib.Error (Region.wrap_ghost msg)
else
Log.trace ~offsets:IO.options#offsets
IO.options#mode

View File

@ -1,5 +1,7 @@
(* Functor to build a standalone LIGO lexer *)
module Region = Simple_utils.Region
module type IO =
sig
val ext : string (* LIGO file extension *)
@ -8,6 +10,6 @@ module type IO =
module Make (IO: IO) (Lexer: Lexer.S) :
sig
val scan : unit -> (Lexer.token list, string) Stdlib.result
val trace : unit -> (unit, string) Stdlib.result
val scan : unit -> (Lexer.token list, string Region.reg) Stdlib.result
val trace : unit -> (unit, string Region.reg) Stdlib.result
end

View File

@ -18,6 +18,7 @@ module type PARSER =
val interactive_expr :
(Lexing.lexbuf -> token) -> Lexing.lexbuf -> expr
val contract :
(Lexing.lexbuf -> token) -> Lexing.lexbuf -> ast
@ -35,6 +36,7 @@ module type PARSER =
sig
val interactive_expr :
Lexing.position -> expr MenhirInterpreter.checkpoint
val contract :
Lexing.position -> ast MenhirInterpreter.checkpoint
end
@ -95,7 +97,9 @@ module Make (Lexer: Lexer.S)
let invalid_lexeme = Lexer.Token.to_lexeme invalid in
Printf.sprintf "%s and before \"%s\"" s invalid_lexeme in
let header = header ^ trailer in
header ^ (if msg = "" then ".\n" else ":\n" ^ msg)
let msg =
header ^ (if msg = "" then ".\n" else ":\n" ^ msg)
in Region.{value=msg; region=invalid_region}
let failure get_win checkpoint =
let message = ParErr.message (state checkpoint) in

View File

@ -47,6 +47,8 @@ module Make (Lexer: Lexer.S)
(Parser: PARSER with type token = Lexer.Token.token)
(ParErr: sig val message : int -> string end) :
sig
(* WARNING: The following parsers may all raise [Lexer.Error] *)
(* The monolithic API of Menhir *)
val mono_contract :
@ -67,5 +69,6 @@ module Make (Lexer: Lexer.S)
val incr_contract : Lexer.instance -> Parser.ast
val incr_expr : Lexer.instance -> Parser.expr
val format_error : ?offsets:bool -> [`Point | `Byte] -> error -> string
val format_error :
?offsets:bool -> [`Point | `Byte] -> error -> string Region.reg
end

View File

@ -37,42 +37,13 @@ module Make (Lexer: Lexer.S)
open Printf
module SSet = Utils.String.Set
(* Error printing and exception tracing *)
(* Log of the lexer *)
let () = Printexc.record_backtrace true
module Log = LexerLog.Make (Lexer)
(* Preprocessing the input source and opening the input channels *)
(* Path for CPP inclusions (#include) *)
let lib_path =
match IO.options#libs with
[] -> ""
| libs -> let mk_I dir path = sprintf " -I %s%s" dir path
in List.fold_right mk_I libs ""
let prefix =
match IO.options#input with
None | Some "-" -> "temp"
| Some file -> Filename.(file |> basename |> remove_extension)
let suffix = ".pp" ^ IO.ext
let pp_input =
if SSet.mem "cpp" IO.options#verbose
then prefix ^ suffix
else let pp_input, pp_out =
Filename.open_temp_file prefix suffix
in close_out pp_out; pp_input
let cpp_cmd =
match IO.options#input with
None | Some "-" ->
sprintf "cpp -traditional-cpp%s - > %s"
lib_path pp_input
| Some file ->
sprintf "cpp -traditional-cpp%s %s > %s"
lib_path file pp_input
let log =
Log.output_token ~offsets:IO.options#offsets
IO.options#mode IO.options#cmd stdout
(* Error handling (reexported from [ParserAPI]) *)
@ -81,8 +52,6 @@ module Make (Lexer: Lexer.S)
type invalid = Parser.token
type error = message * valid option * invalid
exception Point of error
(* Instantiating the parser *)
module Front = ParserAPI.Make (Lexer)(Parser)(ParErr)
@ -94,18 +63,23 @@ module Make (Lexer: Lexer.S)
(* Parsing an expression *)
let parse_expr lexer_inst tokeniser output state :
(AST.expr, string) Stdlib.result =
let close_all () =
lexer_inst.Lexer.close (); close_out stdout in
let lexbuf = lexer_inst.Lexer.buffer in
let parse_expr lexer_inst :
(AST.expr, message Region.reg) Stdlib.result =
let output = Buffer.create 131 in
let state =
ParserLog.mk_state ~offsets:IO.options#offsets
~mode:IO.options#mode
~buffer:output in
let close () = lexer_inst.Lexer.close () in
let expr =
try
if IO.options#mono then
Front.mono_expr tokeniser lexbuf
let tokeniser = lexer_inst.Lexer.read ~log
and lexbuf = lexer_inst.Lexer.buffer
in Front.mono_expr tokeniser lexbuf
else
Front.incr_expr lexer_inst
with exn -> close_all (); raise exn in
with exn -> close (); raise exn in
let () =
if SSet.mem "ast-tokens" IO.options#verbose then
begin
@ -120,22 +94,27 @@ module Make (Lexer: Lexer.S)
ParserLog.pp_expr state expr;
Buffer.output_buffer stdout output
end
in close_all (); Ok expr
in close (); Ok expr
(* Parsing a contract *)
let parse_contract lexer_inst tokeniser output state
: (AST.t, string) Stdlib.result =
let close_all () =
lexer_inst.Lexer.close (); close_out stdout in
let lexbuf = lexer_inst.Lexer.buffer in
let parse_contract lexer_inst :
(AST.t, message Region.reg) Stdlib.result =
let output = Buffer.create 131 in
let state =
ParserLog.mk_state ~offsets:IO.options#offsets
~mode:IO.options#mode
~buffer:output in
let close () = lexer_inst.Lexer.close () in
let ast =
try
if IO.options#mono then
Front.mono_contract tokeniser lexbuf
let tokeniser = lexer_inst.Lexer.read ~log
and lexbuf = lexer_inst.Lexer.buffer
in Front.mono_contract tokeniser lexbuf
else
Front.incr_contract lexer_inst
with exn -> close_all (); raise exn in
with exn -> close (); raise exn in
let () =
if SSet.mem "ast-tokens" IO.options#verbose then
begin
@ -150,74 +129,45 @@ module Make (Lexer: Lexer.S)
ParserLog.pp_ast state ast;
Buffer.output_buffer stdout output
end
in close_all (); Ok ast
in close (); Ok ast
(* Wrapper for the parsers above *)
let parse parser =
(* Preprocessing the input *)
type 'a parser = Lexer.instance -> ('a, message Region.reg) result
if SSet.mem "cpp" IO.options#verbose
then eprintf "%s\n%!" cpp_cmd
else ();
let apply lexer_inst parser =
(* Calling the parser and filtering errors *)
if Sys.command cpp_cmd <> 0 then
let msg =
sprintf "External error: \"%s\" failed." cpp_cmd
in Stdlib.Error msg
else
(* Instantiating the lexer *)
match parser lexer_inst with
Stdlib.Error _ as error -> error
| Stdlib.Ok _ as node -> node
let lexer_inst = Lexer.open_token_stream (Some pp_input) in
(* Lexing errors *)
(* Making the tokeniser *)
| exception Lexer.Error err ->
let file =
match IO.options#input with
None | Some "-" -> false
| Some _ -> true in
let error =
Lexer.format_error ~offsets:IO.options#offsets
IO.options#mode err ~file
in Stdlib.Error error
let module Log = LexerLog.Make (Lexer) in
(* Incremental API of Menhir *)
let log =
Log.output_token ~offsets:IO.options#offsets
IO.options#mode IO.options#cmd stdout in
| exception Front.Point point ->
let error =
Front.format_error ~offsets:IO.options#offsets
IO.options#mode point
in Stdlib.Error error
let tokeniser = lexer_inst.Lexer.read ~log in
(* Monolithic API of Menhir *)
let output = Buffer.create 131 in
let state = ParserLog.mk_state
~offsets:IO.options#offsets
~mode:IO.options#mode
~buffer:output in
(* Calling the specific parser (that is, the parameter) *)
match parser lexer_inst tokeniser output state with
Stdlib.Error _ as error -> error
| Stdlib.Ok _ as node -> node
(* Lexing errors *)
| exception Lexer.Error err ->
let file =
match IO.options#input with
None | Some "-" -> false
| Some _ -> true in
let error =
Lexer.format_error ~offsets:IO.options#offsets
IO.options#mode err ~file
in Stdlib.Error error
(* Incremental API of Menhir *)
| exception Front.Point point ->
let error =
Front.format_error ~offsets:IO.options#offsets
IO.options#mode point
in Stdlib.Error error
(* Monolithic API of Menhir *)
| exception Parser.Error ->
let invalid, valid_opt =
match lexer_inst.Lexer.get_win () with
Lexer.Nil ->
| exception Parser.Error ->
let invalid, valid_opt =
match lexer_inst.Lexer.get_win () with
Lexer.Nil ->
assert false (* Safe: There is always at least EOF. *)
| Lexer.One invalid -> invalid, None
| Lexer.Two (invalid, valid) -> invalid, Some valid in
@ -227,8 +177,8 @@ module Make (Lexer: Lexer.S)
IO.options#mode point
in Stdlib.Error error
(* I/O errors *)
| exception Sys_error error -> Stdlib.Error error
(* I/O errors *)
| exception Sys_error error ->
Stdlib.Error (Region.wrap_ghost error)
end

View File

@ -23,49 +23,37 @@ module type Pretty =
val print_expr : state -> expr -> unit
end
module Make (Lexer: Lexer.S)
(AST: sig type t type expr end)
(Parser: ParserAPI.PARSER
with type ast = AST.t
and type expr = AST.expr
and type token = Lexer.token)
(ParErr: sig val message : int -> string end)
(ParserLog: Pretty with type ast = AST.t
and type expr = AST.expr)
module Make (Lexer : Lexer.S)
(AST : sig type t type expr end)
(Parser : ParserAPI.PARSER
with type ast = AST.t
and type expr = AST.expr
and type token = Lexer.token)
(ParErr : sig val message : int -> string end)
(ParserLog : Pretty with type ast = AST.t
and type expr = AST.expr)
(IO: IO) :
sig
(* Error handling (reexported from [ParserAPI]) *)
(* Error handling reexported from [ParserAPI] without the
exception [Point] *)
type message = string
type valid = Parser.token
type invalid = Parser.token
type error = message * valid option * invalid
exception Point of error
val format_error :
?offsets:bool -> [`Byte | `Point] -> error -> string
?offsets:bool -> [`Byte | `Point] -> error -> string Region.reg
val short_error :
?offsets:bool -> [`Point | `Byte] -> string -> Region.t -> string
?offsets:bool -> [`Point | `Byte] -> message -> Region.t -> string
(* Parsers *)
val parse :
(Lexer.instance ->
(Lexing.lexbuf -> Lexer.token) ->
Buffer.t -> ParserLog.state -> ('a, string) result) ->
('a, string) result
type 'a parser = Lexer.instance -> ('a, message Region.reg) result
val parse_contract :
Lexer.instance ->
(Lexing.lexbuf -> Lexer.token) ->
Buffer.t -> ParserLog.state ->
(AST.t, string) Stdlib.result
val parse_expr :
Lexer.instance ->
(Lexing.lexbuf -> Lexer.token) ->
Buffer.t -> ParserLog.state -> (AST.expr, string) Stdlib.result
val apply : Lexer.instance -> 'a parser -> ('a, message Region.reg) result
val parse_contract : AST.t parser
val parse_expr : AST.expr parser
end

View File

@ -32,46 +32,48 @@ module Errors = struct
in
let data = [
("expected", fun () -> expected_name);
("location" , fun () -> Format.asprintf "%a" Location.pp_lift @@ Raw.pattern_to_region actual)
] in
error ~data title message
("location",
fun () -> Format.asprintf "%a" Location.pp_lift @@
Raw.pattern_to_region actual)]
in error ~data title message
let unsupported_let_in_function (patterns : Raw.pattern list) =
let title () = "unsupported 'let ... in' function" in
let message () = "defining functions via 'let ... in' is not supported yet" in
let title () = "" in
let message () = "\nDefining functions with \"let ... in\" \
is not supported yet.\n" in
let patterns_loc =
List.fold_left (fun a p -> Region.cover a (Raw.pattern_to_region p))
Region.ghost patterns in
let data = [
("location", fun () -> Format.asprintf "%a" Location.pp_lift @@ patterns_loc)
] in
error ~data title message
("location",
fun () -> Format.asprintf "%a" Location.pp_lift @@ patterns_loc)]
in error ~data title message
let unknown_predefined_type name =
let title () = "type constants" in
let title () = "Type constants" in
let message () =
Format.asprintf "unknown predefined type \"%s\"" name.Region.value in
Format.asprintf "Unknown predefined type \"%s\".\n"
name.Region.value in
let data = [
("location",
fun () -> Format.asprintf "%a" Location.pp_lift @@ name.Region.region)
] in
error ~data title message
fun () -> Format.asprintf "%a" Location.pp_lift @@ name.Region.region)]
in error ~data title message
let untyped_fun_param var =
let title () = "function parameter" in
let title () = "" in
let message () =
Format.asprintf "untyped function parameters are not supported yet" in
Format.asprintf "\nUntyped function parameters \
are not supported yet.\n" in
let param_loc = var.Region.region in
let data = [
("location",
fun () -> Format.asprintf "%a" Location.pp_lift @@ param_loc)
] in
error ~data title message
fun () -> Format.asprintf "%a" Location.pp_lift @@ param_loc)]
in error ~data title message
let unsupported_tuple_pattern p =
let title () = "tuple pattern" in
let title () = "" in
let message () =
Format.asprintf "tuple patterns are not supported yet" in
Format.asprintf "\nTuple patterns are not supported yet.\n" in
let pattern_loc = Raw.pattern_to_region p in
let data = [
("location",
@ -80,21 +82,20 @@ module Errors = struct
error ~data title message
let unsupported_cst_constr p =
let title () = "constant constructor" in
let title () = "" in
let message () =
Format.asprintf "constant constructors are not supported yet" in
Format.asprintf "\nConstant constructors are not supported yet.\n" in
let pattern_loc = Raw.pattern_to_region p in
let data = [
("location",
fun () -> Format.asprintf "%a" Location.pp_lift @@ pattern_loc)
] in
error ~data title message
fun () -> Format.asprintf "%a" Location.pp_lift @@ pattern_loc)]
in error ~data title message
let unsupported_non_var_pattern p =
let title () = "pattern is not a variable" in
let title () = "" in
let message () =
Format.asprintf "non-variable patterns in constructors \
are not supported yet" in
Format.asprintf "\nNon-variable patterns in constructors \
are not supported yet.\n" in
let pattern_loc = Raw.pattern_to_region p in
let data = [
("location",
@ -103,20 +104,20 @@ module Errors = struct
error ~data title message
let simplifying_expr t =
let title () = "simplifying expression" in
let title () = "Simplifying expression" in
let message () = "" in
let data = [
("expression" ,
(** TODO: The labelled arguments should be flowing from the CLI. *)
thunk @@ Parser.Cameligo.ParserLog.expr_to_string
~offsets:true ~mode:`Point t)
] in
error ~data title message
~offsets:true ~mode:`Point t)]
in error ~data title message
let only_constructors p =
let title () = "constructors in patterns" in
let title () = "" in
let message () =
Format.asprintf "currently, only constructors are supported in patterns" in
Format.asprintf "\nCurrently, only constructors are \
supported in patterns.\n" in
let pattern_loc = Raw.pattern_to_region p in
let data = [
("location",
@ -125,18 +126,18 @@ module Errors = struct
error ~data title message
let unsupported_sugared_lists region =
let title () = "lists in patterns" in
let title () = "" in
let message () =
Format.asprintf "currently, only empty lists and constructors (::) \
are supported in patterns" in
Format.asprintf "\nCurrently, only empty lists and \
constructors (::) \
are supported in patterns.\n" in
let data = [
("location",
fun () -> Format.asprintf "%a" Location.pp_lift @@ region)
] in
error ~data title message
fun () -> Format.asprintf "%a" Location.pp_lift @@ region)]
in error ~data title message
let corner_case description =
let title () = "corner case" in
let title () = "Corner case" in
let message () = description in
error title message
@ -286,9 +287,9 @@ let rec simpl_expression :
let simpl_update = fun (u:Raw.update Region.reg) ->
let (u, loc) = r_split u in
let (name, path) = simpl_path u.record in
let record = match path with
let record = match path with
| [] -> e_variable (Var.of_name name)
| _ -> e_accessor (e_variable (Var.of_name name)) path in
| _ -> e_accessor (e_variable (Var.of_name name)) path in
let updates = u.updates.value.ne_elements in
let%bind updates' =
let aux (f:Raw.field_path_assign Raw.reg) =
@ -296,7 +297,7 @@ let rec simpl_expression :
let%bind expr = simpl_expression f.field_expr in
ok ( List.map (fun (x: _ Raw.reg) -> x.value) (npseq_to_list f.field_path), expr)
in
bind_map_list aux @@ npseq_to_list updates
bind_map_list aux @@ npseq_to_list updates
in
let aux ur (path, expr) =
let rec aux record = function
@ -356,7 +357,7 @@ let rec simpl_expression :
| hd :: tl ->
e_let_in hd
inline
(e_accessor rhs_b_expr [Access_tuple ((List.length prep_vars) - (List.length tl) - 1)])
(e_accessor rhs_b_expr [Access_tuple ((List.length prep_vars) - (List.length tl) - 1)])
(chain_let_in tl body)
| [] -> body (* Precluded by corner case assertion above *)
in
@ -733,7 +734,7 @@ and simpl_declaration : Raw.declaration -> declaration Location.wrap list result
match v_type with
| Some v_type -> ok (to_option (simpl_type_expression v_type))
| None -> ok None
in
in
let%bind simpl_rhs_expr = simpl_expression rhs_expr in
ok @@ loc x @@ Declaration_constant (Var.of_name v.value, v_type_expression, inline, simpl_rhs_expr) )
in let%bind variables = ok @@ npseq_to_list pt.value
@ -834,9 +835,9 @@ and simpl_cases : type a . (Raw.pattern * a) list -> (a, unit) matching result =
| PConstr v ->
let const, pat_opt =
match v with
PConstrApp {value; _} ->
PConstrApp {value; _} ->
(match value with
| constr, None ->
| constr, None ->
constr, Some (PVar {value = "unit"; region = Region.ghost})
| _ -> value)
| PSomeApp {value=region,pat; _} ->

View File

@ -68,9 +68,9 @@ let detect_free_variables (for_body : expression) (local_decl_names : expression
module Errors = struct
let unsupported_cst_constr p =
let title () = "constant constructor" in
let title () = "" in
let message () =
Format.asprintf "constant constructors are not supported yet" in
Format.asprintf "\nConstant constructors are not supported yet.\n" in
let pattern_loc = Raw.pattern_to_region p in
let data = [
("location",
@ -79,11 +79,11 @@ module Errors = struct
error ~data title message
let corner_case ~loc message =
let title () = "corner case" in
let content () = "We don't have a good error message for this case. \
let title () = "\nCorner case" in
let content () = "We do not have a good error message for this case. \
We are striving find ways to better report them and \
find the use-cases that generate them. \
Please report this to the developers." in
Please report this to the developers.\n" in
let data = [
("location" , fun () -> loc) ;
("message" , fun () -> message) ;
@ -91,9 +91,9 @@ module Errors = struct
error ~data title content
let unknown_predefined_type name =
let title () = "type constants" in
let title () = "\nType constants" in
let message () =
Format.asprintf "unknown predefined type \"%s\"" name.Region.value in
Format.asprintf "Unknown predefined type \"%s\".\n" name.Region.value in
let data = [
("location",
fun () -> Format.asprintf "%a" Location.pp_lift @@ name.Region.region)
@ -101,10 +101,10 @@ module Errors = struct
error ~data title message
let unsupported_non_var_pattern p =
let title () = "pattern is not a variable" in
let title () = "" in
let message () =
Format.asprintf "non-variable patterns in constructors \
are not supported yet" in
Format.asprintf "\nNon-variable patterns in constructors \
are not supported yet.\n" in
let pattern_loc = Raw.pattern_to_region p in
let data = [
("location",
@ -113,9 +113,10 @@ module Errors = struct
error ~data title message
let only_constructors p =
let title () = "constructors in patterns" in
let title () = "" in
let message () =
Format.asprintf "currently, only constructors are supported in patterns" in
Format.asprintf "\nCurrently, only constructors \
are supported in patterns.\n" in
let pattern_loc = Raw.pattern_to_region p in
let data = [
("location",
@ -124,9 +125,9 @@ module Errors = struct
error ~data title message
let unsupported_tuple_pattern p =
let title () = "tuple pattern" in
let title () = "" in
let message () =
Format.asprintf "tuple patterns are not supported yet" in
Format.asprintf "\nTuple patterns are not supported yet.\n" in
let pattern_loc = Raw.pattern_to_region p in
let data = [
("location",
@ -139,10 +140,10 @@ module Errors = struct
error ~data title message
let unsupported_deep_Some_patterns pattern =
let title () = "option patterns" in
let title () = "" in
let message () =
Format.asprintf "currently, only variables in Some constructors \
in patterns are supported" in
Format.asprintf "\nCurrently, only variables in constructors \
\"Some\" in patterns are supported.\n" in
let pattern_loc = Raw.pattern_to_region pattern in
let data = [
("location",
@ -151,10 +152,10 @@ module Errors = struct
error ~data title message
let unsupported_deep_list_patterns cons =
let title () = "lists in patterns" in
let title () = "" in
let message () =
Format.asprintf "currently, only empty lists and x::y \
are supported in patterns" in
Format.asprintf "\nCurrently, only empty lists and x::y \
are supported in patterns.\n" in
let data = [
("location",
fun () -> Format.asprintf "%a" Location.pp_lift @@ cons.Region.region)
@ -164,7 +165,7 @@ module Errors = struct
(* Logging *)
let simplifying_instruction t =
let title () = "simplifiying instruction" in
let title () = "\nSimplifiying instruction" in
let message () = "" in
(** TODO: The labelled arguments should be flowing from the CLI. *)
let data = [
@ -1185,7 +1186,7 @@ and simpl_for_int : Raw.for_int -> (_ -> expression result) result = fun fi ->
- references to the iterated value ==> variable `#COMPILER#elt_X`
Note: In the case of an inner loop capturing variable from an outer loop
the free variable name can be `#COMPILER#acc.Y` and because we do not
capture the accumulator record in the inner loop, we don't want to
capture the accumulator record in the inner loop, we do not want to
generate `#COMPILER#acc.#COMPILER#acc.Y` but `#COMPILER#acc.Y`
5) Append the return value to the body
@ -1321,12 +1322,9 @@ and simpl_for_collect : Raw.for_collect -> (_ -> expression result) result = fun
| None -> e_skip ()
| Some seq -> e_let_in (Var.of_name "#COMPILER#folded_record", None) false fold seq in (* TODO fresh *)
return_statement @@ final_sequence
(*
and simpl_declaration : Raw.declaration -> declaration Location.wrap result =
*)
and simpl_declaration_list declarations :
Ast_simplified.declaration Location.wrap list result =
Ast_simplified.declaration Location.wrap list result =
let open Raw in
let rec hook acc = function
[] -> acc
@ -1387,8 +1385,7 @@ and simpl_declaration_list declarations :
Declaration_constant (name, ty_opt, inline, expr) in
let res = Location.wrap ~loc new_decl in
hook (bind_list_cons res acc) declarations
in
hook (ok @@ []) (List.rev declarations)
in hook (ok @@ []) (List.rev declarations)
let simpl_program : Raw.ast -> program result =
fun t -> simpl_declaration_list @@ nseq_to_list t.decl

View File

@ -1,52 +1,45 @@
// Test a PascaLIGO function which takes another PascaLIGO function as an argument
function foobar (const i : int) : int is
block {
function foo (const i : int) : int is
i ;
function bar (const f : int -> int) : int is
f ( i ) ;
} with bar (foo) ;
begin
function foo (const i : int) : int is i;
function bar (const f : int -> int) : int is f (i);
end with bar (foo);
// higher order function with more than one argument
function higher2(const i: int; const f: int -> int): int is
block {
const ii: int = f(i)
} with ii
function higher2(const i : int; const f : int -> int): int is
begin
const ii: int = f (i)
end with ii
function foobar2 (const i : int) : int is
block {
function foo2 (const i : int) : int is
i;
} with higher2(i,foo2)
begin
function foo2 (const i : int) : int is i
end with higher2 (i,foo2)
const a : int = 0;
function foobar3 (const i : int) : int is
block {
function foo2 (const i : int) : int is
(a+i);
} with higher2(i,foo2)
begin
function foo2 (const i : int) : int is a+i
end with higher2 (i,foo2)
function f (const i : int) : int is
i
function f (const i : int) : int is i
function g (const i : int) : int is
f(i)
function g (const i : int) : int is f (i)
function foobar4 (const i : int) : int is
g(g(i))
function foobar4 (const i : int) : int is g (g (i))
function higher3(const i: int; const f: int -> int; const g: int -> int): int is
block {
const ii: int = f(g(i));
} with ii
function higher3(const i : int; const f : int -> int; const g : int -> int)
: int is
begin
const ii : int = f(g(i))
end with ii
function foobar5 (const i : int) : int is
block {
begin
const a : int = 0;
function foo (const i : int) : int is
(a+i);
function goo (const i : int) : int is
foo(i);
} with higher3(i,foo,goo)
function foo (const i : int) : int is a+i;
function goo (const i : int) : int is foo (i)
end with higher3(i,foo,goo)
function foobar6 (const i : int) : (int->int) is f
function foobar6 (const i : int) : int -> int is f

0
vendors/Preproc/.EMain.tag vendored Normal file
View File

0
vendors/Preproc/.Eparser.mly.tag vendored Normal file
View File

0
vendors/Preproc/.ProcMain.tag vendored Normal file
View File

1
vendors/Preproc/.links vendored Normal file
View File

@ -0,0 +1 @@
$HOME/git/OCaml-build/Makefile

33
vendors/Preproc/EMain.ml vendored Normal file
View File

@ -0,0 +1,33 @@
(* This module is only used for testing modules [Escan] and [Eparser]
as units *)
module Lexer = struct
let run () =
match Array.length Sys.argv with
2 -> Escan.trace Sys.argv.(1)
| _ -> prerr_endline ("Usage: " ^ Sys.argv.(0) ^ " [file]")
end
module Parser = struct
let run () =
if Array.length Sys.argv = 2
then
match open_in Sys.argv.(1) with
exception Sys_error msg -> prerr_endline msg
| cin ->
let buffer = Lexing.from_channel cin in
let open Error in
let () =
try
let tree = Eparser.pp_expression Escan.token buffer in
let value = Preproc.(eval Env.empty tree)
in (print_string (string_of_bool value);
print_newline ())
with Lexer diag -> print "Lexical" diag
| Parser diag -> print "Syntactical" diag
| Eparser.Error -> print "" ("Parse", mk_seg buffer, 1)
in close_in cin
else prerr_endline ("Usage: " ^ Sys.argv.(0) ^ " [file]")
end
let _ = Parser.run()

50
vendors/Preproc/Eparser.mly vendored Normal file
View File

@ -0,0 +1,50 @@
%{
(* Grammar for boolean expressions in preprocessing directives of C# *)
%}
%token True False
%token <string> Ident
%token OR AND EQ NEQ NOT EOL LPAR RPAR
(* Entries *)
%start pp_expression
%type <Etree.t> pp_expression
%%
(* Grammar *)
pp_expression:
e=pp_or_expression EOL { e }
pp_or_expression:
e=pp_and_expression { e }
| e1=pp_or_expression OR e2=pp_and_expression {
Etree.Or (e1,e2)
}
pp_and_expression:
e=pp_equality_expression { e }
| e1=pp_and_expression AND e2=pp_unary_expression {
Etree.And (e1,e2)
}
pp_equality_expression:
e=pp_unary_expression { e }
| e1=pp_equality_expression EQ e2=pp_unary_expression {
Etree.Eq (e1,e2)
}
| e1=pp_equality_expression NEQ e2=pp_unary_expression {
Etree.Neq (e1,e2)
}
pp_unary_expression:
e=pp_primary_expression { e }
| NOT e=pp_unary_expression { Etree.Not e }
pp_primary_expression:
True { Etree.True }
| False { Etree.False }
| id=Ident { Etree.Ident id }
| LPAR e=pp_or_expression RPAR { e }

31
vendors/Preproc/Error.ml vendored Normal file
View File

@ -0,0 +1,31 @@
(* This module provides support for managing and printing errors when
preprocessing C# source files. *)
type message = string
type start = Lexing.position
type stop = Lexing.position
type seg = start * stop
let mk_seg buffer =
Lexing.(lexeme_start_p buffer, lexeme_end_p buffer)
type vline = int
exception Lexer of (message * seg * vline)
exception Parser of (message * seg * vline)
let print (kind: string) (msg, (start, stop), vend) =
let open Lexing in
let delta = vend - stop.pos_lnum in
let vstart = start.pos_lnum + delta
in assert (msg <> "");
prerr_endline
((if kind = "" then msg else kind) ^ " error at line "
^ string_of_int vstart ^ ", char "
^ string_of_int (start.pos_cnum - start.pos_bol)
^ (if stop.pos_lnum = start.pos_lnum
then "--" ^ string_of_int (stop.pos_cnum - stop.pos_bol)
else " to line " ^ string_of_int vend
^ ", char "
^ string_of_int (stop.pos_cnum - stop.pos_bol))
^ (if kind = "" then "." else ":\n" ^ msg))

95
vendors/Preproc/Escan.mll vendored Normal file
View File

@ -0,0 +1,95 @@
{
(* Auxiliary scanner for boolean expressions of the C# preprocessor *)
(* Concrete syntax of tokens. See module [Eparser]. *)
let string_of_token =
let open Eparser
in function True -> "true"
| False -> "false"
| Ident id -> id
| OR -> "||"
| AND -> "&&"
| EQ -> "=="
| NEQ -> "!="
| NOT -> "!"
| LPAR -> "("
| RPAR -> ")"
| EOL -> "EOL"
}
(* Regular expressions for literals *)
(* White space *)
let newline = '\n' | '\r' | "\r\n"
let blank = ' ' | '\t'
(* Unicode escape sequences *)
let digit = ['0'-'9']
let hexdigit = digit | ['A'-'F' 'a'-'f']
let four_hex = hexdigit hexdigit hexdigit hexdigit
let uni_esc = "\\u" four_hex | "\\U" four_hex four_hex
(* Identifiers *)
let lowercase = ['a'-'z']
let uppercase = ['A'-'Z']
let letter = lowercase | uppercase | uni_esc
let start = '_' | letter
let alphanum = letter | digit | '_'
let ident = start alphanum*
(* Rules *)
rule token = parse
blank+ { token lexbuf }
| newline { Lexing.new_line lexbuf; Eparser.EOL }
| eof { Eparser.EOL }
| "true" { Eparser.True }
| "false" { Eparser.False }
| ident as id { Eparser.Ident id }
| '(' { Eparser.LPAR }
| ')' { Eparser.RPAR }
| "||" { Eparser.OR }
| "&&" { Eparser.AND }
| "==" { Eparser.EQ }
| "!=" { Eparser.NEQ }
| "!" { Eparser.NOT }
| "//" { inline_com lexbuf }
| _ as c { let code = Char.code c in
let msg = "Invalid character " ^ String.make 1 c
^ " (" ^ string_of_int code ^ ")."
in raise Error.(Lexer (msg, mk_seg lexbuf, 1))
}
and inline_com = parse
newline { Lexing.new_line lexbuf; Eparser.EOL }
| eof { Eparser.EOL }
| _ { inline_com lexbuf }
{
(* Standalone lexer for debugging purposes. See module [Topexp]. *)
type filename = string
let trace (name: filename) =
match open_in name with
cin ->
let buffer = Lexing.from_channel cin
and cout = stdout in
let rec iter () =
match token buffer with
Eparser.EOL -> close_in cin; close_out cout
| t -> begin
output_string cout (string_of_token t);
output_string cout "\n";
flush cout;
iter ()
end
| exception Error.Lexer diag -> Error.print "Lexical" diag
in iter ()
| exception Sys_error msg -> prerr_endline msg
}

28
vendors/Preproc/Etree.ml vendored Normal file
View File

@ -0,0 +1,28 @@
(* This module defines and exports the type [t] of conditional
expressions of C# directives.
To avoid over-engineering, we moved the definition of the function
[eval] below into the module [Preproc] itself.
*)
type t =
Or of t * t
| And of t * t
| Eq of t * t
| Neq of t * t
| Not of t
| True
| False
| Ident of string
(*
let rec eval env = function
Or (e1,e2) -> eval env e1 || eval env e2
| And (e1,e2) -> eval env e1 && eval env e2
| Eq (e1,e2) -> eval env e1 = eval env e2
| Neq (e1,e2) -> eval env e1 != eval env e2
| Not e -> not (eval env e)
| True -> true
| False -> false
| Ident id -> Preproc.Env.mem id env
*)

21
vendors/Preproc/LICENSE vendored Normal file
View File

@ -0,0 +1,21 @@
MIT License
Copyright (c) 2018 Christian Rinderknecht
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

4
vendors/Preproc/Makefile.cfg vendored Normal file
View File

@ -0,0 +1,4 @@
SHELL := dash
BFLAGS := -strict-sequence -w +A-48-4
#OCAMLC := ocamlcp
#OCAMLOPT := ocamloptp

585
vendors/Preproc/Preproc.mll vendored Normal file
View File

@ -0,0 +1,585 @@
(* Preprocessor for C#, to be processed by [ocamllex]. *)
{
(* STRING PROCESSING *)
(* The value of [mk_str len p] ("make string") is a string of length
[len] containing the [len] characters in the list [p], in reverse
order. For instance, [mk_str 3 ['c';'b';'a'] = "abc"]. *)
let mk_str (len: int) (p: char list) : string =
let () = assert (len = List.length p) in
let bytes = Bytes.make len ' ' in
let rec fill i = function
[] -> bytes
| char::l -> Bytes.set bytes i char; fill (i-1) l
in fill (len-1) p |> Bytes.to_string
(* The call [explode s a] is the list made by pushing the characters
in the string [s] on top of [a], in reverse order. For example,
[explode "ba" ['c';'d'] = ['a'; 'b'; 'c'; 'd']]. *)
let explode s acc =
let rec push = function
0 -> acc
| i -> s.[i-1] :: push (i-1)
in push (String.length s)
(* ERROR HANDLING *)
let stop msg seg = raise (Error.Lexer (msg, seg,1))
let fail msg buffer = stop msg (Error.mk_seg buffer)
exception Local_err of Error.message
let handle_err scan buffer =
try scan buffer with Local_err msg -> fail msg buffer
(* LEXING ENGINE *)
(* Copying the current lexeme to [stdout] *)
let copy buffer = print_string (Lexing.lexeme buffer)
(* End of lines *)
let handle_nl buffer = Lexing.new_line buffer; copy buffer
(* C# PREPROCESSOR DIRECTIVES *)
(* The type [mode] defines the two scanning modes of the preprocessor:
either we copy the current characters or we skip them. *)
type mode = Copy | Skip
(* Trace of directives
We keep track of directives #if, #elif, #else, #region and #endregion.
*)
type cond = If of mode | Elif of mode | Else | Region
type trace = cond list
(* The function [reduce_cond] is called when a #endif directive is
found, and the trace (see type [trace] above) needs updating. *)
let rec reduce_cond seg = function
[] -> stop "Dangling #endif." seg
| If mode::trace -> trace, mode
| Region::_ -> stop "Invalid scoping of #region" seg
| _::trace -> reduce_cond seg trace
(* The function [reduce_reg] is called when a #endregion directive is
read, and the trace needs updating. *)
let reduce_reg seg = function
[] -> stop "Dangling #endregion." seg
| Region::trace -> trace
| _ -> stop "Invalid scoping of #endregion" seg
(* The function [extend] is called when encountering conditional
directives #if, #else and #elif. As its name suggests, it extends
the current trace with the current conditional directive, whilst
performing some validity checks. *)
let extend seg cond trace =
match cond, trace with
If _, Elif _::_ ->
stop "Directive #if cannot follow #elif." seg
| Else, Else::_ ->
stop "Directive #else cannot follow #else." seg
| Else, [] ->
stop "Dangling #else." seg
| Elif _, Else::_ ->
stop "Directive #elif cannot follow #else." seg
| Elif _, [] ->
stop "Dangling #elif." seg
| _ -> cond::trace
(* The function [last_mode] seeks the last mode as recorded in the
trace (see type [trace] above). *)
let rec last_mode = function
[] -> assert false
| (If mode | Elif mode)::_ -> mode
| _::trace -> last_mode trace
(* Line offsets
The value [Inline] of type [offset] means that the current location
cannot be reached from the start of the line with only white
space. The same holds for the special value [Prefix 0]. Values of
the form [Prefix n] mean that the current location can be reached
from the start of the line with [n] white spaces (padding). These
distinctions are needed because preprocessor directives cannot
occur inside lines.
*)
type offset = Prefix of int | Inline
let expand = function
Prefix 0 | Inline -> ()
| Prefix n -> print_string (String.make n ' ')
(* Directives *)
let directives = [
"if"; "else"; "elif"; "endif"; "define"; "undef";
"error"; "warning"; "line"; "region"; "endregion";
"include"]
(* Environments and preprocessor expressions
The evaluation of conditional directives may involve symbols whose
value may be defined using #define directives, or undefined by
means of #undef. Therefore, we need to evaluate conditional
expressions in an environment made of a set of defined symbols.
Note that we rely on an external lexer and parser for the
conditional expressions. See modules [Escan] and [Eparser].
*)
module Env = Set.Make(String)
let rec eval env =
let open Etree
in function
Or (e1,e2) -> eval env e1 || eval env e2
| And (e1,e2) -> eval env e1 && eval env e2
| Eq (e1,e2) -> eval env e1 = eval env e2
| Neq (e1,e2) -> eval env e1 != eval env e2
| Not e -> not (eval env e)
| True -> true
| False -> false
| Ident id -> Env.mem id env
let expr env buffer =
let tree = Eparser.pp_expression Escan.token buffer
in if eval env tree then Copy else Skip
(* END OF HEADER *)
}
(* REGULAR EXPRESSIONS *)
(* White space *)
let nl = '\n' | '\r' | "\r\n"
let blank = ' ' | '\t'
(* Integers *)
let int_suf = 'U' | 'u' | 'L' | 'l' | "UL" | "Ul" | "uL"
| "ul" | "LU" | "Lu" | "lU" | "lu"
let digit = ['0'-'9']
let dec = digit+ int_suf?
let hexdigit = digit | ['A'-'F' 'a'-'f']
let hex_pre = "0x" | "0X"
let hexa = hex_pre hexdigit+ int_suf?
let integer = dec | hexa
(* Unicode escape sequences *)
let four_hex = hexdigit hexdigit hexdigit hexdigit
let uni_esc = "\\u" four_hex | "\\U" four_hex four_hex
(* Identifiers *)
let lowercase = ['a'-'z']
let uppercase = ['A'-'Z']
let letter = lowercase | uppercase | uni_esc
let start = '_' | letter
let alphanum = letter | digit | '_'
let ident = start alphanum*
(* Real *)
let decimal = digit+
let exponent = ['e' 'E'] ['+' '-']? decimal
let real_suf = ['F' 'f' 'D' 'd' 'M' 'm']
let real = (decimal? '.')? decimal exponent? real_suf?
(* Characters *)
let single = [^ '\n' '\r']
let esc = "\\'" | "\\\"" | "\\\\" | "\\0" | "\\a" | "\\b" | "\\f"
| "\\n" | "\\r" | "\\t" | "\\v"
let hex_esc = "\\x" hexdigit hexdigit? hexdigit? hexdigit?
let character = single | esc | hex_esc | uni_esc
let char = "'" character "'"
(* Directives *)
let directive = '#' (blank* as space) (ident as id)
(* Rules *)
(* The rule [scan] scans the input buffer for directives, strings,
comments, blanks, new lines and end of file characters. As a
result, either the matched input is copied to [stdout] or not,
depending on the compilation directives. If not copied, new line
characters are output.
Scanning is triggered by the function call [scan env mode offset
trace lexbuf], where [env] is the set of defined symbols
(introduced by `#define'), [mode] specifies whether we are copying
or skipping the input, [offset] informs about the location in the
line (either there is a prefix of blanks, or at least a non-blank
character has been read), and [trace] is the stack of conditional
directives read so far.
The first call is [scan Env.empty Copy (Prefix 0) []], meaning that
we start with an empty environment, that copying the input is
enabled by default, and that we are at the start of a line and no
previous conditional directives have been read yet.
When an "#if" is matched, the trace is extended by the call [extend
lexbuf (If mode) trace], during the evaluation of which the
syntactic validity of having encountered an "#if" is checked (for
example, it would be invalid had an "#elif" been last read). Note
that the current mode is stored in the trace with the current
directive -- that mode may be later restored (see below for some
examples). Moreover, the directive would be deemed invalid if its
current position in the line (that is, its offset) were not
preceeded by blanks or nothing, otherwise the rule [expr] is called
to scan the boolean expression associated with the "#if": if it
evaluates to [true], the result is [Copy], meaning that we may copy
what follows, otherwise skip it -- the actual decision depending on
the current mode. That new mode is used if we were in copy mode,
and the offset is reset to the start of a new line (as we read a
new line in [expr]); otherwise we were in skipping mode and the
value of the conditional expression must be ignored (but not its
syntax), and we continue skipping the input.
When an "#else" is matched, the trace is extended with [Else],
then, if the directive is not at a wrong offset, the rest of the
line is scanned with [pp_newline]. If we were in copy mode, the new
mode toggles to skipping mode; otherwise, the trace is searched for
the last encountered "#if" of "#elif" and the associated mode is
restored.
The case "#elif" is the result of the fusion (in the technical
sense) of the code for dealing with an "#else" followed by an
"#if".
When an "#endif" is matched, the trace is reduced, that is, all
conditional directives are popped until an [If mode'] is found and
[mode'] is restored as the current mode.
Consider the following four cases, where the modes (Copy/Skip) are
located between the lines:
Copy ----+ Copy ----+
#if true | #if true |
Copy | Copy |
#else | #else |
+-- Skip --+ | +-- Skip --+ |
#if true | | | #if false | | |
| Skip | | | Skip | |
#else | | | #else | | |
+-> Skip | | +-> Skip | |
#endif | | #endif | |
Skip <-+ | Skip <-+ |
#endif | #endif |
Copy <---+ Copy <---+
+-- Copy ----+ Copy --+-+
#if false | | #if false | |
| Skip | Skip | |
#else | | #else | |
+-> Copy --+ | +-+-- Copy <-+ |
#if true | | #if false | | |
Copy | | | | Skip |
#else | | #else | | |
Skip | | | +-> Copy |
#endif | | #endif | |
Copy <-+ | +---> Copy |
#endif | #endif |
Copy <---+ Copy <---+
The following four cases feature #elif. Note that we put between
brackets the mode saved for the #elif, which is sometimes restored
later.
Copy --+ Copy --+
#if true | #if true |
Copy | Copy |
#elif true +--[Skip] | #elif false +--[Skip] |
| Skip | | Skip |
#else | | #else | |
+-> Skip | +-> Skip |
#endif | #endif |
Copy <-+ Copy <-+
+-- Copy --+-+ +-- Copy ----+
#if false | | | #if false | |
| Skip | | | Skip |
#elif true +->[Copy] | | #elif false +->[Copy]--+ |
Copy <-+ | Skip | |
#else | #else | |
Skip | Copy <-+ |
#endif | #endif |
Copy <---+ Copy <---+
Note how "#elif" indeed behaves like an "#else" followed by an
"#if", and the mode stored with the data constructor [Elif]
corresponds to the mode before the virtual "#if".
Important note: Comments and strings are recognised as such only in
copy mode, which is a different behaviour from the preprocessor of
GNU GCC, which always does.
*)
rule scan env mode offset trace = parse
nl { handle_nl lexbuf;
scan env mode (Prefix 0) trace lexbuf }
| blank { match offset with
Prefix n -> scan env mode (Prefix (n+1)) trace lexbuf
| Inline -> copy lexbuf;
scan env mode Inline trace lexbuf }
| directive {
if not (List.mem id directives)
then fail "Invalid preprocessing directive." lexbuf
else if offset = Inline
then fail "Directive invalid inside line." lexbuf
else let seg = Error.mk_seg lexbuf in
match id with
"include" ->
let curr_line = Lexing.(lexbuf.lex_curr_p.pos_lnum)
and curr_file = Lexing.(lexbuf.lex_curr_p.pos_fname)
|> Filename.basename
and incl_file = scan_inclusion lexbuf in
let incl_buffer =
open_in incl_file |> Lexing.from_channel in
Printf.printf "# 1 \"%s\" 1\n" incl_file;
cat incl_buffer;
Printf.printf "# %i \"%s\" 2\n" (curr_line+1) curr_file;
scan env mode offset trace lexbuf
| "if" ->
let mode' = expr env lexbuf in
let new_mode = if mode = Copy then mode' else Skip in
let trace' = extend seg (If mode) trace
in scan env new_mode (Prefix 0) trace' lexbuf
| "else" ->
let () = pp_newline lexbuf in
let new_mode =
if mode = Copy then Skip else last_mode trace in
let trace' = extend seg Else trace
in scan env new_mode (Prefix 0) trace' lexbuf
| "elif" ->
let mode' = expr env lexbuf in
let trace', new_mode =
match mode with
Copy -> extend seg (Elif Skip) trace, Skip
| Skip -> let old_mode = last_mode trace
in extend seg (Elif old_mode) trace,
if old_mode = Copy then mode' else Skip
in scan env new_mode (Prefix 0) trace' lexbuf
| "endif" ->
let () = pp_newline lexbuf in
let trace', new_mode = reduce_cond seg trace
in scan env new_mode (Prefix 0) trace' lexbuf
| "define" ->
let id, seg = ident env lexbuf
in if id="true" || id="false"
then let msg = "Symbol \"" ^ id ^ "\" cannot be defined."
in stop msg seg
else if Env.mem id env
then let msg = "Symbol \"" ^ id
^ "\" was already defined."
in stop msg seg
else scan (Env.add id env) mode (Prefix 0) trace lexbuf
| "undef" ->
let id, _ = ident env lexbuf
in scan (Env.remove id env) mode (Prefix 0) trace lexbuf
| "error" ->
stop (message [] lexbuf) seg
| "warning" ->
let start_p, end_p = seg in
let msg = message [] lexbuf in
let open Lexing
in prerr_endline
("Warning at line " ^ string_of_int start_p.pos_lnum
^ ", char "
^ string_of_int (start_p.pos_cnum - start_p.pos_bol)
^ "--" ^ string_of_int (end_p.pos_cnum - end_p.pos_bol)
^ ":\n" ^ msg);
scan env mode (Prefix 0) trace lexbuf
| "region" ->
let msg = message [] lexbuf
in expand offset;
print_endline ("#" ^ space ^ "region" ^ msg);
scan env mode (Prefix 0) (Region::trace) lexbuf
| "endregion" ->
let msg = message [] lexbuf
in expand offset;
print_endline ("#" ^ space ^ "endregion" ^ msg);
scan env mode (Prefix 0) (reduce_reg seg trace) lexbuf
| "line" ->
expand offset;
print_string ("#" ^ space ^ "line");
line_ind lexbuf;
scan env mode (Prefix 0) trace lexbuf
| _ -> assert false
}
| eof { match trace with
[] -> expand offset; flush stdout; (env, trace)
| _ -> fail "Missing #endif." lexbuf }
| '"' { if mode = Copy then begin
expand offset; copy lexbuf;
handle_err in_norm_str lexbuf
end;
scan env mode Inline trace lexbuf }
| "@\"" { if mode = Copy then begin
expand offset; copy lexbuf;
handle_err in_verb_str lexbuf
end;
scan env mode Inline trace lexbuf }
| "//" { if mode = Copy then begin
expand offset; copy lexbuf;
in_line_com mode lexbuf
end;
scan env mode Inline trace lexbuf }
| "/*" { if mode = Copy then begin
expand offset; copy lexbuf;
handle_err in_block_com lexbuf
end;
scan env mode Inline trace lexbuf }
| _ { if mode = Copy then (expand offset; copy lexbuf);
scan env mode Inline trace lexbuf }
(* Support for #define and #undef *)
and ident env = parse
blank* { let r = __ident env lexbuf
in pp_newline lexbuf; r }
and __ident env = parse
ident as id { id, Error.mk_seg lexbuf }
(* Line indicator (#line) *)
and line_ind = parse
blank* as space { print_string space; line_indicator lexbuf }
and line_indicator = parse
decimal as ind {
print_string ind;
end_indicator lexbuf
}
| ident as id {
match id with
"default" | "hidden" ->
print_endline (id ^ message [] lexbuf)
| _ -> fail "Invalid line indicator." lexbuf
}
| nl | eof { fail "Line indicator expected." lexbuf }
and end_indicator = parse
blank* nl { copy lexbuf; handle_nl lexbuf }
| blank* eof { copy lexbuf }
| blank* "//" { copy lexbuf; print_endline (message [] lexbuf) }
| blank+ '"' { copy lexbuf;
handle_err in_norm_str lexbuf;
opt_line_com lexbuf }
| _ { fail "Line comment or blank expected." lexbuf }
and opt_line_com = parse
nl { handle_nl lexbuf }
| eof { copy lexbuf }
| blank+ { copy lexbuf; opt_line_com lexbuf }
| "//" { print_endline ("//" ^ message [] lexbuf) }
(* New lines and verbatim sequence of characters *)
and pp_newline = parse
nl { handle_nl lexbuf }
| blank+ { pp_newline lexbuf }
| "//" { in_line_com Skip lexbuf }
| _ { fail "Only a single-line comment allowed." lexbuf }
and message acc = parse
nl { Lexing.new_line lexbuf;
mk_str (List.length acc) acc }
| eof { mk_str (List.length acc) acc }
| _ as c { message (c::acc) lexbuf }
(* Comments *)
and in_line_com mode = parse
nl { handle_nl lexbuf }
| eof { flush stdout }
| _ { if mode = Copy then copy lexbuf; in_line_com mode lexbuf }
and in_block_com = parse
nl { handle_nl lexbuf; in_block_com lexbuf }
| "*/" { copy lexbuf }
| eof { raise (Local_err "Unterminated comment.") }
| _ { copy lexbuf; in_block_com lexbuf }
(* Include a file *)
and cat = parse
eof { () }
| _ { copy lexbuf; cat lexbuf }
(* Included filename *)
and scan_inclusion = parse
blank+ { scan_inclusion lexbuf }
| '"' { handle_err (in_inclusion [] 0) lexbuf }
and in_inclusion acc len = parse
'"' { mk_str len acc }
| nl { fail "Newline invalid in string." lexbuf }
| eof { raise (Local_err "Unterminated string.") }
| _ as c { in_inclusion (c::acc) (len+1) lexbuf }
(* Strings *)
and in_norm_str = parse
"\\\"" { copy lexbuf; in_norm_str lexbuf }
| '"' { copy lexbuf }
| nl { fail "Newline invalid in string." lexbuf }
| eof { raise (Local_err "Unterminated string.") }
| _ { copy lexbuf; in_norm_str lexbuf }
and in_verb_str = parse
"\"\"" { copy lexbuf; in_verb_str lexbuf }
| '"' { copy lexbuf }
| nl { handle_nl lexbuf; in_verb_str lexbuf }
| eof { raise (Local_err "Unterminated string.") }
| _ { copy lexbuf; in_verb_str lexbuf }
{
(* The function [lex] is a wrapper of [scan], which also checks that
the trace is empty at the end. Note that we discard the
environment at the end. *)
let lex buffer =
let _env, trace = scan Env.empty Copy (Prefix 0) [] buffer
in assert (trace = [])
(* Exported definitions *)
type filename = string
let trace (name: filename) : unit =
match open_in name with
cin ->
let open Lexing in
let buffer = from_channel cin in
let pos_fname = Filename.basename name in
let () = buffer.lex_curr_p <- {buffer.lex_curr_p with pos_fname} in
let open Error
in (try lex buffer with
Lexer diag -> print "Lexical" diag
| Parser diag -> print "Syntactical" diag
| Eparser.Error -> print "" ("Parse", mk_seg buffer, 1));
close_in cin; flush stdout
| exception Sys_error msg -> prerr_endline msg
}

5
vendors/Preproc/ProcMain.ml vendored Normal file
View File

@ -0,0 +1,5 @@
(* This is the entry point of the C# preprocessor. See [Makefile.cfg]. *)
match Array.length Sys.argv with
2 -> Preproc.trace Sys.argv.(1)
| _ -> prerr_endline ("Usage: " ^ Sys.argv.(0) ^ " [file]")

1
vendors/Preproc/README.md vendored Normal file
View File

@ -0,0 +1 @@
# A C# preprocessor in OCaml

23
vendors/Preproc/build.sh vendored Executable file
View File

@ -0,0 +1,23 @@
#!/bin/sh
set -x
ocamllex.opt Escan.mll
ocamllex.opt Preproc.mll
menhir -la 1 Eparser.mly
ocamlfind ocamlopt -strict-sequence -w +A-48-4 -c Etree.ml
ocamlfind ocamlopt -strict-sequence -w +A-48-4 -c Error.ml
ocamlfind ocamlopt -strict-sequence -w +A-48-4 -c Etree.ml
ocamlfind ocamlopt -strict-sequence -w +A-48-4 -c Error.ml
ocamlfind ocamlc -strict-sequence -w +A-48-4 -c Eparser.mli
camlcmd="ocamlfind ocamlc -I _i686 -strict-sequence -w +A-48-4 "
menhir --infer --ocamlc="$camlcmd" Eparser.mly
ocamlfind ocamlopt -strict-sequence -w +A-48-4 -c Escan.ml
ocamlfind ocamlopt -strict-sequence -w +A-48-4 -c Eparser.ml
ocamlfind ocamlopt -strict-sequence -w +A-48-4 -c Preproc.ml
ocamlfind ocamlopt -strict-sequence -w +A-48-4 -c Escan.ml
ocamlfind ocamlopt -strict-sequence -w +A-48-4 -c Preproc.ml
ocamlfind ocamlopt -strict-sequence -w +A-48-4 -c EMain.ml
ocamlfind ocamlopt -strict-sequence -w +A-48-4 -c EMain.ml
ocamlfind ocamlopt -o EMain.opt Etree.cmx Eparser.cmx Error.cmx Escan.cmx Preproc.cmx EMain.cmx
ocamlfind ocamlopt -strict-sequence -w +A-48-4 -c ProcMain.ml
ocamlfind ocamlopt -strict-sequence -w +A-48-4 -c ProcMain.ml
ocamlfind ocamlopt -o ProcMain.opt Etree.cmx Eparser.cmx Error.cmx Escan.cmx Preproc.cmx ProcMain.cmx

3
vendors/Preproc/clean.sh vendored Executable file
View File

@ -0,0 +1,3 @@
#!/bin/sh
\rm -f *.cm* *.o *.byte *.opt

20
vendors/Preproc/dune vendored Normal file
View File

@ -0,0 +1,20 @@
(ocamllex Escan Preproc)
(menhir
(modules Eparser))
(library
(name PreProc)
; (public_name ligo.preproc)
(wrapped false)
(modules Eparser Error Escan Etree Preproc))
(test
(modules ProcMain)
(libraries PreProc)
(name ProcMain))
(test
(modules EMain)
(libraries PreProc)
(name EMain))

View File

@ -539,8 +539,8 @@ let bind_smap (s:_ X_map.String.t) =
let aux k v prev =
prev >>? fun prev' ->
v >>? fun v' ->
ok @@ add k v' prev' in
fold aux s (ok empty)
ok @@ add k v' prev'
in fold aux s (ok empty)
let bind_fold_smap f init (smap : _ X_map.String.t) =
let aux k v prev =
@ -558,11 +558,11 @@ let bind_map_list f lst = bind_list (List.map f lst)
let rec bind_map_list_seq f lst = match lst with
| [] -> ok []
| hd :: tl -> (
| hd :: tl ->
let%bind hd' = f hd in
let%bind tl' = bind_map_list_seq f tl in
ok (hd' :: tl')
)
let bind_map_ne_list : _ -> 'a X_list.Ne.t -> 'b X_list.Ne.t result =
fun f lst -> bind_ne_list (X_list.Ne.map f lst)
let bind_iter_list : (_ -> unit result) -> _ list -> unit result =
@ -575,11 +575,8 @@ let bind_location (x:_ Location.wrap) =
let bind_map_location f x = bind_location (Location.map f x)
let bind_fold_list f init lst =
let aux x y =
x >>? fun x ->
f x y
in
List.fold_left aux (ok init) lst
let aux x y = x >>? fun x -> f x y
in List.fold_left aux (ok init) lst
module TMap(X : Map.OrderedType) = struct
module MX = Map.Make(X)
@ -587,8 +584,7 @@ module TMap(X : Map.OrderedType) = struct
let aux k v x =
x >>? fun x ->
f ~x ~k ~v
in
MX.fold aux map (ok init)
in MX.fold aux map (ok init)
let bind_map_Map f map =
let aux k v map' =
@ -596,33 +592,26 @@ module TMap(X : Map.OrderedType) = struct
f ~k ~v >>? fun v' ->
ok @@ MX.update k (function
| None -> Some v'
| Some _ -> failwith "key collision, shouldn't happen in bind_map_Map")
| Some _ ->
failwith "Key collision: Should not happen in bind_map_Map")
map'
in
MX.fold aux map (ok MX.empty)
in MX.fold aux map (ok MX.empty)
end
let bind_fold_pair f init (a,b) =
let aux x y =
x >>? fun x ->
f x y
in
List.fold_left aux (ok init) [a;b]
let aux x y = x >>? fun x -> f x y
in List.fold_left aux (ok init) [a;b]
let bind_fold_triple f init (a,b,c) =
let aux x y =
x >>? fun x ->
f x y
in
List.fold_left aux (ok init) [a;b;c]
let aux x y = x >>? fun x -> f x y
in List.fold_left aux (ok init) [a;b;c]
let bind_fold_map_list = fun f acc lst ->
let rec aux (acc , prev) f = function
| [] -> ok (acc , prev)
let bind_fold_map_list f acc lst =
let rec aux (acc, prev) f = function
| [] -> ok (acc, prev)
| hd :: tl ->
f acc hd >>? fun (acc' , hd') ->
aux (acc' , hd' :: prev) f tl
in
aux (acc', hd'::prev) f tl in
aux (acc , []) f lst >>? fun (acc' , lst') ->
ok @@ (acc' , List.rev lst')
@ -637,23 +626,18 @@ let bind_fold_map_right_list = fun f acc lst ->
ok lst'
let bind_fold_right_list f init lst =
let aux x y =
x >>? fun x ->
f x y
in
X_list.fold_right' aux (ok init) lst
let aux x y = x >>? fun x -> f x y
in X_list.fold_right' aux (ok init) lst
let bind_find_map_list error f lst =
let rec aux lst =
match lst with
| [] -> fail error
| hd :: tl -> (
| hd :: tl ->
match f hd with
| Error _ -> aux tl
| o -> o
)
in
aux lst
in aux lst
let bind_list_iter f lst =
let aux () y = f y in
@ -663,28 +647,29 @@ let bind_or (a, b) =
match a with
| Ok _ as o -> o
| _ -> b
let bind_map_or (fa , fb) c =
bind_or (fa c , fb c)
let bind_lr (type a b) ((a : a result), (b:b result)) : [`Left of a | `Right of b] result =
let bind_map_or (fa, fb) c = bind_or (fa c, fb c)
let bind_lr (type a b) ((a : a result), (b:b result))
: [`Left of a | `Right of b] result =
match (a, b) with
| (Ok _ as o), _ -> map (fun x -> `Left x) o
| _, (Ok _ as o) -> map (fun x -> `Right x) o
| _, Error b -> Error b
let bind_lr_lazy (type a b) ((a : a result), (b:unit -> b result)) : [`Left of a | `Right of b] result =
let bind_lr_lazy (type a b) ((a : a result), (b:unit -> b result))
: [`Left of a | `Right of b] result =
match a with
| Ok _ as o -> map (fun x -> `Left x) o
| _ -> (
match b() with
| Ok _ as o -> map (fun x -> `Right x) o
| Error b -> Error b
)
| _ -> match b() with
| Ok _ as o -> map (fun x -> `Right x) o
| Error b -> Error b
let bind_and (a, b) =
a >>? fun a ->
b >>? fun b ->
ok (a, b)
let bind_and3 (a, b, c) =
a >>? fun a ->
b >>? fun b ->
@ -692,18 +677,18 @@ let bind_and3 (a, b, c) =
ok (a, b, c)
let bind_pair = bind_and
let bind_map_pair f (a, b) =
bind_pair (f a, f b)
let bind_fold_map_pair f acc (a, b) =
f acc a >>? fun (acc' , a') ->
f acc' b >>? fun (acc'' , b') ->
ok (acc'' , (a' , b'))
let bind_map_triple f (a, b, c) =
bind_and3 (f a, f b, f c)
let bind_list_cons v lst =
lst >>? fun lst ->
ok (v::lst)
let bind_fold_map_pair f acc (a, b) =
f acc a >>? fun (acc', a') ->
f acc' b >>? fun (acc'', b') ->
ok (acc'', (a', b'))
let bind_map_triple f (a, b, c) = bind_and3 (f a, f b, f c)
let bind_list_cons v lst = lst >>? fun lst -> ok (v::lst)
let rec bind_chain : ('a -> 'a result) list -> 'a -> 'a result = fun fs x ->
match fs with
@ -716,29 +701,23 @@ let rec bind_chain : ('a -> 'a result) list -> 'a -> 'a result = fun fs x ->
(**
Wraps a call that might trigger an exception in a result.
*)
let generic_try err f =
try (
ok @@ f ()
) with _ -> fail err
let generic_try err f = try ok @@ f () with _ -> fail err
(**
Same, but with a handler that generates an error based on the exception,
rather than a fixed error.
*)
let specific_try handler f =
try (
ok @@ f ()
) with exn -> fail (handler exn)
try ok @@ f () with exn -> fail (handler exn)
(**
Same, but tailored to `Sys_error`s, found in `Sys` from `Pervasives`.
*)
let sys_try f =
let handler = function
| Sys_error str -> error (thunk "Sys_error") (fun () -> str)
| exn -> raise exn
in
specific_try handler f
Sys_error str -> error (thunk "Sys_error") (fun () -> str)
| exn -> raise exn
in specific_try handler f
(**
Same, but for a given command.
@ -746,53 +725,60 @@ let sys_try f =
let sys_command command =
sys_try (fun () -> Sys.command command) >>? function
| 0 -> ok ()
| n -> fail (fun () -> error (thunk "Nonzero return code") (fun () -> (string_of_int n)) ())
| n -> fail (fun () -> error (thunk "Nonzero return code.")
(fun () -> (string_of_int n)) ())
(**
Assertion module.
Would make sense to move it outside Trace.
*)
module Assert = struct
let assert_fail ?(msg="didn't fail") = function
| Ok _ -> simple_fail msg
| _ -> ok ()
let assert_fail ?(msg="Did not fail.") = function
Ok _ -> simple_fail msg
| _ -> ok ()
let assert_true ?(msg="not true") = function
| true -> ok ()
| false -> simple_fail msg
let assert_true ?(msg="Not true.") = function
true -> ok ()
| false -> simple_fail msg
let assert_equal ?msg expected actual =
assert_true ?msg (expected = actual)
let assert_equal_string ?msg expected actual =
let msg =
let default = Format.asprintf "Not equal string : expected \"%s\", got \"%s\"" expected actual in
X_option.unopt ~default msg in
assert_equal ~msg expected actual
let default =
Format.asprintf "Not equal string: Expected \"%s\", got \"%s\""
expected actual
in X_option.unopt ~default msg
in assert_equal ~msg expected actual
let assert_equal_int ?msg expected actual =
let msg =
let default = Format.asprintf "Not equal int : expected %d, got %d" expected actual in
X_option.unopt ~default msg in
assert_equal ~msg expected actual
let default =
Format.asprintf "Not equal int : expected %d, got %d"
expected actual
in X_option.unopt ~default msg
in assert_equal ~msg expected actual
let assert_equal_bool ?msg expected actual =
let msg =
let default = Format.asprintf "Not equal bool : expected %b, got %b" expected actual in
let default =
Format.asprintf "Not equal bool: expected %b, got %b"
expected actual in
X_option.unopt ~default msg in
assert_equal ~msg expected actual
let assert_none ?(msg="not a none") opt = match opt with
let assert_none ?(msg="Not a None value.") opt = match opt with
| None -> ok ()
| _ -> simple_fail msg
let assert_list_size ?(msg="lst doesn't have the right size") lst n =
let assert_list_size ?(msg="Wrong list size.") lst n =
assert_true ~msg List.(length lst = n)
let assert_list_empty ?(msg="lst isn't empty") lst =
let assert_list_empty ?(msg="Non-empty list.") lst =
assert_true ~msg List.(length lst = 0)
let assert_list_same_size ?(msg="lists don't have same size") a b =
let assert_list_same_size ?(msg="Lists with different lengths.") a b =
assert_true ~msg List.(length a = length b)
let assert_list_size_2 ~msg = function