[WIP] Refactoring the front-end.

This commit is contained in:
Christian Rinderknecht 2020-01-27 16:05:47 +01:00
parent 0131b0c23d
commit fc3385389b
31 changed files with 1392 additions and 487 deletions

View File

@ -4,7 +4,7 @@ maintainer: "ligolang@gmail.com"
authors: [ "Galfour" ]
homepage: "https://gitlab.com/ligolang/tezos"
bug-reports: "https://gitlab.com/ligolang/tezos/issues"
synopsis: "A higher-level language which compiles to Michelson"
synopsis: "A high-level language which compiles to Michelson"
dev-repo: "git+https://gitlab.com/ligolang/tezos.git"
license: "MIT"
depends: [
@ -21,6 +21,8 @@ depends: [
"yojson"
"alcotest" { with-test }
"getopt"
"terminal_size"
"pprint"
# work around upstream in-place update
"ocaml-migrate-parsetree" { = "1.4.0" }
]

View File

@ -19,7 +19,7 @@ let source_file n =
let open Arg in
let info =
let docv = "SOURCE_FILE" in
let doc = "$(docv) is the path to the .ligo or .mligo file of the contract." in
let doc = "$(docv) is the path to the smart contract file." in
info ~docv ~doc [] in
required @@ pos n (some string) None info
@ -42,7 +42,7 @@ let syntax =
let open Arg in
let info =
let docv = "SYNTAX" in
let doc = "$(docv) is the syntax that will be used. Currently supported syntaxes are \"pascaligo\" and \"cameligo\". By default, the syntax is guessed from the extension (.ligo and .mligo, respectively)." in
let doc = "$(docv) is the syntax that will be used. Currently supported syntaxes are \"pascaligo\", \"cameligo\" and \"reasonligo\". By default, the syntax is guessed from the extension (.ligo, .mligo, .religo respectively)." in
info ~docv ~doc ["syntax" ; "s"] in
value @@ opt string "auto" info
@ -58,7 +58,7 @@ let init_file =
let open Arg in
let info =
let docv = "INIT_FILE" in
let doc = "$(docv) is the path to the .ligo or .mligo file to be used for context initialization." in
let doc = "$(docv) is the path to smart contract file to be used for context initialization." in
info ~docv ~doc ["init-file"] in
value @@ opt (some string) None info
@ -66,7 +66,7 @@ let amount =
let open Arg in
let info =
let docv = "AMOUNT" in
let doc = "$(docv) is the amount the michelson interpreter will use." in
let doc = "$(docv) is the amount the Michelson interpreter will use." in
info ~docv ~doc ["amount"] in
value @@ opt string "0" info
@ -74,7 +74,7 @@ let sender =
let open Arg in
let info =
let docv = "SENDER" in
let doc = "$(docv) is the sender the michelson interpreter transaction will use." in
let doc = "$(docv) is the sender the Michelson interpreter transaction will use." in
info ~docv ~doc ["sender"] in
value @@ opt (some string) None info
@ -82,7 +82,7 @@ let source =
let open Arg in
let info =
let docv = "SOURCE" in
let doc = "$(docv) is the source the michelson interpreter transaction will use." in
let doc = "$(docv) is the source the Michelson interpreter transaction will use." in
info ~docv ~doc ["source"] in
value @@ opt (some string) None info
@ -90,7 +90,7 @@ let predecessor_timestamp =
let open Arg in
let info =
let docv = "PREDECESSOR_TIMESTAMP" in
let doc = "$(docv) is the pedecessor_timestamp (now value minus one minute) the michelson interpreter will use (e.g. '2000-01-01T10:10:10Z')" in
let doc = "$(docv) is the predecessor_timestamp (now value minus one minute) the Michelson interpreter will use (e.g. '2000-01-01T10:10:10Z')" in
info ~docv ~doc ["predecessor-timestamp"] in
value @@ opt (some string) None info
@ -135,7 +135,7 @@ let compile_file =
let term =
Term.(const f $ source_file 0 $ entry_point 1 $ syntax $ display_format $ michelson_code_format) in
let cmdname = "compile-contract" in
let doc = "Subcommand: compile a contract." in
let doc = "Subcommand: Compile a contract." in
(Term.ret term , Term.info ~doc cmdname)
let print_cst =
@ -147,7 +147,7 @@ let print_cst =
in
let term = Term.(const f $ source_file 0 $ syntax $ display_format) in
let cmdname = "print-cst" in
let doc = "Subcommand: print the cst. Warning: intended for development of LIGO and can break at any time." in
let doc = "Subcommand: Print the CST.\nWarning: Intended for development of LIGO and can break at any time." in
(Term.ret term, Term.info ~doc cmdname)
let print_ast =
@ -159,7 +159,7 @@ let print_ast =
in
let term = Term.(const f $ source_file 0 $ syntax $ display_format) in
let cmdname = "print-ast" in
let doc = "Subcommand: print the ast. Warning: intended for development of LIGO and can break at any time." in
let doc = "Subcommand: Print the AST.\n Warning: Intended for development of LIGO and can break at any time." in
(Term.ret term, Term.info ~doc cmdname)
let print_typed_ast =
@ -172,7 +172,7 @@ let print_typed_ast =
in
let term = Term.(const f $ source_file 0 $ syntax $ display_format) in
let cmdname = "print-typed-ast" in
let doc = "Subcommand: print the typed ast. Warning: intended for development of LIGO and can break at any time." in
let doc = "Subcommand: Print the typed AST.\n Warning: Intended for development of LIGO and can break at any time." in
(Term.ret term, Term.info ~doc cmdname)
let print_mini_c =
@ -186,7 +186,7 @@ let print_mini_c =
in
let term = Term.(const f $ source_file 0 $ syntax $ display_format) in
let cmdname = "print-mini-c" in
let doc = "Subcommand: print mini c. Warning: intended for development of LIGO and can break at any time." in
let doc = "Subcommand: Print Mini-C. Warning: Intended for development of LIGO and can break at any time." in
(Term.ret term, Term.info ~doc cmdname)
let measure_contract =
@ -203,7 +203,7 @@ let measure_contract =
let term =
Term.(const f $ source_file 0 $ entry_point 1 $ syntax $ display_format) in
let cmdname = "measure-contract" in
let doc = "Subcommand: measure a contract's compiled size in bytes." in
let doc = "Subcommand: Measure a contract's compiled size in bytes." in
(Term.ret term , Term.info ~doc cmdname)
let compile_parameter =
@ -232,7 +232,7 @@ let compile_parameter =
let term =
Term.(const f $ source_file 0 $ entry_point 1 $ expression "PARAMETER" 2 $ syntax $ amount $ sender $ source $ predecessor_timestamp $ display_format $ michelson_code_format) in
let cmdname = "compile-parameter" in
let doc = "Subcommand: compile parameters to a michelson expression. The resulting michelson expression can be passed as an argument in a transaction which calls a contract." in
let doc = "Subcommand: Compile parameters to a Michelson expression. The resulting Michelson expression can be passed as an argument in a transaction which calls a contract." in
(Term.ret term , Term.info ~doc cmdname)
let interpret =
@ -265,7 +265,7 @@ let interpret =
let term =
Term.(const f $ expression "EXPRESSION" 0 $ init_file $ syntax $ amount $ sender $ source $ predecessor_timestamp $ display_format ) in
let cmdname = "interpret" in
let doc = "Subcommand: interpret the expression in the context initialized by the provided source file." in
let doc = "Subcommand: Interpret the expression in the context initialized by the provided source file." in
(Term.ret term , Term.info ~doc cmdname)
@ -295,7 +295,7 @@ let compile_storage =
let term =
Term.(const f $ source_file 0 $ entry_point 1 $ expression "STORAGE" 2 $ syntax $ amount $ sender $ source $ predecessor_timestamp $ display_format $ michelson_code_format) in
let cmdname = "compile-storage" in
let doc = "Subcommand: compile an initial storage in ligo syntax to a michelson expression. The resulting michelson expression can be passed as an argument in a transaction which originates a contract." in
let doc = "Subcommand: Compile an initial storage in ligo syntax to a Michelson expression. The resulting Michelson expression can be passed as an argument in a transaction which originates a contract." in
(Term.ret term , Term.info ~doc cmdname)
let dry_run =
@ -330,7 +330,7 @@ let dry_run =
let term =
Term.(const f $ source_file 0 $ entry_point 1 $ expression "PARAMETER" 2 $ expression "STORAGE" 3 $ amount $ sender $ source $ predecessor_timestamp $ syntax $ display_format) in
let cmdname = "dry-run" in
let doc = "Subcommand: run a smart-contract with the given storage and input." in
let doc = "Subcommand: Run a smart-contract with the given storage and input." in
(Term.ret term , Term.info ~doc cmdname)
let run_function =
@ -361,7 +361,7 @@ let run_function =
let term =
Term.(const f $ source_file 0 $ entry_point 1 $ expression "PARAMETER" 2 $ amount $ sender $ source $ predecessor_timestamp $ syntax $ display_format) in
let cmdname = "run-function" in
let doc = "Subcommand: run a function with the given parameter." in
let doc = "Subcommand: Run a function with the given parameter." in
(Term.ret term , Term.info ~doc cmdname)
let evaluate_value =
@ -380,7 +380,7 @@ let evaluate_value =
let term =
Term.(const f $ source_file 0 $ entry_point 1 $ amount $ sender $ source $ predecessor_timestamp $ syntax $ display_format) in
let cmdname = "evaluate-value" in
let doc = "Subcommand: evaluate a given definition." in
let doc = "Subcommand: Evaluate a given definition." in
(Term.ret term , Term.info ~doc cmdname)
let compile_expression =
@ -399,7 +399,7 @@ let compile_expression =
let term =
Term.(const f $ expression "" 1 $ req_syntax 0 $ display_format $ michelson_code_format) in
let cmdname = "compile-expression" in
let doc = "Subcommand: compile to a michelson value." in
let doc = "Subcommand: Compile to a michelson value." in
(Term.ret term , Term.info ~doc cmdname)
let dump_changelog =
@ -420,7 +420,7 @@ let list_declarations =
let term =
Term.(const f $ source_file 0 $ syntax ) in
let cmdname = "list-declarations" in
let doc = "Subcommand: list all the top-level decalarations." in
let doc = "Subcommand: List all the top-level declarations." in
(Term.ret term , Term.info ~doc cmdname)
let run ?argv () =

View File

@ -37,10 +37,10 @@ ligo: : Lexical error in file "broken_string.mligo", line 1, characters 8-9:
run_ligo_bad [ "compile-contract" ; "../../test/lexer/broken_string.religo" ; "main" ] ;
[%expect {|
ligo: : Lexical error at line 1, characters 8-9:
ligo: : Lexical error in file "broken_string.religo", line 1, characters 8-9:
The string starting here is interrupted by a line break.
Hint: Remove the break, close the string before or insert a backslash.
{"parser_loc":"in file \"broken_string.religo\", line 1, characters 8-9"}
{}
If you're not sure how to fix this error, you can
@ -88,10 +88,10 @@ ligo: : Lexical error in file "negative_byte_sequence.mligo", line 1, characters
run_ligo_bad [ "compile-contract" ; "../../test/lexer/negative_byte_sequence.religo" ; "main" ] ;
[%expect {|
ligo: : Lexical error at line 1, characters 8-13:
ligo: : Lexical error in file "negative_byte_sequence.religo", line 1, characters 8-13:
Negative byte sequence.
Hint: Remove the leading minus sign.
{"parser_loc":"in file \"negative_byte_sequence.religo\", line 1, characters 8-13"}
{}
If you're not sure how to fix this error, you can
@ -122,10 +122,10 @@ ligo: : Lexical error in file "reserved_name.ligo", line 1, characters 4-13:
run_ligo_bad [ "compile-contract" ; "../../test/lexer/reserved_name.religo" ; "main" ] ;
[%expect {|
ligo: : Lexical error at line 1, characters 4-7:
ligo: : Lexical error in file "reserved_name.religo", line 1, characters 4-7:
Reserved name: end.
Hint: Change the name.
{"parser_loc":"in file \"reserved_name.religo\", line 1, characters 4-7"}
{}
If you're not sure how to fix this error, you can
@ -188,9 +188,9 @@ ligo: : Lexical error in file "unexpected_character.mligo", line 1, characters 8
run_ligo_bad [ "compile-contract" ; "../../test/lexer/unexpected_character.religo" ; "main" ] ;
[%expect {|
ligo: : Lexical error at line 1, characters 8-9:
ligo: : Lexical error in file "unexpected_character.religo", line 1, characters 8-9:
Unexpected character '\239'.
{"parser_loc":"in file \"unexpected_character.religo\", line 1, characters 8-9"}
{}
If you're not sure how to fix this error, you can
@ -255,10 +255,10 @@ ligo: : Lexical error in file "invalid_symbol.mligo", line 1, characters 10-13:
run_ligo_bad [ "compile-contract" ; "../../test/lexer/invalid_symbol.religo" ; "main" ] ;
[%expect {|
ligo: : Lexical error at line 1, characters 10-11:
ligo: : Lexical error in file "invalid_symbol.religo", line 1, characters 10-11:
Invalid symbol.
Hint: Check the LIGO syntax you use.
{"parser_loc":"in file \"invalid_symbol.religo\", line 1, characters 10-11"}
{}
If you're not sure how to fix this error, you can
@ -306,10 +306,10 @@ ligo: : Lexical error in file "missing_break.mligo", line 1, characters 11-11:
run_ligo_bad [ "compile-contract" ; "../../test/lexer/missing_break.religo" ; "main" ] ;
[%expect {|
ligo: : Lexical error at line 1, characters 11-11:
ligo: : Lexical error in file "missing_break.religo", line 1, characters 11-11:
Missing break.
Hint: Insert some space.
{"parser_loc":"in file \"missing_break.religo\", line 1, characters 11-11"}
{}
If you're not sure how to fix this error, you can
@ -357,10 +357,10 @@ ligo: : Lexical error in file "invalid_character_in_string.mligo", line 1, chara
run_ligo_bad [ "compile-contract" ; "../../test/lexer/invalid_character_in_string.religo" ; "main" ] ;
[%expect {|
ligo: : Lexical error at line 1, characters 9-10:
ligo: : Lexical error in file "invalid_character_in_string.religo", line 1, characters 9-10:
Invalid character in string.
Hint: Remove or replace the character.
{"parser_loc":"in file \"invalid_character_in_string.religo\", line 1, characters 9-10"}
{}
If you're not sure how to fix this error, you can

View File

@ -1,27 +1,23 @@
open Trace
type s_syntax = Syntax_name of string
type v_syntax = Pascaligo | Cameligo | ReasonLIGO
type v_syntax = PascaLIGO | CameLIGO | ReasonLIGO
let syntax_to_variant : s_syntax -> string option -> v_syntax result =
fun syntax source_filename ->
let subr s n =
String.sub s (String.length s - n) n in
let endswith s suffix =
let suffixlen = String.length suffix in
( String.length s >= suffixlen
&& String.equal (subr s suffixlen) suffix)
in
let (Syntax_name syntax) = syntax in
match (syntax , source_filename) with
| "auto" , Some sf when endswith sf ".ligo" -> ok Pascaligo
| "auto" , Some sf when endswith sf ".mligo" -> ok Cameligo
| "auto" , Some sf when endswith sf ".religo" -> ok ReasonLIGO
| "auto" , _ -> simple_fail "cannot auto-detect syntax, pleas use -s name_of_syntax"
| "pascaligo" , _ -> ok Pascaligo
| "cameligo" , _ -> ok Cameligo
| "reasonligo", _ -> ok ReasonLIGO
| _ -> simple_fail "unrecognized parser"
let syntax_to_variant (Syntax_name syntax) source =
match syntax, source with
"auto", Some sf ->
(match Filename.extension sf with
".ligo" | ".pligo" -> ok PascaLIGO
| ".mligo" -> ok CameLIGO
| ".religo" -> ok ReasonLIGO
| _ -> simple_fail "Cannot auto-detect the syntax.\n\
Hint: Use -s <name of syntax>\n")
| ("pascaligo" | "PascaLIGO"), _ -> ok PascaLIGO
| ("cameligo" | "CameLIGO"), _ -> ok CameLIGO
| ("reasonligo" | "ReasonLIGO"), _ -> ok ReasonLIGO
| _ -> simple_fail "Invalid syntax name.\n\
Hint: Use \"pascaligo\", \"cameligo\" \
or \"reasonligo\".\n"
let parsify_pascaligo source =
let%bind raw =
@ -32,141 +28,144 @@ let parsify_pascaligo source =
Simplify.Pascaligo.simpl_program raw
in ok simplified
let parsify_expression_pascaligo = fun source ->
let parsify_expression_pascaligo source =
let%bind raw =
trace (simple_error "parsing expression") @@
Parser.Pascaligo.parse_expression source in
let%bind simplified =
trace (simple_error "simplifying expression") @@
Simplify.Pascaligo.simpl_expression raw in
ok simplified
Simplify.Pascaligo.simpl_expression raw
in ok simplified
let parsify_cameligo = fun source ->
let parsify_cameligo source =
let%bind raw =
trace (simple_error "parsing") @@
Parser.Cameligo.parse_file source in
let%bind simplified =
trace (simple_error "simplifying") @@
Simplify.Cameligo.simpl_program raw in
ok simplified
Simplify.Cameligo.simpl_program raw
in ok simplified
let parsify_expression_cameligo = fun source ->
let parsify_expression_cameligo source =
let%bind raw =
trace (simple_error "parsing expression") @@
Parser.Cameligo.parse_expression source in
let%bind simplified =
trace (simple_error "simplifying expression") @@
Simplify.Cameligo.simpl_expression raw in
ok simplified
Simplify.Cameligo.simpl_expression raw
in ok simplified
let parsify_reasonligo = fun source ->
let parsify_reasonligo source =
let%bind raw =
trace (simple_error "parsing") @@
Parser.Reasonligo.parse_file source in
let%bind simplified =
trace (simple_error "simplifying") @@
Simplify.Cameligo.simpl_program raw in
ok simplified
Simplify.Cameligo.simpl_program raw
in ok simplified
let parsify_expression_reasonligo = fun source ->
let parsify_expression_reasonligo source =
let%bind raw =
trace (simple_error "parsing expression") @@
Parser.Reasonligo.parse_expression source in
let%bind simplified =
trace (simple_error "simplifying expression") @@
Simplify.Cameligo.simpl_expression raw in
ok simplified
Simplify.Cameligo.simpl_expression raw
in ok simplified
let parsify = fun (syntax : v_syntax) source_filename ->
let%bind parsify = match syntax with
| Pascaligo -> ok parsify_pascaligo
| Cameligo -> ok parsify_cameligo
let parsify syntax source =
let%bind parsify =
match syntax with
PascaLIGO -> ok parsify_pascaligo
| CameLIGO -> ok parsify_cameligo
| ReasonLIGO -> ok parsify_reasonligo in
let%bind parsified = parsify source_filename in
let%bind applied = Self_ast_simplified.all_program parsified in
ok applied
let parsify_expression = fun syntax source ->
let%bind parsify = match syntax with
| Pascaligo -> ok parsify_expression_pascaligo
| Cameligo -> ok parsify_expression_cameligo
| ReasonLIGO -> ok parsify_expression_reasonligo
in
let%bind parsified = parsify source in
let%bind applied = Self_ast_simplified.all_expression parsified in
ok applied
let%bind applied = Self_ast_simplified.all_program parsified
in ok applied
let parsify_string_reasonligo = fun source ->
let parsify_expression syntax source =
let%bind parsify = match syntax with
PascaLIGO -> ok parsify_expression_pascaligo
| CameLIGO -> ok parsify_expression_cameligo
| ReasonLIGO -> ok parsify_expression_reasonligo in
let%bind parsified = parsify source in
let%bind applied = Self_ast_simplified.all_expression parsified
in ok applied
let parsify_string_reasonligo source =
let%bind raw =
trace (simple_error "parsing") @@
Parser.Reasonligo.parse_string source in
let%bind simplified =
trace (simple_error "simplifying") @@
Simplify.Cameligo.simpl_program raw in
ok simplified
Simplify.Cameligo.simpl_program raw
in ok simplified
let parsify_string_pascaligo = fun source ->
let parsify_string_pascaligo source =
let%bind raw =
trace (simple_error "parsing") @@
Parser.Pascaligo.parse_string source in
let%bind simplified =
trace (simple_error "simplifying") @@
Simplify.Pascaligo.simpl_program raw in
ok simplified
Simplify.Pascaligo.simpl_program raw
in ok simplified
let parsify_string_cameligo = fun source ->
let parsify_string_cameligo source =
let%bind raw =
trace (simple_error "parsing") @@
Parser.Cameligo.parse_string source in
let%bind simplified =
trace (simple_error "simplifying") @@
Simplify.Cameligo.simpl_program raw in
ok simplified
Simplify.Cameligo.simpl_program raw
in ok simplified
let parsify_string = fun (syntax : v_syntax) source_filename ->
let%bind parsify = match syntax with
| Pascaligo -> ok parsify_string_pascaligo
| Cameligo -> ok parsify_string_cameligo
| ReasonLIGO -> ok parsify_string_reasonligo
in
let%bind parsified = parsify source_filename in
let%bind applied = Self_ast_simplified.all_program parsified in
ok applied
let parsify_string syntax source =
let%bind parsify =
match syntax with
PascaLIGO -> ok parsify_string_pascaligo
| CameLIGO -> ok parsify_string_cameligo
| ReasonLIGO -> ok parsify_string_reasonligo in
let%bind parsified = parsify source in
let%bind applied = Self_ast_simplified.all_program parsified
in ok applied
let pretty_print_pascaligo = fun source ->
let pretty_print_pascaligo source =
let%bind ast = Parser.Pascaligo.parse_file source in
let buffer = Buffer.create 59 in
let state = Parser_pascaligo.ParserLog.mk_state
~offsets:true
~mode:`Byte
~buffer in
let state =
Parser_pascaligo.ParserLog.mk_state
~offsets:true
~mode:`Byte
~buffer in
Parser_pascaligo.ParserLog.pp_ast state ast;
ok buffer
let pretty_print_cameligo = fun source ->
let pretty_print_cameligo source =
let%bind ast = Parser.Cameligo.parse_file source in
let buffer = Buffer.create 59 in
let state = Parser_cameligo.ParserLog.mk_state
~offsets:true
~mode:`Byte
~buffer in
let state = (* TODO: Should flow from the CLI *)
Parser_cameligo.ParserLog.mk_state
~offsets:true
~mode:`Point
~buffer in
Parser.Cameligo.ParserLog.pp_ast state ast;
ok buffer
let pretty_print_reasonligo = fun source ->
let pretty_print_reasonligo source =
let%bind ast = Parser.Reasonligo.parse_file source in
let buffer = Buffer.create 59 in
let state = Parser.Reasonligo.ParserLog.mk_state
~offsets:true
~mode:`Byte
~buffer in
let state = (* TODO: Should flow from the CLI *)
Parser.Reasonligo.ParserLog.mk_state
~offsets:true
~mode:`Point
~buffer in
Parser.Reasonligo.ParserLog.pp_ast state ast;
ok buffer
let pretty_print = fun syntax source_filename ->
let%bind v_syntax = syntax_to_variant syntax (Some source_filename) in
(match v_syntax with
| Pascaligo -> pretty_print_pascaligo
| Cameligo -> pretty_print_cameligo
| ReasonLIGO -> pretty_print_reasonligo)
source_filename
let pretty_print syntax source =
let%bind v_syntax =
syntax_to_variant syntax (Some source) in
match v_syntax with
PascaLIGO -> pretty_print_pascaligo source
| CameLIGO -> pretty_print_cameligo source
| ReasonLIGO -> pretty_print_reasonligo source

View File

@ -47,38 +47,35 @@ module Errors =
struct
(* let data =
[("location",
fun () -> Format.asprintf "%a" Location.pp_lift @@ loc)]
*)
fun () -> Format.asprintf "%a" Location.pp_lift @@ loc)] *)
let generic message =
let title () = ""
and message () = message.Region.value
in Trace.error ~data:[] title message
end
let parse (module IO : IO) parser =
let module Unit = PreUnit (IO) in
let local_fail error =
Unit.format_error ~offsets:IO.options#offsets
IO.options#mode error
|> Errors.generic |> Trace.fail in
Trace.fail
@@ Errors.generic
@@ Unit.format_error ~offsets:IO.options#offsets
IO.options#mode error in
match parser () with
Stdlib.Ok semantic_value -> Trace.ok semantic_value
(* Lexing and parsing errors *)
| Stdlib.Error error ->
Trace.fail @@ Errors.generic error
| Stdlib.Error error -> Trace.fail @@ Errors.generic error
(* Scoping errors *)
| exception Scoping.Error (Scoping.Reserved_name name) ->
let token =
Lexer.Token.mk_ident name.Region.value name.Region.region in
(match token with
(* Cannot fail because [name] is a not a
reserved name for the lexer. *)
Stdlib.Error _ -> assert false
Stdlib.Error LexToken.Reserved_name ->
Trace.fail @@ Errors.generic @@ Region.wrap_ghost "Reserved name."
| Ok invalid ->
local_fail
("Reserved name.\nHint: Change the name.\n", None, invalid))
@ -94,22 +91,19 @@ let parse (module IO : IO) parser =
let token =
Lexer.Token.mk_ident var.Region.value var.Region.region in
(match token with
(* Cannot fail because [var] is a not a
reserved name for the lexer. *)
Stdlib.Error _ -> assert false
Stdlib.Error LexToken.Reserved_name ->
Trace.fail @@ Errors.generic @@ Region.wrap_ghost "Reserved name."
| Ok invalid ->
local_fail
("Repeated variable in this pattern.\n\
Hint: Change the name.\n",
None, invalid))
local_fail ("Repeated variable in this pattern.\n\
Hint: Change the name.\n",
None, invalid))
| exception Scoping.Error (Scoping.Duplicate_field name) ->
let token =
Lexer.Token.mk_ident name.Region.value name.Region.region in
(match token with
(* Cannot fail because [name] is a not a
reserved name for the lexer. *)
Stdlib.Error _ -> assert false
Stdlib.Error LexToken.Reserved_name ->
Trace.fail @@ Errors.generic @@ Region.wrap_ghost "Reserved name."
| Ok invalid ->
local_fail
("Duplicate field name in this record declaration.\n\
@ -131,7 +125,7 @@ let parse_file (source: string) =
let prefix =
match IO.options#input with
None | Some "-" -> "temp"
| Some file -> Filename.(file |> basename |> remove_extension) in
| Some file -> Filename.(remove_extension @@ basename file) in
let suffix = ".pp" ^ IO.ext in
let pp_input =
if SSet.mem "cpp" IO.options#verbose
@ -150,12 +144,12 @@ let parse_file (source: string) =
let open Trace in
let%bind () = sys_command cpp_cmd in
let module Unit = PreUnit (IO) in
let instance =
match Lexer.open_token_stream (Lexer.File pp_input) with
Ok instance -> instance
| Stdlib.Error _ -> assert false (* No file opening *) in
let thunk () = Unit.apply instance Unit.parse_contract
in parse (module IO) thunk
match Lexer.open_token_stream (Lexer.File pp_input) with
Ok instance ->
let thunk () = Unit.apply instance Unit.parse_contract
in parse (module IO) thunk
| Stdlib.Error (Lexer.File_opening msg) ->
Trace.fail @@ Errors.generic @@ Region.wrap_ghost msg
let parse_string (s: string) =
let module IO =
@ -164,12 +158,12 @@ let parse_string (s: string) =
let options = PreIO.pre_options ~input:None ~expr:false
end in
let module Unit = PreUnit (IO) in
let instance =
match Lexer.open_token_stream (Lexer.String s) with
Ok instance -> instance
| Stdlib.Error _ -> assert false (* No file opening *) in
let thunk () = Unit.apply instance Unit.parse_contract
in parse (module IO) thunk
match Lexer.open_token_stream (Lexer.String s) with
Ok instance ->
let thunk () = Unit.apply instance Unit.parse_contract
in parse (module IO) thunk
| Stdlib.Error (Lexer.File_opening msg) ->
Trace.fail @@ Errors.generic @@ Region.wrap_ghost msg
let parse_expression (s: string) =
let module IO =
@ -178,9 +172,9 @@ let parse_expression (s: string) =
let options = PreIO.pre_options ~input:None ~expr:true
end in
let module Unit = PreUnit (IO) in
let instance =
match Lexer.open_token_stream (Lexer.String s) with
Ok instance -> instance
| Stdlib.Error _ -> assert false (* No file opening *) in
let thunk () = Unit.apply instance Unit.parse_expr
in parse (module IO) thunk
match Lexer.open_token_stream (Lexer.String s) with
Ok instance ->
let thunk () = Unit.apply instance Unit.parse_expr
in parse (module IO) thunk
| Stdlib.Error (Lexer.File_opening msg) ->
Trace.fail @@ Errors.generic @@ Region.wrap_ghost msg

View File

@ -90,7 +90,7 @@ tuple(item):
(* Possibly empty semicolon-separated values between brackets *)
list(item):
list__(item):
"[" sep_or_term_list(item,";")? "]" {
let compound = Brackets ($1,$3)
and region = cover $1 $3 in
@ -294,7 +294,7 @@ core_pattern:
| "false" { PFalse $1 }
| "true" { PTrue $1 }
| par(ptuple) { PPar $1 }
| list(tail) { PList (PListComp $1) }
| list__(tail) { PList (PListComp $1) }
| constr_pattern { PConstr $1 }
| record_pattern { PRecord $1 }
@ -585,7 +585,7 @@ core_expr:
| unit { EUnit $1 }
| "false" { ELogic (BoolExpr (False $1)) }
| "true" { ELogic (BoolExpr (True $1)) }
| list(expr) { EList (EListComp $1) }
| list__(expr) { EList (EListComp $1) }
| sequence { ESeq $1 }
| record_expr { ERecord $1 }
| update_record { EUpdate $1 }

View File

@ -47,38 +47,35 @@ module Errors =
struct
(* let data =
[("location",
fun () -> Format.asprintf "%a" Location.pp_lift @@ loc)]
*)
fun () -> Format.asprintf "%a" Location.pp_lift @@ loc)] *)
let generic message =
let title () = ""
and message () = message.Region.value
in Trace.error ~data:[] title message
end
let parse (module IO : IO) parser =
let module Unit = PreUnit (IO) in
let local_fail error =
Unit.format_error ~offsets:IO.options#offsets
IO.options#mode error
|> Errors.generic |> Trace.fail in
Trace.fail
@@ Errors.generic
@@ Unit.format_error ~offsets:IO.options#offsets
IO.options#mode error in
match parser () with
Stdlib.Ok semantic_value -> Trace.ok semantic_value
(* Lexing and parsing errors *)
| Stdlib.Error error ->
Trace.fail @@ Errors.generic error
| Stdlib.Error error -> Trace.fail @@ Errors.generic error
(* Scoping errors *)
| exception Scoping.Error (Scoping.Reserved_name name) ->
let token =
Lexer.Token.mk_ident name.Region.value name.Region.region in
(match token with
(* Cannot fail because [name] is a not a
reserved name for the lexer. *)
Stdlib.Error _ -> assert false
Stdlib.Error LexToken.Reserved_name ->
Trace.fail @@ Errors.generic @@ Region.wrap_ghost "Reserved name."
| Ok invalid ->
local_fail
("Reserved name.\nHint: Change the name.\n", None, invalid))
@ -87,9 +84,8 @@ let parse (module IO : IO) parser =
let token =
Lexer.Token.mk_ident name.Region.value name.Region.region in
(match token with
(* Cannot fail because [name] is a not a
reserved name for the lexer. *)
Stdlib.Error _ -> assert false
Stdlib.Error LexToken.Reserved_name ->
Trace.fail @@ Errors.generic @@ Region.wrap_ghost "Reserved name."
| Ok invalid ->
local_fail
("Duplicate parameter.\nHint: Change the name.\n",
@ -106,93 +102,49 @@ let parse (module IO : IO) parser =
let token =
Lexer.Token.mk_ident var.Region.value var.Region.region in
(match token with
(* Cannot fail because [var] is a not a
reserved name for the lexer. *)
Stdlib.Error _ -> assert false
Stdlib.Error LexToken.Reserved_name ->
Trace.fail @@ Errors.generic @@ Region.wrap_ghost "Reserved name."
| Ok invalid ->
local_fail
("Repeated variable in this pattern.\n\
Hint: Change the name.\n",
None, invalid))
local_fail ("Repeated variable in this pattern.\n\
Hint: Change the name.\n",
None, invalid))
| exception Scoping.Error (Scoping.Duplicate_field name) ->
let token =
Lexer.Token.mk_ident name.Region.value name.Region.region in
(match token with
(* Cannot fail because [name] is a not a
reserved name for the lexer. *)
Stdlib.Error _ -> assert false
Stdlib.Error LexToken.Reserved_name ->
Trace.fail @@ Errors.generic @@ Region.wrap_ghost "Reserved name."
| Ok invalid ->
local_fail
("Duplicate field name in this record declaration.\n\
Hint: Change the name.\n",
None, invalid))
let parse_file (source: string) =
let parse_file source =
let module IO =
struct
let ext = PreIO.ext
let options =
PreIO.pre_options ~input:(Some source) ~expr:false
end in
let lib_path =
match IO.options#libs with
[] -> ""
| libs -> let mk_I dir path = Printf.sprintf " -I %s%s" dir path
in List.fold_right mk_I libs "" in
let prefix =
match IO.options#input with
None | Some "-" -> "temp"
| Some file -> Filename.(file |> basename |> remove_extension) in
let suffix = ".pp" ^ IO.ext in
let pp_input =
if SSet.mem "cpp" IO.options#verbose
then prefix ^ suffix
else let pp_input, pp_out =
Filename.open_temp_file prefix suffix
in close_out pp_out; pp_input in
let cpp_cmd =
match IO.options#input with
None | Some "-" ->
Printf.sprintf "cpp -traditional-cpp%s - > %s"
lib_path pp_input
| Some file ->
Printf.sprintf "cpp -traditional-cpp%s %s > %s"
lib_path file pp_input in
let open Trace in
let%bind () = sys_command cpp_cmd in
let module Unit = PreUnit (IO) in
let instance =
match Lexer.open_token_stream (Lexer.File pp_input) with
Ok instance -> instance
| Stdlib.Error _ -> assert false (* No file opening *) in
let thunk () = Unit.apply instance Unit.parse_contract
in parse (module IO) thunk
let module Unit = PreUnit (IO)
in Wrapper.parse_file Errors.generic (module Unit : ParserUnit.S) parse
let parse_string (s: string) =
let parse_string =
let module IO =
struct
let ext = PreIO.ext
let options = PreIO.pre_options ~input:None ~expr:false
end in
let module Unit = PreUnit (IO) in
let instance =
match Lexer.open_token_stream (Lexer.String s) with
Ok instance -> instance
| Stdlib.Error _ -> assert false (* No file opening *) in
let thunk () = Unit.apply instance Unit.parse_contract
in parse (module IO) thunk
let module Unit = PreUnit (IO)
in Wrapper.parse_string Errors.generic (module Unit : ParserUnit.S) parse
let parse_expression (s: string) =
let parse_expression =
let module IO =
struct
let ext = PreIO.ext
let options = PreIO.pre_options ~input:None ~expr:true
end in
let module Unit = PreUnit (IO) in
let instance =
match Lexer.open_token_stream (Lexer.String s) with
Ok instance -> instance
| Stdlib.Error _ -> assert false (* No file opening *) in
let thunk () = Unit.apply instance Unit.parse_expr
in parse (module IO) thunk
let module Unit = PreUnit (IO)
in Wrapper.parse_expression Errors.generic (module Unit : ParserUnit.S) parse

View File

@ -1,12 +1,13 @@
open Trace
module AST = Parser_cameligo.AST
module LexToken = Parser_reasonligo.LexToken
module Lexer = Lexer.Make(LexToken)
module Scoping = Parser_cameligo.Scoping
module Region = Simple_utils.Region
module ParErr = Parser_reasonligo.ParErr
module AST = Parser_cameligo.AST
module LexToken = Parser_reasonligo.LexToken
module Lexer = Lexer.Make(LexToken)
module Scoping = Parser_cameligo.Scoping
module Region = Simple_utils.Region
module ParErr = Parser_reasonligo.ParErr
module SyntaxError = Parser_reasonligo.SyntaxError
module SSet = Utils.String.Set
(* Mock IOs TODO: Fill them with CLI options *)
@ -20,9 +21,8 @@ module PreIO =
struct
let ext = ".ligo"
let pre_options =
EvalOpt.make ~input:None
~libs:[]
~verbose:Utils.String.Set.empty
EvalOpt.make ~libs:[]
~verbose:SSet.empty
~offsets:true
~mode:`Point
~cmd:EvalOpt.Quiet
@ -48,59 +48,10 @@ module PreUnit =
module Errors =
struct
let reserved_name Region.{value; region} =
let title () = Printf.sprintf "\nReserved name \"%s\"" value in
let message () = "" in
let data = [
("location",
fun () -> Format.asprintf "%a" Location.pp_lift @@ region)]
in error ~data title message
let duplicate_variant Region.{value; region} =
let title () =
Printf.sprintf "\nDuplicate variant \"%s\" in this \
type declaration" value in
let message () = "" in
let data = [
("location",
fun () -> Format.asprintf "%a" Location.pp_lift @@ region)]
in error ~data title message
let non_linear_pattern Region.{value; region} =
let title () =
Printf.sprintf "\nRepeated variable \"%s\" in this pattern" value in
let message () = "" in
let data = [
("location",
fun () -> Format.asprintf "%a" Location.pp_lift @@ region)]
in error ~data title message
let duplicate_field Region.{value; region} =
let title () =
Printf.sprintf "\nDuplicate field name \"%s\" \
in this record declaration" value in
let message () = "" in
let data = [
("location",
fun () -> Format.asprintf "%a" Location.pp_lift @@ region)]
in error ~data title message
let parser_error Region.{value; region} =
let generic message =
let title () = ""
and message () = value
and loc = region in
let data =
[("parser_loc",
fun () -> Format.asprintf "%a" Location.pp_lift @@ loc)]
in error ~data title message
let lexer_error (e: Lexer.error AST.reg) =
let title () = "\nLexer error" in
let message () = Lexer.error_to_string e.value in
let data = [
("parser_loc",
fun () -> Format.asprintf "%a" Location.pp_lift @@ e.region)]
in error ~data title message
and message () = message.Region.value
in Trace.error ~data:[] title message
let wrong_function_arguments (expr: AST.expr) =
let title () = "\nWrong function arguments" in
@ -114,115 +65,127 @@ module Errors =
let parse (module IO : IO) parser =
let module Unit = PreUnit (IO) in
let mk_error error =
Unit.format_error ~offsets:IO.options#offsets
IO.options#mode error in
let local_fail error =
Trace.fail
@@ Errors.generic
@@ Unit.format_error ~offsets:IO.options#offsets
IO.options#mode error in
match parser () with
(* Scoping errors *)
Stdlib.Ok semantic_value -> Trace.ok semantic_value
Stdlib.Ok semantic_value -> ok semantic_value
| Stdlib.Error error -> fail @@ Errors.parser_error error
| exception Lexer.Error e -> fail @@ Errors.lexer_error e
(* Lexing and parsing errors *)
| Stdlib.Error error -> Trace.fail @@ Errors.generic error
(* Scoping errors *)
| exception SyntaxError.Error (SyntaxError.WrongFunctionArguments expr) ->
fail @@ Errors.wrong_function_arguments expr
| exception Scoping.Error (Scoping.Reserved_name name) ->
let token =
Lexer.Token.mk_ident name.Region.value name.Region.region in
(match token with
(* Cannot fail because [name] is a not a
reserved name for the lexer. *)
Stdlib.Error _ -> assert false
Stdlib.Error LexToken.Reserved_name ->
Trace.fail @@ Errors.generic @@ Region.wrap_ghost "Reserved name."
| Ok invalid ->
let point =
"Reserved name.\nHint: Change the name.\n", None, invalid
in fail @@ Errors.reserved_name @@ mk_error point)
local_fail
("Reserved name.\nHint: Change the name.\n", None, invalid))
| exception Scoping.Error (Scoping.Duplicate_variant name) ->
let token =
Lexer.Token.mk_constr name.Region.value name.Region.region in
let point =
"Duplicate constructor in this sum type declaration.\n\
Hint: Change the constructor.\n",
None, token
in fail @@ Errors.duplicate_variant @@ mk_error point
Lexer.Token.mk_constr name.Region.value name.Region.region
in local_fail
("Duplicate constructor in this sum type declaration.\n\
Hint: Change the constructor.\n", None, token)
| exception Scoping.Error (Scoping.Non_linear_pattern var) ->
let token =
Lexer.Token.mk_ident var.Region.value var.Region.region in
(match token with
(* Cannot fail because [var] is a not a
reserved name for the lexer. *)
Stdlib.Error _ -> assert false
Stdlib.Error LexToken.Reserved_name ->
Trace.fail @@ Errors.generic @@ Region.wrap_ghost "Reserved name."
| Ok invalid ->
let point =
"Repeated variable in this pattern.\n\
Hint: Change the name.\n",
None, invalid
in fail @@ Errors.non_linear_pattern @@ mk_error point)
local_fail ("Repeated variable in this pattern.\n\
Hint: Change the name.\n",
None, invalid))
| exception Scoping.Error (Scoping.Duplicate_field name) ->
let token =
Lexer.Token.mk_ident name.Region.value name.Region.region in
(match token with
(* Cannot fail because [name] is a not a
reserved name for the lexer. *)
Stdlib.Error _ -> assert false
Stdlib.Error LexToken.Reserved_name ->
Trace.fail @@ Errors.generic @@ Region.wrap_ghost "Reserved name."
| Ok invalid ->
let point =
"Duplicate field name in this record declaration.\n\
Hint: Change the name.\n",
None, invalid
in fail @@ Errors.duplicate_field @@ mk_error point)
local_fail
("Duplicate field name in this record declaration.\n\
Hint: Change the name.\n",
None, invalid))
| exception SyntaxError.Error (SyntaxError.WrongFunctionArguments expr) ->
Trace.fail @@ Errors.wrong_function_arguments expr
let parse_file (source: string) =
let module IO =
struct
let ext = PreIO.ext
let options = PreIO.pre_options ~expr:false
let options =
PreIO.pre_options ~input:(Some source) ~expr:false
end in
let lib_path =
match IO.options#libs with
[] -> ""
| libs -> let mk_I dir path = Printf.sprintf " -I %s%s" dir path
in List.fold_right mk_I libs "" in
let prefix =
match IO.options#input with
None | Some "-" -> "temp"
| Some file -> Filename.(remove_extension @@ basename file) in
let suffix = ".pp" ^ IO.ext in
let pp_input =
let prefix = Filename.(source |> basename |> remove_extension)
and suffix = ".pp.ligo"
in prefix ^ suffix in
let cpp_cmd = Printf.sprintf "cpp -traditional-cpp %s > %s"
source pp_input in
if SSet.mem "cpp" IO.options#verbose
then prefix ^ suffix
else let pp_input, pp_out =
Filename.open_temp_file prefix suffix
in close_out pp_out; pp_input in
let cpp_cmd =
match IO.options#input with
None | Some "-" ->
Printf.sprintf "cpp -traditional-cpp%s - > %s"
lib_path pp_input
| Some file ->
Printf.sprintf "cpp -traditional-cpp%s %s > %s"
lib_path file pp_input in
let open Trace in
let%bind () = sys_command cpp_cmd in
let%bind channel =
generic_try (simple_error "Error when opening file") @@
(fun () -> open_in pp_input) in
let module Unit = PreUnit (IO) in
let instance =
match Lexer.open_token_stream (Lexer.Channel channel) with
Ok instance -> instance
| Stdlib.Error _ -> assert false (* No file opening *) in
let thunk () = Unit.apply instance Unit.parse_contract in
parse (module IO) thunk
match Lexer.open_token_stream (Lexer.File pp_input) with
Ok instance ->
let thunk () = Unit.apply instance Unit.parse_contract
in parse (module IO) thunk
| Stdlib.Error (Lexer.File_opening msg) ->
Trace.fail @@ Errors.generic @@ Region.wrap_ghost msg
let parse_string (s: string) =
let module IO =
struct
let ext = PreIO.ext
let options = PreIO.pre_options ~expr:false
let options = PreIO.pre_options ~input:None ~expr:false
end in
let module Unit = PreUnit (IO) in
let instance =
match Lexer.open_token_stream (Lexer.String s) with
Ok instance -> instance
| Stdlib.Error _ -> assert false (* No file opening *) in
let thunk () = Unit.apply instance Unit.parse_contract in
parse (module IO) thunk
match Lexer.open_token_stream (Lexer.String s) with
Ok instance ->
let thunk () = Unit.apply instance Unit.parse_contract
in parse (module IO) thunk
| Stdlib.Error (Lexer.File_opening msg) ->
Trace.fail @@ Errors.generic @@ Region.wrap_ghost msg
let parse_expression (s: string) =
let module IO =
struct
let ext = PreIO.ext
let options = PreIO.pre_options ~expr:true
let options = PreIO.pre_options ~input:None ~expr:true
end in
let module Unit = PreUnit (IO) in
let instance =
match Lexer.open_token_stream (Lexer.String s) with
Ok instance -> instance
| Stdlib.Error _ -> assert false (* No file opening *) in
let thunk () = Unit.apply instance Unit.parse_expr in
parse (module IO) thunk
match Lexer.open_token_stream (Lexer.String s) with
Ok instance ->
let thunk () = Unit.apply instance Unit.parse_expr
in parse (module IO) thunk
| Stdlib.Error (Lexer.File_opening msg) ->
Trace.fail @@ Errors.generic @@ Region.wrap_ghost msg

View File

@ -119,7 +119,7 @@ tuple(item):
(* Possibly empty semicolon-separated values between brackets *)
list(item):
list__(item):
"[" sep_or_term_list(item,";")? "]" {
let compound = Brackets ($1,$3)
and region = cover $1 $3 in
@ -335,7 +335,7 @@ core_pattern:
| "false" { PFalse $1 }
| "<string>" { PString $1 }
| par(ptuple) { PPar $1 }
| list(sub_pattern) { PList (PListComp $1) }
| list__(sub_pattern) { PList (PListComp $1) }
| constr_pattern { PConstr $1 }
| record_pattern { PRecord $1 }
@ -725,8 +725,8 @@ common_expr:
| "true" { ELogic (BoolExpr (True $1)) }
core_expr_2:
common_expr { $1 }
| list(expr) { EList (EListComp $1) }
common_expr { $1 }
| list__(expr) { EList (EListComp $1) }
list_or_spread:
"[" expr "," sep_or_term_list(expr, ",") "]" {

View File

@ -525,15 +525,12 @@ module Make (Token: TOKEN) : (S with module Token = Token) =
let region, lexeme, state = sync state buffer in
let lexeme = Str.string_before lexeme (String.index lexeme 't') in
match format_tz lexeme with
| Some tz -> (
match Token.mk_mutez (Z.to_string tz ^ "mutez") region with
Ok token ->
token, state
None -> assert false
| Some tz ->
match Token.mk_mutez (Z.to_string tz ^ "mutez") region with
Ok token -> token, state
| Error Token.Non_canonical_zero ->
fail region Non_canonical_zero
)
| None -> assert false
let mk_ident state buffer =
let region, lexeme, state = sync state buffer in
@ -563,7 +560,6 @@ module Make (Token: TOKEN) : (S with module Token = Token) =
let region, _, state = sync state buffer
in Token.eof region, state
(* END HEADER *)
}
@ -589,8 +585,9 @@ let byte_seq = byte | byte (byte | '_')* byte
let bytes = "0x" (byte_seq? as seq)
let esc = "\\n" | "\\\"" | "\\\\" | "\\b"
| "\\r" | "\\t" | "\\x" byte
let pascaligo_sym = "=/=" | '#' | ":="
let cameligo_sym = "<>" | "::" | "||" | "&&"
let pascaligo_sym = "=/=" | '#' | ":="
let cameligo_sym = "<>" | "::" | "||" | "&&"
let reasonligo_sym = '!' | "=>" | "!=" | "==" | "++" | "..." | "||" | "&&"
let symbol =
@ -689,7 +686,7 @@ and scan state = parse
Some special errors are recognised in the semantic actions of the
following regular expressions. The first error is a minus sign
separated from the integer it applies by some markup (space or
separated from the integer it applies to by some markup (space or
tabs). The second is a minus sign immediately followed by
anything else than a natural number (matched above) or markup and
a number (previous error). The third is the strange occurrence of

View File

@ -23,6 +23,41 @@ module type Pretty =
val print_expr : state -> expr -> unit
end
module type S =
sig
module IO : IO
module Lexer : Lexer.S
module AST : sig type t type expr end
module Parser : ParserAPI.PARSER
with type ast = AST.t
and type expr = AST.expr
and type token = Lexer.token
(* Error handling reexported from [ParserAPI] without the
exception [Point] *)
type message = string
type valid = Parser.token
type invalid = Parser.token
type error = message * valid option * invalid
val format_error :
?offsets:bool -> [`Byte | `Point] -> error -> string Region.reg
val short_error :
?offsets:bool -> [`Point | `Byte] -> message -> Region.t -> string
(* Parsers *)
type 'a parser = Lexer.instance -> ('a, message Region.reg) result
val apply : Lexer.instance -> 'a parser -> ('a, message Region.reg) result
val parse_contract : AST.t parser
val parse_expr : AST.expr parser
end
module Make (Lexer: Lexer.S)
(AST: sig type t type expr end)
(Parser: ParserAPI.PARSER
@ -34,6 +69,11 @@ module Make (Lexer: Lexer.S)
and type expr = AST.expr)
(IO: IO) =
struct
module IO = IO
module Lexer = Lexer
module AST = AST
module Parser = Parser
open Printf
module SSet = Utils.String.Set

View File

@ -23,17 +23,17 @@ module type Pretty =
val print_expr : state -> expr -> unit
end
module Make (Lexer: Lexer.S)
(AST: sig type t type expr end)
(Parser: ParserAPI.PARSER
module type S =
sig
module IO : IO
module Lexer : Lexer.S
module AST : sig type t type expr end
module Parser : ParserAPI.PARSER
with type ast = AST.t
and type expr = AST.expr
and type token = Lexer.token)
(ParErr: sig val message : int -> string end)
(ParserLog: Pretty with type ast = AST.t
and type expr = AST.expr)
(IO: IO) :
sig
and type token = Lexer.token
(* Error handling reexported from [ParserAPI] without the
exception [Point] *)
@ -57,3 +57,17 @@ module Make (Lexer: Lexer.S)
val parse_contract : AST.t parser
val parse_expr : AST.expr parser
end
module Make (Lexer : Lexer.S)
(AST : sig type t type expr end)
(Parser : ParserAPI.PARSER
with type ast = AST.t
and type expr = AST.expr
and type token = Lexer.token)
(ParErr : sig val message : int -> string end)
(ParserLog : Pretty with type ast = AST.t
and type expr = AST.expr)
(IO: IO) : S with module IO = IO
and module Lexer = Lexer
and module AST = AST
and module Parser = Parser

View File

@ -0,0 +1,59 @@
module SSet = Utils.String.Set
module type IO =
sig
val ext : string
val options : EvalOpt.options
end
let parse_file generic_error (module Unit : ParserUnit.S) parse =
let lib_path =
match Unit.IO.options#libs with
[] -> ""
| libs -> let mk_I dir path = Printf.sprintf " -I %s%s" dir path
in List.fold_right mk_I libs "" in
let prefix =
match Unit.IO.options#input with
None | Some "-" -> "temp"
| Some file -> Filename.(remove_extension @@ basename file) in
let suffix = ".pp" ^ Unit.IO.ext in
let pp_input =
if SSet.mem "cpp" Unit.IO.options#verbose
then prefix ^ suffix
else let pp_input, pp_out =
Filename.open_temp_file prefix suffix
in close_out pp_out; pp_input in
let cpp_cmd =
match Unit.IO.options#input with
None | Some "-" ->
Printf.sprintf "cpp -traditional-cpp%s - > %s"
lib_path pp_input
| Some file ->
Printf.sprintf "cpp -traditional-cpp%s %s > %s"
lib_path file pp_input in
let open Trace in
let%bind () = sys_command cpp_cmd in
match Unit.Lexer.(open_token_stream (File pp_input)) with
Ok instance ->
let thunk () = Unit.apply instance Unit.parse_contract
in parse (module Unit.IO : IO) thunk
| Stdlib.Error (Unit.Lexer.File_opening msg) ->
Trace.fail @@ generic_error @@ Region.wrap_ghost msg
let parse_string generic_error
(module Unit : ParserUnit.S) parse (s: string) =
match Unit.Lexer.(open_token_stream (String s)) with
Ok instance ->
let thunk () = Unit.apply instance Unit.parse_contract
in parse (module Unit.IO : IO) thunk
| Stdlib.Error (Unit.Lexer.File_opening msg) ->
Trace.fail @@ generic_error @@ Region.wrap_ghost msg
let parse_expression generic_error
(module Unit : ParserUnit.S) parse (s: string) =
match Unit.Lexer.(open_token_stream (String s)) with
Ok instance ->
let thunk () = Unit.apply instance Unit.parse_expr
in parse (module Unit.IO : IO) thunk
| Stdlib.Error (Unit.Lexer.File_opening msg) ->
Trace.fail @@ generic_error @@ Region.wrap_ghost msg

0
vendors/Preproc/.EMain.tag vendored Normal file
View File

0
vendors/Preproc/.Eparser.mly.tag vendored Normal file
View File

0
vendors/Preproc/.ProcMain.tag vendored Normal file
View File

1
vendors/Preproc/.links vendored Normal file
View File

@ -0,0 +1 @@
$HOME/git/OCaml-build/Makefile

33
vendors/Preproc/EMain.ml vendored Normal file
View File

@ -0,0 +1,33 @@
(* This module is only used for testing modules [Escan] and [Eparser]
as units *)
module Lexer = struct
let run () =
match Array.length Sys.argv with
2 -> Escan.trace Sys.argv.(1)
| _ -> prerr_endline ("Usage: " ^ Sys.argv.(0) ^ " [file]")
end
module Parser = struct
let run () =
if Array.length Sys.argv = 2
then
match open_in Sys.argv.(1) with
exception Sys_error msg -> prerr_endline msg
| cin ->
let buffer = Lexing.from_channel cin in
let open Error in
let () =
try
let tree = Eparser.pp_expression Escan.token buffer in
let value = Preproc.(eval Env.empty tree)
in (print_string (string_of_bool value);
print_newline ())
with Lexer diag -> print "Lexical" diag
| Parser diag -> print "Syntactical" diag
| Eparser.Error -> print "" ("Parse", mk_seg buffer, 1)
in close_in cin
else prerr_endline ("Usage: " ^ Sys.argv.(0) ^ " [file]")
end
let _ = Parser.run()

50
vendors/Preproc/Eparser.mly vendored Normal file
View File

@ -0,0 +1,50 @@
%{
(* Grammar for boolean expressions in preprocessing directives of C# *)
%}
%token True False
%token <string> Ident
%token OR AND EQ NEQ NOT EOL LPAR RPAR
(* Entries *)
%start pp_expression
%type <Etree.t> pp_expression
%%
(* Grammar *)
pp_expression:
e=pp_or_expression EOL { e }
pp_or_expression:
e=pp_and_expression { e }
| e1=pp_or_expression OR e2=pp_and_expression {
Etree.Or (e1,e2)
}
pp_and_expression:
e=pp_equality_expression { e }
| e1=pp_and_expression AND e2=pp_unary_expression {
Etree.And (e1,e2)
}
pp_equality_expression:
e=pp_unary_expression { e }
| e1=pp_equality_expression EQ e2=pp_unary_expression {
Etree.Eq (e1,e2)
}
| e1=pp_equality_expression NEQ e2=pp_unary_expression {
Etree.Neq (e1,e2)
}
pp_unary_expression:
e=pp_primary_expression { e }
| NOT e=pp_unary_expression { Etree.Not e }
pp_primary_expression:
True { Etree.True }
| False { Etree.False }
| id=Ident { Etree.Ident id }
| LPAR e=pp_or_expression RPAR { e }

31
vendors/Preproc/Error.ml vendored Normal file
View File

@ -0,0 +1,31 @@
(* This module provides support for managing and printing errors when
preprocessing C# source files. *)
type message = string
type start = Lexing.position
type stop = Lexing.position
type seg = start * stop
let mk_seg buffer =
Lexing.(lexeme_start_p buffer, lexeme_end_p buffer)
type vline = int
exception Lexer of (message * seg * vline)
exception Parser of (message * seg * vline)
let print (kind: string) (msg, (start, stop), vend) =
let open Lexing in
let delta = vend - stop.pos_lnum in
let vstart = start.pos_lnum + delta
in assert (msg <> "");
prerr_endline
((if kind = "" then msg else kind) ^ " error at line "
^ string_of_int vstart ^ ", char "
^ string_of_int (start.pos_cnum - start.pos_bol)
^ (if stop.pos_lnum = start.pos_lnum
then "--" ^ string_of_int (stop.pos_cnum - stop.pos_bol)
else " to line " ^ string_of_int vend
^ ", char "
^ string_of_int (stop.pos_cnum - stop.pos_bol))
^ (if kind = "" then "." else ":\n" ^ msg))

95
vendors/Preproc/Escan.mll vendored Normal file
View File

@ -0,0 +1,95 @@
{
(* Auxiliary scanner for boolean expressions of the C# preprocessor *)
(* Concrete syntax of tokens. See module [Eparser]. *)
let string_of_token =
let open Eparser
in function True -> "true"
| False -> "false"
| Ident id -> id
| OR -> "||"
| AND -> "&&"
| EQ -> "=="
| NEQ -> "!="
| NOT -> "!"
| LPAR -> "("
| RPAR -> ")"
| EOL -> "EOL"
}
(* Regular expressions for literals *)
(* White space *)
let newline = '\n' | '\r' | "\r\n"
let blank = ' ' | '\t'
(* Unicode escape sequences *)
let digit = ['0'-'9']
let hexdigit = digit | ['A'-'F' 'a'-'f']
let four_hex = hexdigit hexdigit hexdigit hexdigit
let uni_esc = "\\u" four_hex | "\\U" four_hex four_hex
(* Identifiers *)
let lowercase = ['a'-'z']
let uppercase = ['A'-'Z']
let letter = lowercase | uppercase | uni_esc
let start = '_' | letter
let alphanum = letter | digit | '_'
let ident = start alphanum*
(* Rules *)
rule token = parse
blank+ { token lexbuf }
| newline { Lexing.new_line lexbuf; Eparser.EOL }
| eof { Eparser.EOL }
| "true" { Eparser.True }
| "false" { Eparser.False }
| ident as id { Eparser.Ident id }
| '(' { Eparser.LPAR }
| ')' { Eparser.RPAR }
| "||" { Eparser.OR }
| "&&" { Eparser.AND }
| "==" { Eparser.EQ }
| "!=" { Eparser.NEQ }
| "!" { Eparser.NOT }
| "//" { inline_com lexbuf }
| _ as c { let code = Char.code c in
let msg = "Invalid character " ^ String.make 1 c
^ " (" ^ string_of_int code ^ ")."
in raise Error.(Lexer (msg, mk_seg lexbuf, 1))
}
and inline_com = parse
newline { Lexing.new_line lexbuf; Eparser.EOL }
| eof { Eparser.EOL }
| _ { inline_com lexbuf }
{
(* Standalone lexer for debugging purposes. See module [Topexp]. *)
type filename = string
let trace (name: filename) =
match open_in name with
cin ->
let buffer = Lexing.from_channel cin
and cout = stdout in
let rec iter () =
match token buffer with
Eparser.EOL -> close_in cin; close_out cout
| t -> begin
output_string cout (string_of_token t);
output_string cout "\n";
flush cout;
iter ()
end
| exception Error.Lexer diag -> Error.print "Lexical" diag
in iter ()
| exception Sys_error msg -> prerr_endline msg
}

28
vendors/Preproc/Etree.ml vendored Normal file
View File

@ -0,0 +1,28 @@
(* This module defines and exports the type [t] of conditional
expressions of C# directives.
To avoid over-engineering, we moved the definition of the function
[eval] below into the module [Preproc] itself.
*)
type t =
Or of t * t
| And of t * t
| Eq of t * t
| Neq of t * t
| Not of t
| True
| False
| Ident of string
(*
let rec eval env = function
Or (e1,e2) -> eval env e1 || eval env e2
| And (e1,e2) -> eval env e1 && eval env e2
| Eq (e1,e2) -> eval env e1 = eval env e2
| Neq (e1,e2) -> eval env e1 != eval env e2
| Not e -> not (eval env e)
| True -> true
| False -> false
| Ident id -> Preproc.Env.mem id env
*)

21
vendors/Preproc/LICENSE vendored Normal file
View File

@ -0,0 +1,21 @@
MIT License
Copyright (c) 2018 Christian Rinderknecht
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

4
vendors/Preproc/Makefile.cfg vendored Normal file
View File

@ -0,0 +1,4 @@
SHELL := dash
BFLAGS := -strict-sequence -w +A-48-4
#OCAMLC := ocamlcp
#OCAMLOPT := ocamloptp

585
vendors/Preproc/Preproc.mll vendored Normal file
View File

@ -0,0 +1,585 @@
(* Preprocessor for C#, to be processed by [ocamllex]. *)
{
(* STRING PROCESSING *)
(* The value of [mk_str len p] ("make string") is a string of length
[len] containing the [len] characters in the list [p], in reverse
order. For instance, [mk_str 3 ['c';'b';'a'] = "abc"]. *)
let mk_str (len: int) (p: char list) : string =
let () = assert (len = List.length p) in
let bytes = Bytes.make len ' ' in
let rec fill i = function
[] -> bytes
| char::l -> Bytes.set bytes i char; fill (i-1) l
in fill (len-1) p |> Bytes.to_string
(* The call [explode s a] is the list made by pushing the characters
in the string [s] on top of [a], in reverse order. For example,
[explode "ba" ['c';'d'] = ['a'; 'b'; 'c'; 'd']]. *)
let explode s acc =
let rec push = function
0 -> acc
| i -> s.[i-1] :: push (i-1)
in push (String.length s)
(* ERROR HANDLING *)
let stop msg seg = raise (Error.Lexer (msg, seg,1))
let fail msg buffer = stop msg (Error.mk_seg buffer)
exception Local_err of Error.message
let handle_err scan buffer =
try scan buffer with Local_err msg -> fail msg buffer
(* LEXING ENGINE *)
(* Copying the current lexeme to [stdout] *)
let copy buffer = print_string (Lexing.lexeme buffer)
(* End of lines *)
let handle_nl buffer = Lexing.new_line buffer; copy buffer
(* C# PREPROCESSOR DIRECTIVES *)
(* The type [mode] defines the two scanning modes of the preprocessor:
either we copy the current characters or we skip them. *)
type mode = Copy | Skip
(* Trace of directives
We keep track of directives #if, #elif, #else, #region and #endregion.
*)
type cond = If of mode | Elif of mode | Else | Region
type trace = cond list
(* The function [reduce_cond] is called when a #endif directive is
found, and the trace (see type [trace] above) needs updating. *)
let rec reduce_cond seg = function
[] -> stop "Dangling #endif." seg
| If mode::trace -> trace, mode
| Region::_ -> stop "Invalid scoping of #region" seg
| _::trace -> reduce_cond seg trace
(* The function [reduce_reg] is called when a #endregion directive is
read, and the trace needs updating. *)
let reduce_reg seg = function
[] -> stop "Dangling #endregion." seg
| Region::trace -> trace
| _ -> stop "Invalid scoping of #endregion" seg
(* The function [extend] is called when encountering conditional
directives #if, #else and #elif. As its name suggests, it extends
the current trace with the current conditional directive, whilst
performing some validity checks. *)
let extend seg cond trace =
match cond, trace with
If _, Elif _::_ ->
stop "Directive #if cannot follow #elif." seg
| Else, Else::_ ->
stop "Directive #else cannot follow #else." seg
| Else, [] ->
stop "Dangling #else." seg
| Elif _, Else::_ ->
stop "Directive #elif cannot follow #else." seg
| Elif _, [] ->
stop "Dangling #elif." seg
| _ -> cond::trace
(* The function [last_mode] seeks the last mode as recorded in the
trace (see type [trace] above). *)
let rec last_mode = function
[] -> assert false
| (If mode | Elif mode)::_ -> mode
| _::trace -> last_mode trace
(* Line offsets
The value [Inline] of type [offset] means that the current location
cannot be reached from the start of the line with only white
space. The same holds for the special value [Prefix 0]. Values of
the form [Prefix n] mean that the current location can be reached
from the start of the line with [n] white spaces (padding). These
distinctions are needed because preprocessor directives cannot
occur inside lines.
*)
type offset = Prefix of int | Inline
let expand = function
Prefix 0 | Inline -> ()
| Prefix n -> print_string (String.make n ' ')
(* Directives *)
let directives = [
"if"; "else"; "elif"; "endif"; "define"; "undef";
"error"; "warning"; "line"; "region"; "endregion";
"include"]
(* Environments and preprocessor expressions
The evaluation of conditional directives may involve symbols whose
value may be defined using #define directives, or undefined by
means of #undef. Therefore, we need to evaluate conditional
expressions in an environment made of a set of defined symbols.
Note that we rely on an external lexer and parser for the
conditional expressions. See modules [Escan] and [Eparser].
*)
module Env = Set.Make(String)
let rec eval env =
let open Etree
in function
Or (e1,e2) -> eval env e1 || eval env e2
| And (e1,e2) -> eval env e1 && eval env e2
| Eq (e1,e2) -> eval env e1 = eval env e2
| Neq (e1,e2) -> eval env e1 != eval env e2
| Not e -> not (eval env e)
| True -> true
| False -> false
| Ident id -> Env.mem id env
let expr env buffer =
let tree = Eparser.pp_expression Escan.token buffer
in if eval env tree then Copy else Skip
(* END OF HEADER *)
}
(* REGULAR EXPRESSIONS *)
(* White space *)
let nl = '\n' | '\r' | "\r\n"
let blank = ' ' | '\t'
(* Integers *)
let int_suf = 'U' | 'u' | 'L' | 'l' | "UL" | "Ul" | "uL"
| "ul" | "LU" | "Lu" | "lU" | "lu"
let digit = ['0'-'9']
let dec = digit+ int_suf?
let hexdigit = digit | ['A'-'F' 'a'-'f']
let hex_pre = "0x" | "0X"
let hexa = hex_pre hexdigit+ int_suf?
let integer = dec | hexa
(* Unicode escape sequences *)
let four_hex = hexdigit hexdigit hexdigit hexdigit
let uni_esc = "\\u" four_hex | "\\U" four_hex four_hex
(* Identifiers *)
let lowercase = ['a'-'z']
let uppercase = ['A'-'Z']
let letter = lowercase | uppercase | uni_esc
let start = '_' | letter
let alphanum = letter | digit | '_'
let ident = start alphanum*
(* Real *)
let decimal = digit+
let exponent = ['e' 'E'] ['+' '-']? decimal
let real_suf = ['F' 'f' 'D' 'd' 'M' 'm']
let real = (decimal? '.')? decimal exponent? real_suf?
(* Characters *)
let single = [^ '\n' '\r']
let esc = "\\'" | "\\\"" | "\\\\" | "\\0" | "\\a" | "\\b" | "\\f"
| "\\n" | "\\r" | "\\t" | "\\v"
let hex_esc = "\\x" hexdigit hexdigit? hexdigit? hexdigit?
let character = single | esc | hex_esc | uni_esc
let char = "'" character "'"
(* Directives *)
let directive = '#' (blank* as space) (ident as id)
(* Rules *)
(* The rule [scan] scans the input buffer for directives, strings,
comments, blanks, new lines and end of file characters. As a
result, either the matched input is copied to [stdout] or not,
depending on the compilation directives. If not copied, new line
characters are output.
Scanning is triggered by the function call [scan env mode offset
trace lexbuf], where [env] is the set of defined symbols
(introduced by `#define'), [mode] specifies whether we are copying
or skipping the input, [offset] informs about the location in the
line (either there is a prefix of blanks, or at least a non-blank
character has been read), and [trace] is the stack of conditional
directives read so far.
The first call is [scan Env.empty Copy (Prefix 0) []], meaning that
we start with an empty environment, that copying the input is
enabled by default, and that we are at the start of a line and no
previous conditional directives have been read yet.
When an "#if" is matched, the trace is extended by the call [extend
lexbuf (If mode) trace], during the evaluation of which the
syntactic validity of having encountered an "#if" is checked (for
example, it would be invalid had an "#elif" been last read). Note
that the current mode is stored in the trace with the current
directive -- that mode may be later restored (see below for some
examples). Moreover, the directive would be deemed invalid if its
current position in the line (that is, its offset) were not
preceeded by blanks or nothing, otherwise the rule [expr] is called
to scan the boolean expression associated with the "#if": if it
evaluates to [true], the result is [Copy], meaning that we may copy
what follows, otherwise skip it -- the actual decision depending on
the current mode. That new mode is used if we were in copy mode,
and the offset is reset to the start of a new line (as we read a
new line in [expr]); otherwise we were in skipping mode and the
value of the conditional expression must be ignored (but not its
syntax), and we continue skipping the input.
When an "#else" is matched, the trace is extended with [Else],
then, if the directive is not at a wrong offset, the rest of the
line is scanned with [pp_newline]. If we were in copy mode, the new
mode toggles to skipping mode; otherwise, the trace is searched for
the last encountered "#if" of "#elif" and the associated mode is
restored.
The case "#elif" is the result of the fusion (in the technical
sense) of the code for dealing with an "#else" followed by an
"#if".
When an "#endif" is matched, the trace is reduced, that is, all
conditional directives are popped until an [If mode'] is found and
[mode'] is restored as the current mode.
Consider the following four cases, where the modes (Copy/Skip) are
located between the lines:
Copy ----+ Copy ----+
#if true | #if true |
Copy | Copy |
#else | #else |
+-- Skip --+ | +-- Skip --+ |
#if true | | | #if false | | |
| Skip | | | Skip | |
#else | | | #else | | |
+-> Skip | | +-> Skip | |
#endif | | #endif | |
Skip <-+ | Skip <-+ |
#endif | #endif |
Copy <---+ Copy <---+
+-- Copy ----+ Copy --+-+
#if false | | #if false | |
| Skip | Skip | |
#else | | #else | |
+-> Copy --+ | +-+-- Copy <-+ |
#if true | | #if false | | |
Copy | | | | Skip |
#else | | #else | | |
Skip | | | +-> Copy |
#endif | | #endif | |
Copy <-+ | +---> Copy |
#endif | #endif |
Copy <---+ Copy <---+
The following four cases feature #elif. Note that we put between
brackets the mode saved for the #elif, which is sometimes restored
later.
Copy --+ Copy --+
#if true | #if true |
Copy | Copy |
#elif true +--[Skip] | #elif false +--[Skip] |
| Skip | | Skip |
#else | | #else | |
+-> Skip | +-> Skip |
#endif | #endif |
Copy <-+ Copy <-+
+-- Copy --+-+ +-- Copy ----+
#if false | | | #if false | |
| Skip | | | Skip |
#elif true +->[Copy] | | #elif false +->[Copy]--+ |
Copy <-+ | Skip | |
#else | #else | |
Skip | Copy <-+ |
#endif | #endif |
Copy <---+ Copy <---+
Note how "#elif" indeed behaves like an "#else" followed by an
"#if", and the mode stored with the data constructor [Elif]
corresponds to the mode before the virtual "#if".
Important note: Comments and strings are recognised as such only in
copy mode, which is a different behaviour from the preprocessor of
GNU GCC, which always does.
*)
rule scan env mode offset trace = parse
nl { handle_nl lexbuf;
scan env mode (Prefix 0) trace lexbuf }
| blank { match offset with
Prefix n -> scan env mode (Prefix (n+1)) trace lexbuf
| Inline -> copy lexbuf;
scan env mode Inline trace lexbuf }
| directive {
if not (List.mem id directives)
then fail "Invalid preprocessing directive." lexbuf
else if offset = Inline
then fail "Directive invalid inside line." lexbuf
else let seg = Error.mk_seg lexbuf in
match id with
"include" ->
let curr_line = Lexing.(lexbuf.lex_curr_p.pos_lnum)
and curr_file = Lexing.(lexbuf.lex_curr_p.pos_fname)
|> Filename.basename
and incl_file = scan_inclusion lexbuf in
let incl_buffer =
open_in incl_file |> Lexing.from_channel in
Printf.printf "# 1 \"%s\" 1\n" incl_file;
cat incl_buffer;
Printf.printf "# %i \"%s\" 2\n" (curr_line+1) curr_file;
scan env mode offset trace lexbuf
| "if" ->
let mode' = expr env lexbuf in
let new_mode = if mode = Copy then mode' else Skip in
let trace' = extend seg (If mode) trace
in scan env new_mode (Prefix 0) trace' lexbuf
| "else" ->
let () = pp_newline lexbuf in
let new_mode =
if mode = Copy then Skip else last_mode trace in
let trace' = extend seg Else trace
in scan env new_mode (Prefix 0) trace' lexbuf
| "elif" ->
let mode' = expr env lexbuf in
let trace', new_mode =
match mode with
Copy -> extend seg (Elif Skip) trace, Skip
| Skip -> let old_mode = last_mode trace
in extend seg (Elif old_mode) trace,
if old_mode = Copy then mode' else Skip
in scan env new_mode (Prefix 0) trace' lexbuf
| "endif" ->
let () = pp_newline lexbuf in
let trace', new_mode = reduce_cond seg trace
in scan env new_mode (Prefix 0) trace' lexbuf
| "define" ->
let id, seg = ident env lexbuf
in if id="true" || id="false"
then let msg = "Symbol \"" ^ id ^ "\" cannot be defined."
in stop msg seg
else if Env.mem id env
then let msg = "Symbol \"" ^ id
^ "\" was already defined."
in stop msg seg
else scan (Env.add id env) mode (Prefix 0) trace lexbuf
| "undef" ->
let id, _ = ident env lexbuf
in scan (Env.remove id env) mode (Prefix 0) trace lexbuf
| "error" ->
stop (message [] lexbuf) seg
| "warning" ->
let start_p, end_p = seg in
let msg = message [] lexbuf in
let open Lexing
in prerr_endline
("Warning at line " ^ string_of_int start_p.pos_lnum
^ ", char "
^ string_of_int (start_p.pos_cnum - start_p.pos_bol)
^ "--" ^ string_of_int (end_p.pos_cnum - end_p.pos_bol)
^ ":\n" ^ msg);
scan env mode (Prefix 0) trace lexbuf
| "region" ->
let msg = message [] lexbuf
in expand offset;
print_endline ("#" ^ space ^ "region" ^ msg);
scan env mode (Prefix 0) (Region::trace) lexbuf
| "endregion" ->
let msg = message [] lexbuf
in expand offset;
print_endline ("#" ^ space ^ "endregion" ^ msg);
scan env mode (Prefix 0) (reduce_reg seg trace) lexbuf
| "line" ->
expand offset;
print_string ("#" ^ space ^ "line");
line_ind lexbuf;
scan env mode (Prefix 0) trace lexbuf
| _ -> assert false
}
| eof { match trace with
[] -> expand offset; flush stdout; (env, trace)
| _ -> fail "Missing #endif." lexbuf }
| '"' { if mode = Copy then begin
expand offset; copy lexbuf;
handle_err in_norm_str lexbuf
end;
scan env mode Inline trace lexbuf }
| "@\"" { if mode = Copy then begin
expand offset; copy lexbuf;
handle_err in_verb_str lexbuf
end;
scan env mode Inline trace lexbuf }
| "//" { if mode = Copy then begin
expand offset; copy lexbuf;
in_line_com mode lexbuf
end;
scan env mode Inline trace lexbuf }
| "/*" { if mode = Copy then begin
expand offset; copy lexbuf;
handle_err in_block_com lexbuf
end;
scan env mode Inline trace lexbuf }
| _ { if mode = Copy then (expand offset; copy lexbuf);
scan env mode Inline trace lexbuf }
(* Support for #define and #undef *)
and ident env = parse
blank* { let r = __ident env lexbuf
in pp_newline lexbuf; r }
and __ident env = parse
ident as id { id, Error.mk_seg lexbuf }
(* Line indicator (#line) *)
and line_ind = parse
blank* as space { print_string space; line_indicator lexbuf }
and line_indicator = parse
decimal as ind {
print_string ind;
end_indicator lexbuf
}
| ident as id {
match id with
"default" | "hidden" ->
print_endline (id ^ message [] lexbuf)
| _ -> fail "Invalid line indicator." lexbuf
}
| nl | eof { fail "Line indicator expected." lexbuf }
and end_indicator = parse
blank* nl { copy lexbuf; handle_nl lexbuf }
| blank* eof { copy lexbuf }
| blank* "//" { copy lexbuf; print_endline (message [] lexbuf) }
| blank+ '"' { copy lexbuf;
handle_err in_norm_str lexbuf;
opt_line_com lexbuf }
| _ { fail "Line comment or blank expected." lexbuf }
and opt_line_com = parse
nl { handle_nl lexbuf }
| eof { copy lexbuf }
| blank+ { copy lexbuf; opt_line_com lexbuf }
| "//" { print_endline ("//" ^ message [] lexbuf) }
(* New lines and verbatim sequence of characters *)
and pp_newline = parse
nl { handle_nl lexbuf }
| blank+ { pp_newline lexbuf }
| "//" { in_line_com Skip lexbuf }
| _ { fail "Only a single-line comment allowed." lexbuf }
and message acc = parse
nl { Lexing.new_line lexbuf;
mk_str (List.length acc) acc }
| eof { mk_str (List.length acc) acc }
| _ as c { message (c::acc) lexbuf }
(* Comments *)
and in_line_com mode = parse
nl { handle_nl lexbuf }
| eof { flush stdout }
| _ { if mode = Copy then copy lexbuf; in_line_com mode lexbuf }
and in_block_com = parse
nl { handle_nl lexbuf; in_block_com lexbuf }
| "*/" { copy lexbuf }
| eof { raise (Local_err "Unterminated comment.") }
| _ { copy lexbuf; in_block_com lexbuf }
(* Include a file *)
and cat = parse
eof { () }
| _ { copy lexbuf; cat lexbuf }
(* Included filename *)
and scan_inclusion = parse
blank+ { scan_inclusion lexbuf }
| '"' { handle_err (in_inclusion [] 0) lexbuf }
and in_inclusion acc len = parse
'"' { mk_str len acc }
| nl { fail "Newline invalid in string." lexbuf }
| eof { raise (Local_err "Unterminated string.") }
| _ as c { in_inclusion (c::acc) (len+1) lexbuf }
(* Strings *)
and in_norm_str = parse
"\\\"" { copy lexbuf; in_norm_str lexbuf }
| '"' { copy lexbuf }
| nl { fail "Newline invalid in string." lexbuf }
| eof { raise (Local_err "Unterminated string.") }
| _ { copy lexbuf; in_norm_str lexbuf }
and in_verb_str = parse
"\"\"" { copy lexbuf; in_verb_str lexbuf }
| '"' { copy lexbuf }
| nl { handle_nl lexbuf; in_verb_str lexbuf }
| eof { raise (Local_err "Unterminated string.") }
| _ { copy lexbuf; in_verb_str lexbuf }
{
(* The function [lex] is a wrapper of [scan], which also checks that
the trace is empty at the end. Note that we discard the
environment at the end. *)
let lex buffer =
let _env, trace = scan Env.empty Copy (Prefix 0) [] buffer
in assert (trace = [])
(* Exported definitions *)
type filename = string
let trace (name: filename) : unit =
match open_in name with
cin ->
let open Lexing in
let buffer = from_channel cin in
let pos_fname = Filename.basename name in
let () = buffer.lex_curr_p <- {buffer.lex_curr_p with pos_fname} in
let open Error
in (try lex buffer with
Lexer diag -> print "Lexical" diag
| Parser diag -> print "Syntactical" diag
| Eparser.Error -> print "" ("Parse", mk_seg buffer, 1));
close_in cin; flush stdout
| exception Sys_error msg -> prerr_endline msg
}

5
vendors/Preproc/ProcMain.ml vendored Normal file
View File

@ -0,0 +1,5 @@
(* This is the entry point of the C# preprocessor. See [Makefile.cfg]. *)
match Array.length Sys.argv with
2 -> Preproc.trace Sys.argv.(1)
| _ -> prerr_endline ("Usage: " ^ Sys.argv.(0) ^ " [file]")

1
vendors/Preproc/README.md vendored Normal file
View File

@ -0,0 +1 @@
# A C# preprocessor in OCaml

23
vendors/Preproc/build.sh vendored Executable file
View File

@ -0,0 +1,23 @@
#!/bin/sh
set -x
ocamllex.opt Escan.mll
ocamllex.opt Preproc.mll
menhir -la 1 Eparser.mly
ocamlfind ocamlopt -strict-sequence -w +A-48-4 -c Etree.ml
ocamlfind ocamlopt -strict-sequence -w +A-48-4 -c Error.ml
ocamlfind ocamlopt -strict-sequence -w +A-48-4 -c Etree.ml
ocamlfind ocamlopt -strict-sequence -w +A-48-4 -c Error.ml
ocamlfind ocamlc -strict-sequence -w +A-48-4 -c Eparser.mli
camlcmd="ocamlfind ocamlc -I _i686 -strict-sequence -w +A-48-4 "
menhir --infer --ocamlc="$camlcmd" Eparser.mly
ocamlfind ocamlopt -strict-sequence -w +A-48-4 -c Escan.ml
ocamlfind ocamlopt -strict-sequence -w +A-48-4 -c Eparser.ml
ocamlfind ocamlopt -strict-sequence -w +A-48-4 -c Preproc.ml
ocamlfind ocamlopt -strict-sequence -w +A-48-4 -c Escan.ml
ocamlfind ocamlopt -strict-sequence -w +A-48-4 -c Preproc.ml
ocamlfind ocamlopt -strict-sequence -w +A-48-4 -c EMain.ml
ocamlfind ocamlopt -strict-sequence -w +A-48-4 -c EMain.ml
ocamlfind ocamlopt -o EMain.opt Etree.cmx Eparser.cmx Error.cmx Escan.cmx Preproc.cmx EMain.cmx
ocamlfind ocamlopt -strict-sequence -w +A-48-4 -c ProcMain.ml
ocamlfind ocamlopt -strict-sequence -w +A-48-4 -c ProcMain.ml
ocamlfind ocamlopt -o ProcMain.opt Etree.cmx Eparser.cmx Error.cmx Escan.cmx Preproc.cmx ProcMain.cmx

3
vendors/Preproc/clean.sh vendored Executable file
View File

@ -0,0 +1,3 @@
#!/bin/sh
\rm -f *.cm* *.o *.byte *.opt

20
vendors/Preproc/dune vendored Normal file
View File

@ -0,0 +1,20 @@
(ocamllex Escan Preproc)
(menhir
(modules Eparser))
(library
(name PreProc)
; (public_name ligo.preproc)
(wrapped false)
(modules Eparser Error Escan Etree Preproc))
(test
(modules ProcMain)
(libraries PreProc)
(name ProcMain))
(test
(modules EMain)
(libraries PreProc)
(name EMain))

View File

@ -539,8 +539,8 @@ let bind_smap (s:_ X_map.String.t) =
let aux k v prev =
prev >>? fun prev' ->
v >>? fun v' ->
ok @@ add k v' prev' in
fold aux s (ok empty)
ok @@ add k v' prev'
in fold aux s (ok empty)
let bind_fold_smap f init (smap : _ X_map.String.t) =
let aux k v prev =
@ -558,11 +558,11 @@ let bind_map_list f lst = bind_list (List.map f lst)
let rec bind_map_list_seq f lst = match lst with
| [] -> ok []
| hd :: tl -> (
| hd :: tl ->
let%bind hd' = f hd in
let%bind tl' = bind_map_list_seq f tl in
ok (hd' :: tl')
)
let bind_map_ne_list : _ -> 'a X_list.Ne.t -> 'b X_list.Ne.t result =
fun f lst -> bind_ne_list (X_list.Ne.map f lst)
let bind_iter_list : (_ -> unit result) -> _ list -> unit result =
@ -575,11 +575,8 @@ let bind_location (x:_ Location.wrap) =
let bind_map_location f x = bind_location (Location.map f x)
let bind_fold_list f init lst =
let aux x y =
x >>? fun x ->
f x y
in
List.fold_left aux (ok init) lst
let aux x y = x >>? fun x -> f x y
in List.fold_left aux (ok init) lst
module TMap(X : Map.OrderedType) = struct
module MX = Map.Make(X)
@ -587,8 +584,7 @@ module TMap(X : Map.OrderedType) = struct
let aux k v x =
x >>? fun x ->
f ~x ~k ~v
in
MX.fold aux map (ok init)
in MX.fold aux map (ok init)
let bind_map_Map f map =
let aux k v map' =
@ -596,33 +592,26 @@ module TMap(X : Map.OrderedType) = struct
f ~k ~v >>? fun v' ->
ok @@ MX.update k (function
| None -> Some v'
| Some _ -> failwith "key collision, shouldn't happen in bind_map_Map")
| Some _ ->
failwith "Key collision: Should not happen in bind_map_Map")
map'
in
MX.fold aux map (ok MX.empty)
in MX.fold aux map (ok MX.empty)
end
let bind_fold_pair f init (a,b) =
let aux x y =
x >>? fun x ->
f x y
in
List.fold_left aux (ok init) [a;b]
let aux x y = x >>? fun x -> f x y
in List.fold_left aux (ok init) [a;b]
let bind_fold_triple f init (a,b,c) =
let aux x y =
x >>? fun x ->
f x y
in
List.fold_left aux (ok init) [a;b;c]
let aux x y = x >>? fun x -> f x y
in List.fold_left aux (ok init) [a;b;c]
let bind_fold_map_list = fun f acc lst ->
let rec aux (acc , prev) f = function
| [] -> ok (acc , prev)
let bind_fold_map_list f acc lst =
let rec aux (acc, prev) f = function
| [] -> ok (acc, prev)
| hd :: tl ->
f acc hd >>? fun (acc' , hd') ->
aux (acc' , hd' :: prev) f tl
in
aux (acc', hd'::prev) f tl in
aux (acc , []) f lst >>? fun (acc' , lst') ->
ok @@ (acc' , List.rev lst')
@ -637,23 +626,18 @@ let bind_fold_map_right_list = fun f acc lst ->
ok lst'
let bind_fold_right_list f init lst =
let aux x y =
x >>? fun x ->
f x y
in
X_list.fold_right' aux (ok init) lst
let aux x y = x >>? fun x -> f x y
in X_list.fold_right' aux (ok init) lst
let bind_find_map_list error f lst =
let rec aux lst =
match lst with
| [] -> fail error
| hd :: tl -> (
| hd :: tl ->
match f hd with
| Error _ -> aux tl
| o -> o
)
in
aux lst
in aux lst
let bind_list_iter f lst =
let aux () y = f y in
@ -663,23 +647,23 @@ let bind_or (a, b) =
match a with
| Ok _ as o -> o
| _ -> b
let bind_map_or (fa , fb) c =
bind_or (fa c , fb c)
let bind_lr (type a b) ((a : a result), (b:b result)) : [`Left of a | `Right of b] result =
let bind_map_or (fa, fb) c = bind_or (fa c, fb c)
let bind_lr (type a b) ((a : a result), (b:b result))
: [`Left of a | `Right of b] result =
match (a, b) with
| (Ok _ as o), _ -> map (fun x -> `Left x) o
| _, (Ok _ as o) -> map (fun x -> `Right x) o
| _, Error b -> Error b
let bind_lr_lazy (type a b) ((a : a result), (b:unit -> b result)) : [`Left of a | `Right of b] result =
let bind_lr_lazy (type a b) ((a : a result), (b:unit -> b result))
: [`Left of a | `Right of b] result =
match a with
| Ok _ as o -> map (fun x -> `Left x) o
| _ -> (
match b() with
| Ok _ as o -> map (fun x -> `Right x) o
| Error b -> Error b
)
| _ -> match b() with
| Ok _ as o -> map (fun x -> `Right x) o
| Error b -> Error b
let bind_and (a, b) =
a >>? fun a ->
@ -698,9 +682,9 @@ let bind_map_pair f (a, b) =
bind_pair (f a, f b)
let bind_fold_map_pair f acc (a, b) =
f acc a >>? fun (acc' , a') ->
f acc' b >>? fun (acc'' , b') ->
ok (acc'' , (a' , b'))
f acc a >>? fun (acc', a') ->
f acc' b >>? fun (acc'', b') ->
ok (acc'', (a', b'))
let bind_map_triple f (a, b, c) = bind_and3 (f a, f b, f c)
@ -717,29 +701,23 @@ let rec bind_chain : ('a -> 'a result) list -> 'a -> 'a result = fun fs x ->
(**
Wraps a call that might trigger an exception in a result.
*)
let generic_try err f =
try (
ok @@ f ()
) with _ -> fail err
let generic_try err f = try ok @@ f () with _ -> fail err
(**
Same, but with a handler that generates an error based on the exception,
rather than a fixed error.
*)
let specific_try handler f =
try (
ok @@ f ()
) with exn -> fail (handler exn)
try ok @@ f () with exn -> fail (handler exn)
(**
Same, but tailored to `Sys_error`s, found in `Sys` from `Pervasives`.
*)
let sys_try f =
let handler = function
| Sys_error str -> error (thunk "Sys_error") (fun () -> str)
| exn -> raise exn
in
specific_try handler f
Sys_error str -> error (thunk "Sys_error") (fun () -> str)
| exn -> raise exn
in specific_try handler f
(**
Same, but for a given command.
@ -747,53 +725,60 @@ let sys_try f =
let sys_command command =
sys_try (fun () -> Sys.command command) >>? function
| 0 -> ok ()
| n -> fail (fun () -> error (thunk "Nonzero return code") (fun () -> (string_of_int n)) ())
| n -> fail (fun () -> error (thunk "Nonzero return code.")
(fun () -> (string_of_int n)) ())
(**
Assertion module.
Would make sense to move it outside Trace.
*)
module Assert = struct
let assert_fail ?(msg="didn't fail") = function
| Ok _ -> simple_fail msg
| _ -> ok ()
let assert_fail ?(msg="Did not fail.") = function
Ok _ -> simple_fail msg
| _ -> ok ()
let assert_true ?(msg="not true") = function
| true -> ok ()
| false -> simple_fail msg
let assert_true ?(msg="Not true.") = function
true -> ok ()
| false -> simple_fail msg
let assert_equal ?msg expected actual =
assert_true ?msg (expected = actual)
let assert_equal_string ?msg expected actual =
let msg =
let default = Format.asprintf "Not equal string : expected \"%s\", got \"%s\"" expected actual in
X_option.unopt ~default msg in
assert_equal ~msg expected actual
let default =
Format.asprintf "Not equal string: Expected \"%s\", got \"%s\""
expected actual
in X_option.unopt ~default msg
in assert_equal ~msg expected actual
let assert_equal_int ?msg expected actual =
let msg =
let default = Format.asprintf "Not equal int : expected %d, got %d" expected actual in
X_option.unopt ~default msg in
assert_equal ~msg expected actual
let default =
Format.asprintf "Not equal int : expected %d, got %d"
expected actual
in X_option.unopt ~default msg
in assert_equal ~msg expected actual
let assert_equal_bool ?msg expected actual =
let msg =
let default = Format.asprintf "Not equal bool : expected %b, got %b" expected actual in
let default =
Format.asprintf "Not equal bool: expected %b, got %b"
expected actual in
X_option.unopt ~default msg in
assert_equal ~msg expected actual
let assert_none ?(msg="not a none") opt = match opt with
let assert_none ?(msg="Not a None value.") opt = match opt with
| None -> ok ()
| _ -> simple_fail msg
let assert_list_size ?(msg="lst doesn't have the right size") lst n =
let assert_list_size ?(msg="Wrong list size.") lst n =
assert_true ~msg List.(length lst = n)
let assert_list_empty ?(msg="lst isn't empty") lst =
let assert_list_empty ?(msg="Non-empty list.") lst =
assert_true ~msg List.(length lst = 0)
let assert_list_same_size ?(msg="lists don't have same size") a b =
let assert_list_same_size ?(msg="Lists with different lengths.") a b =
assert_true ~msg List.(length a = length b)
let assert_list_size_2 ~msg = function