Refactoring of comments (for [dune build @doc]).
Refactoring of parsing command-line arguments * The type [options] is now abstract and implemented as an object type to avoid struggling with scoping and type inference when record types share some common field names. Refactoring of ParserLog for PascaLIGO and CameLIGO * The immediate motivation behind that refactoring was to remove the use of a couple of global references. A consequence is that we have a nicer and more compact code, by threading a state. The files [pascaligo/Tests/pp.ligo] and [ligodity/Tests/pp.mligo]. * Another consequence is that the choice of making strings from AST nodes depends on the CLI (offsets? mode?). After this refactoring, that choice is hardcoded in the simplifiers in a few places (TODO), waiting for a general solution that would have all CL options flow through the compiler. * I removed the use of vendors [x_option.ml], [x_map.ml] and [x_list.ml] when handling optional values. (Less dependencies this way.) Refactoring of the ASTs * I removed the node [local_decl], which was set to [[]] already in a previous commit (which removed local declarations as being redundant, as statements could already be instructions or declarations). * I changed [StrLit] to [String] in the AST of CameLIGO and ReasonLIGO. * I also changed the type [fun_expr] so now either a block is present, and therefore followed by the [with] keyword, or it is not. (Before, the presence of a block was not enforced in the type with the presence of the keyword.) Notes * [LexerMain.ml] and [ParserMain.ml] for CameLIGO and PascaLIGO are almost identical and differ in the same way (language name and file extension), which suggests that they should be in the [shared] folder and instanciated as a functor in the future (TODO). * I removed the blank characters at the end of many lines in the parser of ReasonLIGO.
This commit is contained in:
parent
079e59edff
commit
0226b9f23c
@ -268,7 +268,7 @@ and list_expr =
|
||||
|
||||
and string_expr =
|
||||
Cat of cat bin_op reg
|
||||
| StrLit of string reg
|
||||
| String of string reg
|
||||
|
||||
and constr_expr =
|
||||
ENone of c_None
|
||||
@ -422,7 +422,7 @@ let arith_expr_to_region = function
|
||||
| Nat {region; _} -> region
|
||||
|
||||
let string_expr_to_region = function
|
||||
StrLit {region;_} | Cat {region;_} -> region
|
||||
String {region;_} | Cat {region;_} -> region
|
||||
|
||||
let list_expr_to_region = function
|
||||
ECons {region; _} | EListComp {region; _}
|
||||
|
@ -256,7 +256,7 @@ and list_expr =
|
||||
|
||||
and string_expr =
|
||||
Cat of cat bin_op reg (* e1 ^ e2 *)
|
||||
| StrLit of string reg (* "foo" *)
|
||||
| String of string reg (* "foo" *)
|
||||
|
||||
and constr_expr =
|
||||
ENone of c_None
|
||||
|
@ -1,43 +1,40 @@
|
||||
(* Driver for the lexer of Ligodity *)
|
||||
(** Driver for the LIGO lexer *)
|
||||
|
||||
(* Error printing and exception tracing *)
|
||||
let extension = ".mligo"
|
||||
let options = EvalOpt.read "CameLIGO" extension
|
||||
|
||||
(** Error printing and exception tracing
|
||||
*)
|
||||
let () = Printexc.record_backtrace true
|
||||
|
||||
(* Running the lexer on the source *)
|
||||
|
||||
let options = EvalOpt.read "CameLIGO" ".mligo"
|
||||
|
||||
open EvalOpt
|
||||
|
||||
let external_ text =
|
||||
Utils.highlight (Printf.sprintf "External error: %s" text); exit 1;;
|
||||
|
||||
(* Path for CPP inclusions (#include) *)
|
||||
(** {1 Preprocessing the input source and opening the input channels} *)
|
||||
|
||||
(** Path for CPP inclusions (#include)
|
||||
*)
|
||||
let lib_path =
|
||||
match options.libs with
|
||||
match options#libs with
|
||||
[] -> ""
|
||||
| libs -> let mk_I dir path = Printf.sprintf " -I %s%s" dir path
|
||||
in List.fold_right mk_I libs ""
|
||||
|
||||
(* Preprocessing the input source and opening the input channels *)
|
||||
|
||||
let prefix =
|
||||
match options.input with
|
||||
match options#input with
|
||||
None | Some "-" -> "temp"
|
||||
| Some file -> Filename.(file |> basename |> remove_extension)
|
||||
|
||||
let suffix = ".pp.mligo"
|
||||
let suffix = ".pp" ^ extension
|
||||
|
||||
let pp_input =
|
||||
if Utils.String.Set.mem "cpp" options.verbose
|
||||
if Utils.String.Set.mem "cpp" options#verbose
|
||||
then prefix ^ suffix
|
||||
else let pp_input, pp_out = Filename.open_temp_file prefix suffix
|
||||
in close_out pp_out; pp_input
|
||||
|
||||
let cpp_cmd =
|
||||
match options.input with
|
||||
match options#input with
|
||||
None | Some "-" ->
|
||||
Printf.sprintf "cpp -traditional-cpp%s - > %s"
|
||||
lib_path pp_input
|
||||
@ -46,16 +43,14 @@ let cpp_cmd =
|
||||
lib_path file pp_input
|
||||
|
||||
let () =
|
||||
if Utils.String.Set.mem "cpp" options.verbose
|
||||
if Utils.String.Set.mem "cpp" options#verbose
|
||||
then Printf.eprintf "%s\n%!" cpp_cmd;
|
||||
if Sys.command cpp_cmd <> 0 then
|
||||
external_ (Printf.sprintf "the command \"%s\" failed." cpp_cmd)
|
||||
|
||||
(* Running the lexer on the input file *)
|
||||
(** {1 Running the lexer on the input file} *)
|
||||
|
||||
module Lexer = Lexer.Make (LexToken)
|
||||
module Log = LexerLog.Make (Lexer.Make (LexToken))
|
||||
|
||||
module Log = LexerLog.Make (Lexer)
|
||||
|
||||
let () = Log.trace ~offsets:options.offsets
|
||||
options.mode (Some pp_input) options.cmd
|
||||
let () = Log.trace ~offsets:options#offsets
|
||||
options#mode (Some pp_input) options#cmd
|
||||
|
@ -690,7 +690,7 @@ core_expr:
|
||||
| Nat { EArith (Nat $1) }
|
||||
| Ident | module_field { EVar $1 }
|
||||
| projection { EProj $1 }
|
||||
| String { EString (StrLit $1) }
|
||||
| String { EString (String $1) }
|
||||
| unit { EUnit $1 }
|
||||
| False { ELogic (BoolExpr (False $1)) }
|
||||
| True { ELogic (BoolExpr (True $1)) }
|
||||
|
File diff suppressed because it is too large
Load Diff
@ -1,23 +1,30 @@
|
||||
(* Printing the AST *)
|
||||
(** Printing the AST *)
|
||||
|
||||
val offsets : bool ref
|
||||
val mode : [`Byte | `Point] ref
|
||||
(** The type [state] captures the state that is threaded in the
|
||||
printing iterators in this module.
|
||||
*)
|
||||
type state
|
||||
|
||||
(* Printing the tokens reconstructed from the AST. This is very useful
|
||||
val mk_state :
|
||||
offsets:bool -> mode:[`Point|`Byte] -> buffer:Buffer.t -> state
|
||||
|
||||
(** {1 Printing tokens from the AST in a buffer}
|
||||
|
||||
Printing the tokens reconstructed from the AST. This is very useful
|
||||
for debugging, as the output of [print_token ast] can be textually
|
||||
compared to that of [Lexer.trace] (see module [LexerMain]). The
|
||||
optional parameter [undo] is bound to [true] if the caller wants
|
||||
the AST to be unparsed before printing (those nodes that have been
|
||||
normalised with function [norm_let] and [norm_fun]). *)
|
||||
compared to that of [Lexer.trace] (see module [LexerMain]). *)
|
||||
|
||||
val print_tokens : Buffer.t -> AST.t -> unit
|
||||
val print_pattern : Buffer.t -> AST.pattern -> unit
|
||||
val print_expr : Buffer.t -> AST.expr -> unit
|
||||
val print_tokens : state -> AST.t -> unit
|
||||
val print_pattern : state -> AST.pattern -> unit
|
||||
val print_expr : state -> AST.expr -> unit
|
||||
|
||||
val tokens_to_string : AST.t -> string
|
||||
val pattern_to_string : AST.pattern -> string
|
||||
val expr_to_string : AST.expr -> string
|
||||
val tokens_to_string :
|
||||
offsets:bool -> mode:[`Point|`Byte] -> AST.t -> string
|
||||
val pattern_to_string :
|
||||
offsets:bool -> mode:[`Point|`Byte] -> AST.pattern -> string
|
||||
val expr_to_string :
|
||||
offsets:bool -> mode:[`Point|`Byte] -> AST.expr -> string
|
||||
|
||||
(* Pretty-printing of the AST *)
|
||||
(** {1 Pretty-printing of the AST} *)
|
||||
|
||||
val pp_ast : Buffer.t -> AST.t -> unit
|
||||
val pp_ast : state -> AST.t -> unit
|
||||
|
@ -1,27 +1,24 @@
|
||||
(* Driver for the parser of Ligodity *)
|
||||
(** Driver for the parser of CameLIGO *)
|
||||
|
||||
(* Error printing and exception tracing *)
|
||||
let extension = ".mligo"
|
||||
let options = EvalOpt.read "CameLIGO" extension
|
||||
|
||||
(** Error printing and exception tracing
|
||||
*)
|
||||
let () = Printexc.record_backtrace true
|
||||
|
||||
(* Reading the command-line options *)
|
||||
|
||||
let options = EvalOpt.read "CameLIGO" ".mligo"
|
||||
|
||||
open EvalOpt
|
||||
|
||||
(* Auxiliary functions *)
|
||||
|
||||
(** Auxiliary functions
|
||||
*)
|
||||
let sprintf = Printf.sprintf
|
||||
|
||||
(* Extracting the input file *)
|
||||
|
||||
(** Extracting the input file
|
||||
*)
|
||||
let file =
|
||||
match options.input with
|
||||
match options#input with
|
||||
None | Some "-" -> false
|
||||
| Some _ -> true
|
||||
|
||||
(* Error printing and exception tracing *)
|
||||
(** {1 Error printing and exception tracing} *)
|
||||
|
||||
let () = Printexc.record_backtrace true
|
||||
|
||||
@ -35,35 +32,35 @@ let error_to_string = function
|
||||
| _ -> assert false
|
||||
|
||||
let print_error ?(offsets=true) mode Region.{region; value} ~file =
|
||||
let msg = error_to_string value in
|
||||
let reg = region#to_string ~file ~offsets mode in
|
||||
let msg = error_to_string value in
|
||||
let reg = region#to_string ~file ~offsets mode in
|
||||
Utils.highlight (sprintf "Parse error %s:\n%s%!" reg msg)
|
||||
|
||||
(* Path for CPP inclusions (#include) *)
|
||||
(** {1 Preprocessing the input source and opening the input channels} *)
|
||||
|
||||
(** Path for CPP inclusions (#include)
|
||||
*)
|
||||
let lib_path =
|
||||
match options.libs with
|
||||
match options#libs with
|
||||
[] -> ""
|
||||
| libs -> let mk_I dir path = Printf.sprintf " -I %s%s" dir path
|
||||
in List.fold_right mk_I libs ""
|
||||
|
||||
(* Preprocessing the input source and opening the input channels *)
|
||||
in List.fold_right mk_I libs ""
|
||||
|
||||
let prefix =
|
||||
match options.input with
|
||||
match options#input with
|
||||
None | Some "-" -> "temp"
|
||||
| Some file -> Filename.(file |> basename |> remove_extension)
|
||||
|
||||
let suffix = ".pp.mligo"
|
||||
let suffix = ".pp" ^ extension
|
||||
|
||||
let pp_input =
|
||||
if Utils.String.Set.mem "cpp" options.verbose
|
||||
if Utils.String.Set.mem "cpp" options#verbose
|
||||
then prefix ^ suffix
|
||||
else let pp_input, pp_out = Filename.open_temp_file prefix suffix
|
||||
in close_out pp_out; pp_input
|
||||
|
||||
let cpp_cmd =
|
||||
match options.input with
|
||||
match options#input with
|
||||
None | Some "-" ->
|
||||
Printf.sprintf "cpp -traditional-cpp%s - > %s"
|
||||
lib_path pp_input
|
||||
@ -72,12 +69,12 @@ let cpp_cmd =
|
||||
lib_path file pp_input
|
||||
|
||||
let () =
|
||||
if Utils.String.Set.mem "cpp" options.verbose
|
||||
if Utils.String.Set.mem "cpp" options#verbose
|
||||
then Printf.eprintf "%s\n%!" cpp_cmd;
|
||||
if Sys.command cpp_cmd <> 0 then
|
||||
external_ (Printf.sprintf "the command \"%s\" failed." cpp_cmd)
|
||||
|
||||
(* Instanciating the lexer *)
|
||||
(** {1 Instanciating the lexer} *)
|
||||
|
||||
module Lexer = Lexer.Make (LexToken)
|
||||
|
||||
@ -88,45 +85,49 @@ let Lexer.{read; buffer; get_pos; get_last; close} =
|
||||
|
||||
and cout = stdout
|
||||
|
||||
let log = Log.output_token ~offsets:options.offsets
|
||||
options.mode options.cmd cout
|
||||
let log = Log.output_token ~offsets:options#offsets
|
||||
options#mode options#cmd cout
|
||||
|
||||
and close_all () = close (); close_out cout
|
||||
|
||||
(* Tokeniser *)
|
||||
(** {1 Tokeniser} *)
|
||||
|
||||
let tokeniser = read ~log
|
||||
|
||||
(* Main *)
|
||||
(** {1 Main} *)
|
||||
|
||||
let () =
|
||||
try
|
||||
let ast = Parser.contract tokeniser buffer in
|
||||
if Utils.String.Set.mem "ast" options.verbose
|
||||
if Utils.String.Set.mem "ast" options#verbose
|
||||
then let buffer = Buffer.create 131 in
|
||||
let state = ParserLog.mk_state
|
||||
~offsets:options#offsets
|
||||
~mode:options#mode
|
||||
~buffer in
|
||||
begin
|
||||
ParserLog.offsets := options.offsets;
|
||||
ParserLog.mode := options.mode;
|
||||
ParserLog.pp_ast buffer ast;
|
||||
ParserLog.pp_ast state ast;
|
||||
Buffer.output_buffer stdout buffer
|
||||
end
|
||||
else if Utils.String.Set.mem "ast-tokens" options.verbose
|
||||
else if Utils.String.Set.mem "ast-tokens" options#verbose
|
||||
then let buffer = Buffer.create 131 in
|
||||
let state = ParserLog.mk_state
|
||||
~offsets:options#offsets
|
||||
~mode:options#mode
|
||||
~buffer in
|
||||
begin
|
||||
ParserLog.offsets := options.offsets;
|
||||
ParserLog.mode := options.mode;
|
||||
ParserLog.print_tokens buffer ast;
|
||||
ParserLog.print_tokens state ast;
|
||||
Buffer.output_buffer stdout buffer
|
||||
end
|
||||
with
|
||||
Lexer.Error err ->
|
||||
close_all ();
|
||||
Lexer.print_error ~offsets:options.offsets
|
||||
options.mode err ~file
|
||||
Lexer.print_error ~offsets:options#offsets
|
||||
options#mode err ~file
|
||||
| Parser.Error ->
|
||||
let region = get_last () in
|
||||
let error = Region.{region; value=ParseError} in
|
||||
let () = close_all () in
|
||||
print_error ~offsets:options.offsets
|
||||
options.mode error ~file
|
||||
print_error ~offsets:options#offsets
|
||||
options#mode error ~file
|
||||
| Sys_error msg -> Utils.highlight msg
|
||||
|
26
src/passes/1-parser/ligodity/Tests/pp.mligo
Normal file
26
src/passes/1-parser/ligodity/Tests/pp.mligo
Normal file
@ -0,0 +1,26 @@
|
||||
type q = {a: int; b: {c: string}}
|
||||
type r = int list
|
||||
type s = (int, address) map
|
||||
type t = int
|
||||
type u = {a: int; b: t * char}
|
||||
type v = int * (string * address)
|
||||
type w = timestamp * nat -> (string, address) map
|
||||
type x = A | B of t * int | C of int -> (string -> int)
|
||||
|
||||
let x = 4
|
||||
let y : t = (if true then -3 + f x x else 0) - 1
|
||||
let f (x: int) y = (x : int)
|
||||
let z : (t) = y
|
||||
let w =
|
||||
match f 3 with
|
||||
None -> []
|
||||
| Some (1::[2;3]) -> [4;5]::[]
|
||||
let n : nat = 0n
|
||||
let a = A
|
||||
let b = B a
|
||||
let c = C (a, B (a))
|
||||
let d = None
|
||||
let e = Some (a, B b)
|
||||
let z = z.1.2
|
||||
let v = "hello" ^ "world" ^ "!"
|
||||
let w = Map.literal [(1,"1"); (2,"2")]
|
@ -1 +0,0 @@
|
||||
ocamlc: -w -42
|
@ -4,9 +4,6 @@ $HOME/git/ligo/vendors/ligo-utils/simple-utils/pos.mli
|
||||
$HOME/git/ligo/vendors/ligo-utils/simple-utils/pos.ml
|
||||
$HOME/git/ligo/vendors/ligo-utils/simple-utils/region.mli
|
||||
$HOME/git/ligo/vendors/ligo-utils/simple-utils/region.ml
|
||||
$HOME/git/ligo/vendors/ligo-utils/simple-utils/x_map.ml
|
||||
$HOME/git/ligo/vendors/ligo-utils/simple-utils/x_list.ml
|
||||
$HOME/git/ligo/vendors/ligo-utils/simple-utils/x_option.ml
|
||||
../shared/Lexer.mli
|
||||
../shared/Lexer.mll
|
||||
../shared/Error.mli
|
||||
|
@ -217,10 +217,8 @@ and fun_expr = {
|
||||
colon : colon;
|
||||
ret_type : type_expr;
|
||||
kwd_is : kwd_is;
|
||||
local_decls : local_decl list;
|
||||
block : block reg option;
|
||||
kwd_with : kwd_with option;
|
||||
return : expr;
|
||||
block_with : (block reg * kwd_with) option;
|
||||
return : expr
|
||||
}
|
||||
|
||||
and fun_decl = {
|
||||
@ -269,9 +267,6 @@ and statement =
|
||||
Instr of instruction
|
||||
| Data of data_decl
|
||||
|
||||
and local_decl =
|
||||
| LocalData of data_decl
|
||||
|
||||
and data_decl =
|
||||
LocalConst of const_decl reg
|
||||
| LocalVar of var_decl reg
|
||||
@ -757,11 +752,6 @@ let pattern_to_region = function
|
||||
| PList PCons {region; _}
|
||||
| PTuple {region; _} -> region
|
||||
|
||||
let local_decl_to_region = function
|
||||
| LocalData LocalFun {region; _}
|
||||
| LocalData LocalConst {region; _}
|
||||
| LocalData LocalVar {region; _} -> region
|
||||
|
||||
let lhs_to_region : lhs -> Region.t = function
|
||||
Path path -> path_to_region path
|
||||
| MapPath {region; _} -> region
|
||||
|
@ -208,10 +208,8 @@ and fun_expr = {
|
||||
colon : colon;
|
||||
ret_type : type_expr;
|
||||
kwd_is : kwd_is;
|
||||
local_decls : local_decl list;
|
||||
block : block reg option;
|
||||
kwd_with : kwd_with option;
|
||||
return : expr;
|
||||
block_with : (block reg * kwd_with) option;
|
||||
return : expr
|
||||
}
|
||||
|
||||
and fun_decl = {
|
||||
@ -260,9 +258,6 @@ and statement =
|
||||
Instr of instruction
|
||||
| Data of data_decl
|
||||
|
||||
and local_decl =
|
||||
| LocalData of data_decl
|
||||
|
||||
and data_decl =
|
||||
LocalConst of const_decl reg
|
||||
| LocalVar of var_decl reg
|
||||
@ -615,7 +610,6 @@ val type_expr_to_region : type_expr -> Region.t
|
||||
val expr_to_region : expr -> Region.t
|
||||
val instr_to_region : instruction -> Region.t
|
||||
val pattern_to_region : pattern -> Region.t
|
||||
val local_decl_to_region : local_decl -> Region.t
|
||||
val path_to_region : path -> Region.t
|
||||
val lhs_to_region : lhs -> Region.t
|
||||
val rhs_to_region : rhs -> Region.t
|
||||
|
@ -1,43 +1,40 @@
|
||||
(* Driver for the lexer of PascaLIGO *)
|
||||
(** Driver for the LIGO lexer *)
|
||||
|
||||
(* Error printing and exception tracing *)
|
||||
let extension = ".ligo"
|
||||
let options = EvalOpt.read "PascaLIGO" extension
|
||||
|
||||
(** Error printing and exception tracing
|
||||
*)
|
||||
let () = Printexc.record_backtrace true
|
||||
|
||||
(* Running the lexer on the source *)
|
||||
|
||||
let options = EvalOpt.read "PascaLIGO" ".ligo"
|
||||
|
||||
open EvalOpt
|
||||
|
||||
let external_ text =
|
||||
Utils.highlight (Printf.sprintf "External error: %s" text); exit 1;;
|
||||
|
||||
(* Path for CPP inclusions (#include) *)
|
||||
(** {1 Preprocessing the input source and opening the input channels} *)
|
||||
|
||||
(** Path for CPP inclusions (#include)
|
||||
*)
|
||||
let lib_path =
|
||||
match options.libs with
|
||||
match options#libs with
|
||||
[] -> ""
|
||||
| libs -> let mk_I dir path = Printf.sprintf " -I %s%s" dir path
|
||||
in List.fold_right mk_I libs ""
|
||||
|
||||
(* Preprocessing the input source and opening the input channels *)
|
||||
|
||||
let prefix =
|
||||
match options.input with
|
||||
match options#input with
|
||||
None | Some "-" -> "temp"
|
||||
| Some file -> Filename.(file |> basename |> remove_extension)
|
||||
|
||||
let suffix = ".pp.ligo"
|
||||
let suffix = ".pp" ^ extension
|
||||
|
||||
let pp_input =
|
||||
if Utils.String.Set.mem "cpp" options.verbose
|
||||
if Utils.String.Set.mem "cpp" options#verbose
|
||||
then prefix ^ suffix
|
||||
else let pp_input, pp_out = Filename.open_temp_file prefix suffix
|
||||
in close_out pp_out; pp_input
|
||||
|
||||
let cpp_cmd =
|
||||
match options.input with
|
||||
match options#input with
|
||||
None | Some "-" ->
|
||||
Printf.sprintf "cpp -traditional-cpp%s - > %s"
|
||||
lib_path pp_input
|
||||
@ -46,16 +43,14 @@ let cpp_cmd =
|
||||
lib_path file pp_input
|
||||
|
||||
let () =
|
||||
if Utils.String.Set.mem "cpp" options.verbose
|
||||
if Utils.String.Set.mem "cpp" options#verbose
|
||||
then Printf.eprintf "%s\n%!" cpp_cmd;
|
||||
if Sys.command cpp_cmd <> 0 then
|
||||
external_ (Printf.sprintf "the command \"%s\" failed." cpp_cmd)
|
||||
|
||||
(* Running the lexer on the input file *)
|
||||
(** {1 Running the lexer on the input file} *)
|
||||
|
||||
module Lexer = Lexer.Make (LexToken)
|
||||
module Log = LexerLog.Make (Lexer.Make (LexToken))
|
||||
|
||||
module Log = LexerLog.Make (Lexer)
|
||||
|
||||
let () = Log.trace ~offsets:options.offsets
|
||||
options.mode (Some pp_input) options.cmd
|
||||
let () = Log.trace ~offsets:options#offsets
|
||||
options#mode (Some pp_input) options#cmd
|
||||
|
@ -252,12 +252,9 @@ fun_expr:
|
||||
colon = $4;
|
||||
ret_type = $5;
|
||||
kwd_is = $6;
|
||||
local_decls = [];
|
||||
block = Some $7;
|
||||
kwd_with = Some $8;
|
||||
return = $9;
|
||||
}
|
||||
in {region;value}}
|
||||
block_with = Some ($7, $8);
|
||||
return = $9}
|
||||
in {region;value} }
|
||||
| Function option(fun_name) parameters COLON type_expr Is
|
||||
expr {
|
||||
let stop = expr_to_region $7 in
|
||||
@ -269,11 +266,8 @@ fun_expr:
|
||||
colon = $4;
|
||||
ret_type = $5;
|
||||
kwd_is = $6;
|
||||
local_decls = [];
|
||||
block = None;
|
||||
kwd_with = None;
|
||||
return = $7;
|
||||
}
|
||||
block_with = None;
|
||||
return = $7}
|
||||
in {region;value}}
|
||||
|
||||
|
||||
@ -288,20 +282,17 @@ fun_decl:
|
||||
| None -> $1.region in
|
||||
let region = cover $1.region stop
|
||||
and value = {
|
||||
fun_expr = $1;
|
||||
terminator = $2;
|
||||
}
|
||||
in {region;value}}
|
||||
fun_expr = $1;
|
||||
terminator = $2}
|
||||
in {region; value} }
|
||||
|
||||
open_fun_decl:
|
||||
fun_expr {
|
||||
let region = $1.region
|
||||
and value = {
|
||||
fun_expr = $1;
|
||||
terminator = None;
|
||||
}
|
||||
in {region;value}}
|
||||
|
||||
fun_expr = $1;
|
||||
terminator = None}
|
||||
in {region; value} }
|
||||
|
||||
parameters:
|
||||
par(nsepseq(param_decl,SEMI)) { $1 }
|
||||
|
File diff suppressed because it is too large
Load Diff
@ -1,18 +1,35 @@
|
||||
(* Printing the AST *)
|
||||
(** Printing the AST *)
|
||||
|
||||
val offsets : bool ref
|
||||
val mode : [`Byte | `Point] ref
|
||||
(** The type [state] captures the state that is threaded in the
|
||||
printing iterators in this module.
|
||||
*)
|
||||
type state
|
||||
|
||||
val print_tokens : Buffer.t -> AST.t -> unit
|
||||
val print_path : Buffer.t -> AST.path -> unit
|
||||
val print_pattern : Buffer.t -> AST.pattern -> unit
|
||||
val print_instruction : Buffer.t -> AST.instruction -> unit
|
||||
val mk_state :
|
||||
offsets:bool -> mode:[`Point|`Byte] -> buffer:Buffer.t -> state
|
||||
|
||||
val tokens_to_string : AST.t -> string
|
||||
val path_to_string : AST.path -> string
|
||||
val pattern_to_string : AST.pattern -> string
|
||||
val instruction_to_string : AST.instruction -> string
|
||||
(** {1 Printing tokens from the AST in a buffer}
|
||||
|
||||
(* Pretty-printing of the AST *)
|
||||
Printing the tokens reconstructed from the AST. This is very useful
|
||||
for debugging, as the output of [print_token ast] can be textually
|
||||
compared to that of [Lexer.trace] (see module [LexerMain]). *)
|
||||
|
||||
val pp_ast : Buffer.t -> AST.t -> unit
|
||||
val print_tokens : state -> AST.t -> unit
|
||||
val print_path : state -> AST.path -> unit
|
||||
val print_pattern : state -> AST.pattern -> unit
|
||||
val print_instruction : state -> AST.instruction -> unit
|
||||
|
||||
(** {1 Printing tokens from the AST in a string} *)
|
||||
|
||||
val tokens_to_string :
|
||||
offsets:bool -> mode:[`Point|`Byte] -> AST.t -> string
|
||||
val path_to_string :
|
||||
offsets:bool -> mode:[`Point|`Byte] -> AST.path -> string
|
||||
val pattern_to_string :
|
||||
offsets:bool -> mode:[`Point|`Byte] -> AST.pattern -> string
|
||||
val instruction_to_string :
|
||||
offsets:bool -> mode:[`Point|`Byte] -> AST.instruction -> string
|
||||
|
||||
(** {1 Pretty-printing of the AST} *)
|
||||
|
||||
val pp_ast : state -> AST.t -> unit
|
||||
|
@ -1,27 +1,24 @@
|
||||
(* Driver for the parser of PascaLIGO *)
|
||||
(** Driver for the parser of PascaLIGO *)
|
||||
|
||||
(* Error printing and exception tracing *)
|
||||
let extension = ".ligo"
|
||||
let options = EvalOpt.read "PascaLIGO" extension
|
||||
|
||||
(** Error printing and exception tracing
|
||||
*)
|
||||
let () = Printexc.record_backtrace true
|
||||
|
||||
(* Reading the command-line options *)
|
||||
|
||||
let options = EvalOpt.read "PascaLIGO" ".ligo"
|
||||
|
||||
open EvalOpt
|
||||
|
||||
(* Auxiliary functions *)
|
||||
|
||||
(** Auxiliary functions
|
||||
*)
|
||||
let sprintf = Printf.sprintf
|
||||
|
||||
(* Extracting the input file *)
|
||||
|
||||
(** Extracting the input file
|
||||
*)
|
||||
let file =
|
||||
match options.input with
|
||||
match options#input with
|
||||
None | Some "-" -> false
|
||||
| Some _ -> true
|
||||
|
||||
(* Error printing and exception tracing *)
|
||||
(** {1 Error printing and exception tracing} *)
|
||||
|
||||
let () = Printexc.record_backtrace true
|
||||
|
||||
@ -35,35 +32,35 @@ let error_to_string = function
|
||||
| _ -> assert false
|
||||
|
||||
let print_error ?(offsets=true) mode Region.{region; value} ~file =
|
||||
let msg = error_to_string value in
|
||||
let reg = region#to_string ~file ~offsets mode in
|
||||
let msg = error_to_string value in
|
||||
let reg = region#to_string ~file ~offsets mode in
|
||||
Utils.highlight (sprintf "Parse error %s:\n%s%!" reg msg)
|
||||
|
||||
(* Path for CPP inclusions (#include) *)
|
||||
(** {1 Preprocessing the input source and opening the input channels} *)
|
||||
|
||||
(** Path for CPP inclusions (#include)
|
||||
*)
|
||||
let lib_path =
|
||||
match options.libs with
|
||||
match options#libs with
|
||||
[] -> ""
|
||||
| libs -> let mk_I dir path = Printf.sprintf " -I %s%s" dir path
|
||||
in List.fold_right mk_I libs ""
|
||||
|
||||
(* Preprocessing the input source and opening the input channels *)
|
||||
in List.fold_right mk_I libs ""
|
||||
|
||||
let prefix =
|
||||
match options.input with
|
||||
match options#input with
|
||||
None | Some "-" -> "temp"
|
||||
| Some file -> Filename.(file |> basename |> remove_extension)
|
||||
|
||||
let suffix = ".pp.ligo"
|
||||
let suffix = ".pp" ^ extension
|
||||
|
||||
let pp_input =
|
||||
if Utils.String.Set.mem "cpp" options.verbose
|
||||
if Utils.String.Set.mem "cpp" options#verbose
|
||||
then prefix ^ suffix
|
||||
else let pp_input, pp_out = Filename.open_temp_file prefix suffix
|
||||
in close_out pp_out; pp_input
|
||||
|
||||
let cpp_cmd =
|
||||
match options.input with
|
||||
match options#input with
|
||||
None | Some "-" ->
|
||||
Printf.sprintf "cpp -traditional-cpp%s - > %s"
|
||||
lib_path pp_input
|
||||
@ -72,12 +69,12 @@ let cpp_cmd =
|
||||
lib_path file pp_input
|
||||
|
||||
let () =
|
||||
if Utils.String.Set.mem "cpp" options.verbose
|
||||
if Utils.String.Set.mem "cpp" options#verbose
|
||||
then Printf.eprintf "%s\n%!" cpp_cmd;
|
||||
if Sys.command cpp_cmd <> 0 then
|
||||
external_ (Printf.sprintf "the command \"%s\" failed." cpp_cmd)
|
||||
|
||||
(* Instanciating the lexer *)
|
||||
(** {1 Instanciating the lexer} *)
|
||||
|
||||
module Lexer = Lexer.Make (LexToken)
|
||||
|
||||
@ -88,45 +85,49 @@ let Lexer.{read; buffer; get_pos; get_last; close} =
|
||||
|
||||
and cout = stdout
|
||||
|
||||
let log = Log.output_token ~offsets:options.offsets
|
||||
options.mode options.cmd cout
|
||||
let log = Log.output_token ~offsets:options#offsets
|
||||
options#mode options#cmd cout
|
||||
|
||||
and close_all () = close (); close_out cout
|
||||
|
||||
(* Tokeniser *)
|
||||
(** {1 Tokeniser} *)
|
||||
|
||||
let tokeniser = read ~log
|
||||
|
||||
(* Main *)
|
||||
(** {1 Main} *)
|
||||
|
||||
let () =
|
||||
try
|
||||
let ast = Parser.contract tokeniser buffer in
|
||||
if Utils.String.Set.mem "ast" options.verbose
|
||||
if Utils.String.Set.mem "ast" options#verbose
|
||||
then let buffer = Buffer.create 131 in
|
||||
let state = ParserLog.mk_state
|
||||
~offsets:options#offsets
|
||||
~mode:options#mode
|
||||
~buffer in
|
||||
begin
|
||||
ParserLog.offsets := options.offsets;
|
||||
ParserLog.mode := options.mode;
|
||||
ParserLog.pp_ast buffer ast;
|
||||
ParserLog.pp_ast state ast;
|
||||
Buffer.output_buffer stdout buffer
|
||||
end
|
||||
else if Utils.String.Set.mem "ast-tokens" options.verbose
|
||||
else if Utils.String.Set.mem "ast-tokens" options#verbose
|
||||
then let buffer = Buffer.create 131 in
|
||||
let state = ParserLog.mk_state
|
||||
~offsets:options#offsets
|
||||
~mode:options#mode
|
||||
~buffer in
|
||||
begin
|
||||
ParserLog.offsets := options.offsets;
|
||||
ParserLog.mode := options.mode;
|
||||
ParserLog.print_tokens buffer ast;
|
||||
ParserLog.print_tokens state ast;
|
||||
Buffer.output_buffer stdout buffer
|
||||
end
|
||||
with
|
||||
Lexer.Error err ->
|
||||
close_all ();
|
||||
Lexer.print_error ~offsets:options.offsets
|
||||
options.mode err ~file
|
||||
Lexer.print_error ~offsets:options#offsets
|
||||
options#mode err ~file
|
||||
| Parser.Error ->
|
||||
let region = get_last () in
|
||||
let error = Region.{region; value=ParseError} in
|
||||
let () = close_all () in
|
||||
print_error ~offsets:options.offsets
|
||||
options.mode error ~file
|
||||
print_error ~offsets:options#offsets
|
||||
options#mode error ~file
|
||||
| Sys_error msg -> Utils.highlight msg
|
||||
|
@ -1,3 +1,2 @@
|
||||
module Region = Region
|
||||
module Pos = Pos
|
||||
module Option = X_option
|
||||
|
@ -3,8 +3,8 @@ type u is A | B of t * int | C of int -> (string -> int)
|
||||
type v is record a : t; b : record c : string end end
|
||||
|
||||
function back (var store : store) : list (operation) * store is
|
||||
var operations : list (operation) := list []
|
||||
begin
|
||||
var operations : list (operation) := list [];
|
||||
const a : nat = 0n;
|
||||
x0 := record foo = "1"; bar = 4n end;
|
||||
x1 := nil;
|
||||
@ -31,8 +31,8 @@ function back (var store : store) : list (operation) * store is
|
||||
if now > store.deadline and (not True) then
|
||||
begin
|
||||
f (x,1);
|
||||
for k -> d : int * string in map m block { skip };
|
||||
for x : int in set s block { skip };
|
||||
for k -> d in map m block { skip };
|
||||
for x in set s block { skip };
|
||||
while i < 10n
|
||||
begin
|
||||
acc := 2 - (if toggle then f(x) else Unit);
|
||||
@ -53,8 +53,8 @@ function back (var store : store) : list (operation) * store is
|
||||
end with (operations, store)
|
||||
|
||||
function claim (var store : store) : list (operation) * store is
|
||||
var operations : list (operation) := nil
|
||||
begin
|
||||
var operations : list (operation) := nil;
|
||||
if now <= store.deadline then
|
||||
failwith ("Too soon.")
|
||||
else
|
||||
@ -73,8 +73,8 @@ function claim (var store : store) : list (operation) * store is
|
||||
end with (operations, store)
|
||||
|
||||
function withdraw (var store : store) : list (operation) * store is
|
||||
var operations : list (operation) := list end
|
||||
begin
|
||||
var operations : list (operation) := list end;
|
||||
if sender = owner then
|
||||
if now >= store.deadline then
|
||||
if balance >= store.goal then {
|
||||
|
@ -36,11 +36,11 @@ type 'a sequence_or_record =
|
||||
|
||||
|
||||
%nonassoc Ident
|
||||
%nonassoc COLON (* Solves a shift/reduce problem that happens with record
|
||||
and sequences. To elaborate:
|
||||
- sequence_or_record_in can be reduced to
|
||||
expr -> Ident, but also to
|
||||
field_assignment -> Ident.
|
||||
%nonassoc COLON (* Solves a shift/reduce problem that happens with record
|
||||
and sequences. To elaborate:
|
||||
- sequence_or_record_in can be reduced to
|
||||
expr -> Ident, but also to
|
||||
field_assignment -> Ident.
|
||||
*)
|
||||
%%
|
||||
|
||||
@ -135,7 +135,7 @@ sepseq(item,sep):
|
||||
(* Non-empty comma-separated values (at least two values) *)
|
||||
|
||||
tuple(item):
|
||||
item COMMA nsepseq(item,COMMA) {
|
||||
item COMMA nsepseq(item,COMMA) {
|
||||
let h,t = $3 in $1,($2,h)::t
|
||||
}
|
||||
|
||||
@ -143,7 +143,7 @@ tuple(item):
|
||||
|
||||
list(item):
|
||||
LBRACKET sep_or_term_list(item, COMMA) RBRACKET {
|
||||
let elements, terminator = $2 in
|
||||
let elements, terminator = $2 in
|
||||
{ value =
|
||||
{
|
||||
compound = Brackets ($1,$3);
|
||||
@ -151,7 +151,7 @@ list(item):
|
||||
terminator;
|
||||
};
|
||||
region = cover $1 $3
|
||||
}
|
||||
}
|
||||
}
|
||||
| LBRACKET RBRACKET {
|
||||
let value = {
|
||||
@ -161,11 +161,11 @@ list(item):
|
||||
let region = cover $1 $2
|
||||
in {value; region}
|
||||
}
|
||||
|
||||
|
||||
(* Main *)
|
||||
|
||||
contract:
|
||||
declarations EOF {
|
||||
declarations EOF {
|
||||
{decl = $1; eof=$2} }
|
||||
|
||||
declarations:
|
||||
@ -179,7 +179,7 @@ declaration:
|
||||
(* Type declarations *)
|
||||
|
||||
type_decl:
|
||||
Type type_name EQ type_expr {
|
||||
Type type_name EQ type_expr {
|
||||
let region = cover $1 (type_expr_to_region $4) in
|
||||
let value = {
|
||||
kwd_type = $1;
|
||||
@ -200,15 +200,15 @@ cartesian:
|
||||
let value = Utils.nsepseq_cons $1 $2 $3 in
|
||||
let region = nsepseq_to_region type_expr_to_region value
|
||||
in TProd {region; value}
|
||||
}
|
||||
| fun_type { ($1 : type_expr) }
|
||||
|
||||
fun_type:
|
||||
core_type {
|
||||
$1
|
||||
}
|
||||
| core_type ARROW fun_type {
|
||||
let region = cover (type_expr_to_region $1) (type_expr_to_region $3) in
|
||||
| fun_type { ($1 : type_expr) }
|
||||
|
||||
fun_type:
|
||||
core_type {
|
||||
$1
|
||||
}
|
||||
| core_type ARROW fun_type {
|
||||
let region = cover (type_expr_to_region $1) (type_expr_to_region $3) in
|
||||
TFun {region; value = ($1, $2, $3)}
|
||||
}
|
||||
|
||||
@ -219,28 +219,28 @@ core_type:
|
||||
| module_name DOT type_name {
|
||||
let module_name = $1.value in
|
||||
let type_name = $3.value in
|
||||
let value = module_name ^ "." ^ type_name in
|
||||
let value = module_name ^ "." ^ type_name in
|
||||
let region = cover $1.region $3.region
|
||||
in
|
||||
in
|
||||
TVar {region; value}
|
||||
}
|
||||
| type_constr LPAR nsepseq(core_type, COMMA) RPAR {
|
||||
| type_constr LPAR nsepseq(core_type, COMMA) RPAR {
|
||||
let arg_val = $3 in
|
||||
let constr = $1 in
|
||||
let start = $1.region in
|
||||
let stop = $4 in
|
||||
let start = $1.region in
|
||||
let stop = $4 in
|
||||
let region = cover start stop in
|
||||
let lpar, rpar = $2, $4 in
|
||||
TApp Region.{value = constr, {
|
||||
value = {
|
||||
lpar;
|
||||
lpar;
|
||||
rpar;
|
||||
inside = arg_val
|
||||
};
|
||||
region = cover lpar rpar;
|
||||
}; region}
|
||||
}
|
||||
| par (type_expr) {
|
||||
| par (type_expr) {
|
||||
TPar $1
|
||||
}
|
||||
|
||||
@ -248,7 +248,7 @@ type_constr:
|
||||
type_name { $1 }
|
||||
|
||||
sum_type:
|
||||
VBAR nsepseq(variant,VBAR) {
|
||||
VBAR nsepseq(variant,VBAR) {
|
||||
let region = nsepseq_to_region (fun x -> x.region) $2
|
||||
in {region; value = $2}
|
||||
}
|
||||
@ -259,11 +259,11 @@ variant:
|
||||
and value = {constr = $1; arg = Some ($2, $3)}
|
||||
in {region; value}
|
||||
}
|
||||
| Constr {
|
||||
| Constr {
|
||||
{region=$1.region; value= {constr=$1; arg=None}} }
|
||||
|
||||
record_type:
|
||||
LBRACE sep_or_term_list(field_decl,COMMA) RBRACE {
|
||||
LBRACE sep_or_term_list(field_decl,COMMA) RBRACE {
|
||||
let ne_elements, terminator = $2 in
|
||||
let region = cover $1 $3
|
||||
and value = {
|
||||
@ -271,7 +271,7 @@ record_type:
|
||||
ne_elements;
|
||||
terminator;
|
||||
}
|
||||
in {region; value}
|
||||
in {region; value}
|
||||
}
|
||||
|
||||
type_expr_field:
|
||||
@ -282,24 +282,24 @@ type_expr_field:
|
||||
field_decl:
|
||||
field_name {
|
||||
let value = {field_name = $1; colon = Region.ghost; field_type = TVar $1}
|
||||
in {region = $1.region; value}
|
||||
in {region = $1.region; value}
|
||||
}
|
||||
| field_name COLON type_expr_field {
|
||||
let stop = type_expr_to_region $3 in
|
||||
let region = cover $1.region stop
|
||||
and value = {field_name = $1; colon = $2; field_type = $3}
|
||||
in {region; value}
|
||||
in {region; value}
|
||||
}
|
||||
|
||||
(* Top-level non-recursive definitions *)
|
||||
|
||||
let_declaration:
|
||||
Let let_binding {
|
||||
let kwd_let = $1 in
|
||||
let kwd_let = $1 in
|
||||
let binding, (region: Region.region) = $2 in
|
||||
{value = kwd_let, binding; region}
|
||||
}
|
||||
|
||||
|
||||
es6_func:
|
||||
ARROW expr {
|
||||
$1, $2
|
||||
@ -313,42 +313,42 @@ let_binding:
|
||||
let region = cover start stop in
|
||||
({binders = pattern, []; lhs_type=$2; eq=$3; let_rhs=$4}, region)
|
||||
}
|
||||
| tuple(sub_irrefutable) type_annotation? EQ expr {
|
||||
let h, t = $1 in
|
||||
| tuple(sub_irrefutable) type_annotation? EQ expr {
|
||||
let h, t = $1 in
|
||||
let start = pattern_to_region h in
|
||||
let stop = last (fun (region, _) -> region) t in
|
||||
let region = cover start stop in
|
||||
let region = cover start stop in
|
||||
let pattern = PTuple { value = $1; region } in
|
||||
let start = region in
|
||||
let stop = expr_to_region $4 in
|
||||
let stop = expr_to_region $4 in
|
||||
let region = cover start stop in
|
||||
({binders = pattern, []; lhs_type=$2; eq=$3; let_rhs=$4}, region)
|
||||
}
|
||||
| WILD type_annotation? EQ expr {
|
||||
| WILD type_annotation? EQ expr {
|
||||
let pattern = PWild $1 in
|
||||
let start = pattern_to_region pattern in
|
||||
let stop = expr_to_region $4 in
|
||||
let stop = expr_to_region $4 in
|
||||
let region = cover start stop in
|
||||
({binders = pattern, []; lhs_type=$2; eq=$3; let_rhs=$4}, region)
|
||||
}
|
||||
| unit type_annotation? EQ expr {
|
||||
| unit type_annotation? EQ expr {
|
||||
let pattern = PUnit $1 in
|
||||
let start = pattern_to_region pattern in
|
||||
let stop = expr_to_region $4 in
|
||||
let stop = expr_to_region $4 in
|
||||
let region = cover start stop in
|
||||
({binders = pattern, []; lhs_type=$2; eq=$3; let_rhs=$4}, region)
|
||||
}
|
||||
| record_pattern type_annotation? EQ expr {
|
||||
| record_pattern type_annotation? EQ expr {
|
||||
let pattern = PRecord $1 in
|
||||
let start = pattern_to_region pattern in
|
||||
let stop = expr_to_region $4 in
|
||||
let stop = expr_to_region $4 in
|
||||
let region = cover start stop in
|
||||
({binders = pattern, []; lhs_type=$2; eq=$3; let_rhs=$4}, region)
|
||||
}
|
||||
| par(closed_irrefutable) type_annotation? EQ expr {
|
||||
| par(closed_irrefutable) type_annotation? EQ expr {
|
||||
let pattern = PPar $1 in
|
||||
let start = pattern_to_region pattern in
|
||||
let stop = expr_to_region $4 in
|
||||
let stop = expr_to_region $4 in
|
||||
let region = cover start stop in
|
||||
({binders = pattern, []; lhs_type=$2; eq=$3; let_rhs=$4}, region)
|
||||
}
|
||||
@ -359,11 +359,11 @@ type_annotation:
|
||||
(* Patterns *)
|
||||
|
||||
irrefutable:
|
||||
tuple(sub_irrefutable) {
|
||||
let h, t = $1 in
|
||||
tuple(sub_irrefutable) {
|
||||
let h, t = $1 in
|
||||
let start = pattern_to_region h in
|
||||
let stop = last (fun (region, _) -> region) t in
|
||||
let region = cover start stop in
|
||||
let region = cover start stop in
|
||||
PTuple { value = $1; region }
|
||||
}
|
||||
| sub_irrefutable { $1 }
|
||||
@ -381,14 +381,14 @@ closed_irrefutable:
|
||||
| typed_pattern { PTyped $1 }
|
||||
|
||||
typed_pattern:
|
||||
irrefutable COLON type_expr {
|
||||
let start = pattern_to_region $1 in
|
||||
irrefutable COLON type_expr {
|
||||
let start = pattern_to_region $1 in
|
||||
let stop = type_expr_to_region $3 in
|
||||
let region = cover start stop in
|
||||
{
|
||||
value = {
|
||||
pattern = $1;
|
||||
colon = $2;
|
||||
pattern = $1;
|
||||
colon = $2;
|
||||
type_expr = $3
|
||||
};
|
||||
region
|
||||
@ -396,18 +396,18 @@ typed_pattern:
|
||||
}
|
||||
|
||||
pattern:
|
||||
LBRACKET sub_pattern COMMA DOTDOTDOT sub_pattern RBRACKET {
|
||||
LBRACKET sub_pattern COMMA DOTDOTDOT sub_pattern RBRACKET {
|
||||
let start = pattern_to_region $2 in
|
||||
let stop = pattern_to_region $5 in
|
||||
let stop = pattern_to_region $5 in
|
||||
let region = cover start stop in
|
||||
let val_ = {value = $2, $3, $5; region} in
|
||||
PList (PCons val_)
|
||||
PList (PCons val_)
|
||||
}
|
||||
| tuple(sub_pattern) {
|
||||
let h, t = $1 in
|
||||
| tuple(sub_pattern) {
|
||||
let h, t = $1 in
|
||||
let start = pattern_to_region h in
|
||||
let stop = last (fun (region, _) -> region) t in
|
||||
let region = cover start stop in
|
||||
let region = cover start stop in
|
||||
PTuple { value = $1; region }
|
||||
}
|
||||
| core_pattern { $1 }
|
||||
@ -425,7 +425,7 @@ core_pattern:
|
||||
| False { PFalse $1 }
|
||||
| Str { PString $1 }
|
||||
| par(ptuple) { PPar $1 }
|
||||
| list(sub_pattern) { PList (PListComp $1) }
|
||||
| list(sub_pattern) { PList (PListComp $1) }
|
||||
| constr_pattern { PConstr $1 }
|
||||
| record_pattern { PRecord $1 }
|
||||
|
||||
@ -439,7 +439,7 @@ record_pattern:
|
||||
terminator;
|
||||
}
|
||||
in
|
||||
{region; value}
|
||||
{region; value}
|
||||
}
|
||||
|
||||
field_pattern:
|
||||
@ -458,25 +458,25 @@ constr_pattern:
|
||||
and value = $1, $2
|
||||
in PSomeApp {value; region}
|
||||
}
|
||||
| Constr {
|
||||
| Constr {
|
||||
PConstrApp { value = $1, None; region = $1.region } }
|
||||
| Constr sub_pattern {
|
||||
| Constr sub_pattern {
|
||||
let region = cover $1.region (pattern_to_region $2) in
|
||||
PConstrApp { value = $1, Some $2; region }
|
||||
PConstrApp { value = $1, Some $2; region }
|
||||
}
|
||||
|
||||
|
||||
ptuple:
|
||||
tuple(sub_pattern) {
|
||||
let h, t = $1 in
|
||||
tuple(sub_pattern) {
|
||||
let h, t = $1 in
|
||||
let start = pattern_to_region h in
|
||||
let stop = last (fun (region, _) -> region) t in
|
||||
let region = cover start stop in
|
||||
PTuple { value = $1; region }
|
||||
let region = cover start stop in
|
||||
PTuple { value = $1; region }
|
||||
}
|
||||
|
||||
unit:
|
||||
LPAR RPAR {
|
||||
LPAR RPAR {
|
||||
let the_unit = ghost, ghost in
|
||||
let region = cover $1 $2 in
|
||||
{ value = the_unit; region }
|
||||
@ -500,57 +500,57 @@ base_cond:
|
||||
base_cond__open(base_cond) { $1 }
|
||||
|
||||
type_expr_simple_args:
|
||||
LPAR nsepseq(type_expr_simple, COMMA) RPAR {
|
||||
LPAR nsepseq(type_expr_simple, COMMA) RPAR {
|
||||
$1, $2, $3
|
||||
}
|
||||
}
|
||||
|
||||
type_expr_simple:
|
||||
core_expr_2 type_expr_simple_args? {
|
||||
type_expr_simple:
|
||||
core_expr_2 type_expr_simple_args? {
|
||||
let args = $2 in
|
||||
let constr = match $1 with
|
||||
let constr = match $1 with
|
||||
| EVar i -> i
|
||||
| EProj {value = {struct_name; field_path; _}; region} ->
|
||||
let path =
|
||||
(Utils.nsepseq_foldl
|
||||
(fun a e ->
|
||||
match e with
|
||||
| EProj {value = {struct_name; field_path; _}; region} ->
|
||||
let path =
|
||||
(Utils.nsepseq_foldl
|
||||
(fun a e ->
|
||||
match e with
|
||||
| FieldName v -> a ^ "." ^ v.value
|
||||
| Component {value = c, _; _} -> a ^ "." ^ c
|
||||
)
|
||||
)
|
||||
struct_name.value
|
||||
field_path
|
||||
)
|
||||
)
|
||||
in
|
||||
{value = path; region }
|
||||
| EArith (Mutez {value = s, _; region })
|
||||
| EArith (Int {value = s, _; region })
|
||||
| EArith (Nat {value = s, _; region }) -> { value = s; region }
|
||||
| EString (StrLit {value = s; region}) -> { value = s; region }
|
||||
| EString (String {value = s; region}) -> { value = s; region }
|
||||
| ELogic (BoolExpr (True t)) -> { value = "true"; region = t }
|
||||
| ELogic (BoolExpr (False f)) -> { value = "false"; region = f }
|
||||
| _ -> failwith "Not supported"
|
||||
in
|
||||
match args with
|
||||
match args with
|
||||
Some (lpar, args, rpar) -> (
|
||||
let start = expr_to_region $1 in
|
||||
let stop = rpar in
|
||||
let region = cover start stop in
|
||||
let region = cover start stop in
|
||||
TApp {
|
||||
value = constr, {
|
||||
value = {
|
||||
inside = args;
|
||||
lpar;
|
||||
inside = args;
|
||||
lpar;
|
||||
rpar
|
||||
};
|
||||
region};
|
||||
};
|
||||
region};
|
||||
region}
|
||||
)
|
||||
| None -> TVar constr
|
||||
}
|
||||
| LPAR nsepseq(type_expr_simple, COMMA) RPAR {
|
||||
| LPAR nsepseq(type_expr_simple, COMMA) RPAR {
|
||||
TProd {value = $2; region = cover $1 $3}
|
||||
}
|
||||
| LPAR type_expr_simple ARROW type_expr_simple RPAR {
|
||||
| LPAR type_expr_simple ARROW type_expr_simple RPAR {
|
||||
TFun {value = $2, $3, $4; region = cover $1 $5}
|
||||
}
|
||||
|
||||
@ -562,32 +562,32 @@ fun_expr:
|
||||
let arrow, body = $2 in
|
||||
let kwd_fun = Region.ghost in
|
||||
let start = expr_to_region $1 in
|
||||
let stop = expr_to_region body in
|
||||
let stop = expr_to_region body in
|
||||
let region = cover start stop in
|
||||
let rec arg_to_pattern = (function
|
||||
| EVar val_ -> PVar val_
|
||||
| EVar val_ -> PVar val_
|
||||
| EAnnot {value = (EVar v, typ); region} ->
|
||||
PTyped {value = {
|
||||
pattern = PVar v;
|
||||
pattern = PVar v;
|
||||
colon = Region.ghost;
|
||||
type_expr = typ;
|
||||
} ; region}
|
||||
| EPar {value = {inside; lpar; rpar}; region} ->
|
||||
| EPar {value = {inside; lpar; rpar}; region} ->
|
||||
PPar {value = {inside = arg_to_pattern inside; lpar; rpar}; region}
|
||||
| EUnit u -> PUnit u
|
||||
| _ -> failwith "Not supported"
|
||||
)
|
||||
in
|
||||
in
|
||||
let fun_args_to_pattern = (function
|
||||
| EAnnot {value = (ETuple {value = fun_args; _}, _); _} -> (* ((foo:x, bar) : type) *)
|
||||
let bindings = List.map (fun arg -> arg_to_pattern (snd arg)) (snd fun_args) in
|
||||
(arg_to_pattern (fst fun_args), bindings)
|
||||
| EAnnot {value = (EPar {value = {inside = fun_arg ; _}; _}, _); _} -> (* ((foo:x, bar) : type) *)
|
||||
(arg_to_pattern fun_arg, [])
|
||||
| EPar {value = {inside = fun_arg; _ }; _} ->
|
||||
| EPar {value = {inside = fun_arg; _ }; _} ->
|
||||
(arg_to_pattern fun_arg, [])
|
||||
| EAnnot e -> (arg_to_pattern (EAnnot e), [])
|
||||
| ETuple {value = fun_args; _} ->
|
||||
| ETuple {value = fun_args; _} ->
|
||||
let bindings = List.map (fun arg -> arg_to_pattern (snd arg)) (snd fun_args) in
|
||||
(arg_to_pattern (fst fun_args), bindings)
|
||||
| EUnit e ->
|
||||
@ -624,13 +624,13 @@ if_then(right_expr):
|
||||
let the_unit = ghost, ghost in
|
||||
let ifnot = EUnit {region=ghost; value=the_unit} in
|
||||
let region = cover $1 $5 in
|
||||
{
|
||||
{
|
||||
value = {
|
||||
kwd_if = $1;
|
||||
test = $2;
|
||||
kwd_then = $3;
|
||||
kwd_if = $1;
|
||||
test = $2;
|
||||
kwd_then = $3;
|
||||
ifso = $4;
|
||||
kwd_else = Region.ghost;
|
||||
kwd_else = Region.ghost;
|
||||
ifnot;
|
||||
};
|
||||
region
|
||||
@ -640,13 +640,13 @@ if_then(right_expr):
|
||||
if_then_else(right_expr):
|
||||
If parenthesized_expr LBRACE closed_if SEMI RBRACE Else LBRACE right_expr SEMI RBRACE {
|
||||
let region = cover $1 $11 in
|
||||
{
|
||||
{
|
||||
value = {
|
||||
kwd_if = $1;
|
||||
test = $2;
|
||||
kwd_then = $3;
|
||||
kwd_if = $1;
|
||||
test = $2;
|
||||
kwd_then = $3;
|
||||
ifso = $4;
|
||||
kwd_else = $6;
|
||||
kwd_else = $6;
|
||||
ifnot = $9
|
||||
};
|
||||
region
|
||||
@ -671,20 +671,20 @@ switch_expr(right_expr):
|
||||
let stop = $5 in
|
||||
let region = cover start stop in
|
||||
{ value = {
|
||||
kwd_match = $1;
|
||||
expr = $2;
|
||||
lead_vbar = None;
|
||||
kwd_match = $1;
|
||||
expr = $2;
|
||||
lead_vbar = None;
|
||||
kwd_with = Region.ghost;
|
||||
cases = {
|
||||
value = cases;
|
||||
region = nsepseq_to_region (fun {region; _} -> region) $4
|
||||
};
|
||||
};
|
||||
region
|
||||
};
|
||||
region
|
||||
}
|
||||
}
|
||||
|
||||
switch_expr_:
|
||||
switch_expr_:
|
||||
| par(expr) {
|
||||
$1.value.inside
|
||||
}
|
||||
@ -693,7 +693,7 @@ switch_expr_:
|
||||
}
|
||||
|
||||
cases(right_expr):
|
||||
nseq(case_clause(right_expr)) {
|
||||
nseq(case_clause(right_expr)) {
|
||||
let (hd, tl) = $1 in
|
||||
hd, (List.map (fun f -> expr_to_region f.value.rhs, f) tl)
|
||||
}
|
||||
@ -701,11 +701,11 @@ cases(right_expr):
|
||||
case_clause(right_expr):
|
||||
VBAR pattern ARROW right_expr SEMI? {
|
||||
let region = cover (pattern_to_region $2) (expr_to_region $4) in
|
||||
{value =
|
||||
{value =
|
||||
{
|
||||
pattern = $2;
|
||||
arrow = $3;
|
||||
rhs=$4
|
||||
pattern = $2;
|
||||
arrow = $3;
|
||||
rhs=$4
|
||||
};
|
||||
region
|
||||
}
|
||||
@ -713,11 +713,11 @@ case_clause(right_expr):
|
||||
|
||||
let_expr(right_expr):
|
||||
Let let_binding SEMI right_expr {
|
||||
let kwd_let = $1 in
|
||||
let (binding: let_binding), _ = $2 in
|
||||
let kwd_let = $1 in
|
||||
let (binding: let_binding), _ = $2 in
|
||||
let kwd_in = $3 in
|
||||
let body = $4 in
|
||||
let stop = expr_to_region $4 in
|
||||
let stop = expr_to_region $4 in
|
||||
let region = cover $1 stop in
|
||||
let let_in = {kwd_let; binding; kwd_in; body}
|
||||
in ELetIn {region; value=let_in} }
|
||||
@ -726,19 +726,19 @@ disj_expr_level:
|
||||
disj_expr { ELogic (BoolExpr (Or $1)) }
|
||||
| conj_expr_level { $1 }
|
||||
| par(tuple(disj_expr_level)) type_annotation_simple? {
|
||||
let region = $1.region in
|
||||
let region = $1.region in
|
||||
let tuple = ETuple {value=$1.value.inside; region} in
|
||||
let region = match $2 with
|
||||
let region = match $2 with
|
||||
| Some s -> cover $1.region (type_expr_to_region s)
|
||||
| None -> region
|
||||
in
|
||||
match $2 with
|
||||
in
|
||||
match $2 with
|
||||
| Some typ -> EAnnot({value = tuple, typ; region})
|
||||
| None -> tuple
|
||||
}
|
||||
|
||||
bin_op(arg1,op,arg2):
|
||||
arg1 op arg2 {
|
||||
arg1 op arg2 {
|
||||
let start = expr_to_region $1 in
|
||||
let stop = expr_to_region $3 in
|
||||
let region = cover start stop in
|
||||
@ -821,29 +821,29 @@ unary_expr_level:
|
||||
let start = $1 in
|
||||
let end_ = expr_to_region $2 in
|
||||
let region = cover start end_
|
||||
and value = {op = $1; arg = $2}
|
||||
in EArith (Neg {region; value})
|
||||
and value = {op = $1; arg = $2}
|
||||
in EArith (Neg {region; value})
|
||||
}
|
||||
| NOT call_expr_level {
|
||||
let start = $1 in
|
||||
let end_ = expr_to_region $2 in
|
||||
let region = cover start end_
|
||||
and value = {op = $1; arg = $2} in
|
||||
and value = {op = $1; arg = $2} in
|
||||
ELogic (BoolExpr (Not ({region; value})))
|
||||
}
|
||||
}
|
||||
| call_expr_level {
|
||||
$1
|
||||
$1
|
||||
}
|
||||
|
||||
call_expr_level:
|
||||
call_expr_level:
|
||||
call_expr_level_in type_annotation_simple? {
|
||||
let region = match $2 with
|
||||
let region = match $2 with
|
||||
| Some s -> cover (expr_to_region $1) (type_expr_to_region s)
|
||||
| None -> expr_to_region $1
|
||||
in
|
||||
in
|
||||
match $2 with
|
||||
| Some t ->
|
||||
EAnnot { value = $1, t; region }
|
||||
| Some t ->
|
||||
EAnnot { value = $1, t; region }
|
||||
| None -> $1
|
||||
}
|
||||
|
||||
@ -859,12 +859,12 @@ constr_expr:
|
||||
| C_Some core_expr {
|
||||
let region = cover $1 (expr_to_region $2)
|
||||
in EConstr (ESomeApp {value = $1,$2; region})
|
||||
}
|
||||
| Constr core_expr? {
|
||||
}
|
||||
| Constr core_expr? {
|
||||
let start = $1.region in
|
||||
let stop = match $2 with
|
||||
let stop = match $2 with
|
||||
| Some c -> expr_to_region c
|
||||
| None -> start
|
||||
| None -> start
|
||||
in
|
||||
let region = cover start stop in
|
||||
EConstr (EConstrApp { value = $1,$2; region})
|
||||
@ -892,7 +892,7 @@ core_expr_2:
|
||||
| Nat { EArith (Nat $1) }
|
||||
| Ident | module_field { EVar $1 }
|
||||
| projection { EProj $1 }
|
||||
| Str { EString (StrLit $1) }
|
||||
| Str { EString (String $1) }
|
||||
| unit { EUnit $1 }
|
||||
| False { ELogic (BoolExpr (False $1)) }
|
||||
| True { ELogic (BoolExpr (True $1)) }
|
||||
@ -940,7 +940,7 @@ core_expr:
|
||||
| Nat { EArith (Nat $1) }
|
||||
| Ident | module_field { EVar $1 }
|
||||
| projection { EProj $1 }
|
||||
| Str { EString (StrLit $1) }
|
||||
| Str { EString (String $1) }
|
||||
| unit { EUnit $1 }
|
||||
| False { ELogic (BoolExpr (False $1)) }
|
||||
| True { ELogic (BoolExpr (True $1)) }
|
||||
@ -949,9 +949,9 @@ core_expr:
|
||||
| sequence_or_record { $1 }
|
||||
|
||||
module_field:
|
||||
module_name DOT field_name {
|
||||
module_name DOT field_name {
|
||||
let region = cover $1.region $3.region in
|
||||
{ value = $1.value ^ "." ^ $3.value; region }
|
||||
{ value = $1.value ^ "." ^ $3.value; region }
|
||||
}
|
||||
|
||||
selection:
|
||||
@ -962,7 +962,7 @@ selection:
|
||||
}
|
||||
| DOT field_name selection {
|
||||
let r, (h, t) = $3 in
|
||||
let result:((selection, dot) Utils.nsepseq) = (FieldName $2), ($1, h) :: t in
|
||||
let result:((selection, dot) Utils.nsepseq) = (FieldName $2), ($1, h) :: t in
|
||||
r, result
|
||||
}
|
||||
| DOT field_name {
|
||||
@ -974,15 +974,15 @@ selection:
|
||||
|
||||
projection:
|
||||
struct_name selection {
|
||||
let start = $1.region in
|
||||
let stop = nsepseq_to_region (function
|
||||
| FieldName f -> f.region
|
||||
let start = $1.region in
|
||||
let stop = nsepseq_to_region (function
|
||||
| FieldName f -> f.region
|
||||
| Component c -> c.region) (snd $2)
|
||||
in
|
||||
let region = cover start stop in
|
||||
{ value =
|
||||
{ value =
|
||||
{
|
||||
struct_name = $1;
|
||||
struct_name = $1;
|
||||
selector = fst $2;
|
||||
field_path = snd $2
|
||||
};
|
||||
@ -994,33 +994,33 @@ projection:
|
||||
let field_name = $3 in
|
||||
let value = module_name.value ^ "." ^ field_name.value in
|
||||
let struct_name = {$1 with value} in
|
||||
let start = $1.region in
|
||||
let stop = nsepseq_to_region (function
|
||||
| FieldName f -> f.region
|
||||
let start = $1.region in
|
||||
let stop = nsepseq_to_region (function
|
||||
| FieldName f -> f.region
|
||||
| Component c -> c.region) (snd $4)
|
||||
in
|
||||
let region = cover start stop in
|
||||
{ value =
|
||||
{ value =
|
||||
{
|
||||
struct_name;
|
||||
selector = fst $4;
|
||||
struct_name;
|
||||
selector = fst $4;
|
||||
field_path = snd $4
|
||||
};
|
||||
region
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
sequence_or_record_in:
|
||||
sequence_or_record_in:
|
||||
expr SEMI sep_or_term_list(expr,SEMI) {
|
||||
let (e, _region) = $3 in
|
||||
let e = Utils.nsepseq_cons $1 $2 e in
|
||||
PaSequence { s_elts = e; s_terminator = None}
|
||||
}
|
||||
| field_assignment COMMA sep_or_term_list(field_assignment,COMMA) {
|
||||
| field_assignment COMMA sep_or_term_list(field_assignment,COMMA) {
|
||||
let (e, _region) = $3 in
|
||||
let e = Utils.nsepseq_cons $1 $2 e in
|
||||
PaRecord { r_elts = e; r_terminator = None}
|
||||
}
|
||||
}
|
||||
| expr SEMI? {
|
||||
PaSingleExpr $1
|
||||
}
|
||||
@ -1029,7 +1029,7 @@ sequence_or_record:
|
||||
LBRACE sequence_or_record_in RBRACE {
|
||||
let compound = Braces($1, $3) in
|
||||
let region = cover $1 $3 in
|
||||
match $2 with
|
||||
match $2 with
|
||||
| PaSequence s -> (
|
||||
let value: expr injection = {
|
||||
compound;
|
||||
@ -1045,33 +1045,33 @@ sequence_or_record:
|
||||
ne_elements = r.r_elts;
|
||||
terminator = r.r_terminator;
|
||||
}
|
||||
in
|
||||
in
|
||||
ERecord {value; region}
|
||||
)
|
||||
| PaSingleExpr e -> e
|
||||
| PaSingleExpr e -> e
|
||||
}
|
||||
|
||||
field_assignment:
|
||||
field_name {
|
||||
{ value =
|
||||
field_name {
|
||||
{ value =
|
||||
{
|
||||
field_name = $1;
|
||||
assignment = Region.ghost;
|
||||
field_name = $1;
|
||||
assignment = Region.ghost;
|
||||
field_expr = EVar $1
|
||||
};
|
||||
region = $1.region
|
||||
}
|
||||
}
|
||||
| field_name COLON expr {
|
||||
let start = $1.region in
|
||||
let stop = expr_to_region $3 in
|
||||
let start = $1.region in
|
||||
let stop = expr_to_region $3 in
|
||||
let region = cover start stop in
|
||||
{ value =
|
||||
{ value =
|
||||
{
|
||||
field_name = $1;
|
||||
assignment = $2;
|
||||
field_name = $1;
|
||||
assignment = $2;
|
||||
field_expr = $3
|
||||
};
|
||||
region
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1,22 +1,32 @@
|
||||
(* Parsing command-line options *)
|
||||
|
||||
(* The type [command] denotes some possible behaviours of the
|
||||
compiler. *)
|
||||
(** Parsing command-line options *)
|
||||
|
||||
(** The type [command] denotes some possible behaviours of the
|
||||
compiler.
|
||||
*)
|
||||
type command = Quiet | Copy | Units | Tokens
|
||||
|
||||
(* The type [options] gathers the command-line options. *)
|
||||
|
||||
type options = {
|
||||
(** The type [options] gathers the command-line options.
|
||||
*)
|
||||
type options = <
|
||||
input : string option;
|
||||
libs : string list;
|
||||
verbose : Utils.String.Set.t;
|
||||
offsets : bool;
|
||||
mode : [`Byte | `Point];
|
||||
cmd : command
|
||||
}
|
||||
>
|
||||
|
||||
(* Auxiliary functions *)
|
||||
let make ~input ~libs ~verbose ~offsets ~mode ~cmd =
|
||||
object
|
||||
method input = input
|
||||
method libs = libs
|
||||
method verbose = verbose
|
||||
method offsets = offsets
|
||||
method mode = mode
|
||||
method cmd = cmd
|
||||
end
|
||||
|
||||
(** {1 Auxiliary functions} *)
|
||||
|
||||
let printf = Printf.printf
|
||||
let sprintf = Printf.sprintf
|
||||
@ -25,7 +35,7 @@ let print = print_endline
|
||||
let abort msg =
|
||||
Utils.highlight (sprintf "Command-line error: %s\n" msg); exit 1
|
||||
|
||||
(* Help *)
|
||||
(** {1 Help} *)
|
||||
|
||||
let help language extension () =
|
||||
let file = Filename.basename Sys.argv.(0) in
|
||||
@ -44,11 +54,11 @@ let help language extension () =
|
||||
print " -h, --help This help";
|
||||
exit 0
|
||||
|
||||
(* Version *)
|
||||
(** {1 Version} *)
|
||||
|
||||
let version () = printf "%s\n" Version.version; exit 0
|
||||
|
||||
(* Specifying the command-line options a la GNU *)
|
||||
(** {1 Specifying the command-line options a la GNU} *)
|
||||
|
||||
let copy = ref false
|
||||
and tokens = ref false
|
||||
@ -85,8 +95,8 @@ let specs language extension =
|
||||
]
|
||||
;;
|
||||
|
||||
(* Handler of anonymous arguments *)
|
||||
|
||||
(** Handler of anonymous arguments
|
||||
*)
|
||||
let anonymous arg =
|
||||
match !input with
|
||||
None -> input := Some arg
|
||||
@ -94,8 +104,8 @@ let anonymous arg =
|
||||
abort (sprintf "Multiple inputs")
|
||||
;;
|
||||
|
||||
(* Checking options and exporting them as non-mutable values *)
|
||||
|
||||
(** Checking options and exporting them as non-mutable values
|
||||
*)
|
||||
let string_of convert = function
|
||||
None -> "None"
|
||||
| Some s -> sprintf "Some %s" (convert s)
|
||||
@ -168,9 +178,9 @@ let check extension =
|
||||
| false, false, false, true -> Tokens
|
||||
| _ -> abort "Choose one of -q, -c, -u, -t."
|
||||
|
||||
in {input; libs; verbose; offsets; mode; cmd}
|
||||
in make ~input ~libs ~verbose ~offsets ~mode ~cmd
|
||||
|
||||
(* Parsing the command-line options *)
|
||||
(** {1 Parsing the command-line options} *)
|
||||
|
||||
let read language extension =
|
||||
try
|
||||
|
@ -1,55 +1,67 @@
|
||||
(* Parsing the command-line options of PascaLIGO *)
|
||||
(** Parsing the command-line options of PascaLIGO *)
|
||||
|
||||
(* The type [command] denotes some possible behaviours of the
|
||||
compiler. The constructors are
|
||||
(** The type [command] denotes some possible behaviours of the
|
||||
compiler. The constructors are
|
||||
{ul
|
||||
|
||||
* [Quiet], then no output from the lexer and parser should be
|
||||
expected, safe error messages: this is the default value;
|
||||
* [Copy], then lexemes of tokens and markup will be printed to
|
||||
standard output, with the expectation of a perfect match with
|
||||
the input file;
|
||||
* [Units], then the tokens and markup will be printed to standard
|
||||
output, that is, the abstract representation of the concrete
|
||||
lexical syntax;
|
||||
* [Tokens], then the tokens only will be printed.
|
||||
*)
|
||||
{li [Quiet], then no output from the lexer and parser should be
|
||||
expected, safe error messages: this is the default value;}
|
||||
|
||||
{li [Copy], then lexemes of tokens and markup will be printed to
|
||||
standard output, with the expectation of a perfect match
|
||||
with the input file;}
|
||||
|
||||
{li [Units], then the tokens and markup will be printed to
|
||||
standard output, that is, the abstract representation of the
|
||||
concrete lexical syntax;}
|
||||
|
||||
{li [Tokens], then the tokens only will be printed.}
|
||||
}
|
||||
*)
|
||||
type command = Quiet | Copy | Units | Tokens
|
||||
|
||||
(* The type [options] gathers the command-line options.
|
||||
(** The type [options] gathers the command-line options.
|
||||
{ul
|
||||
|
||||
If the field [input] is [Some src], the name of the PascaLIGO
|
||||
source file, with the extension ".ligo", is [src]. If [input] is
|
||||
[Some "-"] or [None], the source file is read from standard input.
|
||||
{li If the field [input] is [Some src], the name of the
|
||||
PascaLIGO source file, with the extension ".ligo", is
|
||||
[src]. If [input] is [Some "-"] or [None], the source file
|
||||
is read from standard input.}
|
||||
|
||||
The field [libs] is the paths where to find PascaLIGO files for
|
||||
inclusion (#include).
|
||||
{li The field [libs] is the paths where to find PascaLIGO files
|
||||
for inclusion (#include).}
|
||||
|
||||
The field [verbose] is a set of stages of the compiler chain,
|
||||
about which more information may be displayed.
|
||||
{li The field [verbose] is a set of stages of the compiler
|
||||
chain, about which more information may be displayed.}
|
||||
|
||||
If the field [offsets] is [true], then the user requested that
|
||||
messages about source positions and regions be expressed in terms
|
||||
of horizontal offsets.
|
||||
{li If the field [offsets] is [true], then the user requested
|
||||
that messages about source positions and regions be
|
||||
expressed in terms of horizontal offsets.}
|
||||
|
||||
If the value [mode] is [`Byte], then the unit in which source
|
||||
positions and regions are expressed in messages is the byte. If
|
||||
[`Point], the unit is unicode points.
|
||||
|
||||
*)
|
||||
|
||||
type options = {
|
||||
{li If the value [mode] is [`Byte], then the unit in which
|
||||
source positions and regions are expressed in messages is
|
||||
the byte. If [`Point], the unit is unicode points.}
|
||||
}
|
||||
*)
|
||||
type options = <
|
||||
input : string option;
|
||||
libs : string list;
|
||||
verbose : Utils.String.Set.t;
|
||||
offsets : bool;
|
||||
mode : [`Byte | `Point];
|
||||
cmd : command
|
||||
}
|
||||
>
|
||||
|
||||
(* Parsing the command-line options on stdin. The first parameter is
|
||||
val make :
|
||||
input:string option ->
|
||||
libs:string list ->
|
||||
verbose:Utils.String.Set.t ->
|
||||
offsets:bool ->
|
||||
mode:[`Byte | `Point] ->
|
||||
cmd:command -> options
|
||||
|
||||
(** Parsing the command-line options on stdin. The first parameter is
|
||||
the name of the concrete syntax, e.g., "pascaligo", and the second
|
||||
is the file extension, e.g., ".ligo".
|
||||
*)
|
||||
|
||||
val read : string -> string -> options
|
||||
|
@ -1,6 +1,7 @@
|
||||
(* Lexer specification for LIGO, to be processed by [ocamllex]. *)
|
||||
|
||||
{
|
||||
[@@@warning "-42"]
|
||||
|
||||
module Region = Simple_utils.Region
|
||||
module Pos = Simple_utils.Pos
|
||||
|
@ -1,4 +1,4 @@
|
||||
(* Embedding the lexer of PascaLIGO in a debug module *)
|
||||
(** Embedding the LIGO lexer in a debug module *)
|
||||
|
||||
let sprintf = Printf.sprintf
|
||||
|
||||
@ -24,10 +24,9 @@ module Make (Lexer: Lexer.S) : (S with module Lexer = Lexer) =
|
||||
module Lexer = Lexer
|
||||
module Token = Lexer.Token
|
||||
|
||||
(* Pretty-printing in a string the lexemes making up the markup
|
||||
(** Pretty-printing in a string the lexemes making up the markup
|
||||
between two tokens, concatenated with the last lexeme
|
||||
itself. *)
|
||||
|
||||
let output_token ?(offsets=true) mode command
|
||||
channel left_mark token : unit =
|
||||
let output str = Printf.fprintf channel "%s%!" str in
|
||||
|
@ -100,8 +100,10 @@ module Errors = struct
|
||||
let title () = "simplifying expression" in
|
||||
let message () = "" in
|
||||
let data = [
|
||||
("expression" ,
|
||||
thunk @@ Parser.Ligodity.ParserLog.expr_to_string t)
|
||||
("expression" ,
|
||||
(** TODO: The labelled arguments should be flowing from the CLI. *)
|
||||
thunk @@ Parser.Ligodity.ParserLog.expr_to_string
|
||||
~offsets:true ~mode:`Point t)
|
||||
] in
|
||||
error ~data title message
|
||||
|
||||
@ -350,7 +352,7 @@ let rec simpl_expression :
|
||||
return @@ e_literal ~loc (Literal_mutez n)
|
||||
)
|
||||
| EArith (Neg e) -> simpl_unop "NEG" e
|
||||
| EString (StrLit s) -> (
|
||||
| EString (String s) -> (
|
||||
let (s , loc) = r_split s in
|
||||
let s' =
|
||||
let s = s in
|
||||
@ -724,9 +726,11 @@ and simpl_cases : type a . (Raw.pattern * a) list -> (a, unit) matching result =
|
||||
| lst ->
|
||||
let error x =
|
||||
let title () = "Pattern" in
|
||||
(** TODO: The labelled arguments should be flowing from the CLI. *)
|
||||
let content () =
|
||||
Printf.sprintf "Pattern : %s"
|
||||
(Parser.Ligodity.ParserLog.pattern_to_string x) in
|
||||
(Parser.Ligodity.ParserLog.pattern_to_string
|
||||
~offsets:true ~mode:`Point x) in
|
||||
error title content
|
||||
in
|
||||
let as_variant () =
|
||||
|
@ -1,4 +1,4 @@
|
||||
open Trace
|
||||
open! Trace
|
||||
open Ast_simplified
|
||||
|
||||
module Raw = Parser.Pascaligo.AST
|
||||
@ -13,12 +13,12 @@ let pseq_to_list = function
|
||||
| None -> []
|
||||
| Some lst -> npseq_to_list lst
|
||||
let get_value : 'a Raw.reg -> 'a = fun x -> x.value
|
||||
let is_compiler_generated = fun (name) -> String.contains (Var.to_name name) '#'
|
||||
let is_compiler_generated name = String.contains (Var.to_name name) '#'
|
||||
|
||||
let detect_local_declarations (for_body : expression) =
|
||||
let%bind aux = Self_ast_simplified.fold_expression
|
||||
(fun (nlist, cur_loop : expression_variable list * bool) (ass_exp : expression) ->
|
||||
if cur_loop then
|
||||
if cur_loop then
|
||||
match ass_exp.expression with
|
||||
| E_let_in {binder;rhs = _;result = _} ->
|
||||
let (name,_) = binder in
|
||||
@ -45,14 +45,14 @@ let detect_free_variables (for_body : expression) (local_decl_names : expression
|
||||
when n=C_OR || n=C_AND || n=C_LT || n=C_GT ||
|
||||
n=C_LE || n=C_GE || n=C_EQ || n=C_NEQ -> (
|
||||
match (a.expression,b.expression) with
|
||||
| E_variable na , E_variable nb ->
|
||||
| E_variable na , E_variable nb ->
|
||||
let ret = [] in
|
||||
let ret = if not (is_compiler_generated na) then
|
||||
na::ret else ret in
|
||||
let ret = if not (is_compiler_generated nb) then
|
||||
nb::ret else ret in
|
||||
ok (ret@prev)
|
||||
| E_variable n , _
|
||||
| E_variable n , _
|
||||
| _ , E_variable n ->
|
||||
if not (is_compiler_generated n) then
|
||||
ok (n::prev) else ok prev
|
||||
@ -140,8 +140,10 @@ module Errors = struct
|
||||
let data = [
|
||||
("pattern_loc",
|
||||
fun () -> Format.asprintf "%a" Location.pp_lift @@ pattern_loc) ;
|
||||
(** TODO: The labelled arguments should be flowing from the CLI. *)
|
||||
("pattern",
|
||||
fun () -> Parser.Pascaligo.ParserLog.pattern_to_string p)
|
||||
fun () -> Parser.Pascaligo.ParserLog.pattern_to_string
|
||||
~offsets:true ~mode:`Point p)
|
||||
] in
|
||||
error ~data title message
|
||||
|
||||
@ -189,9 +191,11 @@ module Errors = struct
|
||||
let simplifying_instruction t =
|
||||
let title () = "simplifiying instruction" in
|
||||
let message () = "" in
|
||||
(** TODO: The labelled arguments should be flowing from the CLI. *)
|
||||
let data = [
|
||||
("instruction",
|
||||
fun () -> Parser.Pascaligo.ParserLog.instruction_to_string t)
|
||||
fun () -> Parser.Pascaligo.ParserLog.instruction_to_string
|
||||
~offsets:true ~mode:`Point t)
|
||||
] in
|
||||
error ~data title message
|
||||
end
|
||||
@ -569,11 +573,6 @@ and simpl_tuple_expression ?loc (lst:Raw.expr list) : expression result =
|
||||
let%bind lst = bind_list @@ List.map simpl_expression lst in
|
||||
return @@ e_tuple ?loc lst
|
||||
|
||||
and simpl_local_declaration : Raw.local_decl -> _ result = fun t ->
|
||||
match t with
|
||||
| LocalData d ->
|
||||
simpl_data_declaration d
|
||||
|
||||
and simpl_data_declaration : Raw.data_decl -> _ result = fun t ->
|
||||
match t with
|
||||
| LocalVar x ->
|
||||
@ -612,10 +611,10 @@ and simpl_fun_expression :
|
||||
loc:_ -> Raw.fun_expr -> ((expression_variable option * type_expression option) * expression) result =
|
||||
fun ~loc x ->
|
||||
let open! Raw in
|
||||
let {name;param;ret_type;local_decls;block;return} : fun_expr = x in
|
||||
let {name;param;ret_type;block_with;return} : fun_expr = x in
|
||||
let statements =
|
||||
match block with
|
||||
| Some block -> npseq_to_list block.value.statements
|
||||
match block_with with
|
||||
| Some (block,_) -> npseq_to_list block.value.statements
|
||||
| None -> []
|
||||
in
|
||||
(match param.value.inside with
|
||||
@ -623,14 +622,12 @@ and simpl_fun_expression :
|
||||
let%bind input = simpl_param a in
|
||||
let name = Option.map (fun (x : _ reg) -> Var.of_name x.value) name in
|
||||
let (binder , input_type) = input in
|
||||
let%bind local_declarations =
|
||||
bind_map_list simpl_local_declaration local_decls in
|
||||
let%bind instructions = bind_list
|
||||
@@ List.map simpl_statement
|
||||
@@ statements in
|
||||
let%bind result = simpl_expression return in
|
||||
let%bind output_type = simpl_type_expression ret_type in
|
||||
let body = local_declarations @ instructions in
|
||||
let body = instructions in
|
||||
let%bind result =
|
||||
let aux prec cur = cur (Some prec) in
|
||||
bind_fold_right_list aux result body in
|
||||
@ -654,14 +651,12 @@ and simpl_fun_expression :
|
||||
ass
|
||||
in
|
||||
bind_list @@ List.mapi aux params in
|
||||
let%bind local_declarations =
|
||||
bind_map_list simpl_local_declaration local_decls in
|
||||
let%bind instructions = bind_list
|
||||
@@ List.map simpl_statement
|
||||
@@ statements in
|
||||
let%bind result = simpl_expression return in
|
||||
let%bind output_type = simpl_type_expression ret_type in
|
||||
let body = tpl_declarations @ local_declarations @ instructions in
|
||||
let body = tpl_declarations @ instructions in
|
||||
let%bind result =
|
||||
let aux prec cur = cur (Some prec) in
|
||||
bind_fold_right_list aux result body in
|
||||
@ -1002,9 +997,11 @@ and simpl_cases : type a . (Raw.pattern * a) list -> (a, unit) matching result =
|
||||
let aux (x , y) =
|
||||
let error =
|
||||
let title () = "Pattern" in
|
||||
(** TODO: The labelled arguments should be flowing from the CLI. *)
|
||||
let content () =
|
||||
Printf.sprintf "Pattern : %s"
|
||||
(Parser.Pascaligo.ParserLog.pattern_to_string x) in
|
||||
(Parser.Pascaligo.ParserLog.pattern_to_string
|
||||
~offsets:true ~mode:`Point x) in
|
||||
error title content in
|
||||
let%bind x' =
|
||||
trace error @@
|
||||
@ -1113,7 +1110,7 @@ and simpl_for_int : Raw.for_int -> (_ -> expression result) result = fun fi ->
|
||||
- references to the iterated value ==> variable `#COMPILER#elt_X`
|
||||
Note: In the case of an inner loop capturing variable from an outer loop
|
||||
the free variable name can be `#COMPILER#acc.Y` and because we do not
|
||||
capture the accumulator record in the inner loop, we don't want to
|
||||
capture the accumulator record in the inner loop, we don't want to
|
||||
generate `#COMPILER#acc.#COMPILER#acc.Y` but `#COMPILER#acc.Y`
|
||||
|
||||
5) Append the return value to the body
|
||||
@ -1145,7 +1142,7 @@ and simpl_for_collect : Raw.for_collect -> (_ -> expression result) result = fun
|
||||
let elt_v_name = match fc.bind_to with
|
||||
| Some v -> "#COMPILER#elt_"^(snd v).value
|
||||
| None -> "#COMPILER#elt_unused" in
|
||||
let element_names = ok @@ match fc.bind_to with
|
||||
let element_names = ok @@ match fc.bind_to with
|
||||
| Some v -> [Var.of_name fc.var.value;Var.of_name (snd v).value]
|
||||
| None -> [Var.of_name fc.var.value] in
|
||||
(* STEP 1 *)
|
||||
|
Loading…
Reference in New Issue
Block a user