Additional refactoring to get local and global builds closer.
I removed the last top-level effect (the execution of cpp). The idea is that ParserUnit.ml and each ParserMain.ml get closer to pascaligo.ml, cameligo.ml and reasonligo.ml, respectively.
This commit is contained in:
parent
c5b5ffe51a
commit
cb8aaa7b9b
@ -30,9 +30,9 @@ module Unit =
|
|||||||
let issue_error point =
|
let issue_error point =
|
||||||
let error = Unit.format_error ~offsets:IO.options#offsets
|
let error = Unit.format_error ~offsets:IO.options#offsets
|
||||||
IO.options#mode point
|
IO.options#mode point
|
||||||
in (Unit.close_all (); Stdlib.Error error)
|
in Stdlib.Error error
|
||||||
|
|
||||||
let parse (parser: unit -> ('a,string) Stdlib.result) : ('a,string) Stdlib.result =
|
let parse parser : ('a,string) Stdlib.result =
|
||||||
try parser () with
|
try parser () with
|
||||||
(* Scoping errors *)
|
(* Scoping errors *)
|
||||||
|
|
||||||
|
@ -18,5 +18,6 @@ $HOME/git/ligo/vendors/ligo-utils/simple-utils/region.ml
|
|||||||
../shared/ParserAPI.mli
|
../shared/ParserAPI.mli
|
||||||
../shared/ParserAPI.ml
|
../shared/ParserAPI.ml
|
||||||
../shared/LexerUnit.ml
|
../shared/LexerUnit.ml
|
||||||
|
../shared/ParserUnit.mli
|
||||||
../shared/ParserUnit.ml
|
../shared/ParserUnit.ml
|
||||||
Stubs/Simple_utils.ml
|
Stubs/Simple_utils.ml
|
||||||
|
@ -30,9 +30,9 @@ module Unit =
|
|||||||
let issue_error point =
|
let issue_error point =
|
||||||
let error = Unit.format_error ~offsets:IO.options#offsets
|
let error = Unit.format_error ~offsets:IO.options#offsets
|
||||||
IO.options#mode point
|
IO.options#mode point
|
||||||
in (Unit.close_all (); Stdlib.Error error)
|
in Stdlib.Error error
|
||||||
|
|
||||||
let parse (parser: unit -> ('a,string) Stdlib.result) : ('a,string) Stdlib.result =
|
let parse parser : ('a,string) Stdlib.result =
|
||||||
try parser () with
|
try parser () with
|
||||||
(* Scoping errors *)
|
(* Scoping errors *)
|
||||||
|
|
||||||
|
@ -30,9 +30,9 @@ module Unit =
|
|||||||
let issue_error point =
|
let issue_error point =
|
||||||
let error = Unit.format_error ~offsets:IO.options#offsets
|
let error = Unit.format_error ~offsets:IO.options#offsets
|
||||||
IO.options#mode point
|
IO.options#mode point
|
||||||
in (Unit.close_all (); Stdlib.Error error)
|
in Stdlib.Error error
|
||||||
|
|
||||||
let parse (parser: unit -> ('a,string) Stdlib.result) : ('a,string) Stdlib.result =
|
let parse parser : ('a,string) Stdlib.result =
|
||||||
try parser () with
|
try parser () with
|
||||||
(* Ad hoc errors from the parser *)
|
(* Ad hoc errors from the parser *)
|
||||||
|
|
||||||
@ -46,7 +46,7 @@ let parse (parser: unit -> ('a,string) Stdlib.result) : ('a,string) Stdlib.resul
|
|||||||
and reg = AST.expr_to_region expr in
|
and reg = AST.expr_to_region expr in
|
||||||
let error = Unit.short_error ~offsets:IO.options#offsets
|
let error = Unit.short_error ~offsets:IO.options#offsets
|
||||||
IO.options#mode msg reg
|
IO.options#mode msg reg
|
||||||
in (Unit.close_all (); Stdlib.Error error)
|
in Stdlib.Error error
|
||||||
|
|
||||||
(* Scoping errors *)
|
(* Scoping errors *)
|
||||||
|
|
||||||
@ -90,7 +90,8 @@ let parse (parser: unit -> ('a,string) Stdlib.result) : ('a,string) Stdlib.resul
|
|||||||
reserved name for the lexer. *)
|
reserved name for the lexer. *)
|
||||||
Stdlib.Error _ -> assert false
|
Stdlib.Error _ -> assert false
|
||||||
| Ok invalid ->
|
| Ok invalid ->
|
||||||
let point = "Duplicate field name in this record declaration.\n\
|
let point =
|
||||||
|
"Duplicate field name in this record declaration.\n\
|
||||||
Hint: Change the name.\n",
|
Hint: Change the name.\n",
|
||||||
None, invalid
|
None, invalid
|
||||||
in issue_error point)
|
in issue_error point)
|
||||||
|
@ -105,6 +105,8 @@ module type TOKEN =
|
|||||||
* a function [get_pos] that returns the current position, and
|
* a function [get_pos] that returns the current position, and
|
||||||
* a function [get_last] that returns the region of the last
|
* a function [get_last] that returns the region of the last
|
||||||
recognised token.
|
recognised token.
|
||||||
|
* a function [get_file] that returns the name of the file being scanned
|
||||||
|
(empty string if [stdin]).
|
||||||
|
|
||||||
Note that a module [Token] is exported too, because the signature
|
Note that a module [Token] is exported too, because the signature
|
||||||
of the exported functions depend on it.
|
of the exported functions depend on it.
|
||||||
@ -137,6 +139,7 @@ module type S =
|
|||||||
get_win : unit -> window;
|
get_win : unit -> window;
|
||||||
get_pos : unit -> Pos.t;
|
get_pos : unit -> Pos.t;
|
||||||
get_last : unit -> Region.t;
|
get_last : unit -> Region.t;
|
||||||
|
get_file : unit -> file_path;
|
||||||
close : unit -> unit
|
close : unit -> unit
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -161,6 +161,7 @@ module type S =
|
|||||||
get_win : unit -> window;
|
get_win : unit -> window;
|
||||||
get_pos : unit -> Pos.t;
|
get_pos : unit -> Pos.t;
|
||||||
get_last : unit -> Region.t;
|
get_last : unit -> Region.t;
|
||||||
|
get_file : unit -> file_path;
|
||||||
close : unit -> unit
|
close : unit -> unit
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -840,6 +841,7 @@ type instance = {
|
|||||||
get_win : unit -> window;
|
get_win : unit -> window;
|
||||||
get_pos : unit -> Pos.t;
|
get_pos : unit -> Pos.t;
|
||||||
get_last : unit -> Region.t;
|
get_last : unit -> Region.t;
|
||||||
|
get_file : unit -> file_path;
|
||||||
close : unit -> unit
|
close : unit -> unit
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -862,7 +864,8 @@ let open_token_stream file_path_opt =
|
|||||||
|
|
||||||
let get_pos () = !state.pos
|
let get_pos () = !state.pos
|
||||||
and get_last () = !state.last
|
and get_last () = !state.last
|
||||||
and get_win () = !state.window in
|
and get_win () = !state.window
|
||||||
|
and get_file () = file_path in
|
||||||
|
|
||||||
let patch_buffer (start, stop) buffer =
|
let patch_buffer (start, stop) buffer =
|
||||||
let open Lexing in
|
let open Lexing in
|
||||||
@ -934,7 +937,7 @@ let open_token_stream file_path_opt =
|
|||||||
None | Some "-" -> ()
|
None | Some "-" -> ()
|
||||||
| Some file_path -> reset ~file:file_path buffer
|
| Some file_path -> reset ~file:file_path buffer
|
||||||
and close () = close_in cin in
|
and close () = close_in cin in
|
||||||
{read = read_token; buffer; get_win; get_pos; get_last; close}
|
{read = read_token; buffer; get_win; get_pos; get_last; get_file; close}
|
||||||
|
|
||||||
end (* of functor [Make] in HEADER *)
|
end (* of functor [Make] in HEADER *)
|
||||||
(* END TRAILER *)
|
(* END TRAILER *)
|
||||||
|
@ -1,5 +1,7 @@
|
|||||||
(* Functor to build a standalone LIGO parser *)
|
(* Functor to build a standalone LIGO parser *)
|
||||||
|
|
||||||
|
module Region = Simple_utils.Region
|
||||||
|
|
||||||
module type IO =
|
module type IO =
|
||||||
sig
|
sig
|
||||||
val ext : string (* LIGO file extension *)
|
val ext : string (* LIGO file extension *)
|
||||||
@ -33,21 +35,12 @@ module Make (Lexer: Lexer.S)
|
|||||||
(IO: IO) =
|
(IO: IO) =
|
||||||
struct
|
struct
|
||||||
open Printf
|
open Printf
|
||||||
|
module SSet = Utils.String.Set
|
||||||
|
|
||||||
(* Error printing and exception tracing *)
|
(* Error printing and exception tracing *)
|
||||||
|
|
||||||
let () = Printexc.record_backtrace true
|
let () = Printexc.record_backtrace true
|
||||||
|
|
||||||
let external_ text =
|
|
||||||
Utils.highlight (sprintf "External error: %s" text); exit 1
|
|
||||||
|
|
||||||
(* Extracting the input file *)
|
|
||||||
|
|
||||||
let file =
|
|
||||||
match IO.options#input with
|
|
||||||
None | Some "-" -> false
|
|
||||||
| Some _ -> true
|
|
||||||
|
|
||||||
(* Preprocessing the input source and opening the input channels *)
|
(* Preprocessing the input source and opening the input channels *)
|
||||||
|
|
||||||
(* Path for CPP inclusions (#include) *)
|
(* Path for CPP inclusions (#include) *)
|
||||||
@ -65,8 +58,6 @@ module Make (Lexer: Lexer.S)
|
|||||||
|
|
||||||
let suffix = ".pp" ^ IO.ext
|
let suffix = ".pp" ^ IO.ext
|
||||||
|
|
||||||
module SSet = Utils.String.Set
|
|
||||||
|
|
||||||
let pp_input =
|
let pp_input =
|
||||||
if SSet.mem "cpp" IO.options#verbose
|
if SSet.mem "cpp" IO.options#verbose
|
||||||
then prefix ^ suffix
|
then prefix ^ suffix
|
||||||
@ -83,51 +74,33 @@ module Make (Lexer: Lexer.S)
|
|||||||
sprintf "cpp -traditional-cpp%s %s > %s"
|
sprintf "cpp -traditional-cpp%s %s > %s"
|
||||||
lib_path file pp_input
|
lib_path file pp_input
|
||||||
|
|
||||||
let () =
|
(* Error handling (reexported from [ParserAPI]) *)
|
||||||
if SSet.mem "cpp" IO.options#verbose
|
|
||||||
then eprintf "%s\n%!" cpp_cmd;
|
|
||||||
if Sys.command cpp_cmd <> 0 then
|
|
||||||
external_ (sprintf "the command \"%s\" failed." cpp_cmd)
|
|
||||||
|
|
||||||
(* Instanciating the lexer *)
|
type message = string
|
||||||
|
type valid = Parser.token
|
||||||
|
type invalid = Parser.token
|
||||||
|
type error = message * valid option * invalid
|
||||||
|
|
||||||
module ParserFront = ParserAPI.Make (Lexer)(Parser)(ParErr)
|
exception Point of error
|
||||||
|
|
||||||
let format_error = ParserFront.format_error
|
(* Instantiating the parser *)
|
||||||
let short_error = ParserFront.short_error
|
|
||||||
|
|
||||||
let lexer_inst = Lexer.open_token_stream (Some pp_input)
|
module Front = ParserAPI.Make (Lexer)(Parser)(ParErr)
|
||||||
let Lexer.{read; buffer; get_win; close; _} = lexer_inst
|
|
||||||
|
|
||||||
and cout = stdout
|
let format_error = Front.format_error
|
||||||
|
let short_error = Front.short_error
|
||||||
let close_all () = close (); close_out cout
|
|
||||||
|
|
||||||
(* Tokeniser *)
|
|
||||||
|
|
||||||
module Log = LexerLog.Make (Lexer)
|
|
||||||
|
|
||||||
let log = Log.output_token ~offsets:IO.options#offsets
|
|
||||||
IO.options#mode IO.options#cmd cout
|
|
||||||
|
|
||||||
let tokeniser = read ~log
|
|
||||||
|
|
||||||
(* Main *)
|
|
||||||
|
|
||||||
let output = Buffer.create 131
|
|
||||||
let state = ParserLog.mk_state
|
|
||||||
~offsets:IO.options#offsets
|
|
||||||
~mode:IO.options#mode
|
|
||||||
~buffer:output
|
|
||||||
|
|
||||||
(* Parsing an expression *)
|
(* Parsing an expression *)
|
||||||
|
|
||||||
let parse_expr () : AST.expr =
|
let parse_expr lexer_inst tokeniser output state :
|
||||||
|
(AST.expr, string) Stdlib.result =
|
||||||
|
let close_all () =
|
||||||
|
lexer_inst.Lexer.close (); close_out stdout in
|
||||||
let expr =
|
let expr =
|
||||||
if IO.options#mono then
|
if IO.options#mono then
|
||||||
ParserFront.mono_expr tokeniser buffer
|
Front.mono_expr tokeniser lexer_inst.Lexer.buffer
|
||||||
else
|
else
|
||||||
ParserFront.incr_expr lexer_inst in
|
Front.incr_expr lexer_inst in
|
||||||
let () =
|
let () =
|
||||||
if SSet.mem "ast-tokens" IO.options#verbose
|
if SSet.mem "ast-tokens" IO.options#verbose
|
||||||
then begin
|
then begin
|
||||||
@ -142,16 +115,21 @@ module Make (Lexer: Lexer.S)
|
|||||||
ParserLog.pp_expr state expr;
|
ParserLog.pp_expr state expr;
|
||||||
Buffer.output_buffer stdout output
|
Buffer.output_buffer stdout output
|
||||||
end
|
end
|
||||||
in expr (* Or more CLI options handled before *)
|
in close_all (); Ok expr
|
||||||
|
|
||||||
(* Parsing a contract *)
|
(* Parsing a contract *)
|
||||||
|
|
||||||
let parse_contract () : AST.t =
|
let parse_contract lexer_inst tokeniser output state
|
||||||
|
: (AST.t, string) Stdlib.result =
|
||||||
|
let close_all () =
|
||||||
|
lexer_inst.Lexer.close (); close_out stdout in
|
||||||
let ast =
|
let ast =
|
||||||
|
try
|
||||||
if IO.options#mono then
|
if IO.options#mono then
|
||||||
ParserFront.mono_contract tokeniser buffer
|
Front.mono_contract tokeniser lexer_inst.Lexer.buffer
|
||||||
else
|
else
|
||||||
ParserFront.incr_contract lexer_inst in
|
Front.incr_contract lexer_inst
|
||||||
|
with exn -> close_all (); raise exn in
|
||||||
let () =
|
let () =
|
||||||
if SSet.mem "ast" IO.options#verbose
|
if SSet.mem "ast" IO.options#verbose
|
||||||
then begin
|
then begin
|
||||||
@ -166,44 +144,85 @@ module Make (Lexer: Lexer.S)
|
|||||||
ParserLog.print_tokens state ast;
|
ParserLog.print_tokens state ast;
|
||||||
Buffer.output_buffer stdout output
|
Buffer.output_buffer stdout output
|
||||||
end
|
end
|
||||||
in ast (* Or more CLI options handled before. *)
|
in close_all (); Ok ast
|
||||||
|
|
||||||
|
(* Wrapper for the parsers above *)
|
||||||
|
|
||||||
|
let parse parser =
|
||||||
|
(* Preprocessing the input *)
|
||||||
|
|
||||||
|
if SSet.mem "cpp" IO.options#verbose
|
||||||
|
then eprintf "%s\n%!" cpp_cmd
|
||||||
|
else ();
|
||||||
|
|
||||||
|
if Sys.command cpp_cmd <> 0 then
|
||||||
|
let msg =
|
||||||
|
sprintf "External error: the command \"%s\" failed." cpp_cmd
|
||||||
|
in Stdlib.Error msg
|
||||||
|
else
|
||||||
|
(* Instantiating the lexer *)
|
||||||
|
|
||||||
|
let lexer_inst = Lexer.open_token_stream (Some pp_input) in
|
||||||
|
|
||||||
|
(* Making the tokeniser *)
|
||||||
|
|
||||||
|
let module Log = LexerLog.Make (Lexer) in
|
||||||
|
|
||||||
|
let log =
|
||||||
|
Log.output_token ~offsets:IO.options#offsets
|
||||||
|
IO.options#mode IO.options#cmd stdout in
|
||||||
|
|
||||||
|
let tokeniser = lexer_inst.Lexer.read ~log in
|
||||||
|
|
||||||
|
let output = Buffer.create 131 in
|
||||||
|
let state = ParserLog.mk_state
|
||||||
|
~offsets:IO.options#offsets
|
||||||
|
~mode:IO.options#mode
|
||||||
|
~buffer:output in
|
||||||
|
|
||||||
|
(* Calling the specific parser (that is, the parameter) *)
|
||||||
|
|
||||||
|
match parser lexer_inst tokeniser output state with
|
||||||
|
Stdlib.Error _ as error -> error
|
||||||
|
| Stdlib.Ok _ as node -> node
|
||||||
|
|
||||||
let parse (parser: unit -> 'a) : ('a,string) Stdlib.result =
|
|
||||||
try
|
|
||||||
let node = parser () in (close_all (); Ok node)
|
|
||||||
with
|
|
||||||
(* Lexing errors *)
|
(* Lexing errors *)
|
||||||
|
|
||||||
Lexer.Error err ->
|
| exception Lexer.Error err ->
|
||||||
|
let file =
|
||||||
|
match IO.options#input with
|
||||||
|
None | Some "-" -> false
|
||||||
|
| Some _ -> true in
|
||||||
let error =
|
let error =
|
||||||
Lexer.format_error ~offsets:IO.options#offsets
|
Lexer.format_error ~offsets:IO.options#offsets
|
||||||
IO.options#mode err ~file
|
IO.options#mode err ~file
|
||||||
in close_all (); Stdlib.Error error
|
in Stdlib.Error error
|
||||||
|
|
||||||
(* Incremental API of Menhir *)
|
(* Incremental API of Menhir *)
|
||||||
|
|
||||||
| ParserFront.Point point ->
|
| exception Front.Point point ->
|
||||||
let error =
|
let error =
|
||||||
ParserFront.format_error ~offsets:IO.options#offsets
|
Front.format_error ~offsets:IO.options#offsets
|
||||||
IO.options#mode point
|
IO.options#mode point
|
||||||
in close_all (); Stdlib.Error error
|
in Stdlib.Error error
|
||||||
|
|
||||||
(* Monolithic API of Menhir *)
|
(* Monolithic API of Menhir *)
|
||||||
|
|
||||||
| Parser.Error ->
|
| exception Parser.Error ->
|
||||||
let invalid, valid_opt =
|
let invalid, valid_opt =
|
||||||
match get_win () with
|
match lexer_inst.Lexer.get_win () with
|
||||||
Lexer.Nil ->
|
Lexer.Nil ->
|
||||||
assert false (* Safe: There is always at least EOF. *)
|
assert false (* Safe: There is always at least EOF. *)
|
||||||
| Lexer.One invalid -> invalid, None
|
| Lexer.One invalid -> invalid, None
|
||||||
| Lexer.Two (invalid, valid) -> invalid, Some valid in
|
| Lexer.Two (invalid, valid) -> invalid, Some valid in
|
||||||
let point = "", valid_opt, invalid in
|
let point = "", valid_opt, invalid in
|
||||||
let error =
|
let error =
|
||||||
ParserFront.format_error ~offsets:IO.options#offsets
|
Front.format_error ~offsets:IO.options#offsets
|
||||||
IO.options#mode point
|
IO.options#mode point
|
||||||
in close_all (); Stdlib.Error error
|
in Stdlib.Error error
|
||||||
|
|
||||||
(* I/O errors *)
|
(* I/O errors *)
|
||||||
|
|
||||||
| Sys_error error -> Stdlib.Error error
|
| exception Sys_error error -> Stdlib.Error error
|
||||||
|
|
||||||
end
|
end
|
||||||
|
71
src/passes/1-parser/shared/ParserUnit.mli
Normal file
71
src/passes/1-parser/shared/ParserUnit.mli
Normal file
@ -0,0 +1,71 @@
|
|||||||
|
(* Functor to build a standalone LIGO parser *)
|
||||||
|
|
||||||
|
module Region = Simple_utils.Region
|
||||||
|
|
||||||
|
module type IO =
|
||||||
|
sig
|
||||||
|
val ext : string (* LIGO file extension *)
|
||||||
|
val options : EvalOpt.options (* CLI options *)
|
||||||
|
end
|
||||||
|
|
||||||
|
module type Pretty =
|
||||||
|
sig
|
||||||
|
type state
|
||||||
|
type ast
|
||||||
|
type expr
|
||||||
|
|
||||||
|
val mk_state :
|
||||||
|
offsets:bool -> mode:[`Point|`Byte] -> buffer:Buffer.t -> state
|
||||||
|
|
||||||
|
val pp_ast : state -> ast -> unit
|
||||||
|
val pp_expr : state -> expr -> unit
|
||||||
|
val print_tokens : state -> ast -> unit
|
||||||
|
val print_expr : state -> expr -> unit
|
||||||
|
end
|
||||||
|
|
||||||
|
module Make (Lexer: Lexer.S)
|
||||||
|
(AST: sig type t type expr end)
|
||||||
|
(Parser: ParserAPI.PARSER
|
||||||
|
with type ast = AST.t
|
||||||
|
and type expr = AST.expr
|
||||||
|
and type token = Lexer.token)
|
||||||
|
(ParErr: sig val message : int -> string end)
|
||||||
|
(ParserLog: Pretty with type ast = AST.t
|
||||||
|
and type expr = AST.expr)
|
||||||
|
(IO: IO) :
|
||||||
|
sig
|
||||||
|
(* Error handling (reexported from [ParserAPI]) *)
|
||||||
|
|
||||||
|
type message = string
|
||||||
|
type valid = Parser.token
|
||||||
|
type invalid = Parser.token
|
||||||
|
type error = message * valid option * invalid
|
||||||
|
|
||||||
|
exception Point of error
|
||||||
|
|
||||||
|
val format_error :
|
||||||
|
?offsets:bool -> [`Byte | `Point] -> error -> string
|
||||||
|
|
||||||
|
val short_error :
|
||||||
|
?offsets:bool -> [`Byte | `Point] -> message -> Region.t -> string
|
||||||
|
|
||||||
|
(* Parsers *)
|
||||||
|
|
||||||
|
val parse :
|
||||||
|
(Lexer.instance ->
|
||||||
|
(Lexing.lexbuf -> Lexer.token) ->
|
||||||
|
Buffer.t -> ParserLog.state -> ('a, string) result) ->
|
||||||
|
('a, string) result
|
||||||
|
|
||||||
|
val parse_contract :
|
||||||
|
Lexer.instance ->
|
||||||
|
(Lexing.lexbuf -> Lexer.token) ->
|
||||||
|
Buffer.t -> ParserLog.state ->
|
||||||
|
(AST.t, string) Stdlib.result
|
||||||
|
|
||||||
|
val parse_expr :
|
||||||
|
Lexer.instance ->
|
||||||
|
(Lexing.lexbuf -> Lexer.token) ->
|
||||||
|
Buffer.t -> ParserLog.state -> (AST.expr, string) Stdlib.result
|
||||||
|
|
||||||
|
end
|
Loading…
Reference in New Issue
Block a user