Additional refactoring to get local and global builds closer.
I removed the last top-level effect (the execution of cpp). The idea is that ParserUnit.ml and each ParserMain.ml get closer to pascaligo.ml, cameligo.ml and reasonligo.ml, respectively.
This commit is contained in:
parent
c5b5ffe51a
commit
cb8aaa7b9b
@ -30,9 +30,9 @@ module Unit =
|
||||
let issue_error point =
|
||||
let error = Unit.format_error ~offsets:IO.options#offsets
|
||||
IO.options#mode point
|
||||
in (Unit.close_all (); Stdlib.Error error)
|
||||
in Stdlib.Error error
|
||||
|
||||
let parse (parser: unit -> ('a,string) Stdlib.result) : ('a,string) Stdlib.result =
|
||||
let parse parser : ('a,string) Stdlib.result =
|
||||
try parser () with
|
||||
(* Scoping errors *)
|
||||
|
||||
|
@ -18,5 +18,6 @@ $HOME/git/ligo/vendors/ligo-utils/simple-utils/region.ml
|
||||
../shared/ParserAPI.mli
|
||||
../shared/ParserAPI.ml
|
||||
../shared/LexerUnit.ml
|
||||
../shared/ParserUnit.mli
|
||||
../shared/ParserUnit.ml
|
||||
Stubs/Simple_utils.ml
|
||||
|
@ -30,9 +30,9 @@ module Unit =
|
||||
let issue_error point =
|
||||
let error = Unit.format_error ~offsets:IO.options#offsets
|
||||
IO.options#mode point
|
||||
in (Unit.close_all (); Stdlib.Error error)
|
||||
in Stdlib.Error error
|
||||
|
||||
let parse (parser: unit -> ('a,string) Stdlib.result) : ('a,string) Stdlib.result =
|
||||
let parse parser : ('a,string) Stdlib.result =
|
||||
try parser () with
|
||||
(* Scoping errors *)
|
||||
|
||||
|
@ -30,9 +30,9 @@ module Unit =
|
||||
let issue_error point =
|
||||
let error = Unit.format_error ~offsets:IO.options#offsets
|
||||
IO.options#mode point
|
||||
in (Unit.close_all (); Stdlib.Error error)
|
||||
in Stdlib.Error error
|
||||
|
||||
let parse (parser: unit -> ('a,string) Stdlib.result) : ('a,string) Stdlib.result =
|
||||
let parse parser : ('a,string) Stdlib.result =
|
||||
try parser () with
|
||||
(* Ad hoc errors from the parser *)
|
||||
|
||||
@ -46,7 +46,7 @@ let parse (parser: unit -> ('a,string) Stdlib.result) : ('a,string) Stdlib.resul
|
||||
and reg = AST.expr_to_region expr in
|
||||
let error = Unit.short_error ~offsets:IO.options#offsets
|
||||
IO.options#mode msg reg
|
||||
in (Unit.close_all (); Stdlib.Error error)
|
||||
in Stdlib.Error error
|
||||
|
||||
(* Scoping errors *)
|
||||
|
||||
@ -90,7 +90,8 @@ let parse (parser: unit -> ('a,string) Stdlib.result) : ('a,string) Stdlib.resul
|
||||
reserved name for the lexer. *)
|
||||
Stdlib.Error _ -> assert false
|
||||
| Ok invalid ->
|
||||
let point = "Duplicate field name in this record declaration.\n\
|
||||
let point =
|
||||
"Duplicate field name in this record declaration.\n\
|
||||
Hint: Change the name.\n",
|
||||
None, invalid
|
||||
in issue_error point)
|
||||
|
@ -105,6 +105,8 @@ module type TOKEN =
|
||||
* a function [get_pos] that returns the current position, and
|
||||
* a function [get_last] that returns the region of the last
|
||||
recognised token.
|
||||
* a function [get_file] that returns the name of the file being scanned
|
||||
(empty string if [stdin]).
|
||||
|
||||
Note that a module [Token] is exported too, because the signature
|
||||
of the exported functions depend on it.
|
||||
@ -137,6 +139,7 @@ module type S =
|
||||
get_win : unit -> window;
|
||||
get_pos : unit -> Pos.t;
|
||||
get_last : unit -> Region.t;
|
||||
get_file : unit -> file_path;
|
||||
close : unit -> unit
|
||||
}
|
||||
|
||||
|
@ -161,6 +161,7 @@ module type S =
|
||||
get_win : unit -> window;
|
||||
get_pos : unit -> Pos.t;
|
||||
get_last : unit -> Region.t;
|
||||
get_file : unit -> file_path;
|
||||
close : unit -> unit
|
||||
}
|
||||
|
||||
@ -840,6 +841,7 @@ type instance = {
|
||||
get_win : unit -> window;
|
||||
get_pos : unit -> Pos.t;
|
||||
get_last : unit -> Region.t;
|
||||
get_file : unit -> file_path;
|
||||
close : unit -> unit
|
||||
}
|
||||
|
||||
@ -862,7 +864,8 @@ let open_token_stream file_path_opt =
|
||||
|
||||
let get_pos () = !state.pos
|
||||
and get_last () = !state.last
|
||||
and get_win () = !state.window in
|
||||
and get_win () = !state.window
|
||||
and get_file () = file_path in
|
||||
|
||||
let patch_buffer (start, stop) buffer =
|
||||
let open Lexing in
|
||||
@ -934,7 +937,7 @@ let open_token_stream file_path_opt =
|
||||
None | Some "-" -> ()
|
||||
| Some file_path -> reset ~file:file_path buffer
|
||||
and close () = close_in cin in
|
||||
{read = read_token; buffer; get_win; get_pos; get_last; close}
|
||||
{read = read_token; buffer; get_win; get_pos; get_last; get_file; close}
|
||||
|
||||
end (* of functor [Make] in HEADER *)
|
||||
(* END TRAILER *)
|
||||
|
@ -1,5 +1,7 @@
|
||||
(* Functor to build a standalone LIGO parser *)
|
||||
|
||||
module Region = Simple_utils.Region
|
||||
|
||||
module type IO =
|
||||
sig
|
||||
val ext : string (* LIGO file extension *)
|
||||
@ -33,21 +35,12 @@ module Make (Lexer: Lexer.S)
|
||||
(IO: IO) =
|
||||
struct
|
||||
open Printf
|
||||
module SSet = Utils.String.Set
|
||||
|
||||
(* Error printing and exception tracing *)
|
||||
|
||||
let () = Printexc.record_backtrace true
|
||||
|
||||
let external_ text =
|
||||
Utils.highlight (sprintf "External error: %s" text); exit 1
|
||||
|
||||
(* Extracting the input file *)
|
||||
|
||||
let file =
|
||||
match IO.options#input with
|
||||
None | Some "-" -> false
|
||||
| Some _ -> true
|
||||
|
||||
(* Preprocessing the input source and opening the input channels *)
|
||||
|
||||
(* Path for CPP inclusions (#include) *)
|
||||
@ -65,8 +58,6 @@ module Make (Lexer: Lexer.S)
|
||||
|
||||
let suffix = ".pp" ^ IO.ext
|
||||
|
||||
module SSet = Utils.String.Set
|
||||
|
||||
let pp_input =
|
||||
if SSet.mem "cpp" IO.options#verbose
|
||||
then prefix ^ suffix
|
||||
@ -83,51 +74,33 @@ module Make (Lexer: Lexer.S)
|
||||
sprintf "cpp -traditional-cpp%s %s > %s"
|
||||
lib_path file pp_input
|
||||
|
||||
let () =
|
||||
if SSet.mem "cpp" IO.options#verbose
|
||||
then eprintf "%s\n%!" cpp_cmd;
|
||||
if Sys.command cpp_cmd <> 0 then
|
||||
external_ (sprintf "the command \"%s\" failed." cpp_cmd)
|
||||
(* Error handling (reexported from [ParserAPI]) *)
|
||||
|
||||
(* Instanciating the lexer *)
|
||||
type message = string
|
||||
type valid = Parser.token
|
||||
type invalid = Parser.token
|
||||
type error = message * valid option * invalid
|
||||
|
||||
module ParserFront = ParserAPI.Make (Lexer)(Parser)(ParErr)
|
||||
exception Point of error
|
||||
|
||||
let format_error = ParserFront.format_error
|
||||
let short_error = ParserFront.short_error
|
||||
(* Instantiating the parser *)
|
||||
|
||||
let lexer_inst = Lexer.open_token_stream (Some pp_input)
|
||||
let Lexer.{read; buffer; get_win; close; _} = lexer_inst
|
||||
module Front = ParserAPI.Make (Lexer)(Parser)(ParErr)
|
||||
|
||||
and cout = stdout
|
||||
|
||||
let close_all () = close (); close_out cout
|
||||
|
||||
(* Tokeniser *)
|
||||
|
||||
module Log = LexerLog.Make (Lexer)
|
||||
|
||||
let log = Log.output_token ~offsets:IO.options#offsets
|
||||
IO.options#mode IO.options#cmd cout
|
||||
|
||||
let tokeniser = read ~log
|
||||
|
||||
(* Main *)
|
||||
|
||||
let output = Buffer.create 131
|
||||
let state = ParserLog.mk_state
|
||||
~offsets:IO.options#offsets
|
||||
~mode:IO.options#mode
|
||||
~buffer:output
|
||||
let format_error = Front.format_error
|
||||
let short_error = Front.short_error
|
||||
|
||||
(* Parsing an expression *)
|
||||
|
||||
let parse_expr () : AST.expr =
|
||||
let parse_expr lexer_inst tokeniser output state :
|
||||
(AST.expr, string) Stdlib.result =
|
||||
let close_all () =
|
||||
lexer_inst.Lexer.close (); close_out stdout in
|
||||
let expr =
|
||||
if IO.options#mono then
|
||||
ParserFront.mono_expr tokeniser buffer
|
||||
Front.mono_expr tokeniser lexer_inst.Lexer.buffer
|
||||
else
|
||||
ParserFront.incr_expr lexer_inst in
|
||||
Front.incr_expr lexer_inst in
|
||||
let () =
|
||||
if SSet.mem "ast-tokens" IO.options#verbose
|
||||
then begin
|
||||
@ -142,16 +115,21 @@ module Make (Lexer: Lexer.S)
|
||||
ParserLog.pp_expr state expr;
|
||||
Buffer.output_buffer stdout output
|
||||
end
|
||||
in expr (* Or more CLI options handled before *)
|
||||
in close_all (); Ok expr
|
||||
|
||||
(* Parsing a contract *)
|
||||
|
||||
let parse_contract () : AST.t =
|
||||
let parse_contract lexer_inst tokeniser output state
|
||||
: (AST.t, string) Stdlib.result =
|
||||
let close_all () =
|
||||
lexer_inst.Lexer.close (); close_out stdout in
|
||||
let ast =
|
||||
try
|
||||
if IO.options#mono then
|
||||
ParserFront.mono_contract tokeniser buffer
|
||||
Front.mono_contract tokeniser lexer_inst.Lexer.buffer
|
||||
else
|
||||
ParserFront.incr_contract lexer_inst in
|
||||
Front.incr_contract lexer_inst
|
||||
with exn -> close_all (); raise exn in
|
||||
let () =
|
||||
if SSet.mem "ast" IO.options#verbose
|
||||
then begin
|
||||
@ -166,44 +144,85 @@ module Make (Lexer: Lexer.S)
|
||||
ParserLog.print_tokens state ast;
|
||||
Buffer.output_buffer stdout output
|
||||
end
|
||||
in ast (* Or more CLI options handled before. *)
|
||||
in close_all (); Ok ast
|
||||
|
||||
(* Wrapper for the parsers above *)
|
||||
|
||||
let parse parser =
|
||||
(* Preprocessing the input *)
|
||||
|
||||
if SSet.mem "cpp" IO.options#verbose
|
||||
then eprintf "%s\n%!" cpp_cmd
|
||||
else ();
|
||||
|
||||
if Sys.command cpp_cmd <> 0 then
|
||||
let msg =
|
||||
sprintf "External error: the command \"%s\" failed." cpp_cmd
|
||||
in Stdlib.Error msg
|
||||
else
|
||||
(* Instantiating the lexer *)
|
||||
|
||||
let lexer_inst = Lexer.open_token_stream (Some pp_input) in
|
||||
|
||||
(* Making the tokeniser *)
|
||||
|
||||
let module Log = LexerLog.Make (Lexer) in
|
||||
|
||||
let log =
|
||||
Log.output_token ~offsets:IO.options#offsets
|
||||
IO.options#mode IO.options#cmd stdout in
|
||||
|
||||
let tokeniser = lexer_inst.Lexer.read ~log in
|
||||
|
||||
let output = Buffer.create 131 in
|
||||
let state = ParserLog.mk_state
|
||||
~offsets:IO.options#offsets
|
||||
~mode:IO.options#mode
|
||||
~buffer:output in
|
||||
|
||||
(* Calling the specific parser (that is, the parameter) *)
|
||||
|
||||
match parser lexer_inst tokeniser output state with
|
||||
Stdlib.Error _ as error -> error
|
||||
| Stdlib.Ok _ as node -> node
|
||||
|
||||
let parse (parser: unit -> 'a) : ('a,string) Stdlib.result =
|
||||
try
|
||||
let node = parser () in (close_all (); Ok node)
|
||||
with
|
||||
(* Lexing errors *)
|
||||
|
||||
Lexer.Error err ->
|
||||
| exception Lexer.Error err ->
|
||||
let file =
|
||||
match IO.options#input with
|
||||
None | Some "-" -> false
|
||||
| Some _ -> true in
|
||||
let error =
|
||||
Lexer.format_error ~offsets:IO.options#offsets
|
||||
IO.options#mode err ~file
|
||||
in close_all (); Stdlib.Error error
|
||||
in Stdlib.Error error
|
||||
|
||||
(* Incremental API of Menhir *)
|
||||
|
||||
| ParserFront.Point point ->
|
||||
| exception Front.Point point ->
|
||||
let error =
|
||||
ParserFront.format_error ~offsets:IO.options#offsets
|
||||
Front.format_error ~offsets:IO.options#offsets
|
||||
IO.options#mode point
|
||||
in close_all (); Stdlib.Error error
|
||||
in Stdlib.Error error
|
||||
|
||||
(* Monolithic API of Menhir *)
|
||||
|
||||
| Parser.Error ->
|
||||
| exception Parser.Error ->
|
||||
let invalid, valid_opt =
|
||||
match get_win () with
|
||||
match lexer_inst.Lexer.get_win () with
|
||||
Lexer.Nil ->
|
||||
assert false (* Safe: There is always at least EOF. *)
|
||||
| Lexer.One invalid -> invalid, None
|
||||
| Lexer.Two (invalid, valid) -> invalid, Some valid in
|
||||
let point = "", valid_opt, invalid in
|
||||
let error =
|
||||
ParserFront.format_error ~offsets:IO.options#offsets
|
||||
Front.format_error ~offsets:IO.options#offsets
|
||||
IO.options#mode point
|
||||
in close_all (); Stdlib.Error error
|
||||
in Stdlib.Error error
|
||||
|
||||
(* I/O errors *)
|
||||
|
||||
| Sys_error error -> Stdlib.Error error
|
||||
| exception Sys_error error -> Stdlib.Error error
|
||||
|
||||
end
|
||||
|
71
src/passes/1-parser/shared/ParserUnit.mli
Normal file
71
src/passes/1-parser/shared/ParserUnit.mli
Normal file
@ -0,0 +1,71 @@
|
||||
(* Functor to build a standalone LIGO parser *)
|
||||
|
||||
module Region = Simple_utils.Region
|
||||
|
||||
module type IO =
|
||||
sig
|
||||
val ext : string (* LIGO file extension *)
|
||||
val options : EvalOpt.options (* CLI options *)
|
||||
end
|
||||
|
||||
module type Pretty =
|
||||
sig
|
||||
type state
|
||||
type ast
|
||||
type expr
|
||||
|
||||
val mk_state :
|
||||
offsets:bool -> mode:[`Point|`Byte] -> buffer:Buffer.t -> state
|
||||
|
||||
val pp_ast : state -> ast -> unit
|
||||
val pp_expr : state -> expr -> unit
|
||||
val print_tokens : state -> ast -> unit
|
||||
val print_expr : state -> expr -> unit
|
||||
end
|
||||
|
||||
module Make (Lexer: Lexer.S)
|
||||
(AST: sig type t type expr end)
|
||||
(Parser: ParserAPI.PARSER
|
||||
with type ast = AST.t
|
||||
and type expr = AST.expr
|
||||
and type token = Lexer.token)
|
||||
(ParErr: sig val message : int -> string end)
|
||||
(ParserLog: Pretty with type ast = AST.t
|
||||
and type expr = AST.expr)
|
||||
(IO: IO) :
|
||||
sig
|
||||
(* Error handling (reexported from [ParserAPI]) *)
|
||||
|
||||
type message = string
|
||||
type valid = Parser.token
|
||||
type invalid = Parser.token
|
||||
type error = message * valid option * invalid
|
||||
|
||||
exception Point of error
|
||||
|
||||
val format_error :
|
||||
?offsets:bool -> [`Byte | `Point] -> error -> string
|
||||
|
||||
val short_error :
|
||||
?offsets:bool -> [`Byte | `Point] -> message -> Region.t -> string
|
||||
|
||||
(* Parsers *)
|
||||
|
||||
val parse :
|
||||
(Lexer.instance ->
|
||||
(Lexing.lexbuf -> Lexer.token) ->
|
||||
Buffer.t -> ParserLog.state -> ('a, string) result) ->
|
||||
('a, string) result
|
||||
|
||||
val parse_contract :
|
||||
Lexer.instance ->
|
||||
(Lexing.lexbuf -> Lexer.token) ->
|
||||
Buffer.t -> ParserLog.state ->
|
||||
(AST.t, string) Stdlib.result
|
||||
|
||||
val parse_expr :
|
||||
Lexer.instance ->
|
||||
(Lexing.lexbuf -> Lexer.token) ->
|
||||
Buffer.t -> ParserLog.state -> (AST.expr, string) Stdlib.result
|
||||
|
||||
end
|
Loading…
Reference in New Issue
Block a user