Sharing standalone lexers and parsers, and parser error API.
This commit is contained in:
parent
e23350071f
commit
51ccc28e3c
@ -16,4 +16,8 @@ $HOME/git/ligo/vendors/ligo-utils/simple-utils/region.ml
|
||||
../shared/Markup.mli
|
||||
../shared/Utils.mli
|
||||
../shared/Utils.ml
|
||||
../shared/ParserAPI.mli
|
||||
../shared/ParserAPI.ml
|
||||
../shared/LexerUnit.ml
|
||||
../shared/ParserUnit.ml
|
||||
Stubs/Simple_utils.ml
|
||||
|
@ -1,56 +1,9 @@
|
||||
(** Driver for the LIGO lexer *)
|
||||
(** Driver for the CameLIGO lexer *)
|
||||
|
||||
let extension = ".mligo"
|
||||
let options = EvalOpt.read "CameLIGO" extension
|
||||
module IO =
|
||||
struct
|
||||
let ext = ".mligo"
|
||||
let options = EvalOpt.read "CameLIGO" ext
|
||||
end
|
||||
|
||||
(** Error printing and exception tracing
|
||||
*)
|
||||
let () = Printexc.record_backtrace true
|
||||
|
||||
let external_ text =
|
||||
Utils.highlight (Printf.sprintf "External error: %s" text); exit 1;;
|
||||
|
||||
(** {1 Preprocessing the input source and opening the input channels} *)
|
||||
|
||||
(** Path for CPP inclusions (#include)
|
||||
*)
|
||||
let lib_path =
|
||||
match options#libs with
|
||||
[] -> ""
|
||||
| libs -> let mk_I dir path = Printf.sprintf " -I %s%s" dir path
|
||||
in List.fold_right mk_I libs ""
|
||||
|
||||
let prefix =
|
||||
match options#input with
|
||||
None | Some "-" -> "temp"
|
||||
| Some file -> Filename.(file |> basename |> remove_extension)
|
||||
|
||||
let suffix = ".pp" ^ extension
|
||||
|
||||
let pp_input =
|
||||
if Utils.String.Set.mem "cpp" options#verbose
|
||||
then prefix ^ suffix
|
||||
else let pp_input, pp_out = Filename.open_temp_file prefix suffix
|
||||
in close_out pp_out; pp_input
|
||||
|
||||
let cpp_cmd =
|
||||
match options#input with
|
||||
None | Some "-" ->
|
||||
Printf.sprintf "cpp -traditional-cpp%s - > %s"
|
||||
lib_path pp_input
|
||||
| Some file ->
|
||||
Printf.sprintf "cpp -traditional-cpp%s %s > %s"
|
||||
lib_path file pp_input
|
||||
|
||||
let () =
|
||||
if Utils.String.Set.mem "cpp" options#verbose
|
||||
then Printf.eprintf "%s\n%!" cpp_cmd;
|
||||
if Sys.command cpp_cmd <> 0 then
|
||||
external_ (Printf.sprintf "the command \"%s\" failed." cpp_cmd)
|
||||
|
||||
(** {1 Running the lexer on the input file} *)
|
||||
|
||||
module Log = LexerLog.Make (Lexer.Make (LexToken))
|
||||
|
||||
let () = Log.trace ~offsets:options#offsets
|
||||
options#mode (Some pp_input) options#cmd
|
||||
module M = LexerUnit.Make (IO) (Lexer.Make (LexToken))
|
||||
|
@ -1,22 +0,0 @@
|
||||
(** Generic parser API for LIGO *)
|
||||
|
||||
module Make (Lexer: Lexer.S with module Token := LexToken)
|
||||
(Parser: module type of Parser)
|
||||
(ParErr: sig val message: int -> string end) :
|
||||
sig
|
||||
(* Monolithic and incremental APIs of Menhir for parsing *)
|
||||
|
||||
val mono_contract : (Lexing.lexbuf -> Lexer.token) -> Lexing.lexbuf -> AST.t
|
||||
val incr_contract : Lexer.instance -> AST.t
|
||||
|
||||
(* Error handling *)
|
||||
|
||||
type message = string
|
||||
type valid = Lexer.token
|
||||
type invalid = Lexer.token
|
||||
type error = message * valid option * invalid
|
||||
|
||||
exception Point of error
|
||||
|
||||
val format_error : ?offsets:bool -> [`Byte | `Point] -> error -> string
|
||||
end
|
@ -1,145 +1,27 @@
|
||||
(** Driver for the CameLIGO parser *)
|
||||
|
||||
let extension = ".mligo"
|
||||
let options = EvalOpt.read "CameLIGO" extension
|
||||
|
||||
open Printf
|
||||
|
||||
(** Error printing and exception tracing
|
||||
*)
|
||||
let () = Printexc.record_backtrace true
|
||||
|
||||
(** Extracting the input file
|
||||
*)
|
||||
let file =
|
||||
match options#input with
|
||||
None | Some "-" -> false
|
||||
| Some _ -> true
|
||||
|
||||
(** {1 Error printing and exception tracing} *)
|
||||
|
||||
let () = Printexc.record_backtrace true
|
||||
|
||||
let external_ text =
|
||||
Utils.highlight (sprintf "External error: %s" text); exit 1;;
|
||||
|
||||
(** {1 Preprocessing the input source and opening the input channels} *)
|
||||
|
||||
(** Path for CPP inclusions (#include)
|
||||
*)
|
||||
let lib_path =
|
||||
match options#libs with
|
||||
[] -> ""
|
||||
| libs -> let mk_I dir path = sprintf " -I %s%s" dir path
|
||||
in List.fold_right mk_I libs ""
|
||||
|
||||
let prefix =
|
||||
match options#input with
|
||||
None | Some "-" -> "temp"
|
||||
| Some file -> Filename.(file |> basename |> remove_extension)
|
||||
|
||||
let suffix = ".pp" ^ extension
|
||||
|
||||
let pp_input =
|
||||
if Utils.String.Set.mem "cpp" options#verbose
|
||||
then prefix ^ suffix
|
||||
else let pp_input, pp_out = Filename.open_temp_file prefix suffix
|
||||
in close_out pp_out; pp_input
|
||||
|
||||
let cpp_cmd =
|
||||
match options#input with
|
||||
None | Some "-" ->
|
||||
sprintf "cpp -traditional-cpp%s - > %s"
|
||||
lib_path pp_input
|
||||
| Some file ->
|
||||
sprintf "cpp -traditional-cpp%s %s > %s"
|
||||
lib_path file pp_input
|
||||
|
||||
let () =
|
||||
if Utils.String.Set.mem "cpp" options#verbose
|
||||
then eprintf "%s\n%!" cpp_cmd;
|
||||
if Sys.command cpp_cmd <> 0 then
|
||||
external_ (sprintf "the command \"%s\" failed." cpp_cmd)
|
||||
|
||||
(** {1 Instanciating the lexer} *)
|
||||
|
||||
module Lexer = Lexer.Make (LexToken)
|
||||
module Log = LexerLog.Make (Lexer)
|
||||
module ParserFront = ParserAPI.Make (Lexer) (Parser) (ParErr)
|
||||
|
||||
let lexer_inst = Lexer.open_token_stream (Some pp_input)
|
||||
let Lexer.{read; buffer; get_win; get_pos; get_last; close} = lexer_inst
|
||||
|
||||
and cout = stdout
|
||||
|
||||
let log = Log.output_token ~offsets:options#offsets
|
||||
options#mode options#cmd cout
|
||||
|
||||
and close_all () = close (); close_out cout
|
||||
|
||||
(** {1 Tokeniser} *)
|
||||
|
||||
let tokeniser = read ~log
|
||||
|
||||
(** {1 Main} *)
|
||||
|
||||
let () =
|
||||
try
|
||||
let ast =
|
||||
if options#mono
|
||||
then ParserFront.mono_contract tokeniser buffer
|
||||
else ParserFront.incr_contract lexer_inst in
|
||||
if Utils.String.Set.mem "ast" options#verbose
|
||||
then let buffer = Buffer.create 131 in
|
||||
let state = ParserLog.mk_state
|
||||
~offsets:options#offsets
|
||||
~mode:options#mode
|
||||
~buffer in
|
||||
begin
|
||||
ParserLog.pp_ast state ast;
|
||||
Buffer.output_buffer stdout buffer
|
||||
module IO =
|
||||
struct
|
||||
let ext = ".mligo"
|
||||
let options = EvalOpt.read "CameLIGO" ext
|
||||
end
|
||||
else if Utils.String.Set.mem "ast-tokens" options#verbose
|
||||
then let buffer = Buffer.create 131 in
|
||||
let state = ParserLog.mk_state
|
||||
~offsets:options#offsets
|
||||
~mode:options#mode
|
||||
~buffer in
|
||||
begin
|
||||
ParserLog.print_tokens state ast;
|
||||
Buffer.output_buffer stdout buffer
|
||||
|
||||
module ExtParser =
|
||||
struct
|
||||
type ast = AST.t
|
||||
type expr = AST.expr
|
||||
include Parser
|
||||
end
|
||||
with
|
||||
(* Lexing errors *)
|
||||
Lexer.Error err ->
|
||||
close_all ();
|
||||
let msg =
|
||||
Lexer.format_error ~offsets:options#offsets
|
||||
options#mode err ~file
|
||||
in prerr_string msg
|
||||
|
||||
(* Incremental API of Menhir *)
|
||||
| ParserFront.Point point ->
|
||||
let () = close_all () in
|
||||
let error =
|
||||
ParserFront.format_error ~offsets:options#offsets
|
||||
options#mode point
|
||||
in eprintf "\027[31m%s\027[0m%!" error
|
||||
module ExtParserLog =
|
||||
struct
|
||||
type ast = AST.t
|
||||
include ParserLog
|
||||
end
|
||||
|
||||
(* Monolithic API of Menhir *)
|
||||
| Parser.Error ->
|
||||
let () = close_all () in
|
||||
let invalid, valid_opt =
|
||||
match get_win () with
|
||||
Lexer.Nil ->
|
||||
assert false (* Safe: There is always at least EOF. *)
|
||||
| Lexer.One invalid -> invalid, None
|
||||
| Lexer.Two (invalid, valid) -> invalid, Some valid in
|
||||
let point = "", valid_opt, invalid in
|
||||
let error =
|
||||
ParserFront.format_error ~offsets:options#offsets
|
||||
options#mode point
|
||||
in eprintf "\027[31m%s\027[0m%!" error
|
||||
|
||||
(* I/O errors *)
|
||||
| Sys_error msg -> Utils.highlight msg
|
||||
module M = ParserUnit.Make (IO)
|
||||
(Lexer.Make (LexToken))
|
||||
(AST)
|
||||
(ExtParser)
|
||||
(ParErr)
|
||||
(ExtParserLog)
|
||||
|
@ -32,7 +32,7 @@
|
||||
(name ParserMain)
|
||||
(libraries parser_cameligo)
|
||||
(modules
|
||||
ParErr ParserAPI ParserMain)
|
||||
ParErr ParserMain)
|
||||
(preprocess
|
||||
(pps bisect_ppx --conditional))
|
||||
(flags (:standard -open Simple_utils -open Parser_shared -open Parser_cameligo)))
|
||||
|
@ -16,4 +16,8 @@ $HOME/git/ligo/vendors/ligo-utils/simple-utils/region.ml
|
||||
../shared/Markup.mli
|
||||
../shared/Utils.mli
|
||||
../shared/Utils.ml
|
||||
../shared/ParserAPI.mli
|
||||
../shared/ParserAPI.ml
|
||||
../shared/LexerUnit.ml
|
||||
../shared/ParserUnit.ml
|
||||
Stubs/Simple_utils.ml
|
||||
|
@ -1,56 +1,9 @@
|
||||
(** Driver for the LIGO lexer *)
|
||||
(** Driver for the PascaLIGO lexer *)
|
||||
|
||||
let extension = ".ligo"
|
||||
let options = EvalOpt.read "PascaLIGO" extension
|
||||
module IO =
|
||||
struct
|
||||
let ext = ".ligo"
|
||||
let options = EvalOpt.read "PascaLIGO" ext
|
||||
end
|
||||
|
||||
(** Error printing and exception tracing
|
||||
*)
|
||||
let () = Printexc.record_backtrace true
|
||||
|
||||
let external_ text =
|
||||
Utils.highlight (Printf.sprintf "External error: %s" text); exit 1;;
|
||||
|
||||
(** {1 Preprocessing the input source and opening the input channels} *)
|
||||
|
||||
(** Path for CPP inclusions (#include)
|
||||
*)
|
||||
let lib_path =
|
||||
match options#libs with
|
||||
[] -> ""
|
||||
| libs -> let mk_I dir path = Printf.sprintf " -I %s%s" dir path
|
||||
in List.fold_right mk_I libs ""
|
||||
|
||||
let prefix =
|
||||
match options#input with
|
||||
None | Some "-" -> "temp"
|
||||
| Some file -> Filename.(file |> basename |> remove_extension)
|
||||
|
||||
let suffix = ".pp" ^ extension
|
||||
|
||||
let pp_input =
|
||||
if Utils.String.Set.mem "cpp" options#verbose
|
||||
then prefix ^ suffix
|
||||
else let pp_input, pp_out = Filename.open_temp_file prefix suffix
|
||||
in close_out pp_out; pp_input
|
||||
|
||||
let cpp_cmd =
|
||||
match options#input with
|
||||
None | Some "-" ->
|
||||
Printf.sprintf "cpp -traditional-cpp%s - > %s"
|
||||
lib_path pp_input
|
||||
| Some file ->
|
||||
Printf.sprintf "cpp -traditional-cpp%s %s > %s"
|
||||
lib_path file pp_input
|
||||
|
||||
let () =
|
||||
if Utils.String.Set.mem "cpp" options#verbose
|
||||
then Printf.eprintf "%s\n%!" cpp_cmd;
|
||||
if Sys.command cpp_cmd <> 0 then
|
||||
external_ (Printf.sprintf "the command \"%s\" failed." cpp_cmd)
|
||||
|
||||
(** {1 Running the lexer on the input file} *)
|
||||
|
||||
module Log = LexerLog.Make (Lexer.Make (LexToken))
|
||||
|
||||
let () = Log.trace ~offsets:options#offsets
|
||||
options#mode (Some pp_input) options#cmd
|
||||
module M = LexerUnit.Make (IO) (Lexer.Make (LexToken))
|
||||
|
@ -1,82 +0,0 @@
|
||||
(* Generic parser for LIGO *)
|
||||
|
||||
(* Main functor *)
|
||||
|
||||
module Make (Lexer: Lexer.S with module Token := LexToken)
|
||||
(Parser: module type of Parser)
|
||||
(ParErr: sig val message : int -> string end) =
|
||||
struct
|
||||
module I = Parser.MenhirInterpreter
|
||||
module S = MenhirLib.General (* Streams *)
|
||||
|
||||
(* The call [stack checkpoint] extracts the parser's stack out of
|
||||
a checkpoint. *)
|
||||
|
||||
let stack = function
|
||||
I.HandlingError env -> I.stack env
|
||||
| _ -> assert false
|
||||
|
||||
(* The call [state checkpoint] extracts the number of the current
|
||||
state out of a parser checkpoint. *)
|
||||
|
||||
let state checkpoint : int =
|
||||
match Lazy.force (stack checkpoint) with
|
||||
S.Nil -> 0 (* WARNING: Hack. The first state should be 0. *)
|
||||
| S.Cons (I.Element (s,_,_,_),_) -> I.number s
|
||||
|
||||
(* The parser has successfully produced a semantic value. *)
|
||||
|
||||
let success v = v
|
||||
|
||||
(* The parser has suspended itself because of a syntax error. Stop. *)
|
||||
|
||||
type message = string
|
||||
type valid = Lexer.token
|
||||
type invalid = Lexer.token
|
||||
type error = message * valid option * invalid
|
||||
|
||||
exception Point of error
|
||||
|
||||
let failure get_win checkpoint =
|
||||
let message = ParErr.message (state checkpoint) in
|
||||
match get_win () with
|
||||
Lexer.Nil -> assert false
|
||||
| Lexer.One invalid ->
|
||||
raise (Point (message, None, invalid))
|
||||
| Lexer.Two (invalid, valid) ->
|
||||
raise (Point (message, Some valid, invalid))
|
||||
|
||||
(* The two Menhir APIs are called from the following two functions. *)
|
||||
|
||||
let incr_contract Lexer.{read; buffer; get_win; close; _} : AST.t =
|
||||
let supplier = I.lexer_lexbuf_to_supplier read buffer
|
||||
and failure = failure get_win in
|
||||
let parser = Parser.Incremental.contract buffer.Lexing.lex_curr_p in
|
||||
let ast = I.loop_handle success failure supplier parser
|
||||
in close (); ast
|
||||
|
||||
let mono_contract = Parser.contract
|
||||
|
||||
(* Errors *)
|
||||
|
||||
let format_error ?(offsets=true) mode (msg, valid_opt, invalid) =
|
||||
let invalid_region = LexToken.to_region invalid in
|
||||
let header =
|
||||
"Parse error " ^ invalid_region#to_string ~offsets mode in
|
||||
let trailer =
|
||||
match valid_opt with
|
||||
None ->
|
||||
if LexToken.is_eof invalid then ""
|
||||
else let invalid_lexeme = LexToken.to_lexeme invalid in
|
||||
Printf.sprintf ", before \"%s\"" invalid_lexeme
|
||||
| Some valid ->
|
||||
let valid_lexeme = LexToken.to_lexeme valid in
|
||||
let s = Printf.sprintf ", after \"%s\"" valid_lexeme in
|
||||
if LexToken.is_eof invalid then s
|
||||
else
|
||||
let invalid_lexeme = LexToken.to_lexeme invalid in
|
||||
Printf.sprintf "%s and before \"%s\"" s invalid_lexeme in
|
||||
let header = header ^ trailer in
|
||||
header ^ (if msg = "" then ".\n" else ":\n" ^ msg)
|
||||
|
||||
end
|
@ -1,22 +0,0 @@
|
||||
(** Generic parser API for LIGO *)
|
||||
|
||||
module Make (Lexer: Lexer.S with module Token := LexToken)
|
||||
(Parser: module type of Parser)
|
||||
(ParErr: module type of ParErr) :
|
||||
sig
|
||||
(* Monolithic and incremental APIs of Menhir for parsing *)
|
||||
|
||||
val mono_contract : (Lexing.lexbuf -> Lexer.token) -> Lexing.lexbuf -> AST.t
|
||||
val incr_contract : Lexer.instance -> AST.t
|
||||
|
||||
(* Error handling *)
|
||||
|
||||
type message = string
|
||||
type valid = Lexer.token
|
||||
type invalid = Lexer.token
|
||||
type error = message * valid option * invalid
|
||||
|
||||
exception Point of error
|
||||
|
||||
val format_error : ?offsets:bool -> [`Byte | `Point] -> error -> string
|
||||
end
|
@ -1,145 +1,27 @@
|
||||
(** Driver for the PascaLIGO parser *)
|
||||
|
||||
let extension = ".ligo"
|
||||
let options = EvalOpt.read "PascaLIGO" extension
|
||||
|
||||
open Printf
|
||||
|
||||
(** Error printing and exception tracing
|
||||
*)
|
||||
let () = Printexc.record_backtrace true
|
||||
|
||||
(** Extracting the input file
|
||||
*)
|
||||
let file =
|
||||
match options#input with
|
||||
None | Some "-" -> false
|
||||
| Some _ -> true
|
||||
|
||||
(** {1 Error printing and exception tracing} *)
|
||||
|
||||
let () = Printexc.record_backtrace true
|
||||
|
||||
let external_ text =
|
||||
Utils.highlight (sprintf "External error: %s" text); exit 1;;
|
||||
|
||||
(** {1 Preprocessing the input source and opening the input channels} *)
|
||||
|
||||
(** Path for CPP inclusions (#include)
|
||||
*)
|
||||
let lib_path =
|
||||
match options#libs with
|
||||
[] -> ""
|
||||
| libs -> let mk_I dir path = sprintf " -I %s%s" dir path
|
||||
in List.fold_right mk_I libs ""
|
||||
|
||||
let prefix =
|
||||
match options#input with
|
||||
None | Some "-" -> "temp"
|
||||
| Some file -> Filename.(file |> basename |> remove_extension)
|
||||
|
||||
let suffix = ".pp" ^ extension
|
||||
|
||||
let pp_input =
|
||||
if Utils.String.Set.mem "cpp" options#verbose
|
||||
then prefix ^ suffix
|
||||
else let pp_input, pp_out = Filename.open_temp_file prefix suffix
|
||||
in close_out pp_out; pp_input
|
||||
|
||||
let cpp_cmd =
|
||||
match options#input with
|
||||
None | Some "-" ->
|
||||
sprintf "cpp -traditional-cpp%s - > %s"
|
||||
lib_path pp_input
|
||||
| Some file ->
|
||||
sprintf "cpp -traditional-cpp%s %s > %s"
|
||||
lib_path file pp_input
|
||||
|
||||
let () =
|
||||
if Utils.String.Set.mem "cpp" options#verbose
|
||||
then eprintf "%s\n%!" cpp_cmd;
|
||||
if Sys.command cpp_cmd <> 0 then
|
||||
external_ (sprintf "the command \"%s\" failed." cpp_cmd)
|
||||
|
||||
(** {1 Instanciating the lexer} *)
|
||||
|
||||
module Lexer = Lexer.Make (LexToken)
|
||||
module Log = LexerLog.Make (Lexer)
|
||||
module ParserFront = ParserAPI.Make (Lexer) (Parser) (ParErr)
|
||||
|
||||
let lexer_inst = Lexer.open_token_stream (Some pp_input)
|
||||
let Lexer.{read; buffer; get_win; get_pos; get_last; close} = lexer_inst
|
||||
|
||||
and cout = stdout
|
||||
|
||||
let log = Log.output_token ~offsets:options#offsets
|
||||
options#mode options#cmd cout
|
||||
|
||||
and close_all () = close (); close_out cout
|
||||
|
||||
(** {1 Tokeniser} *)
|
||||
|
||||
let tokeniser = read ~log
|
||||
|
||||
(** {1 Main} *)
|
||||
|
||||
let () =
|
||||
try
|
||||
let ast =
|
||||
if options#mono
|
||||
then ParserFront.mono_contract tokeniser buffer
|
||||
else ParserFront.incr_contract lexer_inst in
|
||||
if Utils.String.Set.mem "ast" options#verbose
|
||||
then let buffer = Buffer.create 131 in
|
||||
let state = ParserLog.mk_state
|
||||
~offsets:options#offsets
|
||||
~mode:options#mode
|
||||
~buffer in
|
||||
begin
|
||||
ParserLog.pp_ast state ast;
|
||||
Buffer.output_buffer stdout buffer
|
||||
module IO =
|
||||
struct
|
||||
let ext = ".ligo"
|
||||
let options = EvalOpt.read "PascaLIGO" ext
|
||||
end
|
||||
else if Utils.String.Set.mem "ast-tokens" options#verbose
|
||||
then let buffer = Buffer.create 131 in
|
||||
let state = ParserLog.mk_state
|
||||
~offsets:options#offsets
|
||||
~mode:options#mode
|
||||
~buffer in
|
||||
begin
|
||||
ParserLog.print_tokens state ast;
|
||||
Buffer.output_buffer stdout buffer
|
||||
|
||||
module ExtParser =
|
||||
struct
|
||||
type ast = AST.t
|
||||
type expr = AST.expr
|
||||
include Parser
|
||||
end
|
||||
with
|
||||
(* Lexing errors *)
|
||||
Lexer.Error err ->
|
||||
close_all ();
|
||||
let msg =
|
||||
Lexer.format_error ~offsets:options#offsets
|
||||
options#mode err ~file
|
||||
in prerr_string msg
|
||||
|
||||
(* Incremental API of Menhir *)
|
||||
| ParserFront.Point point ->
|
||||
let () = close_all () in
|
||||
let error =
|
||||
ParserFront.format_error ~offsets:options#offsets
|
||||
options#mode point
|
||||
in eprintf "\027[31m%s\027[0m%!" error
|
||||
module ExtParserLog =
|
||||
struct
|
||||
type ast = AST.t
|
||||
include ParserLog
|
||||
end
|
||||
|
||||
(* Monolithic API of Menhir *)
|
||||
| Parser.Error ->
|
||||
let () = close_all () in
|
||||
let invalid, valid_opt =
|
||||
match get_win () with
|
||||
Lexer.Nil ->
|
||||
assert false (* Safe: There is always at least EOF. *)
|
||||
| Lexer.One invalid -> invalid, None
|
||||
| Lexer.Two (invalid, valid) -> invalid, Some valid in
|
||||
let point = "", valid_opt, invalid in
|
||||
let error =
|
||||
ParserFront.format_error ~offsets:options#offsets
|
||||
options#mode point
|
||||
in eprintf "\027[31m%s\027[0m%!" error
|
||||
|
||||
(* I/O errors *)
|
||||
| Sys_error msg -> Utils.highlight msg
|
||||
module M = ParserUnit.Make (IO)
|
||||
(Lexer.Make (LexToken))
|
||||
(AST)
|
||||
(ExtParser)
|
||||
(ParErr)
|
||||
(ExtParserLog)
|
||||
|
@ -14,8 +14,7 @@
|
||||
parser_shared
|
||||
hex
|
||||
simple-utils
|
||||
tezos-utils
|
||||
)
|
||||
tezos-utils)
|
||||
(preprocess
|
||||
(pps bisect_ppx --conditional))
|
||||
(flags (:standard -open Parser_shared -open Simple_utils)))
|
||||
@ -33,7 +32,7 @@
|
||||
(name ParserMain)
|
||||
(libraries parser_pascaligo)
|
||||
(modules
|
||||
ParErr ParserAPI ParserMain)
|
||||
ParErr ParserMain)
|
||||
(preprocess
|
||||
(pps bisect_ppx --conditional))
|
||||
(flags (:standard -open Simple_utils -open Parser_shared -open Parser_pascaligo)))
|
||||
|
@ -16,6 +16,10 @@ $HOME/git/ligo/vendors/ligo-utils/simple-utils/region.ml
|
||||
../shared/Markup.mli
|
||||
../shared/Utils.mli
|
||||
../shared/Utils.ml
|
||||
../shared/ParserAPI.mli
|
||||
../shared/ParserAPI.ml
|
||||
../shared/LexerUnit.ml
|
||||
../shared/ParserUnit.ml
|
||||
Stubs/Simple_utils.ml
|
||||
Stubs/Parser_cameligo.ml
|
||||
../cameligo/AST.mli
|
||||
|
@ -1,56 +1,9 @@
|
||||
(** Driver for the LIGO lexer *)
|
||||
(** Driver for the ReasonLIGO lexer *)
|
||||
|
||||
let extension = ".religo"
|
||||
let options = EvalOpt.read "ReasonLIGO" extension
|
||||
module IO =
|
||||
struct
|
||||
let ext = ".religo"
|
||||
let options = EvalOpt.read "ReasonLIGO" ext
|
||||
end
|
||||
|
||||
(** Error printing and exception tracing
|
||||
*)
|
||||
let () = Printexc.record_backtrace true
|
||||
|
||||
let external_ text =
|
||||
Utils.highlight (Printf.sprintf "External error: %s" text); exit 1;;
|
||||
|
||||
(** {1 Preprocessing the input source and opening the input channels} *)
|
||||
|
||||
(** Path for CPP inclusions (#include)
|
||||
*)
|
||||
let lib_path =
|
||||
match options#libs with
|
||||
[] -> ""
|
||||
| libs -> let mk_I dir path = Printf.sprintf " -I %s%s" dir path
|
||||
in List.fold_right mk_I libs ""
|
||||
|
||||
let prefix =
|
||||
match options#input with
|
||||
None | Some "-" -> "temp"
|
||||
| Some file -> Filename.(file |> basename |> remove_extension)
|
||||
|
||||
let suffix = ".pp" ^ extension
|
||||
|
||||
let pp_input =
|
||||
if Utils.String.Set.mem "cpp" options#verbose
|
||||
then prefix ^ suffix
|
||||
else let pp_input, pp_out = Filename.open_temp_file prefix suffix
|
||||
in close_out pp_out; pp_input
|
||||
|
||||
let cpp_cmd =
|
||||
match options#input with
|
||||
None | Some "-" ->
|
||||
Printf.sprintf "cpp -traditional-cpp%s - > %s"
|
||||
lib_path pp_input
|
||||
| Some file ->
|
||||
Printf.sprintf "cpp -traditional-cpp%s %s > %s"
|
||||
lib_path file pp_input
|
||||
|
||||
let () =
|
||||
if Utils.String.Set.mem "cpp" options#verbose
|
||||
then Printf.eprintf "%s\n%!" cpp_cmd;
|
||||
if Sys.command cpp_cmd <> 0 then
|
||||
external_ (Printf.sprintf "the command \"%s\" failed." cpp_cmd)
|
||||
|
||||
(** {1 Running the lexer on the input file} *)
|
||||
|
||||
module Log = LexerLog.Make (Lexer.Make (LexToken))
|
||||
|
||||
let () = Log.trace ~offsets:options#offsets
|
||||
options#mode (Some pp_input) options#cmd
|
||||
module M = LexerUnit.Make (IO) (Lexer.Make (LexToken))
|
||||
|
@ -1,82 +0,0 @@
|
||||
(* Generic parser for LIGO *)
|
||||
|
||||
(* Main functor *)
|
||||
|
||||
module Make (Lexer: Lexer.S with module Token := LexToken)
|
||||
(Parser: module type of Parser)
|
||||
(ParErr: sig val message : int -> string end) =
|
||||
struct
|
||||
module I = Parser.MenhirInterpreter
|
||||
module S = MenhirLib.General (* Streams *)
|
||||
|
||||
(* The call [stack checkpoint] extracts the parser's stack out of
|
||||
a checkpoint. *)
|
||||
|
||||
let stack = function
|
||||
I.HandlingError env -> I.stack env
|
||||
| _ -> assert false
|
||||
|
||||
(* The call [state checkpoint] extracts the number of the current
|
||||
state out of a parser checkpoint. *)
|
||||
|
||||
let state checkpoint : int =
|
||||
match Lazy.force (stack checkpoint) with
|
||||
S.Nil -> 0 (* WARNING: Hack. The first state should be 0. *)
|
||||
| S.Cons (I.Element (s,_,_,_),_) -> I.number s
|
||||
|
||||
(* The parser has successfully produced a semantic value. *)
|
||||
|
||||
let success v = v
|
||||
|
||||
(* The parser has suspended itself because of a syntax error. Stop. *)
|
||||
|
||||
type message = string
|
||||
type valid = Lexer.token
|
||||
type invalid = Lexer.token
|
||||
type error = message * valid option * invalid
|
||||
|
||||
exception Point of error
|
||||
|
||||
let failure get_win checkpoint =
|
||||
let message = ParErr.message (state checkpoint) in
|
||||
match get_win () with
|
||||
Lexer.Nil -> assert false
|
||||
| Lexer.One invalid ->
|
||||
raise (Point (message, None, invalid))
|
||||
| Lexer.Two (invalid, valid) ->
|
||||
raise (Point (message, Some valid, invalid))
|
||||
|
||||
(* The two Menhir APIs are called from the following two functions. *)
|
||||
|
||||
let incr_contract Lexer.{read; buffer; get_win; close; _} : AST.t =
|
||||
let supplier = I.lexer_lexbuf_to_supplier read buffer
|
||||
and failure = failure get_win in
|
||||
let parser = Parser.Incremental.contract buffer.Lexing.lex_curr_p in
|
||||
let ast = I.loop_handle success failure supplier parser
|
||||
in close (); ast
|
||||
|
||||
let mono_contract = Parser.contract
|
||||
|
||||
(* Errors *)
|
||||
|
||||
let format_error ?(offsets=true) mode (msg, valid_opt, invalid) =
|
||||
let invalid_region = LexToken.to_region invalid in
|
||||
let header =
|
||||
"Parse error " ^ invalid_region#to_string ~offsets mode in
|
||||
let trailer =
|
||||
match valid_opt with
|
||||
None ->
|
||||
if LexToken.is_eof invalid then ""
|
||||
else let invalid_lexeme = LexToken.to_lexeme invalid in
|
||||
Printf.sprintf ", before \"%s\"" invalid_lexeme
|
||||
| Some valid ->
|
||||
let valid_lexeme = LexToken.to_lexeme valid in
|
||||
let s = Printf.sprintf ", after \"%s\"" valid_lexeme in
|
||||
if LexToken.is_eof invalid then s
|
||||
else
|
||||
let invalid_lexeme = LexToken.to_lexeme invalid in
|
||||
Printf.sprintf "%s and before \"%s\"" s invalid_lexeme in
|
||||
let header = header ^ trailer in
|
||||
header ^ (if msg = "" then ".\n" else ":\n" ^ msg)
|
||||
|
||||
end
|
@ -1,22 +0,0 @@
|
||||
(** Generic parser API for LIGO *)
|
||||
|
||||
module Make (Lexer: Lexer.S with module Token := LexToken)
|
||||
(Parser: module type of Parser)
|
||||
(ParErr: sig val message: int -> string end) :
|
||||
sig
|
||||
(* Monolithic and incremental APIs of Menhir for parsing *)
|
||||
|
||||
val mono_contract : (Lexing.lexbuf -> Lexer.token) -> Lexing.lexbuf -> AST.t
|
||||
val incr_contract : Lexer.instance -> AST.t
|
||||
|
||||
(* Error handling *)
|
||||
|
||||
type message = string
|
||||
type valid = Lexer.token
|
||||
type invalid = Lexer.token
|
||||
type error = message * valid option * invalid
|
||||
|
||||
exception Point of error
|
||||
|
||||
val format_error : ?offsets:bool -> [`Byte | `Point] -> error -> string
|
||||
end
|
@ -1,145 +1,27 @@
|
||||
(** Driver for the ReasonLIGO parser *)
|
||||
|
||||
let extension = ".religo"
|
||||
let options = EvalOpt.read "ReasonLIGO" extension
|
||||
|
||||
open Printf
|
||||
|
||||
(** Error printing and exception tracing
|
||||
*)
|
||||
let () = Printexc.record_backtrace true
|
||||
|
||||
(** Extracting the input file
|
||||
*)
|
||||
let file =
|
||||
match options#input with
|
||||
None | Some "-" -> false
|
||||
| Some _ -> true
|
||||
|
||||
(** {1 Error printing and exception tracing} *)
|
||||
|
||||
let () = Printexc.record_backtrace true
|
||||
|
||||
let external_ text =
|
||||
Utils.highlight (sprintf "External error: %s" text); exit 1;;
|
||||
|
||||
(** {1 Preprocessing the input source and opening the input channels} *)
|
||||
|
||||
(** Path for CPP inclusions (#include)
|
||||
*)
|
||||
let lib_path =
|
||||
match options#libs with
|
||||
[] -> ""
|
||||
| libs -> let mk_I dir path = sprintf " -I %s%s" dir path
|
||||
in List.fold_right mk_I libs ""
|
||||
|
||||
let prefix =
|
||||
match options#input with
|
||||
None | Some "-" -> "temp"
|
||||
| Some file -> Filename.(file |> basename |> remove_extension)
|
||||
|
||||
let suffix = ".pp" ^ extension
|
||||
|
||||
let pp_input =
|
||||
if Utils.String.Set.mem "cpp" options#verbose
|
||||
then prefix ^ suffix
|
||||
else let pp_input, pp_out = Filename.open_temp_file prefix suffix
|
||||
in close_out pp_out; pp_input
|
||||
|
||||
let cpp_cmd =
|
||||
match options#input with
|
||||
None | Some "-" ->
|
||||
sprintf "cpp -traditional-cpp%s - > %s"
|
||||
lib_path pp_input
|
||||
| Some file ->
|
||||
sprintf "cpp -traditional-cpp%s %s > %s"
|
||||
lib_path file pp_input
|
||||
|
||||
let () =
|
||||
if Utils.String.Set.mem "cpp" options#verbose
|
||||
then eprintf "%s\n%!" cpp_cmd;
|
||||
if Sys.command cpp_cmd <> 0 then
|
||||
external_ (sprintf "the command \"%s\" failed." cpp_cmd)
|
||||
|
||||
(** {1 Instanciating the lexer} *)
|
||||
|
||||
module Lexer = Lexer.Make (LexToken)
|
||||
module Log = LexerLog.Make (Lexer)
|
||||
module ParserFront = ParserAPI.Make (Lexer) (Parser) (ParErr)
|
||||
|
||||
let lexer_inst = Lexer.open_token_stream (Some pp_input)
|
||||
let Lexer.{read; buffer; get_win; get_pos; get_last; close} = lexer_inst
|
||||
|
||||
and cout = stdout
|
||||
|
||||
let log = Log.output_token ~offsets:options#offsets
|
||||
options#mode options#cmd cout
|
||||
|
||||
and close_all () = close (); close_out cout
|
||||
|
||||
(** {1 Tokeniser} *)
|
||||
|
||||
let tokeniser = read ~log
|
||||
|
||||
(** {1 Main} *)
|
||||
|
||||
let () =
|
||||
try
|
||||
let ast =
|
||||
if options#mono
|
||||
then ParserFront.mono_contract tokeniser buffer
|
||||
else ParserFront.incr_contract lexer_inst in
|
||||
if Utils.String.Set.mem "ast" options#verbose
|
||||
then let buffer = Buffer.create 131 in
|
||||
let state = ParserLog.mk_state
|
||||
~offsets:options#offsets
|
||||
~mode:options#mode
|
||||
~buffer in
|
||||
begin
|
||||
ParserLog.pp_ast state ast;
|
||||
Buffer.output_buffer stdout buffer
|
||||
module IO =
|
||||
struct
|
||||
let ext = ".religo"
|
||||
let options = EvalOpt.read "ReasonLIGO" ext
|
||||
end
|
||||
else if Utils.String.Set.mem "ast-tokens" options#verbose
|
||||
then let buffer = Buffer.create 131 in
|
||||
let state = ParserLog.mk_state
|
||||
~offsets:options#offsets
|
||||
~mode:options#mode
|
||||
~buffer in
|
||||
begin
|
||||
ParserLog.print_tokens state ast;
|
||||
Buffer.output_buffer stdout buffer
|
||||
|
||||
module ExtParser =
|
||||
struct
|
||||
type ast = AST.t
|
||||
type expr = AST.expr
|
||||
include Parser
|
||||
end
|
||||
with
|
||||
(* Lexing errors *)
|
||||
Lexer.Error err ->
|
||||
close_all ();
|
||||
let msg =
|
||||
Lexer.format_error ~offsets:options#offsets
|
||||
options#mode err ~file
|
||||
in prerr_string msg
|
||||
|
||||
(* Incremental API of Menhir *)
|
||||
| ParserFront.Point point ->
|
||||
let () = close_all () in
|
||||
let error =
|
||||
ParserFront.format_error ~offsets:options#offsets
|
||||
options#mode point
|
||||
in eprintf "\027[31m%s\027[0m%!" error
|
||||
module ExtParserLog =
|
||||
struct
|
||||
type ast = AST.t
|
||||
include ParserLog
|
||||
end
|
||||
|
||||
(* Monolithic API of Menhir *)
|
||||
| Parser.Error ->
|
||||
let () = close_all () in
|
||||
let invalid, valid_opt =
|
||||
match get_win () with
|
||||
Lexer.Nil ->
|
||||
assert false (* Safe: There is always at least EOF. *)
|
||||
| Lexer.One invalid -> invalid, None
|
||||
| Lexer.Two (invalid, valid) -> invalid, Some valid in
|
||||
let point = "", valid_opt, invalid in
|
||||
let error =
|
||||
ParserFront.format_error ~offsets:options#offsets
|
||||
options#mode point
|
||||
in eprintf "\027[31m%s\027[0m%!" error
|
||||
|
||||
(* I/O errors *)
|
||||
| Sys_error msg -> Utils.highlight msg
|
||||
module M = ParserUnit.Make (IO)
|
||||
(Lexer.Make (LexToken))
|
||||
(AST)
|
||||
(ExtParser)
|
||||
(ParErr)
|
||||
(ExtParserLog)
|
||||
|
@ -36,7 +36,7 @@
|
||||
parser_reasonligo
|
||||
parser_cameligo)
|
||||
(modules
|
||||
ParErr ParserAPI ParserMain)
|
||||
ParErr ParserMain)
|
||||
(preprocess
|
||||
(pps bisect_ppx --conditional))
|
||||
(flags (:standard -open Simple_utils -open Parser_cameligo -open Parser_shared -open Parser_reasonligo)))
|
||||
|
@ -1,9 +1,47 @@
|
||||
(* Generic parser for LIGO *)
|
||||
|
||||
module type PARSER =
|
||||
sig
|
||||
(* The type of tokens, abstract syntax trees and expressions *)
|
||||
|
||||
type token
|
||||
type ast
|
||||
type expr
|
||||
|
||||
(* This exception is raised by the monolithic API functions. *)
|
||||
|
||||
exception Error
|
||||
|
||||
(* The monolithic API. *)
|
||||
|
||||
val interactive_expr :
|
||||
(Lexing.lexbuf -> token) -> Lexing.lexbuf -> expr
|
||||
val contract :
|
||||
(Lexing.lexbuf -> token) -> Lexing.lexbuf -> ast
|
||||
|
||||
module MenhirInterpreter :
|
||||
sig
|
||||
(* The incremental API. *)
|
||||
|
||||
include MenhirLib.IncrementalEngine.INCREMENTAL_ENGINE
|
||||
with type token = token
|
||||
end
|
||||
|
||||
(* The entry point(s) to the incremental API. *)
|
||||
|
||||
module Incremental :
|
||||
sig
|
||||
val interactive_expr :
|
||||
Lexing.position -> expr MenhirInterpreter.checkpoint
|
||||
val contract :
|
||||
Lexing.position -> ast MenhirInterpreter.checkpoint
|
||||
end
|
||||
end
|
||||
|
||||
(* Main functor *)
|
||||
|
||||
module Make (Lexer: Lexer.S with module Token := LexToken)
|
||||
(Parser: module type of Parser)
|
||||
module Make (Lexer: Lexer.S)
|
||||
(Parser: PARSER with type token = Lexer.Token.token)
|
||||
(ParErr: sig val message : int -> string end) =
|
||||
struct
|
||||
module I = Parser.MenhirInterpreter
|
||||
@ -31,8 +69,8 @@ module Make (Lexer: Lexer.S with module Token := LexToken)
|
||||
(* The parser has suspended itself because of a syntax error. Stop. *)
|
||||
|
||||
type message = string
|
||||
type valid = Lexer.token
|
||||
type invalid = Lexer.token
|
||||
type valid = Parser.token
|
||||
type invalid = Parser.token
|
||||
type error = message * valid option * invalid
|
||||
|
||||
exception Point of error
|
||||
@ -48,7 +86,7 @@ module Make (Lexer: Lexer.S with module Token := LexToken)
|
||||
|
||||
(* The two Menhir APIs are called from the following two functions. *)
|
||||
|
||||
let incr_contract Lexer.{read; buffer; get_win; close; _} : AST.t =
|
||||
let incr_contract Lexer.{read; buffer; get_win; close; _} : Parser.ast =
|
||||
let supplier = I.lexer_lexbuf_to_supplier read buffer
|
||||
and failure = failure get_win in
|
||||
let parser = Parser.Incremental.contract buffer.Lexing.lex_curr_p in
|
||||
@ -60,21 +98,21 @@ module Make (Lexer: Lexer.S with module Token := LexToken)
|
||||
(* Errors *)
|
||||
|
||||
let format_error ?(offsets=true) mode (msg, valid_opt, invalid) =
|
||||
let invalid_region = LexToken.to_region invalid in
|
||||
let invalid_region = Lexer.Token.to_region invalid in
|
||||
let header =
|
||||
"Parse error " ^ invalid_region#to_string ~offsets mode in
|
||||
let trailer =
|
||||
match valid_opt with
|
||||
None ->
|
||||
if LexToken.is_eof invalid then ""
|
||||
else let invalid_lexeme = LexToken.to_lexeme invalid in
|
||||
if Lexer.Token.is_eof invalid then ""
|
||||
else let invalid_lexeme = Lexer.Token.to_lexeme invalid in
|
||||
Printf.sprintf ", before \"%s\"" invalid_lexeme
|
||||
| Some valid ->
|
||||
let valid_lexeme = LexToken.to_lexeme valid in
|
||||
let valid_lexeme = Lexer.Token.to_lexeme valid in
|
||||
let s = Printf.sprintf ", after \"%s\"" valid_lexeme in
|
||||
if LexToken.is_eof invalid then s
|
||||
if Lexer.Token.is_eof invalid then s
|
||||
else
|
||||
let invalid_lexeme = LexToken.to_lexeme invalid in
|
||||
let invalid_lexeme = Lexer.Token.to_lexeme invalid in
|
||||
Printf.sprintf "%s and before \"%s\"" s invalid_lexeme in
|
||||
let header = header ^ trailer in
|
||||
header ^ (if msg = "" then ".\n" else ":\n" ^ msg)
|
60
src/passes/1-parser/shared/ParserAPI.mli
Normal file
60
src/passes/1-parser/shared/ParserAPI.mli
Normal file
@ -0,0 +1,60 @@
|
||||
(* Generic parser API for LIGO *)
|
||||
|
||||
module type PARSER =
|
||||
sig
|
||||
(* The type of tokens. *)
|
||||
|
||||
type token
|
||||
type ast
|
||||
type expr
|
||||
|
||||
(* This exception is raised by the monolithic API functions. *)
|
||||
|
||||
exception Error
|
||||
|
||||
(* The monolithic API. *)
|
||||
|
||||
val interactive_expr : (Lexing.lexbuf -> token) -> Lexing.lexbuf -> expr
|
||||
val contract : (Lexing.lexbuf -> token) -> Lexing.lexbuf -> ast
|
||||
|
||||
(* The incremental API. *)
|
||||
|
||||
module MenhirInterpreter :
|
||||
sig
|
||||
include MenhirLib.IncrementalEngine.INCREMENTAL_ENGINE
|
||||
with type token = token
|
||||
end
|
||||
|
||||
(* The entry point(s) to the incremental API. *)
|
||||
|
||||
module Incremental :
|
||||
sig
|
||||
val interactive_expr :
|
||||
Lexing.position -> expr MenhirInterpreter.checkpoint
|
||||
val contract :
|
||||
Lexing.position -> ast MenhirInterpreter.checkpoint
|
||||
end
|
||||
end
|
||||
|
||||
module Make (Lexer: Lexer.S)
|
||||
(Parser: PARSER with type token = Lexer.Token.token)
|
||||
(ParErr: sig val message : int -> string end) :
|
||||
sig
|
||||
(* Monolithic and incremental APIs of Menhir for parsing *)
|
||||
|
||||
val mono_contract :
|
||||
(Lexing.lexbuf -> Lexer.token) -> Lexing.lexbuf -> Parser.ast
|
||||
val incr_contract :
|
||||
Lexer.instance -> Parser.ast
|
||||
|
||||
(* Error handling *)
|
||||
|
||||
type message = string
|
||||
type valid = Parser.token
|
||||
type invalid = Parser.token
|
||||
type error = message * valid option * invalid
|
||||
|
||||
exception Point of error
|
||||
|
||||
val format_error : ?offsets:bool -> [`Byte | `Point] -> error -> string
|
||||
end
|
@ -4,15 +4,17 @@
|
||||
(name parser_shared)
|
||||
(public_name ligo.parser.shared)
|
||||
(libraries
|
||||
menhirLib
|
||||
simple-utils
|
||||
uutf
|
||||
getopt
|
||||
zarith
|
||||
)
|
||||
zarith)
|
||||
(preprocess
|
||||
(pps bisect_ppx --conditional)
|
||||
)
|
||||
(pps bisect_ppx --conditional))
|
||||
(modules
|
||||
LexerUnit
|
||||
ParserUnit
|
||||
ParserAPI
|
||||
Lexer
|
||||
LexerLog
|
||||
Utils
|
||||
|
Loading…
Reference in New Issue
Block a user