Sharing standalone lexers and parsers, and parser error API.
This commit is contained in:
parent
e23350071f
commit
51ccc28e3c
@ -16,4 +16,8 @@ $HOME/git/ligo/vendors/ligo-utils/simple-utils/region.ml
|
|||||||
../shared/Markup.mli
|
../shared/Markup.mli
|
||||||
../shared/Utils.mli
|
../shared/Utils.mli
|
||||||
../shared/Utils.ml
|
../shared/Utils.ml
|
||||||
|
../shared/ParserAPI.mli
|
||||||
|
../shared/ParserAPI.ml
|
||||||
|
../shared/LexerUnit.ml
|
||||||
|
../shared/ParserUnit.ml
|
||||||
Stubs/Simple_utils.ml
|
Stubs/Simple_utils.ml
|
||||||
|
@ -1,56 +1,9 @@
|
|||||||
(** Driver for the LIGO lexer *)
|
(** Driver for the CameLIGO lexer *)
|
||||||
|
|
||||||
let extension = ".mligo"
|
module IO =
|
||||||
let options = EvalOpt.read "CameLIGO" extension
|
struct
|
||||||
|
let ext = ".mligo"
|
||||||
|
let options = EvalOpt.read "CameLIGO" ext
|
||||||
|
end
|
||||||
|
|
||||||
(** Error printing and exception tracing
|
module M = LexerUnit.Make (IO) (Lexer.Make (LexToken))
|
||||||
*)
|
|
||||||
let () = Printexc.record_backtrace true
|
|
||||||
|
|
||||||
let external_ text =
|
|
||||||
Utils.highlight (Printf.sprintf "External error: %s" text); exit 1;;
|
|
||||||
|
|
||||||
(** {1 Preprocessing the input source and opening the input channels} *)
|
|
||||||
|
|
||||||
(** Path for CPP inclusions (#include)
|
|
||||||
*)
|
|
||||||
let lib_path =
|
|
||||||
match options#libs with
|
|
||||||
[] -> ""
|
|
||||||
| libs -> let mk_I dir path = Printf.sprintf " -I %s%s" dir path
|
|
||||||
in List.fold_right mk_I libs ""
|
|
||||||
|
|
||||||
let prefix =
|
|
||||||
match options#input with
|
|
||||||
None | Some "-" -> "temp"
|
|
||||||
| Some file -> Filename.(file |> basename |> remove_extension)
|
|
||||||
|
|
||||||
let suffix = ".pp" ^ extension
|
|
||||||
|
|
||||||
let pp_input =
|
|
||||||
if Utils.String.Set.mem "cpp" options#verbose
|
|
||||||
then prefix ^ suffix
|
|
||||||
else let pp_input, pp_out = Filename.open_temp_file prefix suffix
|
|
||||||
in close_out pp_out; pp_input
|
|
||||||
|
|
||||||
let cpp_cmd =
|
|
||||||
match options#input with
|
|
||||||
None | Some "-" ->
|
|
||||||
Printf.sprintf "cpp -traditional-cpp%s - > %s"
|
|
||||||
lib_path pp_input
|
|
||||||
| Some file ->
|
|
||||||
Printf.sprintf "cpp -traditional-cpp%s %s > %s"
|
|
||||||
lib_path file pp_input
|
|
||||||
|
|
||||||
let () =
|
|
||||||
if Utils.String.Set.mem "cpp" options#verbose
|
|
||||||
then Printf.eprintf "%s\n%!" cpp_cmd;
|
|
||||||
if Sys.command cpp_cmd <> 0 then
|
|
||||||
external_ (Printf.sprintf "the command \"%s\" failed." cpp_cmd)
|
|
||||||
|
|
||||||
(** {1 Running the lexer on the input file} *)
|
|
||||||
|
|
||||||
module Log = LexerLog.Make (Lexer.Make (LexToken))
|
|
||||||
|
|
||||||
let () = Log.trace ~offsets:options#offsets
|
|
||||||
options#mode (Some pp_input) options#cmd
|
|
||||||
|
@ -1,22 +0,0 @@
|
|||||||
(** Generic parser API for LIGO *)
|
|
||||||
|
|
||||||
module Make (Lexer: Lexer.S with module Token := LexToken)
|
|
||||||
(Parser: module type of Parser)
|
|
||||||
(ParErr: sig val message: int -> string end) :
|
|
||||||
sig
|
|
||||||
(* Monolithic and incremental APIs of Menhir for parsing *)
|
|
||||||
|
|
||||||
val mono_contract : (Lexing.lexbuf -> Lexer.token) -> Lexing.lexbuf -> AST.t
|
|
||||||
val incr_contract : Lexer.instance -> AST.t
|
|
||||||
|
|
||||||
(* Error handling *)
|
|
||||||
|
|
||||||
type message = string
|
|
||||||
type valid = Lexer.token
|
|
||||||
type invalid = Lexer.token
|
|
||||||
type error = message * valid option * invalid
|
|
||||||
|
|
||||||
exception Point of error
|
|
||||||
|
|
||||||
val format_error : ?offsets:bool -> [`Byte | `Point] -> error -> string
|
|
||||||
end
|
|
@ -1,145 +1,27 @@
|
|||||||
(** Driver for the CameLIGO parser *)
|
(** Driver for the CameLIGO parser *)
|
||||||
|
|
||||||
let extension = ".mligo"
|
module IO =
|
||||||
let options = EvalOpt.read "CameLIGO" extension
|
struct
|
||||||
|
let ext = ".mligo"
|
||||||
open Printf
|
let options = EvalOpt.read "CameLIGO" ext
|
||||||
|
|
||||||
(** Error printing and exception tracing
|
|
||||||
*)
|
|
||||||
let () = Printexc.record_backtrace true
|
|
||||||
|
|
||||||
(** Extracting the input file
|
|
||||||
*)
|
|
||||||
let file =
|
|
||||||
match options#input with
|
|
||||||
None | Some "-" -> false
|
|
||||||
| Some _ -> true
|
|
||||||
|
|
||||||
(** {1 Error printing and exception tracing} *)
|
|
||||||
|
|
||||||
let () = Printexc.record_backtrace true
|
|
||||||
|
|
||||||
let external_ text =
|
|
||||||
Utils.highlight (sprintf "External error: %s" text); exit 1;;
|
|
||||||
|
|
||||||
(** {1 Preprocessing the input source and opening the input channels} *)
|
|
||||||
|
|
||||||
(** Path for CPP inclusions (#include)
|
|
||||||
*)
|
|
||||||
let lib_path =
|
|
||||||
match options#libs with
|
|
||||||
[] -> ""
|
|
||||||
| libs -> let mk_I dir path = sprintf " -I %s%s" dir path
|
|
||||||
in List.fold_right mk_I libs ""
|
|
||||||
|
|
||||||
let prefix =
|
|
||||||
match options#input with
|
|
||||||
None | Some "-" -> "temp"
|
|
||||||
| Some file -> Filename.(file |> basename |> remove_extension)
|
|
||||||
|
|
||||||
let suffix = ".pp" ^ extension
|
|
||||||
|
|
||||||
let pp_input =
|
|
||||||
if Utils.String.Set.mem "cpp" options#verbose
|
|
||||||
then prefix ^ suffix
|
|
||||||
else let pp_input, pp_out = Filename.open_temp_file prefix suffix
|
|
||||||
in close_out pp_out; pp_input
|
|
||||||
|
|
||||||
let cpp_cmd =
|
|
||||||
match options#input with
|
|
||||||
None | Some "-" ->
|
|
||||||
sprintf "cpp -traditional-cpp%s - > %s"
|
|
||||||
lib_path pp_input
|
|
||||||
| Some file ->
|
|
||||||
sprintf "cpp -traditional-cpp%s %s > %s"
|
|
||||||
lib_path file pp_input
|
|
||||||
|
|
||||||
let () =
|
|
||||||
if Utils.String.Set.mem "cpp" options#verbose
|
|
||||||
then eprintf "%s\n%!" cpp_cmd;
|
|
||||||
if Sys.command cpp_cmd <> 0 then
|
|
||||||
external_ (sprintf "the command \"%s\" failed." cpp_cmd)
|
|
||||||
|
|
||||||
(** {1 Instanciating the lexer} *)
|
|
||||||
|
|
||||||
module Lexer = Lexer.Make (LexToken)
|
|
||||||
module Log = LexerLog.Make (Lexer)
|
|
||||||
module ParserFront = ParserAPI.Make (Lexer) (Parser) (ParErr)
|
|
||||||
|
|
||||||
let lexer_inst = Lexer.open_token_stream (Some pp_input)
|
|
||||||
let Lexer.{read; buffer; get_win; get_pos; get_last; close} = lexer_inst
|
|
||||||
|
|
||||||
and cout = stdout
|
|
||||||
|
|
||||||
let log = Log.output_token ~offsets:options#offsets
|
|
||||||
options#mode options#cmd cout
|
|
||||||
|
|
||||||
and close_all () = close (); close_out cout
|
|
||||||
|
|
||||||
(** {1 Tokeniser} *)
|
|
||||||
|
|
||||||
let tokeniser = read ~log
|
|
||||||
|
|
||||||
(** {1 Main} *)
|
|
||||||
|
|
||||||
let () =
|
|
||||||
try
|
|
||||||
let ast =
|
|
||||||
if options#mono
|
|
||||||
then ParserFront.mono_contract tokeniser buffer
|
|
||||||
else ParserFront.incr_contract lexer_inst in
|
|
||||||
if Utils.String.Set.mem "ast" options#verbose
|
|
||||||
then let buffer = Buffer.create 131 in
|
|
||||||
let state = ParserLog.mk_state
|
|
||||||
~offsets:options#offsets
|
|
||||||
~mode:options#mode
|
|
||||||
~buffer in
|
|
||||||
begin
|
|
||||||
ParserLog.pp_ast state ast;
|
|
||||||
Buffer.output_buffer stdout buffer
|
|
||||||
end
|
end
|
||||||
else if Utils.String.Set.mem "ast-tokens" options#verbose
|
|
||||||
then let buffer = Buffer.create 131 in
|
module ExtParser =
|
||||||
let state = ParserLog.mk_state
|
struct
|
||||||
~offsets:options#offsets
|
type ast = AST.t
|
||||||
~mode:options#mode
|
type expr = AST.expr
|
||||||
~buffer in
|
include Parser
|
||||||
begin
|
|
||||||
ParserLog.print_tokens state ast;
|
|
||||||
Buffer.output_buffer stdout buffer
|
|
||||||
end
|
end
|
||||||
with
|
|
||||||
(* Lexing errors *)
|
|
||||||
Lexer.Error err ->
|
|
||||||
close_all ();
|
|
||||||
let msg =
|
|
||||||
Lexer.format_error ~offsets:options#offsets
|
|
||||||
options#mode err ~file
|
|
||||||
in prerr_string msg
|
|
||||||
|
|
||||||
(* Incremental API of Menhir *)
|
module ExtParserLog =
|
||||||
| ParserFront.Point point ->
|
struct
|
||||||
let () = close_all () in
|
type ast = AST.t
|
||||||
let error =
|
include ParserLog
|
||||||
ParserFront.format_error ~offsets:options#offsets
|
end
|
||||||
options#mode point
|
|
||||||
in eprintf "\027[31m%s\027[0m%!" error
|
|
||||||
|
|
||||||
(* Monolithic API of Menhir *)
|
module M = ParserUnit.Make (IO)
|
||||||
| Parser.Error ->
|
(Lexer.Make (LexToken))
|
||||||
let () = close_all () in
|
(AST)
|
||||||
let invalid, valid_opt =
|
(ExtParser)
|
||||||
match get_win () with
|
(ParErr)
|
||||||
Lexer.Nil ->
|
(ExtParserLog)
|
||||||
assert false (* Safe: There is always at least EOF. *)
|
|
||||||
| Lexer.One invalid -> invalid, None
|
|
||||||
| Lexer.Two (invalid, valid) -> invalid, Some valid in
|
|
||||||
let point = "", valid_opt, invalid in
|
|
||||||
let error =
|
|
||||||
ParserFront.format_error ~offsets:options#offsets
|
|
||||||
options#mode point
|
|
||||||
in eprintf "\027[31m%s\027[0m%!" error
|
|
||||||
|
|
||||||
(* I/O errors *)
|
|
||||||
| Sys_error msg -> Utils.highlight msg
|
|
||||||
|
@ -32,7 +32,7 @@
|
|||||||
(name ParserMain)
|
(name ParserMain)
|
||||||
(libraries parser_cameligo)
|
(libraries parser_cameligo)
|
||||||
(modules
|
(modules
|
||||||
ParErr ParserAPI ParserMain)
|
ParErr ParserMain)
|
||||||
(preprocess
|
(preprocess
|
||||||
(pps bisect_ppx --conditional))
|
(pps bisect_ppx --conditional))
|
||||||
(flags (:standard -open Simple_utils -open Parser_shared -open Parser_cameligo)))
|
(flags (:standard -open Simple_utils -open Parser_shared -open Parser_cameligo)))
|
||||||
|
@ -16,4 +16,8 @@ $HOME/git/ligo/vendors/ligo-utils/simple-utils/region.ml
|
|||||||
../shared/Markup.mli
|
../shared/Markup.mli
|
||||||
../shared/Utils.mli
|
../shared/Utils.mli
|
||||||
../shared/Utils.ml
|
../shared/Utils.ml
|
||||||
|
../shared/ParserAPI.mli
|
||||||
|
../shared/ParserAPI.ml
|
||||||
|
../shared/LexerUnit.ml
|
||||||
|
../shared/ParserUnit.ml
|
||||||
Stubs/Simple_utils.ml
|
Stubs/Simple_utils.ml
|
||||||
|
@ -1,56 +1,9 @@
|
|||||||
(** Driver for the LIGO lexer *)
|
(** Driver for the PascaLIGO lexer *)
|
||||||
|
|
||||||
let extension = ".ligo"
|
module IO =
|
||||||
let options = EvalOpt.read "PascaLIGO" extension
|
struct
|
||||||
|
let ext = ".ligo"
|
||||||
|
let options = EvalOpt.read "PascaLIGO" ext
|
||||||
|
end
|
||||||
|
|
||||||
(** Error printing and exception tracing
|
module M = LexerUnit.Make (IO) (Lexer.Make (LexToken))
|
||||||
*)
|
|
||||||
let () = Printexc.record_backtrace true
|
|
||||||
|
|
||||||
let external_ text =
|
|
||||||
Utils.highlight (Printf.sprintf "External error: %s" text); exit 1;;
|
|
||||||
|
|
||||||
(** {1 Preprocessing the input source and opening the input channels} *)
|
|
||||||
|
|
||||||
(** Path for CPP inclusions (#include)
|
|
||||||
*)
|
|
||||||
let lib_path =
|
|
||||||
match options#libs with
|
|
||||||
[] -> ""
|
|
||||||
| libs -> let mk_I dir path = Printf.sprintf " -I %s%s" dir path
|
|
||||||
in List.fold_right mk_I libs ""
|
|
||||||
|
|
||||||
let prefix =
|
|
||||||
match options#input with
|
|
||||||
None | Some "-" -> "temp"
|
|
||||||
| Some file -> Filename.(file |> basename |> remove_extension)
|
|
||||||
|
|
||||||
let suffix = ".pp" ^ extension
|
|
||||||
|
|
||||||
let pp_input =
|
|
||||||
if Utils.String.Set.mem "cpp" options#verbose
|
|
||||||
then prefix ^ suffix
|
|
||||||
else let pp_input, pp_out = Filename.open_temp_file prefix suffix
|
|
||||||
in close_out pp_out; pp_input
|
|
||||||
|
|
||||||
let cpp_cmd =
|
|
||||||
match options#input with
|
|
||||||
None | Some "-" ->
|
|
||||||
Printf.sprintf "cpp -traditional-cpp%s - > %s"
|
|
||||||
lib_path pp_input
|
|
||||||
| Some file ->
|
|
||||||
Printf.sprintf "cpp -traditional-cpp%s %s > %s"
|
|
||||||
lib_path file pp_input
|
|
||||||
|
|
||||||
let () =
|
|
||||||
if Utils.String.Set.mem "cpp" options#verbose
|
|
||||||
then Printf.eprintf "%s\n%!" cpp_cmd;
|
|
||||||
if Sys.command cpp_cmd <> 0 then
|
|
||||||
external_ (Printf.sprintf "the command \"%s\" failed." cpp_cmd)
|
|
||||||
|
|
||||||
(** {1 Running the lexer on the input file} *)
|
|
||||||
|
|
||||||
module Log = LexerLog.Make (Lexer.Make (LexToken))
|
|
||||||
|
|
||||||
let () = Log.trace ~offsets:options#offsets
|
|
||||||
options#mode (Some pp_input) options#cmd
|
|
||||||
|
@ -1,82 +0,0 @@
|
|||||||
(* Generic parser for LIGO *)
|
|
||||||
|
|
||||||
(* Main functor *)
|
|
||||||
|
|
||||||
module Make (Lexer: Lexer.S with module Token := LexToken)
|
|
||||||
(Parser: module type of Parser)
|
|
||||||
(ParErr: sig val message : int -> string end) =
|
|
||||||
struct
|
|
||||||
module I = Parser.MenhirInterpreter
|
|
||||||
module S = MenhirLib.General (* Streams *)
|
|
||||||
|
|
||||||
(* The call [stack checkpoint] extracts the parser's stack out of
|
|
||||||
a checkpoint. *)
|
|
||||||
|
|
||||||
let stack = function
|
|
||||||
I.HandlingError env -> I.stack env
|
|
||||||
| _ -> assert false
|
|
||||||
|
|
||||||
(* The call [state checkpoint] extracts the number of the current
|
|
||||||
state out of a parser checkpoint. *)
|
|
||||||
|
|
||||||
let state checkpoint : int =
|
|
||||||
match Lazy.force (stack checkpoint) with
|
|
||||||
S.Nil -> 0 (* WARNING: Hack. The first state should be 0. *)
|
|
||||||
| S.Cons (I.Element (s,_,_,_),_) -> I.number s
|
|
||||||
|
|
||||||
(* The parser has successfully produced a semantic value. *)
|
|
||||||
|
|
||||||
let success v = v
|
|
||||||
|
|
||||||
(* The parser has suspended itself because of a syntax error. Stop. *)
|
|
||||||
|
|
||||||
type message = string
|
|
||||||
type valid = Lexer.token
|
|
||||||
type invalid = Lexer.token
|
|
||||||
type error = message * valid option * invalid
|
|
||||||
|
|
||||||
exception Point of error
|
|
||||||
|
|
||||||
let failure get_win checkpoint =
|
|
||||||
let message = ParErr.message (state checkpoint) in
|
|
||||||
match get_win () with
|
|
||||||
Lexer.Nil -> assert false
|
|
||||||
| Lexer.One invalid ->
|
|
||||||
raise (Point (message, None, invalid))
|
|
||||||
| Lexer.Two (invalid, valid) ->
|
|
||||||
raise (Point (message, Some valid, invalid))
|
|
||||||
|
|
||||||
(* The two Menhir APIs are called from the following two functions. *)
|
|
||||||
|
|
||||||
let incr_contract Lexer.{read; buffer; get_win; close; _} : AST.t =
|
|
||||||
let supplier = I.lexer_lexbuf_to_supplier read buffer
|
|
||||||
and failure = failure get_win in
|
|
||||||
let parser = Parser.Incremental.contract buffer.Lexing.lex_curr_p in
|
|
||||||
let ast = I.loop_handle success failure supplier parser
|
|
||||||
in close (); ast
|
|
||||||
|
|
||||||
let mono_contract = Parser.contract
|
|
||||||
|
|
||||||
(* Errors *)
|
|
||||||
|
|
||||||
let format_error ?(offsets=true) mode (msg, valid_opt, invalid) =
|
|
||||||
let invalid_region = LexToken.to_region invalid in
|
|
||||||
let header =
|
|
||||||
"Parse error " ^ invalid_region#to_string ~offsets mode in
|
|
||||||
let trailer =
|
|
||||||
match valid_opt with
|
|
||||||
None ->
|
|
||||||
if LexToken.is_eof invalid then ""
|
|
||||||
else let invalid_lexeme = LexToken.to_lexeme invalid in
|
|
||||||
Printf.sprintf ", before \"%s\"" invalid_lexeme
|
|
||||||
| Some valid ->
|
|
||||||
let valid_lexeme = LexToken.to_lexeme valid in
|
|
||||||
let s = Printf.sprintf ", after \"%s\"" valid_lexeme in
|
|
||||||
if LexToken.is_eof invalid then s
|
|
||||||
else
|
|
||||||
let invalid_lexeme = LexToken.to_lexeme invalid in
|
|
||||||
Printf.sprintf "%s and before \"%s\"" s invalid_lexeme in
|
|
||||||
let header = header ^ trailer in
|
|
||||||
header ^ (if msg = "" then ".\n" else ":\n" ^ msg)
|
|
||||||
|
|
||||||
end
|
|
@ -1,22 +0,0 @@
|
|||||||
(** Generic parser API for LIGO *)
|
|
||||||
|
|
||||||
module Make (Lexer: Lexer.S with module Token := LexToken)
|
|
||||||
(Parser: module type of Parser)
|
|
||||||
(ParErr: module type of ParErr) :
|
|
||||||
sig
|
|
||||||
(* Monolithic and incremental APIs of Menhir for parsing *)
|
|
||||||
|
|
||||||
val mono_contract : (Lexing.lexbuf -> Lexer.token) -> Lexing.lexbuf -> AST.t
|
|
||||||
val incr_contract : Lexer.instance -> AST.t
|
|
||||||
|
|
||||||
(* Error handling *)
|
|
||||||
|
|
||||||
type message = string
|
|
||||||
type valid = Lexer.token
|
|
||||||
type invalid = Lexer.token
|
|
||||||
type error = message * valid option * invalid
|
|
||||||
|
|
||||||
exception Point of error
|
|
||||||
|
|
||||||
val format_error : ?offsets:bool -> [`Byte | `Point] -> error -> string
|
|
||||||
end
|
|
@ -1,145 +1,27 @@
|
|||||||
(** Driver for the PascaLIGO parser *)
|
(** Driver for the PascaLIGO parser *)
|
||||||
|
|
||||||
let extension = ".ligo"
|
module IO =
|
||||||
let options = EvalOpt.read "PascaLIGO" extension
|
struct
|
||||||
|
let ext = ".ligo"
|
||||||
open Printf
|
let options = EvalOpt.read "PascaLIGO" ext
|
||||||
|
|
||||||
(** Error printing and exception tracing
|
|
||||||
*)
|
|
||||||
let () = Printexc.record_backtrace true
|
|
||||||
|
|
||||||
(** Extracting the input file
|
|
||||||
*)
|
|
||||||
let file =
|
|
||||||
match options#input with
|
|
||||||
None | Some "-" -> false
|
|
||||||
| Some _ -> true
|
|
||||||
|
|
||||||
(** {1 Error printing and exception tracing} *)
|
|
||||||
|
|
||||||
let () = Printexc.record_backtrace true
|
|
||||||
|
|
||||||
let external_ text =
|
|
||||||
Utils.highlight (sprintf "External error: %s" text); exit 1;;
|
|
||||||
|
|
||||||
(** {1 Preprocessing the input source and opening the input channels} *)
|
|
||||||
|
|
||||||
(** Path for CPP inclusions (#include)
|
|
||||||
*)
|
|
||||||
let lib_path =
|
|
||||||
match options#libs with
|
|
||||||
[] -> ""
|
|
||||||
| libs -> let mk_I dir path = sprintf " -I %s%s" dir path
|
|
||||||
in List.fold_right mk_I libs ""
|
|
||||||
|
|
||||||
let prefix =
|
|
||||||
match options#input with
|
|
||||||
None | Some "-" -> "temp"
|
|
||||||
| Some file -> Filename.(file |> basename |> remove_extension)
|
|
||||||
|
|
||||||
let suffix = ".pp" ^ extension
|
|
||||||
|
|
||||||
let pp_input =
|
|
||||||
if Utils.String.Set.mem "cpp" options#verbose
|
|
||||||
then prefix ^ suffix
|
|
||||||
else let pp_input, pp_out = Filename.open_temp_file prefix suffix
|
|
||||||
in close_out pp_out; pp_input
|
|
||||||
|
|
||||||
let cpp_cmd =
|
|
||||||
match options#input with
|
|
||||||
None | Some "-" ->
|
|
||||||
sprintf "cpp -traditional-cpp%s - > %s"
|
|
||||||
lib_path pp_input
|
|
||||||
| Some file ->
|
|
||||||
sprintf "cpp -traditional-cpp%s %s > %s"
|
|
||||||
lib_path file pp_input
|
|
||||||
|
|
||||||
let () =
|
|
||||||
if Utils.String.Set.mem "cpp" options#verbose
|
|
||||||
then eprintf "%s\n%!" cpp_cmd;
|
|
||||||
if Sys.command cpp_cmd <> 0 then
|
|
||||||
external_ (sprintf "the command \"%s\" failed." cpp_cmd)
|
|
||||||
|
|
||||||
(** {1 Instanciating the lexer} *)
|
|
||||||
|
|
||||||
module Lexer = Lexer.Make (LexToken)
|
|
||||||
module Log = LexerLog.Make (Lexer)
|
|
||||||
module ParserFront = ParserAPI.Make (Lexer) (Parser) (ParErr)
|
|
||||||
|
|
||||||
let lexer_inst = Lexer.open_token_stream (Some pp_input)
|
|
||||||
let Lexer.{read; buffer; get_win; get_pos; get_last; close} = lexer_inst
|
|
||||||
|
|
||||||
and cout = stdout
|
|
||||||
|
|
||||||
let log = Log.output_token ~offsets:options#offsets
|
|
||||||
options#mode options#cmd cout
|
|
||||||
|
|
||||||
and close_all () = close (); close_out cout
|
|
||||||
|
|
||||||
(** {1 Tokeniser} *)
|
|
||||||
|
|
||||||
let tokeniser = read ~log
|
|
||||||
|
|
||||||
(** {1 Main} *)
|
|
||||||
|
|
||||||
let () =
|
|
||||||
try
|
|
||||||
let ast =
|
|
||||||
if options#mono
|
|
||||||
then ParserFront.mono_contract tokeniser buffer
|
|
||||||
else ParserFront.incr_contract lexer_inst in
|
|
||||||
if Utils.String.Set.mem "ast" options#verbose
|
|
||||||
then let buffer = Buffer.create 131 in
|
|
||||||
let state = ParserLog.mk_state
|
|
||||||
~offsets:options#offsets
|
|
||||||
~mode:options#mode
|
|
||||||
~buffer in
|
|
||||||
begin
|
|
||||||
ParserLog.pp_ast state ast;
|
|
||||||
Buffer.output_buffer stdout buffer
|
|
||||||
end
|
end
|
||||||
else if Utils.String.Set.mem "ast-tokens" options#verbose
|
|
||||||
then let buffer = Buffer.create 131 in
|
module ExtParser =
|
||||||
let state = ParserLog.mk_state
|
struct
|
||||||
~offsets:options#offsets
|
type ast = AST.t
|
||||||
~mode:options#mode
|
type expr = AST.expr
|
||||||
~buffer in
|
include Parser
|
||||||
begin
|
|
||||||
ParserLog.print_tokens state ast;
|
|
||||||
Buffer.output_buffer stdout buffer
|
|
||||||
end
|
end
|
||||||
with
|
|
||||||
(* Lexing errors *)
|
|
||||||
Lexer.Error err ->
|
|
||||||
close_all ();
|
|
||||||
let msg =
|
|
||||||
Lexer.format_error ~offsets:options#offsets
|
|
||||||
options#mode err ~file
|
|
||||||
in prerr_string msg
|
|
||||||
|
|
||||||
(* Incremental API of Menhir *)
|
module ExtParserLog =
|
||||||
| ParserFront.Point point ->
|
struct
|
||||||
let () = close_all () in
|
type ast = AST.t
|
||||||
let error =
|
include ParserLog
|
||||||
ParserFront.format_error ~offsets:options#offsets
|
end
|
||||||
options#mode point
|
|
||||||
in eprintf "\027[31m%s\027[0m%!" error
|
|
||||||
|
|
||||||
(* Monolithic API of Menhir *)
|
module M = ParserUnit.Make (IO)
|
||||||
| Parser.Error ->
|
(Lexer.Make (LexToken))
|
||||||
let () = close_all () in
|
(AST)
|
||||||
let invalid, valid_opt =
|
(ExtParser)
|
||||||
match get_win () with
|
(ParErr)
|
||||||
Lexer.Nil ->
|
(ExtParserLog)
|
||||||
assert false (* Safe: There is always at least EOF. *)
|
|
||||||
| Lexer.One invalid -> invalid, None
|
|
||||||
| Lexer.Two (invalid, valid) -> invalid, Some valid in
|
|
||||||
let point = "", valid_opt, invalid in
|
|
||||||
let error =
|
|
||||||
ParserFront.format_error ~offsets:options#offsets
|
|
||||||
options#mode point
|
|
||||||
in eprintf "\027[31m%s\027[0m%!" error
|
|
||||||
|
|
||||||
(* I/O errors *)
|
|
||||||
| Sys_error msg -> Utils.highlight msg
|
|
||||||
|
@ -14,8 +14,7 @@
|
|||||||
parser_shared
|
parser_shared
|
||||||
hex
|
hex
|
||||||
simple-utils
|
simple-utils
|
||||||
tezos-utils
|
tezos-utils)
|
||||||
)
|
|
||||||
(preprocess
|
(preprocess
|
||||||
(pps bisect_ppx --conditional))
|
(pps bisect_ppx --conditional))
|
||||||
(flags (:standard -open Parser_shared -open Simple_utils)))
|
(flags (:standard -open Parser_shared -open Simple_utils)))
|
||||||
@ -33,7 +32,7 @@
|
|||||||
(name ParserMain)
|
(name ParserMain)
|
||||||
(libraries parser_pascaligo)
|
(libraries parser_pascaligo)
|
||||||
(modules
|
(modules
|
||||||
ParErr ParserAPI ParserMain)
|
ParErr ParserMain)
|
||||||
(preprocess
|
(preprocess
|
||||||
(pps bisect_ppx --conditional))
|
(pps bisect_ppx --conditional))
|
||||||
(flags (:standard -open Simple_utils -open Parser_shared -open Parser_pascaligo)))
|
(flags (:standard -open Simple_utils -open Parser_shared -open Parser_pascaligo)))
|
||||||
|
@ -16,6 +16,10 @@ $HOME/git/ligo/vendors/ligo-utils/simple-utils/region.ml
|
|||||||
../shared/Markup.mli
|
../shared/Markup.mli
|
||||||
../shared/Utils.mli
|
../shared/Utils.mli
|
||||||
../shared/Utils.ml
|
../shared/Utils.ml
|
||||||
|
../shared/ParserAPI.mli
|
||||||
|
../shared/ParserAPI.ml
|
||||||
|
../shared/LexerUnit.ml
|
||||||
|
../shared/ParserUnit.ml
|
||||||
Stubs/Simple_utils.ml
|
Stubs/Simple_utils.ml
|
||||||
Stubs/Parser_cameligo.ml
|
Stubs/Parser_cameligo.ml
|
||||||
../cameligo/AST.mli
|
../cameligo/AST.mli
|
||||||
|
@ -1,56 +1,9 @@
|
|||||||
(** Driver for the LIGO lexer *)
|
(** Driver for the ReasonLIGO lexer *)
|
||||||
|
|
||||||
let extension = ".religo"
|
module IO =
|
||||||
let options = EvalOpt.read "ReasonLIGO" extension
|
struct
|
||||||
|
let ext = ".religo"
|
||||||
|
let options = EvalOpt.read "ReasonLIGO" ext
|
||||||
|
end
|
||||||
|
|
||||||
(** Error printing and exception tracing
|
module M = LexerUnit.Make (IO) (Lexer.Make (LexToken))
|
||||||
*)
|
|
||||||
let () = Printexc.record_backtrace true
|
|
||||||
|
|
||||||
let external_ text =
|
|
||||||
Utils.highlight (Printf.sprintf "External error: %s" text); exit 1;;
|
|
||||||
|
|
||||||
(** {1 Preprocessing the input source and opening the input channels} *)
|
|
||||||
|
|
||||||
(** Path for CPP inclusions (#include)
|
|
||||||
*)
|
|
||||||
let lib_path =
|
|
||||||
match options#libs with
|
|
||||||
[] -> ""
|
|
||||||
| libs -> let mk_I dir path = Printf.sprintf " -I %s%s" dir path
|
|
||||||
in List.fold_right mk_I libs ""
|
|
||||||
|
|
||||||
let prefix =
|
|
||||||
match options#input with
|
|
||||||
None | Some "-" -> "temp"
|
|
||||||
| Some file -> Filename.(file |> basename |> remove_extension)
|
|
||||||
|
|
||||||
let suffix = ".pp" ^ extension
|
|
||||||
|
|
||||||
let pp_input =
|
|
||||||
if Utils.String.Set.mem "cpp" options#verbose
|
|
||||||
then prefix ^ suffix
|
|
||||||
else let pp_input, pp_out = Filename.open_temp_file prefix suffix
|
|
||||||
in close_out pp_out; pp_input
|
|
||||||
|
|
||||||
let cpp_cmd =
|
|
||||||
match options#input with
|
|
||||||
None | Some "-" ->
|
|
||||||
Printf.sprintf "cpp -traditional-cpp%s - > %s"
|
|
||||||
lib_path pp_input
|
|
||||||
| Some file ->
|
|
||||||
Printf.sprintf "cpp -traditional-cpp%s %s > %s"
|
|
||||||
lib_path file pp_input
|
|
||||||
|
|
||||||
let () =
|
|
||||||
if Utils.String.Set.mem "cpp" options#verbose
|
|
||||||
then Printf.eprintf "%s\n%!" cpp_cmd;
|
|
||||||
if Sys.command cpp_cmd <> 0 then
|
|
||||||
external_ (Printf.sprintf "the command \"%s\" failed." cpp_cmd)
|
|
||||||
|
|
||||||
(** {1 Running the lexer on the input file} *)
|
|
||||||
|
|
||||||
module Log = LexerLog.Make (Lexer.Make (LexToken))
|
|
||||||
|
|
||||||
let () = Log.trace ~offsets:options#offsets
|
|
||||||
options#mode (Some pp_input) options#cmd
|
|
||||||
|
@ -1,82 +0,0 @@
|
|||||||
(* Generic parser for LIGO *)
|
|
||||||
|
|
||||||
(* Main functor *)
|
|
||||||
|
|
||||||
module Make (Lexer: Lexer.S with module Token := LexToken)
|
|
||||||
(Parser: module type of Parser)
|
|
||||||
(ParErr: sig val message : int -> string end) =
|
|
||||||
struct
|
|
||||||
module I = Parser.MenhirInterpreter
|
|
||||||
module S = MenhirLib.General (* Streams *)
|
|
||||||
|
|
||||||
(* The call [stack checkpoint] extracts the parser's stack out of
|
|
||||||
a checkpoint. *)
|
|
||||||
|
|
||||||
let stack = function
|
|
||||||
I.HandlingError env -> I.stack env
|
|
||||||
| _ -> assert false
|
|
||||||
|
|
||||||
(* The call [state checkpoint] extracts the number of the current
|
|
||||||
state out of a parser checkpoint. *)
|
|
||||||
|
|
||||||
let state checkpoint : int =
|
|
||||||
match Lazy.force (stack checkpoint) with
|
|
||||||
S.Nil -> 0 (* WARNING: Hack. The first state should be 0. *)
|
|
||||||
| S.Cons (I.Element (s,_,_,_),_) -> I.number s
|
|
||||||
|
|
||||||
(* The parser has successfully produced a semantic value. *)
|
|
||||||
|
|
||||||
let success v = v
|
|
||||||
|
|
||||||
(* The parser has suspended itself because of a syntax error. Stop. *)
|
|
||||||
|
|
||||||
type message = string
|
|
||||||
type valid = Lexer.token
|
|
||||||
type invalid = Lexer.token
|
|
||||||
type error = message * valid option * invalid
|
|
||||||
|
|
||||||
exception Point of error
|
|
||||||
|
|
||||||
let failure get_win checkpoint =
|
|
||||||
let message = ParErr.message (state checkpoint) in
|
|
||||||
match get_win () with
|
|
||||||
Lexer.Nil -> assert false
|
|
||||||
| Lexer.One invalid ->
|
|
||||||
raise (Point (message, None, invalid))
|
|
||||||
| Lexer.Two (invalid, valid) ->
|
|
||||||
raise (Point (message, Some valid, invalid))
|
|
||||||
|
|
||||||
(* The two Menhir APIs are called from the following two functions. *)
|
|
||||||
|
|
||||||
let incr_contract Lexer.{read; buffer; get_win; close; _} : AST.t =
|
|
||||||
let supplier = I.lexer_lexbuf_to_supplier read buffer
|
|
||||||
and failure = failure get_win in
|
|
||||||
let parser = Parser.Incremental.contract buffer.Lexing.lex_curr_p in
|
|
||||||
let ast = I.loop_handle success failure supplier parser
|
|
||||||
in close (); ast
|
|
||||||
|
|
||||||
let mono_contract = Parser.contract
|
|
||||||
|
|
||||||
(* Errors *)
|
|
||||||
|
|
||||||
let format_error ?(offsets=true) mode (msg, valid_opt, invalid) =
|
|
||||||
let invalid_region = LexToken.to_region invalid in
|
|
||||||
let header =
|
|
||||||
"Parse error " ^ invalid_region#to_string ~offsets mode in
|
|
||||||
let trailer =
|
|
||||||
match valid_opt with
|
|
||||||
None ->
|
|
||||||
if LexToken.is_eof invalid then ""
|
|
||||||
else let invalid_lexeme = LexToken.to_lexeme invalid in
|
|
||||||
Printf.sprintf ", before \"%s\"" invalid_lexeme
|
|
||||||
| Some valid ->
|
|
||||||
let valid_lexeme = LexToken.to_lexeme valid in
|
|
||||||
let s = Printf.sprintf ", after \"%s\"" valid_lexeme in
|
|
||||||
if LexToken.is_eof invalid then s
|
|
||||||
else
|
|
||||||
let invalid_lexeme = LexToken.to_lexeme invalid in
|
|
||||||
Printf.sprintf "%s and before \"%s\"" s invalid_lexeme in
|
|
||||||
let header = header ^ trailer in
|
|
||||||
header ^ (if msg = "" then ".\n" else ":\n" ^ msg)
|
|
||||||
|
|
||||||
end
|
|
@ -1,22 +0,0 @@
|
|||||||
(** Generic parser API for LIGO *)
|
|
||||||
|
|
||||||
module Make (Lexer: Lexer.S with module Token := LexToken)
|
|
||||||
(Parser: module type of Parser)
|
|
||||||
(ParErr: sig val message: int -> string end) :
|
|
||||||
sig
|
|
||||||
(* Monolithic and incremental APIs of Menhir for parsing *)
|
|
||||||
|
|
||||||
val mono_contract : (Lexing.lexbuf -> Lexer.token) -> Lexing.lexbuf -> AST.t
|
|
||||||
val incr_contract : Lexer.instance -> AST.t
|
|
||||||
|
|
||||||
(* Error handling *)
|
|
||||||
|
|
||||||
type message = string
|
|
||||||
type valid = Lexer.token
|
|
||||||
type invalid = Lexer.token
|
|
||||||
type error = message * valid option * invalid
|
|
||||||
|
|
||||||
exception Point of error
|
|
||||||
|
|
||||||
val format_error : ?offsets:bool -> [`Byte | `Point] -> error -> string
|
|
||||||
end
|
|
@ -1,145 +1,27 @@
|
|||||||
(** Driver for the ReasonLIGO parser *)
|
(** Driver for the ReasonLIGO parser *)
|
||||||
|
|
||||||
let extension = ".religo"
|
module IO =
|
||||||
let options = EvalOpt.read "ReasonLIGO" extension
|
struct
|
||||||
|
let ext = ".religo"
|
||||||
open Printf
|
let options = EvalOpt.read "ReasonLIGO" ext
|
||||||
|
|
||||||
(** Error printing and exception tracing
|
|
||||||
*)
|
|
||||||
let () = Printexc.record_backtrace true
|
|
||||||
|
|
||||||
(** Extracting the input file
|
|
||||||
*)
|
|
||||||
let file =
|
|
||||||
match options#input with
|
|
||||||
None | Some "-" -> false
|
|
||||||
| Some _ -> true
|
|
||||||
|
|
||||||
(** {1 Error printing and exception tracing} *)
|
|
||||||
|
|
||||||
let () = Printexc.record_backtrace true
|
|
||||||
|
|
||||||
let external_ text =
|
|
||||||
Utils.highlight (sprintf "External error: %s" text); exit 1;;
|
|
||||||
|
|
||||||
(** {1 Preprocessing the input source and opening the input channels} *)
|
|
||||||
|
|
||||||
(** Path for CPP inclusions (#include)
|
|
||||||
*)
|
|
||||||
let lib_path =
|
|
||||||
match options#libs with
|
|
||||||
[] -> ""
|
|
||||||
| libs -> let mk_I dir path = sprintf " -I %s%s" dir path
|
|
||||||
in List.fold_right mk_I libs ""
|
|
||||||
|
|
||||||
let prefix =
|
|
||||||
match options#input with
|
|
||||||
None | Some "-" -> "temp"
|
|
||||||
| Some file -> Filename.(file |> basename |> remove_extension)
|
|
||||||
|
|
||||||
let suffix = ".pp" ^ extension
|
|
||||||
|
|
||||||
let pp_input =
|
|
||||||
if Utils.String.Set.mem "cpp" options#verbose
|
|
||||||
then prefix ^ suffix
|
|
||||||
else let pp_input, pp_out = Filename.open_temp_file prefix suffix
|
|
||||||
in close_out pp_out; pp_input
|
|
||||||
|
|
||||||
let cpp_cmd =
|
|
||||||
match options#input with
|
|
||||||
None | Some "-" ->
|
|
||||||
sprintf "cpp -traditional-cpp%s - > %s"
|
|
||||||
lib_path pp_input
|
|
||||||
| Some file ->
|
|
||||||
sprintf "cpp -traditional-cpp%s %s > %s"
|
|
||||||
lib_path file pp_input
|
|
||||||
|
|
||||||
let () =
|
|
||||||
if Utils.String.Set.mem "cpp" options#verbose
|
|
||||||
then eprintf "%s\n%!" cpp_cmd;
|
|
||||||
if Sys.command cpp_cmd <> 0 then
|
|
||||||
external_ (sprintf "the command \"%s\" failed." cpp_cmd)
|
|
||||||
|
|
||||||
(** {1 Instanciating the lexer} *)
|
|
||||||
|
|
||||||
module Lexer = Lexer.Make (LexToken)
|
|
||||||
module Log = LexerLog.Make (Lexer)
|
|
||||||
module ParserFront = ParserAPI.Make (Lexer) (Parser) (ParErr)
|
|
||||||
|
|
||||||
let lexer_inst = Lexer.open_token_stream (Some pp_input)
|
|
||||||
let Lexer.{read; buffer; get_win; get_pos; get_last; close} = lexer_inst
|
|
||||||
|
|
||||||
and cout = stdout
|
|
||||||
|
|
||||||
let log = Log.output_token ~offsets:options#offsets
|
|
||||||
options#mode options#cmd cout
|
|
||||||
|
|
||||||
and close_all () = close (); close_out cout
|
|
||||||
|
|
||||||
(** {1 Tokeniser} *)
|
|
||||||
|
|
||||||
let tokeniser = read ~log
|
|
||||||
|
|
||||||
(** {1 Main} *)
|
|
||||||
|
|
||||||
let () =
|
|
||||||
try
|
|
||||||
let ast =
|
|
||||||
if options#mono
|
|
||||||
then ParserFront.mono_contract tokeniser buffer
|
|
||||||
else ParserFront.incr_contract lexer_inst in
|
|
||||||
if Utils.String.Set.mem "ast" options#verbose
|
|
||||||
then let buffer = Buffer.create 131 in
|
|
||||||
let state = ParserLog.mk_state
|
|
||||||
~offsets:options#offsets
|
|
||||||
~mode:options#mode
|
|
||||||
~buffer in
|
|
||||||
begin
|
|
||||||
ParserLog.pp_ast state ast;
|
|
||||||
Buffer.output_buffer stdout buffer
|
|
||||||
end
|
end
|
||||||
else if Utils.String.Set.mem "ast-tokens" options#verbose
|
|
||||||
then let buffer = Buffer.create 131 in
|
module ExtParser =
|
||||||
let state = ParserLog.mk_state
|
struct
|
||||||
~offsets:options#offsets
|
type ast = AST.t
|
||||||
~mode:options#mode
|
type expr = AST.expr
|
||||||
~buffer in
|
include Parser
|
||||||
begin
|
|
||||||
ParserLog.print_tokens state ast;
|
|
||||||
Buffer.output_buffer stdout buffer
|
|
||||||
end
|
end
|
||||||
with
|
|
||||||
(* Lexing errors *)
|
|
||||||
Lexer.Error err ->
|
|
||||||
close_all ();
|
|
||||||
let msg =
|
|
||||||
Lexer.format_error ~offsets:options#offsets
|
|
||||||
options#mode err ~file
|
|
||||||
in prerr_string msg
|
|
||||||
|
|
||||||
(* Incremental API of Menhir *)
|
module ExtParserLog =
|
||||||
| ParserFront.Point point ->
|
struct
|
||||||
let () = close_all () in
|
type ast = AST.t
|
||||||
let error =
|
include ParserLog
|
||||||
ParserFront.format_error ~offsets:options#offsets
|
end
|
||||||
options#mode point
|
|
||||||
in eprintf "\027[31m%s\027[0m%!" error
|
|
||||||
|
|
||||||
(* Monolithic API of Menhir *)
|
module M = ParserUnit.Make (IO)
|
||||||
| Parser.Error ->
|
(Lexer.Make (LexToken))
|
||||||
let () = close_all () in
|
(AST)
|
||||||
let invalid, valid_opt =
|
(ExtParser)
|
||||||
match get_win () with
|
(ParErr)
|
||||||
Lexer.Nil ->
|
(ExtParserLog)
|
||||||
assert false (* Safe: There is always at least EOF. *)
|
|
||||||
| Lexer.One invalid -> invalid, None
|
|
||||||
| Lexer.Two (invalid, valid) -> invalid, Some valid in
|
|
||||||
let point = "", valid_opt, invalid in
|
|
||||||
let error =
|
|
||||||
ParserFront.format_error ~offsets:options#offsets
|
|
||||||
options#mode point
|
|
||||||
in eprintf "\027[31m%s\027[0m%!" error
|
|
||||||
|
|
||||||
(* I/O errors *)
|
|
||||||
| Sys_error msg -> Utils.highlight msg
|
|
||||||
|
@ -36,7 +36,7 @@
|
|||||||
parser_reasonligo
|
parser_reasonligo
|
||||||
parser_cameligo)
|
parser_cameligo)
|
||||||
(modules
|
(modules
|
||||||
ParErr ParserAPI ParserMain)
|
ParErr ParserMain)
|
||||||
(preprocess
|
(preprocess
|
||||||
(pps bisect_ppx --conditional))
|
(pps bisect_ppx --conditional))
|
||||||
(flags (:standard -open Simple_utils -open Parser_cameligo -open Parser_shared -open Parser_reasonligo)))
|
(flags (:standard -open Simple_utils -open Parser_cameligo -open Parser_shared -open Parser_reasonligo)))
|
||||||
|
@ -1,9 +1,47 @@
|
|||||||
(* Generic parser for LIGO *)
|
(* Generic parser for LIGO *)
|
||||||
|
|
||||||
|
module type PARSER =
|
||||||
|
sig
|
||||||
|
(* The type of tokens, abstract syntax trees and expressions *)
|
||||||
|
|
||||||
|
type token
|
||||||
|
type ast
|
||||||
|
type expr
|
||||||
|
|
||||||
|
(* This exception is raised by the monolithic API functions. *)
|
||||||
|
|
||||||
|
exception Error
|
||||||
|
|
||||||
|
(* The monolithic API. *)
|
||||||
|
|
||||||
|
val interactive_expr :
|
||||||
|
(Lexing.lexbuf -> token) -> Lexing.lexbuf -> expr
|
||||||
|
val contract :
|
||||||
|
(Lexing.lexbuf -> token) -> Lexing.lexbuf -> ast
|
||||||
|
|
||||||
|
module MenhirInterpreter :
|
||||||
|
sig
|
||||||
|
(* The incremental API. *)
|
||||||
|
|
||||||
|
include MenhirLib.IncrementalEngine.INCREMENTAL_ENGINE
|
||||||
|
with type token = token
|
||||||
|
end
|
||||||
|
|
||||||
|
(* The entry point(s) to the incremental API. *)
|
||||||
|
|
||||||
|
module Incremental :
|
||||||
|
sig
|
||||||
|
val interactive_expr :
|
||||||
|
Lexing.position -> expr MenhirInterpreter.checkpoint
|
||||||
|
val contract :
|
||||||
|
Lexing.position -> ast MenhirInterpreter.checkpoint
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
(* Main functor *)
|
(* Main functor *)
|
||||||
|
|
||||||
module Make (Lexer: Lexer.S with module Token := LexToken)
|
module Make (Lexer: Lexer.S)
|
||||||
(Parser: module type of Parser)
|
(Parser: PARSER with type token = Lexer.Token.token)
|
||||||
(ParErr: sig val message : int -> string end) =
|
(ParErr: sig val message : int -> string end) =
|
||||||
struct
|
struct
|
||||||
module I = Parser.MenhirInterpreter
|
module I = Parser.MenhirInterpreter
|
||||||
@ -31,8 +69,8 @@ module Make (Lexer: Lexer.S with module Token := LexToken)
|
|||||||
(* The parser has suspended itself because of a syntax error. Stop. *)
|
(* The parser has suspended itself because of a syntax error. Stop. *)
|
||||||
|
|
||||||
type message = string
|
type message = string
|
||||||
type valid = Lexer.token
|
type valid = Parser.token
|
||||||
type invalid = Lexer.token
|
type invalid = Parser.token
|
||||||
type error = message * valid option * invalid
|
type error = message * valid option * invalid
|
||||||
|
|
||||||
exception Point of error
|
exception Point of error
|
||||||
@ -48,7 +86,7 @@ module Make (Lexer: Lexer.S with module Token := LexToken)
|
|||||||
|
|
||||||
(* The two Menhir APIs are called from the following two functions. *)
|
(* The two Menhir APIs are called from the following two functions. *)
|
||||||
|
|
||||||
let incr_contract Lexer.{read; buffer; get_win; close; _} : AST.t =
|
let incr_contract Lexer.{read; buffer; get_win; close; _} : Parser.ast =
|
||||||
let supplier = I.lexer_lexbuf_to_supplier read buffer
|
let supplier = I.lexer_lexbuf_to_supplier read buffer
|
||||||
and failure = failure get_win in
|
and failure = failure get_win in
|
||||||
let parser = Parser.Incremental.contract buffer.Lexing.lex_curr_p in
|
let parser = Parser.Incremental.contract buffer.Lexing.lex_curr_p in
|
||||||
@ -60,21 +98,21 @@ module Make (Lexer: Lexer.S with module Token := LexToken)
|
|||||||
(* Errors *)
|
(* Errors *)
|
||||||
|
|
||||||
let format_error ?(offsets=true) mode (msg, valid_opt, invalid) =
|
let format_error ?(offsets=true) mode (msg, valid_opt, invalid) =
|
||||||
let invalid_region = LexToken.to_region invalid in
|
let invalid_region = Lexer.Token.to_region invalid in
|
||||||
let header =
|
let header =
|
||||||
"Parse error " ^ invalid_region#to_string ~offsets mode in
|
"Parse error " ^ invalid_region#to_string ~offsets mode in
|
||||||
let trailer =
|
let trailer =
|
||||||
match valid_opt with
|
match valid_opt with
|
||||||
None ->
|
None ->
|
||||||
if LexToken.is_eof invalid then ""
|
if Lexer.Token.is_eof invalid then ""
|
||||||
else let invalid_lexeme = LexToken.to_lexeme invalid in
|
else let invalid_lexeme = Lexer.Token.to_lexeme invalid in
|
||||||
Printf.sprintf ", before \"%s\"" invalid_lexeme
|
Printf.sprintf ", before \"%s\"" invalid_lexeme
|
||||||
| Some valid ->
|
| Some valid ->
|
||||||
let valid_lexeme = LexToken.to_lexeme valid in
|
let valid_lexeme = Lexer.Token.to_lexeme valid in
|
||||||
let s = Printf.sprintf ", after \"%s\"" valid_lexeme in
|
let s = Printf.sprintf ", after \"%s\"" valid_lexeme in
|
||||||
if LexToken.is_eof invalid then s
|
if Lexer.Token.is_eof invalid then s
|
||||||
else
|
else
|
||||||
let invalid_lexeme = LexToken.to_lexeme invalid in
|
let invalid_lexeme = Lexer.Token.to_lexeme invalid in
|
||||||
Printf.sprintf "%s and before \"%s\"" s invalid_lexeme in
|
Printf.sprintf "%s and before \"%s\"" s invalid_lexeme in
|
||||||
let header = header ^ trailer in
|
let header = header ^ trailer in
|
||||||
header ^ (if msg = "" then ".\n" else ":\n" ^ msg)
|
header ^ (if msg = "" then ".\n" else ":\n" ^ msg)
|
60
src/passes/1-parser/shared/ParserAPI.mli
Normal file
60
src/passes/1-parser/shared/ParserAPI.mli
Normal file
@ -0,0 +1,60 @@
|
|||||||
|
(* Generic parser API for LIGO *)
|
||||||
|
|
||||||
|
module type PARSER =
|
||||||
|
sig
|
||||||
|
(* The type of tokens. *)
|
||||||
|
|
||||||
|
type token
|
||||||
|
type ast
|
||||||
|
type expr
|
||||||
|
|
||||||
|
(* This exception is raised by the monolithic API functions. *)
|
||||||
|
|
||||||
|
exception Error
|
||||||
|
|
||||||
|
(* The monolithic API. *)
|
||||||
|
|
||||||
|
val interactive_expr : (Lexing.lexbuf -> token) -> Lexing.lexbuf -> expr
|
||||||
|
val contract : (Lexing.lexbuf -> token) -> Lexing.lexbuf -> ast
|
||||||
|
|
||||||
|
(* The incremental API. *)
|
||||||
|
|
||||||
|
module MenhirInterpreter :
|
||||||
|
sig
|
||||||
|
include MenhirLib.IncrementalEngine.INCREMENTAL_ENGINE
|
||||||
|
with type token = token
|
||||||
|
end
|
||||||
|
|
||||||
|
(* The entry point(s) to the incremental API. *)
|
||||||
|
|
||||||
|
module Incremental :
|
||||||
|
sig
|
||||||
|
val interactive_expr :
|
||||||
|
Lexing.position -> expr MenhirInterpreter.checkpoint
|
||||||
|
val contract :
|
||||||
|
Lexing.position -> ast MenhirInterpreter.checkpoint
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
module Make (Lexer: Lexer.S)
|
||||||
|
(Parser: PARSER with type token = Lexer.Token.token)
|
||||||
|
(ParErr: sig val message : int -> string end) :
|
||||||
|
sig
|
||||||
|
(* Monolithic and incremental APIs of Menhir for parsing *)
|
||||||
|
|
||||||
|
val mono_contract :
|
||||||
|
(Lexing.lexbuf -> Lexer.token) -> Lexing.lexbuf -> Parser.ast
|
||||||
|
val incr_contract :
|
||||||
|
Lexer.instance -> Parser.ast
|
||||||
|
|
||||||
|
(* Error handling *)
|
||||||
|
|
||||||
|
type message = string
|
||||||
|
type valid = Parser.token
|
||||||
|
type invalid = Parser.token
|
||||||
|
type error = message * valid option * invalid
|
||||||
|
|
||||||
|
exception Point of error
|
||||||
|
|
||||||
|
val format_error : ?offsets:bool -> [`Byte | `Point] -> error -> string
|
||||||
|
end
|
@ -4,15 +4,17 @@
|
|||||||
(name parser_shared)
|
(name parser_shared)
|
||||||
(public_name ligo.parser.shared)
|
(public_name ligo.parser.shared)
|
||||||
(libraries
|
(libraries
|
||||||
|
menhirLib
|
||||||
simple-utils
|
simple-utils
|
||||||
uutf
|
uutf
|
||||||
getopt
|
getopt
|
||||||
zarith
|
zarith)
|
||||||
)
|
|
||||||
(preprocess
|
(preprocess
|
||||||
(pps bisect_ppx --conditional)
|
(pps bisect_ppx --conditional))
|
||||||
)
|
|
||||||
(modules
|
(modules
|
||||||
|
LexerUnit
|
||||||
|
ParserUnit
|
||||||
|
ParserAPI
|
||||||
Lexer
|
Lexer
|
||||||
LexerLog
|
LexerLog
|
||||||
Utils
|
Utils
|
||||||
|
Loading…
Reference in New Issue
Block a user