[WIP] Adding the infrastructure for supporting the incremental API of Menhir.

* I added CLI option "--mono" to select the monolithic API of Menhir.
  * I added a field "win" to the state of the lexer (a two-token
    window for error reporting).
  * I escaped LIGO strings before making them OCaml strings (for
    example for printing).
This commit is contained in:
Christian Rinderknecht 2019-12-20 16:44:03 +01:00
parent c46bf008c8
commit 072dea757c
18 changed files with 1539 additions and 152 deletions

View File

@ -201,7 +201,7 @@ let to_lexeme = function
| Int i
| Nat i
| Mutez i -> fst i.Region.value
| String s -> s.Region.value
| String s -> String.escaped s.Region.value
| Bytes b -> fst b.Region.value
| Begin _ -> "begin"

View File

@ -0,0 +1,416 @@
(* This file was auto-generated based on "Parser.msg". *)
(* Please note that the function [message] can raise [Not_found]. *)
let message =
fun s ->
match s with
| 0 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 1 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 2 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 3 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 4 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 5 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 7 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 49 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 51 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 52 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 53 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 18 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 8 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 9 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 10 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 42 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 43 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 46 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 47 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 33 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 459 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 27 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 31 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 28 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 35 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 12 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 16 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 6 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 13 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 61 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 133 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 372 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 374 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 134 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 136 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 137 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 153 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 373 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 63 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 142 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 143 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 128 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 145 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 72 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 94 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 106 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 95 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 108 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 109 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 110 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 73 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 91 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 93 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 92 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 90 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 77 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 78 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 65 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 66 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 67 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 120 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 121 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 124 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 125 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 147 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 148 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 149 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 157 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 156 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 462 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 464 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 216 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 241 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 218 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 220 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 214 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 225 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 254 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 255 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 242 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 263 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 227 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 256 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 257 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 265 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 267 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 269 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 271 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 273 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 192 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 258 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 284 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 287 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 244 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 292 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 261 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 160 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 164 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 428 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 331 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 312 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 430 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 314 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 315 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 316 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 431 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 444 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 445 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 432 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 433 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 434 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 435 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 436 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 437 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 439 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 327 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 329 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 333 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 330 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 328 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 339 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 340 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 341 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 342 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 343 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 344 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 366 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 345 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 347 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 440 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 442 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 446 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 429 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 311 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 427 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 165 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 167 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 168 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 169 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 163 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 447 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 449 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 450 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 166 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 234 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 235 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 238 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 239 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 424 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 170 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 171 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 172 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 417 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 418 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 421 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 422 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 174 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 303 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 304 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 404 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 411 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 403 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 305 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 307 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 319 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 320 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 321 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 322 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 323 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 324 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 325 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 326 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 377 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 378 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 380 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 334 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 309 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 306 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 394 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 395 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 396 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 397 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 398 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 399 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 407 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 400 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 402 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 175 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 176 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 179 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 180 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 183 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 301 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 299 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 185 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 187 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 188 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 189 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 190 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 194 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 213 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 193 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 209 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| _ ->
raise Not_found

View File

@ -28,13 +28,46 @@ module type PARSER =
end
end
(* Errors *)
module type PAR_ERR =
sig
val message : int -> string (* From error states to messages *)
end
let format_error ?(offsets=true) mode Region.{region; value} ~file =
let reg = region#to_string ~file ~offsets mode in
Printf.sprintf "\027[31mParse error %s:\n%s\027[0m%!" reg value
(* Main functor *)
module Make (Lexer: Lexer.S)
(Parser: PARSER with type token = Lexer.Token.token) =
(Parser: PARSER with type token = Lexer.Token.token)
(ParErr: PAR_ERR) =
struct
type message = string
type valid = Lexer.token
type invalid = Lexer.token
exception Point of message * valid option * invalid
module I = Parser.MenhirInterpreter
module S = MenhirLib.General (* Streams *)
(* The call [stack checkpoint] extracts the parser's stack out of
a checkpoint. *)
let stack = function
I.HandlingError env -> I.stack env
| _ -> assert false
(* The call [state checkpoint] extracts the number of the current
state out of a parser checkpoint. *)
let state checkpoint : int =
match Lazy.force (stack checkpoint) with
S.Nil -> 0 (* WARNING: Hack. The first state should be 0. *)
| S.Cons (I.Element (s,_,_,_),_) -> I.number s
(* The parser has successfully produced a semantic value. *)
@ -42,14 +75,24 @@ module Make (Lexer: Lexer.S)
(* The parser has suspended itself because of a syntax error. Stop. *)
let fail _checkpoint = raise Parser.Error
(* let fail _checkpoint = raise Parser.Error *)
let failure get_win checkpoint =
let message = ParErr.message (state checkpoint) in
match get_win () with
Lexer.Nil -> assert false
| Lexer.One invalid ->
raise (Point (message, None, invalid))
| Lexer.Two (invalid, valid) ->
raise (Point (message, Some valid, invalid))
(* The generic parsing function *)
let incr_contract Lexer.{read; buffer; close; _} : AST.t =
let supplier = I.lexer_lexbuf_to_supplier read buffer in
let incr_contract Lexer.{read; buffer; get_win; close; _} : AST.t =
let supplier = I.lexer_lexbuf_to_supplier read buffer
and failure = failure get_win in
let parser = Parser.Incremental.contract buffer.Lexing.lex_curr_p in
let ast = I.loop_handle success fail supplier parser
let ast = I.loop_handle success failure supplier parser
in close (); ast
let mono_contract = Parser.contract

View File

@ -29,11 +29,29 @@ module type PARSER =
end
(* Errors *)
module type PAR_ERR =
sig
val message : int -> string (* From error states to messages *)
end
val format_error :
?offsets:bool -> [`Byte | `Point] ->
string Region.reg -> file:bool -> string
(* Main functor *)
module Make (Lexer: Lexer.S)
(Parser: PARSER with type token = Lexer.Token.token) :
(Parser: PARSER with type token = Lexer.Token.token)
(ParErr: PAR_ERR) :
sig
type message = string
type valid = Lexer.token
type invalid = Lexer.token
exception Point of message * valid option * invalid
val mono_contract : (Lexing.lexbuf -> Lexer.token) -> Lexing.lexbuf -> AST.t
val incr_contract : Lexer.instance -> AST.t
end

View File

@ -1,16 +1,14 @@
(** Driver for the parser of CameLIGO *)
(** Driver for the CameLIGO parser *)
let extension = ".mligo"
let options = EvalOpt.read "CameLIGO" extension
open Printf
(** Error printing and exception tracing
*)
let () = Printexc.record_backtrace true
(** Auxiliary functions
*)
let sprintf = Printf.sprintf
(** Extracting the input file
*)
let file =
@ -23,17 +21,7 @@ let file =
let () = Printexc.record_backtrace true
let external_ text =
Utils.highlight (Printf.sprintf "External error: %s" text); exit 1;;
type error = SyntaxError
let error_to_string = function
SyntaxError -> "Syntax error.\n"
let print_error ?(offsets=true) mode Region.{region; value} ~file =
let msg = error_to_string value
and reg = region#to_string ~file ~offsets mode in
Utils.highlight (sprintf "Parse error %s:\n%s%!" reg msg)
Utils.highlight (sprintf "External error: %s" text); exit 1;;
(** {1 Preprocessing the input source and opening the input channels} *)
@ -42,7 +30,7 @@ let print_error ?(offsets=true) mode Region.{region; value} ~file =
let lib_path =
match options#libs with
[] -> ""
| libs -> let mk_I dir path = Printf.sprintf " -I %s%s" dir path
| libs -> let mk_I dir path = sprintf " -I %s%s" dir path
in List.fold_right mk_I libs ""
let prefix =
@ -61,26 +49,26 @@ let pp_input =
let cpp_cmd =
match options#input with
None | Some "-" ->
Printf.sprintf "cpp -traditional-cpp%s - > %s"
sprintf "cpp -traditional-cpp%s - > %s"
lib_path pp_input
| Some file ->
Printf.sprintf "cpp -traditional-cpp%s %s > %s"
sprintf "cpp -traditional-cpp%s %s > %s"
lib_path file pp_input
let () =
if Utils.String.Set.mem "cpp" options#verbose
then Printf.eprintf "%s\n%!" cpp_cmd;
then eprintf "%s\n%!" cpp_cmd;
if Sys.command cpp_cmd <> 0 then
external_ (Printf.sprintf "the command \"%s\" failed." cpp_cmd)
external_ (sprintf "the command \"%s\" failed." cpp_cmd)
(** {1 Instanciating the lexer} *)
module Lexer = Lexer.Make (LexToken)
module Log = LexerLog.Make (Lexer)
module ParserFront = ParserAPI.Make (Lexer) (Parser)
module ParserFront = ParserAPI.Make (Lexer) (Parser) (ParErr)
let lexer_inst = Lexer.open_token_stream (Some pp_input)
let Lexer.{read; buffer; get_pos; get_last; close} = lexer_inst
let Lexer.{read; buffer; get_win; get_pos; get_last; close} = lexer_inst
and cout = stdout
@ -97,10 +85,10 @@ let tokeniser = read ~log
let () =
try
(* The incremental API *)
let ast = ParserFront.incr_contract lexer_inst in
(* The monolithic API *)
(* let ast = ParserFront.mono_contract tokeniser buffer in *)
let ast =
if options#mono
then ParserFront.mono_contract tokeniser buffer
else ParserFront.incr_contract lexer_inst in
if Utils.String.Set.mem "ast" options#verbose
then let buffer = Buffer.create 131 in
let state = ParserLog.mk_state
@ -122,14 +110,52 @@ let () =
Buffer.output_buffer stdout buffer
end
with
(* Lexing errors *)
Lexer.Error err ->
close_all ();
Lexer.print_error ~offsets:options#offsets
let msg =
Lexer.format_error ~offsets:options#offsets
options#mode err ~file
| Parser.Error ->
let region = get_last () in
let error = Region.{region; value=SyntaxError} in
in prerr_string msg
(* Incremental API of Menhir *)
| ParserFront.Point (message, valid_opt, invalid) ->
let () = close_all () in
print_error ~offsets:options#offsets
let invalid_lexeme = Lexer.Token.to_lexeme invalid in
let invalid_region = Lexer.Token.to_region invalid in
let header =
"Parse error " ^
invalid_region#to_string ~offsets:options#offsets
options#mode in
let after =
match valid_opt with
None -> ","
| Some valid ->
let valid_lexeme = Lexer.Token.to_lexeme valid
in sprintf ", after \"%s\" and" valid_lexeme in
let header = header ^ after in
let before = sprintf " before \"%s\"" invalid_lexeme in
let header = header ^ before in
eprintf "\027[31m%s:\n%s\027[0m%!" header message
(* Monolithic API of Menhir *)
| Parser.Error ->
let () = close_all () in
let token =
match get_win () with
Lexer.Nil ->
assert false (* Safe: There is always at least EOF. *)
| Lexer.One token
| Lexer.Two (token, _) -> token in
let lexeme = Lexer.Token.to_lexeme token
and region = Lexer.Token.to_region token in
let msg = sprintf "Syntax error on \"%s\".\n" lexeme in
let error = Region.{region; value=msg} in
let () = close_all () in
let msg =
ParserAPI.format_error ~offsets:options#offsets
options#mode error ~file
in prerr_string msg
(* I/O errors *)
| Sys_error msg -> Utils.highlight msg

View File

@ -224,7 +224,7 @@ let proj_token = function
let to_lexeme = function
(* Literals *)
String s -> s.Region.value
String s -> String.escaped s.Region.value
| Bytes b -> fst b.Region.value
| Int i
| Nat i

View File

@ -0,0 +1,514 @@
(* This file was auto-generated based on "Parser.msg". *)
(* Please note that the function [message] can raise [Not_found]. *)
let message =
fun s ->
match s with
| 0 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 1 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 2 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 3 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 25 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 5 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 7 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 8 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 48 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 9 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 10 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 52 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 53 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 56 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 57 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 59 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 11 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 12 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 20 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 21 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 13 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 6 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 61 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 34 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 15 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 64 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 517 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 29 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 32 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 515 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 35 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 26 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 39 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 27 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 18 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 67 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 70 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 71 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 72 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 73 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 80 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 81 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 76 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 77 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 78 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 85 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 86 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 87 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 88 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 512 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 358 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 359 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 499 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 362 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 360 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 361 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 363 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 364 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 365 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 366 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 367 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 475 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 476 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 477 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 478 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 496 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 503 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 502 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 371 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 372 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 373 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 374 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 378 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 380 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 382 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 383 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 387 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 384 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 385 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 389 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 390 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 391 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 393 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 395 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 399 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 396 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 397 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 375 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 381 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 404 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 405 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 406 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 492 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 493 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 494 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 407 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 488 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 408 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 452 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 447 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 453 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 409 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 410 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 416 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 420 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 421 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 411 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 424 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 425 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 426 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 413 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 415 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 435 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 436 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 437 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 440 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 441 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 469 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 470 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 473 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 472 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 438 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 467 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 439 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 69 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 428 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 429 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 430 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 431 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 432 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 508 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 521 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 159 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 523 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 137 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 150 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 166 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 167 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 158 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 173 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 152 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 168 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 169 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 175 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 177 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 179 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 181 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 183 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 160 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 170 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 157 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 163 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 187 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 92 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 318 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 319 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 322 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 323 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 356 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 351 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 353 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 93 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 94 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 338 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 95 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 96 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 342 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 343 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 346 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 347 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 349 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 97 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 136 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 101 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 195 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 196 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 198 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 199 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 202 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 203 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 334 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 329 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 331 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 102 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 103 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 326 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 312 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 314 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 104 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 308 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 306 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 309 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 310 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 304 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 134 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 106 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 296 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 297 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 298 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 299 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 300 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 107 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 108 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 285 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 286 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 132 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 155 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 288 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 291 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 292 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 128 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 110 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 113 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 208 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 209 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 247 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 271 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 248 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 250 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 251 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 272 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 278 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 277 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 281 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 280 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 218 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 261 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 262 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 265 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 266 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 269 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 255 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 257 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 219 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 244 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 245 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 253 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 241 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 210 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 275 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 211 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 223 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 224 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 240 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 225 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 226 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 234 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 114 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 118 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 206 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 119 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| 125 ->
"<YOUR SYNTAX ERROR MESSAGE HERE>\n"
| _ ->
raise Not_found

View File

@ -28,13 +28,46 @@ module type PARSER =
end
end
(* Errors *)
module type PAR_ERR =
sig
val message : int -> string (* From error states to messages *)
end
let format_error ?(offsets=true) mode Region.{region; value} ~file =
let reg = region#to_string ~file ~offsets mode in
Printf.sprintf "\027[31mParse error %s:\n%s\027[0m%!" reg value
(* Main functor *)
module Make (Lexer: Lexer.S)
(Parser: PARSER with type token = Lexer.Token.token) =
(Parser: PARSER with type token = Lexer.Token.token)
(ParErr: PAR_ERR) =
struct
type message = string
type valid = Lexer.token
type invalid = Lexer.token
exception Point of message * valid option * invalid
module I = Parser.MenhirInterpreter
module S = MenhirLib.General (* Streams *)
(* The call [stack checkpoint] extracts the parser's stack out of
a checkpoint. *)
let stack = function
I.HandlingError env -> I.stack env
| _ -> assert false
(* The call [state checkpoint] extracts the number of the current
state out of a parser checkpoint. *)
let state checkpoint : int =
match Lazy.force (stack checkpoint) with
S.Nil -> 0 (* WARNING: Hack. The first state should be 0. *)
| S.Cons (I.Element (s,_,_,_),_) -> I.number s
(* The parser has successfully produced a semantic value. *)
@ -42,14 +75,22 @@ module Make (Lexer: Lexer.S)
(* The parser has suspended itself because of a syntax error. Stop. *)
let fail _checkpoint = raise Parser.Error
let failure get_win checkpoint =
let message = ParErr.message (state checkpoint) in
match get_win () with
Lexer.Nil -> assert false
| Lexer.One invalid ->
raise (Point (message, None, invalid))
| Lexer.Two (invalid, valid) ->
raise (Point (message, Some valid, invalid))
(* The generic parsing function *)
let incr_contract Lexer.{read; buffer; close; _} : AST.t =
let supplier = I.lexer_lexbuf_to_supplier read buffer in
let incr_contract Lexer.{read; buffer; get_win; close; _} : AST.t =
let supplier = I.lexer_lexbuf_to_supplier read buffer
and failure = failure get_win in
let parser = Parser.Incremental.contract buffer.Lexing.lex_curr_p in
let ast = I.loop_handle success fail supplier parser
let ast = I.loop_handle success failure supplier parser
in close (); ast
let mono_contract = Parser.contract

View File

@ -29,11 +29,29 @@ module type PARSER =
end
(* Errors *)
module type PAR_ERR =
sig
val message : int -> string (* From error states to messages *)
end
val format_error :
?offsets:bool -> [`Byte | `Point] ->
string Region.reg -> file:bool -> string
(* Main functor *)
module Make (Lexer: Lexer.S)
(Parser: PARSER with type token = Lexer.Token.token) :
(Parser: PARSER with type token = Lexer.Token.token)
(ParErr: PAR_ERR) :
sig
type message = string
type valid = Lexer.token
type invalid = Lexer.token
exception Point of message * valid option * invalid
val mono_contract : (Lexing.lexbuf -> Lexer.token) -> Lexing.lexbuf -> AST.t
val incr_contract : Lexer.instance -> AST.t
end

View File

@ -1,16 +1,14 @@
(** Driver for the parser of PascaLIGO *)
(** Driver for the PascaLIGO parser *)
let extension = ".ligo"
let options = EvalOpt.read "PascaLIGO" extension
open Printf
(** Error printing and exception tracing
*)
let () = Printexc.record_backtrace true
(** Auxiliary functions
*)
let sprintf = Printf.sprintf
(** Extracting the input file
*)
let file =
@ -23,17 +21,7 @@ let file =
let () = Printexc.record_backtrace true
let external_ text =
Utils.highlight (Printf.sprintf "External error: %s" text); exit 1;;
type error = SyntaxError
let error_to_string = function
SyntaxError -> "Syntax error.\n"
let print_error ?(offsets=true) mode Region.{region; value} ~file =
let msg = error_to_string value
and reg = region#to_string ~file ~offsets mode in
Utils.highlight (sprintf "Parse error %s:\n%s%!" reg msg)
Utils.highlight (sprintf "External error: %s" text); exit 1;;
(** {1 Preprocessing the input source and opening the input channels} *)
@ -42,7 +30,7 @@ let print_error ?(offsets=true) mode Region.{region; value} ~file =
let lib_path =
match options#libs with
[] -> ""
| libs -> let mk_I dir path = Printf.sprintf " -I %s%s" dir path
| libs -> let mk_I dir path = sprintf " -I %s%s" dir path
in List.fold_right mk_I libs ""
let prefix =
@ -61,26 +49,26 @@ let pp_input =
let cpp_cmd =
match options#input with
None | Some "-" ->
Printf.sprintf "cpp -traditional-cpp%s - > %s"
sprintf "cpp -traditional-cpp%s - > %s"
lib_path pp_input
| Some file ->
Printf.sprintf "cpp -traditional-cpp%s %s > %s"
sprintf "cpp -traditional-cpp%s %s > %s"
lib_path file pp_input
let () =
if Utils.String.Set.mem "cpp" options#verbose
then Printf.eprintf "%s\n%!" cpp_cmd;
then eprintf "%s\n%!" cpp_cmd;
if Sys.command cpp_cmd <> 0 then
external_ (Printf.sprintf "the command \"%s\" failed." cpp_cmd)
external_ (sprintf "the command \"%s\" failed." cpp_cmd)
(** {1 Instanciating the lexer} *)
module Lexer = Lexer.Make (LexToken)
module Log = LexerLog.Make (Lexer)
module ParserFront = ParserAPI.Make (Lexer) (Parser)
module ParserFront = ParserAPI.Make (Lexer) (Parser) (ParErr)
let lexer_inst = Lexer.open_token_stream (Some pp_input)
let Lexer.{read; buffer; get_pos; get_last; close} = lexer_inst
let Lexer.{read; buffer; get_win; get_pos; get_last; close} = lexer_inst
and cout = stdout
@ -97,10 +85,10 @@ let tokeniser = read ~log
let () =
try
(* The incremental API *)
let ast = ParserFront.incr_contract lexer_inst in
(* The monolithic API *)
(* let ast = ParserFront.mono_contract tokeniser buffer in *)
let ast =
if options#mono
then ParserFront.mono_contract tokeniser buffer
else ParserFront.incr_contract lexer_inst in
if Utils.String.Set.mem "ast" options#verbose
then let buffer = Buffer.create 131 in
let state = ParserLog.mk_state
@ -122,14 +110,52 @@ let () =
Buffer.output_buffer stdout buffer
end
with
(* Lexing errors *)
Lexer.Error err ->
close_all ();
Lexer.print_error ~offsets:options#offsets
let msg =
Lexer.format_error ~offsets:options#offsets
options#mode err ~file
| Parser.Error ->
let region = get_last () in
let error = Region.{region; value=SyntaxError} in
in prerr_string msg
(* Incremental API of Menhir *)
| ParserFront.Point (message, valid_opt, invalid) ->
let () = close_all () in
print_error ~offsets:options#offsets
let invalid_lexeme = Lexer.Token.to_lexeme invalid in
let invalid_region = Lexer.Token.to_region invalid in
let header =
"Parse error " ^
invalid_region#to_string ~offsets:options#offsets
options#mode in
let after =
match valid_opt with
None -> ","
| Some valid ->
let valid_lexeme = Lexer.Token.to_lexeme valid
in sprintf ", after \"%s\" and" valid_lexeme in
let header = header ^ after in
let before = sprintf " before \"%s\"" invalid_lexeme in
let header = header ^ before in
eprintf "\027[31m%s:\n%s\027[0m%!" header message
(* Monolithic API of Menhir *)
| Parser.Error ->
let () = close_all () in
let token =
match get_win () with
Lexer.Nil ->
assert false (* Safe: There is always at least EOF. *)
| Lexer.One token
| Lexer.Two (token, _) -> token in
let lexeme = Lexer.Token.to_lexeme token
and region = Lexer.Token.to_region token in
let msg = sprintf "Syntax error on \"%s\".\n" lexeme in
let error = Region.{region; value=msg} in
let () = close_all () in
let msg =
ParserAPI.format_error ~offsets:options#offsets
options#mode error ~file
in prerr_string msg
(* I/O errors *)
| Sys_error msg -> Utils.highlight msg

View File

@ -1,16 +1,14 @@
(** Driver for the LIGO parser *)
(** Driver for the Reason LIGO parser *)
let extension = ".religo"
let options = EvalOpt.read "ReasonLIGO" extension
open Printf
(** Error printing and exception tracing
*)
let () = Printexc.record_backtrace true
(** Auxiliary functions
*)
let sprintf = Printf.sprintf
(** Extracting the input file
*)
let file =
@ -23,17 +21,7 @@ let file =
let () = Printexc.record_backtrace true
let external_ text =
Utils.highlight (Printf.sprintf "External error: %s" text); exit 1;;
type error = SyntaxError
let error_to_string = function
SyntaxError -> "Syntax error.\n"
let print_error ?(offsets=true) mode Region.{region; value} ~file =
let msg = error_to_string value
and reg = region#to_string ~file ~offsets mode in
Utils.highlight (sprintf "Parse error %s:\n%s%!" reg msg)
Utils.highlight (sprintf "External error: %s" text); exit 1;;
(** {1 Preprocessing the input source and opening the input channels} *)
@ -42,7 +30,7 @@ let print_error ?(offsets=true) mode Region.{region; value} ~file =
let lib_path =
match options#libs with
[] -> ""
| libs -> let mk_I dir path = Printf.sprintf " -I %s%s" dir path
| libs -> let mk_I dir path = sprintf " -I %s%s" dir path
in List.fold_right mk_I libs ""
let prefix =
@ -61,26 +49,26 @@ let pp_input =
let cpp_cmd =
match options#input with
None | Some "-" ->
Printf.sprintf "cpp -traditional-cpp%s - > %s"
sprintf "cpp -traditional-cpp%s - > %s"
lib_path pp_input
| Some file ->
Printf.sprintf "cpp -traditional-cpp%s %s > %s"
sprintf "cpp -traditional-cpp%s %s > %s"
lib_path file pp_input
let () =
if Utils.String.Set.mem "cpp" options#verbose
then Printf.eprintf "%s\n%!" cpp_cmd;
then eprintf "%s\n%!" cpp_cmd;
if Sys.command cpp_cmd <> 0 then
external_ (Printf.sprintf "the command \"%s\" failed." cpp_cmd)
external_ (sprintf "the command \"%s\" failed." cpp_cmd)
(** {1 Instanciating the lexer} *)
module Lexer = Lexer.Make (LexToken)
module Log = LexerLog.Make (Lexer)
module ParserFront = ParserAPI.Make (Lexer) (Parser)
module ParserFront = ParserAPI.Make (Lexer) (Parser) (ParErr)
let lexer_inst = Lexer.open_token_stream (Some pp_input)
let Lexer.{read; buffer; get_pos; get_last; close} = lexer_inst
let Lexer.{read; buffer; get_win; get_pos; get_last; close} = lexer_inst
and cout = stdout
@ -97,10 +85,10 @@ let tokeniser = read ~log
let () =
try
(* The incremental API *)
let ast = ParserFront.incr_contract lexer_inst in
(* The monolithic API *)
(* let ast = ParserFront.mono_contract tokeniser buffer in *)
let ast =
if options#mono
then ParserFront.mono_contract tokeniser buffer
else ParserFront.incr_contract lexer_inst in
if Utils.String.Set.mem "ast" options#verbose
then let buffer = Buffer.create 131 in
let state = ParserLog.mk_state
@ -122,14 +110,52 @@ let () =
Buffer.output_buffer stdout buffer
end
with
(* Lexing errors *)
Lexer.Error err ->
close_all ();
Lexer.print_error ~offsets:options#offsets
let msg =
Lexer.format_error ~offsets:options#offsets
options#mode err ~file
| Parser.Error ->
let region = get_last () in
let error = Region.{region; value=SyntaxError} in
in prerr_string msg
(* Incremental API of Menhir *)
| ParserFront.Point (message, valid_opt, invalid) ->
let () = close_all () in
print_error ~offsets:options#offsets
let invalid_lexeme = Lexer.Token.to_lexeme invalid in
let invalid_region = Lexer.Token.to_region invalid in
let header =
"Parse error " ^
invalid_region#to_string ~offsets:options#offsets
options#mode in
let after =
match valid_opt with
None -> ","
| Some valid ->
let valid_lexeme = Lexer.Token.to_lexeme valid
in sprintf ", after \"%s\" and" valid_lexeme in
let header = header ^ after in
let before = sprintf " before \"%s\"" invalid_lexeme in
let header = header ^ before in
eprintf "\027[31m%s:\n%s\027[0m%!" header message
(* Monolithic API of Menhir *)
| Parser.Error ->
let () = close_all () in
let token =
match get_win () with
Lexer.Nil ->
assert false (* Safe: There is always at least EOF. *)
| Lexer.One token
| Lexer.Two (token, _) -> token in
let lexeme = Lexer.Token.to_lexeme token
and region = Lexer.Token.to_region token in
let msg = sprintf "Syntax error on \"%s\".\n" lexeme in
let error = Region.{region; value=msg} in
let () = close_all () in
let msg =
ParserAPI.format_error ~offsets:options#offsets
options#mode error ~file
in prerr_string msg
(* I/O errors *)
| Sys_error msg -> Utils.highlight msg

View File

@ -13,10 +13,11 @@ type options = <
verbose : Utils.String.Set.t;
offsets : bool;
mode : [`Byte | `Point];
cmd : command
cmd : command;
mono : bool
>
let make ~input ~libs ~verbose ~offsets ~mode ~cmd =
let make ~input ~libs ~verbose ~offsets ~mode ~cmd ~mono =
object
method input = input
method libs = libs
@ -24,6 +25,7 @@ let make ~input ~libs ~verbose ~offsets ~mode ~cmd =
method offsets = offsets
method mode = mode
method cmd = cmd
method mono = mono
end
(** {1 Auxiliary functions} *)
@ -49,6 +51,7 @@ let help language extension () =
print " -q, --quiet No output, except errors (default)";
print " --columns Columns for source locations";
print " --bytes Bytes for source locations";
print " --mono Use Menhir monolithic API";
print " --verbose=<stages> cmdline, cpp, ast-tokens, ast (colon-separated)";
print " --version Commit hash on stdout";
print " -h, --help This help";
@ -70,6 +73,7 @@ and verbose = ref Utils.String.Set.empty
and input = ref None
and libs = ref []
and verb_str = ref ""
and mono = ref false
let split_at_colon = Str.(split (regexp ":"))
@ -89,6 +93,7 @@ let specs language extension =
'q', "quiet", set quiet true, None;
noshort, "columns", set columns true, None;
noshort, "bytes", set bytes true, None;
noshort, "mono", set mono true, None;
noshort, "verbose", None, Some add_verbose;
'h', "help", Some (help language extension), None;
noshort, "version", Some version, None
@ -124,6 +129,7 @@ let print_opt () =
printf "quiet = %b\n" !quiet;
printf "columns = %b\n" !columns;
printf "bytes = %b\n" !bytes;
printf "mono = %b\b" !mono;
printf "verbose = %s\n" !verb_str;
printf "input = %s\n" (string_of quote !input);
printf "libs = %s\n" (string_of_path !libs)
@ -151,6 +157,7 @@ let check extension =
and quiet = !quiet
and offsets = not !columns
and mode = if !bytes then `Byte else `Point
and mono = !mono
and verbose = !verbose
and libs = !libs in
@ -164,6 +171,7 @@ let check extension =
printf "quiet = %b\n" quiet;
printf "offsets = %b\n" offsets;
printf "mode = %s\n" (if mode = `Byte then "`Byte" else "`Point");
printf "mono = %b\n" mono;
printf "verbose = %s\n" !verb_str;
printf "input = %s\n" (string_of quote input);
printf "libs = %s\n" (string_of_path libs)
@ -178,7 +186,7 @@ let check extension =
| false, false, false, true -> Tokens
| _ -> abort "Choose one of -q, -c, -u, -t."
in make ~input ~libs ~verbose ~offsets ~mode ~cmd
in make ~input ~libs ~verbose ~offsets ~mode ~cmd ~mono
(** {1 Parsing the command-line options} *)

View File

@ -49,7 +49,8 @@ type options = <
verbose : Utils.String.Set.t;
offsets : bool;
mode : [`Byte | `Point];
cmd : command
cmd : command;
mono : bool
>
val make :
@ -58,7 +59,9 @@ val make :
verbose:Utils.String.Set.t ->
offsets:bool ->
mode:[`Byte | `Point] ->
cmd:command -> options
cmd:command ->
mono:bool ->
options
(** Parsing the command-line options on stdin. The first parameter is
the name of the concrete syntax, e.g., "pascaligo", and the second

View File

@ -124,9 +124,17 @@ module type S =
type file_path = string
type logger = Markup.t list -> token -> unit
type window =
Nil
| One of token
| Two of token * token
val slide : token -> window -> window
type instance = {
read : ?log:logger -> Lexing.lexbuf -> token;
buffer : Lexing.lexbuf;
get_win : unit -> window;
get_pos : unit -> Pos.t;
get_last : unit -> Region.t;
close : unit -> unit
@ -140,9 +148,9 @@ module type S =
exception Error of error Region.reg
val print_error :
val format_error :
?offsets:bool -> [`Byte | `Point] ->
error Region.reg -> file:bool -> unit
error Region.reg -> file:bool -> string
end

View File

@ -140,16 +140,25 @@ module type TOKEN =
(* The module type for lexers is [S]. *)
module type S = sig
module type S =
sig
module Token : TOKEN
type token = Token.token
type file_path = string
type logger = Markup.t list -> token -> unit
type window =
Nil
| One of token
| Two of token * token
val slide : token -> window -> window
type instance = {
read : ?log:logger -> Lexing.lexbuf -> token;
buffer : Lexing.lexbuf;
get_win : unit -> window;
get_pos : unit -> Pos.t;
get_last : unit -> Region.t;
close : unit -> unit
@ -160,11 +169,12 @@ module type S = sig
(* Error reporting *)
type error
exception Error of error Region.reg
val print_error : ?offsets:bool -> [`Byte | `Point] ->
error Region.reg -> file:bool -> unit
end
val format_error : ?offsets:bool -> [`Byte | `Point] ->
error Region.reg -> file:bool -> string
end
(* The functorised interface
@ -209,7 +219,27 @@ module Make (Token: TOKEN) : (S with module Token = Token) =
(* STATE *)
(* Beyond tokens, the result of lexing is a state. The type
(** The type [buffer] models a two-slot buffer of tokens for
reporting after a parse error.
In [Two(t1,t2)], the token [t2] is the next to be sent to the
parser.
The call [slide token buffer] pushes the token [token] in the
buffer [buffer]. If the buffer is full, that is, it is
[Two(t1,t2)], then the token [t2] is discarded to make room for
[token].
*)
type window =
Nil
| One of token
| Two of token * token
let slide token = function
Nil -> One token
| One t | Two (t,_) -> Two (token,t)
(** Beyond tokens, the result of lexing is a state. The type
[state] represents the logical state of the lexing engine, that
is, a value which is threaded during scanning and which denotes
useful, high-level information beyond what the type
@ -235,6 +265,10 @@ module Make (Token: TOKEN) : (S with module Token = Token) =
updated after a single character has been matched: that depends
on the regular expression that matched the lexing buffer.
The field [win] is a two-token window, that is, a buffer that
contains the last recognised token, and the penultimate (if
any).
The fields [decoder] and [supply] offer the support needed
for the lexing of UTF-8 encoded characters in comments (the
only place where they are allowed in LIGO). The former is the
@ -243,10 +277,10 @@ module Make (Token: TOKEN) : (S with module Token = Token) =
it to [decoder]. See the documentation of the third-party
library Uutf.
*)
type state = {
units : (Markup.t list * token) FQueue.t;
markup : Markup.t list;
window : window;
last : Region.t;
pos : Pos.t;
decoder : Uutf.decoder;
@ -398,10 +432,10 @@ module Make (Token: TOKEN) : (S with module Token = Token) =
exception Error of error Region.reg
let print_error ?(offsets=true) mode Region.{region; value} ~file =
let format_error ?(offsets=true) mode Region.{region; value} ~file =
let msg = error_to_string value in
let reg = region#to_string ~file ~offsets mode in
Utils.highlight (sprintf "Lexical error %s:\n%s%!" reg msg)
sprintf "\027[31mLexical error %s:\n%s\027[0m%!" reg msg
let fail region value = raise (Error Region.{region; value})
@ -801,6 +835,7 @@ type logger = Markup.t list -> token -> unit
type instance = {
read : ?log:logger -> Lexing.lexbuf -> token;
buffer : Lexing.lexbuf;
get_win : unit -> window;
get_pos : unit -> Pos.t;
get_last : unit -> Region.t;
close : unit -> unit
@ -817,13 +852,15 @@ let open_token_stream file_path_opt =
let supply = Uutf.Manual.src decoder in
let state = ref {units = FQueue.empty;
last = Region.ghost;
window = Nil;
pos;
markup = [];
decoder;
supply} in
let get_pos () = !state.pos
and get_last () = !state.last in
and get_last () = !state.last
and get_win () = !state.window in
let patch_buffer (start, stop) buffer =
let open Lexing in
@ -880,7 +917,9 @@ let open_token_stream file_path_opt =
read_token ~log buffer
| Some (units, (left_mark, token)) ->
log left_mark token;
state := {!state with units; last = Token.to_region token};
state := {!state with units;
last = Token.to_region token;
window = slide token !state.window};
check_right_context token buffer;
patch_buffer (Token.to_region token)#byte_pos buffer;
token in
@ -893,7 +932,7 @@ let open_token_stream file_path_opt =
None | Some "-" -> ()
| Some file_path -> reset ~file:file_path buffer
and close () = close_in cin in
{read = read_token; buffer; get_pos; get_last; close}
{read = read_token; buffer; get_win; get_pos; get_last; close}
end (* of functor [Make] in HEADER *)
(* END TRAILER *)

View File

@ -66,7 +66,9 @@ module Make (Lexer: Lexer.S) : (S with module Lexer = Lexer) =
match file_path_opt with
None | Some "-" -> false
| Some _ -> true in
Lexer.print_error ~offsets mode e ~file;
let msg =
Lexer.format_error ~offsets mode e ~file
in prerr_string msg;
close_all ()
in iter ()
with Sys_error msg -> Utils.highlight (sprintf "%s\n" msg)

View File

@ -4,7 +4,7 @@
# Menhir and generates minimal inputs that cover all of them and only
# them.
set -x
# set -x
# ====================================================================
# General Settings and wrappers

199
vendors/ligo-utils/simple-utils/par_err.sh vendored Executable file
View File

@ -0,0 +1,199 @@
#!/bin/sh
# This script calls Menhir with a message file, which generates the
# corresponding OCaml file.
# set -x
# ====================================================================
# General Settings and wrappers
script=$(basename $0)
print_nl () { test "$quiet" != "yes" && echo "$1"; }
print () { test "$quiet" != "yes" && printf "$1"; }
fatal_error () {
echo "$script: fatal error:"
echo "$1" 1>&2
exit 1
}
warn () {
print_nl "$script: warning:"
print_nl "$1"
}
failed () {
printf "\033[31mFAILED$1\033[0m\n"
}
emphasise () {
printf "\033[31m$1\033[0m\n"
}
display () {
printf "\033[31m"; cat $1; printf "\033[0m"
}
# ====================================================================
# Parsing loop
#
while : ; do
case "$1" in
"") break;;
--par-tokens=*)
if test -n "$par_tokens"; then
fatal_error "Repeated option --par-tokens."; fi
par_tokens=$(expr "$1" : "[^=]*=\(.*\)")
;;
--par-tokens)
no_eq=$1
break
;;
--lex-tokens=*)
if test -n "$lex_tokens"; then
fatal_error "Repeated option --lex-tokens."; fi
lex_tokens=$(expr "$1" : "[^=]*=\(.*\)")
;;
--lex-tokens)
no_eq=$1
break
;;
--out=*)
if test -n "$out"; then
fatal_error "Repeated option --out."; fi
out=$(expr "$1" : "[^=]*=\(.*\)")
;;
--out)
no_eq=$1
break
;;
-h | --help | -help)
help=yes
;;
# Invalid option
#
-*)
fatal_error "Invalid option \"$1\"."
;;
# Invalid argument
#
*)
if test -n "$parser_arg"; then
fatal_error "Only one Menhir specification allowed."; fi
parser=$1
esac
shift
done
# ====================================================================
# Help
#
usage () {
cat <<EOF
Usage: $(basename $0) [-h|--help]
--par-tokens=<par_tolens>.mly
--lex-tokens=<lex_tokens>.mli
--out=<par_err>.ml
<parser>.mly
Generates <par_err>.ml from <parser>.msg and the parser specification
(see messages.sh) in the current directory.
The following options, if given, must be given only once.
Display control:
-h, --help display this help and exit
Mandatory options:
--lex-tokens=<name>.mli the lexical tokens
--par-tokens=<name>.mly the syntactical tokens
--out=<par_err>.ml
EOF
exit 1
}
if test "$help" = "yes"; then usage; fi
# ====================================================================
# Checking the command-line options and arguments and applying some of
# them.
# It is a common mistake to forget the "=" in GNU long-option style.
if test -n "$no_eq"
then
fatal_error "Long option style $no_eq must be followed by \"=\"."
fi
# Checking options
if test -z "$parser"; then
fatal_error "No parser specification."; fi
if test -z "$par_tokens"; then
fatal_error "No syntactical tokens specification (use --par-tokens)."; fi
if test -z "$lex_tokens"; then
fatal_error "No lexical tokens specification (use --lex-tokens)."; fi
if test ! -e "$parser"; then
fatal_error "Parser specification \"$parser\" not found."; fi
if test ! -e "$lex_tokens"; then
fatal_error "Lexical tokens specification \"$lex_tokens\" not found."; fi
if test ! -e "$par_tokens"; then
fatal_error "Syntactical tokens specification \"$par_tokens\" not found."; fi
parser_ext=$(expr "$parser" : ".*\.mly$")
if test "$parser_ext" = "0"; then
fatal_error "Parser specification must have extension \".mly\"."; fi
par_tokens_ext=$(expr "$par_tokens" : ".*\.mly$")
if test "$par_tokens_ext" = "0"; then
fatal_error "Syntactical tokens specification must have extension \".mly\"."
fi
lex_tokens_ext=$(expr "$lex_tokens" : ".*\.mli$")
if test "$lex_tokens_ext" = "0"; then
fatal_error "Lexical tokens specification must have extension \".mli\"."
fi
mly=$parser
parser_base=$(basename $mly .mly)
par_tokens_base=$(basename $par_tokens .mly)
lex_tokens_base=$(basename $lex_tokens .mli)
# Checking the presence of the messages
msg=$parser_base.msg
if test ! -e $msg; then
fatal_error "File $msg not found."; fi
# Checking the output file
if test -z "$out"; then
fatal_error "Output file missing (use --out)."; fi
# ====================================================================
# Menhir's flags
flags="--table --strict --external-tokens $lex_tokens_base \
--base $parser_base $par_tokens"
# ===================================================================
# Generating source code from error messages
err=.$msg.err
printf "Making $out from $msg... "
menhir --compile-errors $msg $flags $mly > $out 2> $err
if test "$?" = "0"
then printf "done.\n"
rm -f $err
else failed ":"
display "$err"
fi