2020-01-23 21:28:04 +04:00
|
|
|
(* Embedding the LIGO lexer in a debug module *)
|
|
|
|
|
|
|
|
module Region = Simple_utils.Region
|
2019-05-13 00:56:22 +04:00
|
|
|
|
|
|
|
module type S =
|
|
|
|
sig
|
|
|
|
module Lexer : Lexer.S
|
|
|
|
|
|
|
|
val output_token :
|
|
|
|
?offsets:bool -> [`Byte | `Point] ->
|
|
|
|
EvalOpt.command -> out_channel ->
|
|
|
|
Markup.t list -> Lexer.token -> unit
|
|
|
|
|
|
|
|
type file_path = string
|
|
|
|
|
|
|
|
val trace :
|
|
|
|
?offsets:bool -> [`Byte | `Point] ->
|
2020-01-20 13:57:07 +04:00
|
|
|
file_path option -> EvalOpt.command ->
|
2020-01-23 21:28:04 +04:00
|
|
|
(unit, string Region.reg) Stdlib.result
|
2019-05-13 00:56:22 +04:00
|
|
|
end
|
|
|
|
|
|
|
|
module Make (Lexer: Lexer.S) : (S with module Lexer = Lexer) =
|
|
|
|
struct
|
|
|
|
module Lexer = Lexer
|
|
|
|
module Token = Lexer.Token
|
|
|
|
|
Refactoring of comments (for [dune build @doc]).
Refactoring of parsing command-line arguments
* The type [options] is now abstract and implemented as an
object type to avoid struggling with scoping and type
inference when record types share some common field names.
Refactoring of ParserLog for PascaLIGO and CameLIGO
* The immediate motivation behind that refactoring was to
remove the use of a couple of global references. A
consequence is that we have a nicer and more compact code, by
threading a state. The files [pascaligo/Tests/pp.ligo] and
[ligodity/Tests/pp.mligo].
* Another consequence is that the choice of making strings from
AST nodes depends on the CLI (offsets? mode?). After this
refactoring, that choice is hardcoded in the simplifiers in a
few places (TODO), waiting for a general solution that would
have all CL options flow through the compiler.
* I removed the use of vendors [x_option.ml], [x_map.ml] and
[x_list.ml] when handling optional values. (Less dependencies
this way.)
Refactoring of the ASTs
* I removed the node [local_decl], which was set to [[]]
already in a previous commit (which removed local
declarations as being redundant, as statements could already
be instructions or declarations).
* I changed [StrLit] to [String] in the AST of CameLIGO and
ReasonLIGO.
* I also changed the type [fun_expr] so now either a block is
present, and therefore followed by the [with] keyword, or it
is not. (Before, the presence of a block was not enforced in
the type with the presence of the keyword.)
Notes
* [LexerMain.ml] and [ParserMain.ml] for CameLIGO and PascaLIGO
are almost identical and differ in the same way (language
name and file extension), which suggests that they should be
in the [shared] folder and instanciated as a functor in the
future (TODO).
* I removed the blank characters at the end of many lines in
the parser of ReasonLIGO.
2019-12-13 15:21:52 +04:00
|
|
|
(** Pretty-printing in a string the lexemes making up the markup
|
2019-05-13 00:56:22 +04:00
|
|
|
between two tokens, concatenated with the last lexeme
|
|
|
|
itself. *)
|
|
|
|
let output_token ?(offsets=true) mode command
|
|
|
|
channel left_mark token : unit =
|
|
|
|
let output str = Printf.fprintf channel "%s%!" str in
|
|
|
|
let output_nl str = output (str ^ "\n") in
|
|
|
|
match command with
|
|
|
|
EvalOpt.Quiet -> ()
|
2019-07-24 17:41:52 +04:00
|
|
|
| EvalOpt.Tokens ->
|
|
|
|
Token.to_string token ~offsets mode |> output_nl
|
2019-05-13 00:56:22 +04:00
|
|
|
| EvalOpt.Copy ->
|
|
|
|
let lexeme = Token.to_lexeme token
|
|
|
|
and apply acc markup = Markup.to_lexeme markup :: acc
|
|
|
|
in List.fold_left apply [lexeme] left_mark
|
|
|
|
|> String.concat "" |> output
|
|
|
|
| EvalOpt.Units ->
|
|
|
|
let abs_token = Token.to_string token ~offsets mode
|
|
|
|
and apply acc markup =
|
|
|
|
Markup.to_string markup ~offsets mode :: acc
|
|
|
|
in List.fold_left apply [abs_token] left_mark
|
|
|
|
|> String.concat "\n" |> output_nl
|
|
|
|
|
|
|
|
type file_path = string
|
|
|
|
|
2020-01-20 13:57:07 +04:00
|
|
|
let trace ?(offsets=true) mode file_path_opt command :
|
2020-01-23 21:28:04 +04:00
|
|
|
(unit, string Region.reg) Stdlib.result =
|
|
|
|
let input =
|
|
|
|
match file_path_opt with
|
|
|
|
Some file_path -> Lexer.File file_path
|
|
|
|
| None -> Lexer.Stdin in
|
|
|
|
match Lexer.open_token_stream input with
|
|
|
|
Ok Lexer.{read; buffer; close; _} ->
|
|
|
|
let log = output_token ~offsets mode command stdout
|
|
|
|
and close_all () = close (); close_out stdout in
|
|
|
|
let rec iter () =
|
|
|
|
match read ~log buffer with
|
|
|
|
token ->
|
|
|
|
if Token.is_eof token
|
|
|
|
then Stdlib.Ok ()
|
|
|
|
else iter ()
|
|
|
|
| exception Lexer.Error error ->
|
|
|
|
let file =
|
|
|
|
match file_path_opt with
|
|
|
|
None | Some "-" -> false
|
|
|
|
| Some _ -> true in
|
|
|
|
let msg =
|
|
|
|
Lexer.format_error ~offsets mode ~file error
|
|
|
|
in Stdlib.Error msg in
|
|
|
|
let result = iter ()
|
|
|
|
in close_all (); result
|
|
|
|
| Stdlib.Error (Lexer.File_opening msg) ->
|
|
|
|
close_out stdout; Stdlib.Error (Region.wrap_ghost msg)
|
2019-05-13 00:56:22 +04:00
|
|
|
end
|