Forgot to add those fixes to the previous commit.
This commit is contained in:
parent
729ecd3f12
commit
e25bb00961
@ -1,4 +1,4 @@
|
|||||||
(** Driver for the CameLIGO lexer *)
|
(* Driver for the CameLIGO lexer *)
|
||||||
|
|
||||||
module IO =
|
module IO =
|
||||||
struct
|
struct
|
||||||
@ -11,4 +11,4 @@ module M = LexerUnit.Make (IO) (Lexer.Make (LexToken))
|
|||||||
let () =
|
let () =
|
||||||
match M.trace () with
|
match M.trace () with
|
||||||
Stdlib.Ok () -> ()
|
Stdlib.Ok () -> ()
|
||||||
| Error msg -> Utils.highlight msg
|
| Error Region.{value; _} -> Utils.highlight value
|
||||||
|
@ -13,4 +13,4 @@ module M = LexerUnit.Make (IO) (Lexer.Make (LexToken))
|
|||||||
let () =
|
let () =
|
||||||
match M.trace () with
|
match M.trace () with
|
||||||
Stdlib.Ok () -> ()
|
Stdlib.Ok () -> ()
|
||||||
| Error msg -> Utils.highlight msg
|
| Error Region.{value; _} -> Utils.highlight value
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
(** Driver for the ReasonLIGO lexer *)
|
(* Driver for the ReasonLIGO lexer *)
|
||||||
|
|
||||||
module IO =
|
module IO =
|
||||||
struct
|
struct
|
||||||
@ -11,4 +11,4 @@ module M = LexerUnit.Make (IO) (Lexer.Make (LexToken))
|
|||||||
let () =
|
let () =
|
||||||
match M.trace () with
|
match M.trace () with
|
||||||
Stdlib.Ok () -> ()
|
Stdlib.Ok () -> ()
|
||||||
| Error msg -> Utils.highlight msg
|
| Error Region.{value; _} -> Utils.highlight value
|
||||||
|
@ -823,17 +823,17 @@ and scan_utf8 thread state = parse
|
|||||||
context of a recognised lexeme (to enforce stylistic constraints or
|
context of a recognised lexeme (to enforce stylistic constraints or
|
||||||
report special error patterns), we need to keep a hidden reference
|
report special error patterns), we need to keep a hidden reference
|
||||||
to a queue of recognised lexical units (that is, tokens and markup)
|
to a queue of recognised lexical units (that is, tokens and markup)
|
||||||
that acts as a mutable state between the calls to
|
that acts as a mutable state between the calls to [read]. When
|
||||||
[read_token]. When [read_token] is called, that queue is examined
|
[read] is called, that queue is examined first and, if it contains
|
||||||
first and, if it contains at least one token, that token is
|
at least one token, that token is returned; otherwise, the lexing
|
||||||
returned; otherwise, the lexing buffer is scanned for at least one
|
buffer is scanned for at least one more new token. That is the
|
||||||
more new token. That is the general principle: we put a high-level
|
general principle: we put a high-level buffer (our queue) on top of
|
||||||
buffer (our queue) on top of the low-level lexing buffer.
|
the low-level lexing buffer.
|
||||||
|
|
||||||
One tricky and important detail is that we must make any parser
|
One tricky and important detail is that we must make any parser
|
||||||
generated by Menhir (and calling [read_token]) believe that the
|
generated by Menhir (and calling [read]) believe that the last
|
||||||
last region of the input source that was matched indeed corresponds
|
region of the input source that was matched indeed corresponds to
|
||||||
to the returned token, despite that many tokens and markup may have
|
the returned token, despite that many tokens and markup may have
|
||||||
been matched since it was actually read from the input. In other
|
been matched since it was actually read from the input. In other
|
||||||
words, the parser requests a token that is taken from the
|
words, the parser requests a token that is taken from the
|
||||||
high-level buffer, but the parser requests the source regions from
|
high-level buffer, but the parser requests the source regions from
|
||||||
@ -952,7 +952,7 @@ let open_token_stream input =
|
|||||||
in fail region Missing_break
|
in fail region Missing_break
|
||||||
| _ -> () in
|
| _ -> () in
|
||||||
|
|
||||||
let rec read_token ~log buffer =
|
let rec read ~log buffer =
|
||||||
match FQueue.deq !state.units with
|
match FQueue.deq !state.units with
|
||||||
None ->
|
None ->
|
||||||
scan buffer;
|
scan buffer;
|
||||||
|
Loading…
Reference in New Issue
Block a user