Moved logging from Lexer to LexerLog.

This commit is contained in:
Christian Rinderknecht 2019-04-13 18:13:05 +02:00
parent df84763eb2
commit 4d61ac0a13
No known key found for this signature in database
GPG Key ID: 9446816CFD267040
6 changed files with 105 additions and 73 deletions

View File

@ -117,10 +117,6 @@ module type S =
type file_path = string type file_path = string
type logger = Markup.t list -> token -> unit type logger = Markup.t list -> token -> unit
val output_token :
?offsets:bool -> [`Byte | `Point] ->
EvalOpt.command -> out_channel -> logger
type instance = { type instance = {
read : ?log:logger -> Lexing.lexbuf -> token; read : ?log:logger -> Lexing.lexbuf -> token;
buffer : Lexing.lexbuf; buffer : Lexing.lexbuf;
@ -136,13 +132,8 @@ module type S =
exception Error of Error.t Region.reg exception Error of Error.t Region.reg
val print_error : ?offsets:bool -> [`Byte | `Point] -> val print_error : ?offsets:bool -> [`Byte | `Point] ->
Error.t Region.reg -> unit Error.t Region.reg -> unit
(* Standalone tracer *)
val trace :
?offsets:bool -> [`Byte | `Point] ->
file_path option -> EvalOpt.command -> unit
end end
(* The functorised interface (* The functorised interface

View File

@ -139,10 +139,6 @@ module type S = sig
type file_path = string type file_path = string
type logger = Markup.t list -> token -> unit type logger = Markup.t list -> token -> unit
val output_token :
?offsets:bool -> [`Byte | `Point] ->
EvalOpt.command -> out_channel -> logger
type instance = { type instance = {
read : ?log:logger -> Lexing.lexbuf -> token; read : ?log:logger -> Lexing.lexbuf -> token;
buffer : Lexing.lexbuf; buffer : Lexing.lexbuf;
@ -157,14 +153,8 @@ module type S = sig
exception Error of Error.t Region.reg exception Error of Error.t Region.reg
val print_error : val print_error : ?offsets:bool -> [`Byte | `Point] ->
?offsets:bool -> [`Byte | `Point] -> Error.t Region.reg -> unit Error.t Region.reg -> unit
(* Standalone tracer *)
val trace :
?offsets:bool -> [`Byte | `Point] ->
file_path option -> EvalOpt.command -> unit
end end
(* The functorised interface (* The functorised interface
@ -392,6 +382,14 @@ module Make (Token: TOKEN) : (S with module Token = Token) =
exception Error of Error.t Region.reg exception Error of Error.t Region.reg
let print_error ?(offsets=true) mode Region.{region; value} =
let msg = error_to_string value in
let file = match EvalOpt.input with
None | Some "-" -> false
| Some _ -> true in
let reg = region#to_string ~file ~offsets mode in
Utils.highlight (sprintf "Lexical error %s:\n%s%!" reg msg)
let fail region value = raise (Error Region.{region; value}) let fail region value = raise (Error Region.{region; value})
(* TOKENS *) (* TOKENS *)
@ -829,53 +827,6 @@ let open_token_stream file_path_opt =
and close () = close_in cin in and close () = close_in cin in
{read = read_token; buffer; get_pos; get_last; close} {read = read_token; buffer; get_pos; get_last; close}
(* Standalone lexer for debugging purposes *)
(* Pretty-printing in a string the lexemes making up the markup
between two tokens, concatenated with the last lexeme itself. *)
let output_token ?(offsets=true) mode command
channel left_mark token : unit =
let output str = Printf.fprintf channel "%s%!" str in
let output_nl str = output (str ^ "\n") in
match command with
EvalOpt.Quiet -> ()
| EvalOpt.Tokens -> Token.to_string token ~offsets mode |> output_nl
| EvalOpt.Copy ->
let lexeme = Token.to_lexeme token
and apply acc markup = Markup.to_lexeme markup :: acc
in List.fold_left apply [lexeme] left_mark
|> String.concat "" |> output
| EvalOpt.Units ->
let abs_token = Token.to_string token ~offsets mode
and apply acc markup =
Markup.to_string markup ~offsets mode :: acc
in List.fold_left apply [abs_token] left_mark
|> String.concat "\n" |> output_nl
let print_error ?(offsets=true) mode Region.{region; value} =
let msg = error_to_string value in
let file = match EvalOpt.input with
None | Some "-" -> false
| Some _ -> true in
let reg = region#to_string ~file ~offsets mode in
Utils.highlight (sprintf "Lexical error %s:\n%s%!" reg msg)
let trace ?(offsets=true) mode file_path_opt command : unit =
try
let {read; buffer; close; _} = open_token_stream file_path_opt
and cout = stdout in
let log = output_token ~offsets mode command cout
and close_all () = close (); close_out cout in
let rec iter () =
match read ~log buffer with
token ->
if Token.is_eof token then close_all ()
else iter ()
| exception Error e -> print_error ~offsets mode e; close_all ()
in iter ()
with Sys_error msg -> Utils.highlight (sprintf "%s\n" msg)
end (* of functor [Make] in HEADER *) end (* of functor [Make] in HEADER *)
(* END TRAILER *) (* END TRAILER *)
} }

View File

@ -0,0 +1,69 @@
(* Standalone lexer for debugging purposes *)
let sprintf = Printf.sprintf
module type S =
sig
module Lexer : Lexer.S
val output_token :
?offsets:bool -> [`Byte | `Point] ->
EvalOpt.command -> out_channel ->
Markup.t list -> Lexer.token -> unit
type file_path = string
val trace :
?offsets:bool -> [`Byte | `Point] ->
file_path option -> EvalOpt.command -> unit
end
module Make (Lexer: Lexer.S) : (S with module Lexer = Lexer) =
struct
module Lexer = Lexer
module Token = Lexer.Token
(* Pretty-printing in a string the lexemes making up the markup
between two tokens, concatenated with the last lexeme
itself. *)
let output_token ?(offsets=true) mode command
channel left_mark token : unit =
let output str = Printf.fprintf channel "%s%!" str in
let output_nl str = output (str ^ "\n") in
match command with
EvalOpt.Quiet -> ()
| EvalOpt.Tokens -> Token.to_string token ~offsets mode |> output_nl
| EvalOpt.Copy ->
let lexeme = Token.to_lexeme token
and apply acc markup = Markup.to_lexeme markup :: acc
in List.fold_left apply [lexeme] left_mark
|> String.concat "" |> output
| EvalOpt.Units ->
let abs_token = Token.to_string token ~offsets mode
and apply acc markup =
Markup.to_string markup ~offsets mode :: acc
in List.fold_left apply [abs_token] left_mark
|> String.concat "\n" |> output_nl
type file_path = string
let trace ?(offsets=true) mode file_path_opt command : unit =
try
let Lexer.{read; buffer; close; _} =
Lexer.open_token_stream file_path_opt
and cout = stdout in
let log = output_token ~offsets mode command cout
and close_all () = close (); close_out cout in
let rec iter () =
match read ~log buffer with
token ->
if Token.is_eof token then close_all ()
else iter ()
| exception Lexer.Error e ->
Lexer.print_error ~offsets mode e; close_all ()
in iter ()
with Sys_error msg -> Utils.highlight (sprintf "%s\n" msg)
end

View File

@ -0,0 +1,17 @@
module type S =
sig
module Lexer : Lexer.S
val output_token :
?offsets:bool -> [`Byte | `Point] ->
EvalOpt.command -> out_channel ->
Markup.t list -> Lexer.token -> unit
type file_path = string
val trace :
?offsets:bool -> [`Byte | `Point] ->
file_path option -> EvalOpt.command -> unit
end
module Make (Lexer: Lexer.S) : S with module Lexer = Lexer

View File

@ -51,5 +51,7 @@ let () =
module Lexer = Lexer.Make (LexToken) module Lexer = Lexer.Make (LexToken)
let () = Lexer.trace ~offsets:EvalOpt.offsets module Log = LexerLog.Make (Lexer)
EvalOpt.mode (Some pp_input) EvalOpt.cmd
let () = Log.trace ~offsets:EvalOpt.offsets
EvalOpt.mode (Some pp_input) EvalOpt.cmd

View File

@ -67,13 +67,15 @@ let () =
module Lexer = Lexer.Make (LexToken) module Lexer = Lexer.Make (LexToken)
module Log = LexerLog.Make (Lexer)
let Lexer.{read; buffer; get_pos; get_last; close} = let Lexer.{read; buffer; get_pos; get_last; close} =
Lexer.open_token_stream (Some pp_input) Lexer.open_token_stream (Some pp_input)
and cout = stdout and cout = stdout
let log = Lexer.output_token ~offsets:EvalOpt.offsets let log = Log.output_token ~offsets:EvalOpt.offsets
EvalOpt.mode EvalOpt.cmd cout EvalOpt.mode EvalOpt.cmd cout
and close_all () = close (); close_out cout and close_all () = close (); close_out cout