Marked the errors that should be skipped (because catched by the

LIGO lexer later). Added field [is_file] to the state of the
lexer to know if the input is a file or not (insert or not a
first line directive). Fixed ReasonLIGO comments in
entrypoints-contracts.md and website2.religo. WIP on the LIGO
lexer to properly handle comments for all the syntaxes.
This commit is contained in:
Christian Rinderknecht 2020-04-08 20:24:34 +02:00
parent 79967be726
commit 46eecb4027
14 changed files with 190 additions and 97 deletions

View File

@ -466,8 +466,8 @@ let proxy = ((action, store): (parameter, storage)) : return => {
| Some (contract) => contract; | Some (contract) => contract;
| None => (failwith ("Contract not found.") : contract (parameter)); | None => (failwith ("Contract not found.") : contract (parameter));
}; };
(* Reuse the parameter in the subsequent /* Reuse the parameter in the subsequent
transaction or use another one, `mock_param`. *) transaction or use another one, `mock_param`. */
let mock_param : parameter = Increment (5n); let mock_param : parameter = Increment (5n);
let op : operation = Tezos.transaction (action, 0tez, counter); let op : operation = Tezos.transaction (action, 0tez, counter);
([op], store) ([op], store)

View File

@ -7,7 +7,7 @@ let%expect_test _ =
let%expect_test _ = let%expect_test _ =
run_ligo_bad ["interpret" ; "(\"thisisnotasignature\":signature)" ; "--syntax=pascaligo"] ; run_ligo_bad ["interpret" ; "(\"thisisnotasignature\":signature)" ; "--syntax=pascaligo"] ;
[%expect {| [%expect {|
ligo: in file "", line 0, characters 1-32. Badly formatted literal: Signature thisisnotasignature {"location":"in file \"\", line 0, characters 1-32"} ligo: in file ".", line 1, characters 1-32. Badly formatted literal: Signature thisisnotasignature {"location":"in file \".\", line 1, characters 1-32"}
If you're not sure how to fix this error, you can If you're not sure how to fix this error, you can
@ -25,7 +25,7 @@ let%expect_test _ =
let%expect_test _ = let%expect_test _ =
run_ligo_bad ["interpret" ; "(\"thisisnotapublickey\":key)" ; "--syntax=pascaligo"] ; run_ligo_bad ["interpret" ; "(\"thisisnotapublickey\":key)" ; "--syntax=pascaligo"] ;
[%expect {| [%expect {|
ligo: in file "", line 0, characters 1-26. Badly formatted literal: key thisisnotapublickey {"location":"in file \"\", line 0, characters 1-26"} ligo: in file ".", line 1, characters 1-26. Badly formatted literal: key thisisnotapublickey {"location":"in file \".\", line 1, characters 1-26"}
If you're not sure how to fix this error, you can If you're not sure how to fix this error, you can

View File

@ -2,4 +2,4 @@ SHELL := dash
BFLAGS := -strict-sequence -w +A-48-4 -g BFLAGS := -strict-sequence -w +A-48-4 -g
clean:: clean::
> \rm -f Parser.msg.map Parser.msg.states Version.ml > \rm -f Parser.msg Parser.msg.map Parser.msg.states Version.ml

View File

@ -2,4 +2,4 @@ SHELL := dash
BFLAGS := -strict-sequence -w +A-48-4 -g BFLAGS := -strict-sequence -w +A-48-4 -g
clean:: clean::
> \rm -f Parser.msg.map Parser.msg.states Version.ml > \rm -f Parser.msg Parser.msg.map Parser.msg.states Version.ml

View File

@ -2,4 +2,4 @@ SHELL := dash
BFLAGS := -strict-sequence -w +A-48-4 -g BFLAGS := -strict-sequence -w +A-48-4 -g
clean:: clean::
> \rm -f Parser.msg.map Parser.msg.states Version.ml > \rm -f Parser.msg Parser.msg.map Parser.msg.states Version.ml

View File

@ -142,6 +142,8 @@ module type S =
| Channel of in_channel | Channel of in_channel
| Buffer of Lexing.lexbuf | Buffer of Lexing.lexbuf
val is_file : input -> bool
type instance = { type instance = {
input : input; input : input;
read : log:logger -> Lexing.lexbuf -> token; read : log:logger -> Lexing.lexbuf -> token;
@ -158,7 +160,10 @@ module type S =
val lexbuf_from_input : val lexbuf_from_input :
input -> (Lexing.lexbuf * (unit -> unit), open_err) Stdlib.result input -> (Lexing.lexbuf * (unit -> unit), open_err) Stdlib.result
val open_token_stream : input -> (instance, open_err) Stdlib.result type language = [`PascaLIGO | `CameLIGO | `ReasonLIGO]
val open_token_stream :
language -> input -> (instance, open_err) Stdlib.result
(* Error reporting *) (* Error reporting *)

View File

@ -164,6 +164,8 @@ module type S =
| Channel of in_channel | Channel of in_channel
| Buffer of Lexing.lexbuf | Buffer of Lexing.lexbuf
val is_file : input -> bool
type instance = { type instance = {
input : input; input : input;
read : log:logger -> Lexing.lexbuf -> token; read : log:logger -> Lexing.lexbuf -> token;
@ -180,7 +182,10 @@ module type S =
val lexbuf_from_input : val lexbuf_from_input :
input -> (Lexing.lexbuf * (unit -> unit), open_err) Stdlib.result input -> (Lexing.lexbuf * (unit -> unit), open_err) Stdlib.result
val open_token_stream : input -> (instance, open_err) Stdlib.result type language = [`PascaLIGO | `CameLIGO | `ReasonLIGO]
val open_token_stream :
language -> input -> (instance, open_err) Stdlib.result
(* Error reporting *) (* Error reporting *)
@ -297,6 +302,8 @@ module Make (Token: TOKEN) : (S with module Token = Token) =
library Uutf. library Uutf.
*) *)
type language = [`PascaLIGO | `CameLIGO | `ReasonLIGO]
type state = { type state = {
units : (Markup.t list * token) FQueue.t; units : (Markup.t list * token) FQueue.t;
markup : Markup.t list; markup : Markup.t list;
@ -304,7 +311,8 @@ module Make (Token: TOKEN) : (S with module Token = Token) =
last : Region.t; last : Region.t;
pos : Pos.t; pos : Pos.t;
decoder : Uutf.decoder; decoder : Uutf.decoder;
supply : Bytes.t -> int -> int -> unit supply : Bytes.t -> int -> int -> unit;
lang : language
} }
(* The call [enqueue (token, state)] updates functionally the (* The call [enqueue (token, state)] updates functionally the
@ -393,7 +401,7 @@ module Make (Token: TOKEN) : (S with module Token = Token) =
| Unterminated_string | Unterminated_string
| Unterminated_integer | Unterminated_integer
| Odd_lengthed_bytes | Odd_lengthed_bytes
| Unterminated_comment | Unterminated_comment of string
| Orphan_minus | Orphan_minus
| Non_canonical_zero | Non_canonical_zero
| Negative_byte_sequence | Negative_byte_sequence
@ -424,9 +432,9 @@ module Make (Token: TOKEN) : (S with module Token = Token) =
| Odd_lengthed_bytes -> | Odd_lengthed_bytes ->
"The length of the byte sequence is an odd number.\n\ "The length of the byte sequence is an odd number.\n\
Hint: Add or remove a digit." Hint: Add or remove a digit."
| Unterminated_comment -> | Unterminated_comment ending ->
"Unterminated comment.\n\ sprintf "Unterminated comment.\n\
Hint: Close with \"*)\"." Hint: Close with \"%s\"." ending
| Orphan_minus -> | Orphan_minus ->
"Orphan minus sign.\n\ "Orphan minus sign.\n\
Hint: Remove the trailing space." Hint: Remove the trailing space."
@ -643,10 +651,21 @@ and scan state = parse
let thread = {opening; len=1; acc=['"']} in let thread = {opening; len=1; acc=['"']} in
scan_string thread state lexbuf |> mk_string |> enqueue } scan_string thread state lexbuf |> mk_string |> enqueue }
| "(*" { let opening, _, state = sync state lexbuf in | "(*" { if state.lang = `PascaLIGO || state.lang = `CameLIGO then
let opening, _, state = sync state lexbuf in
let thread = {opening; len=2; acc=['*';'(']} in let thread = {opening; len=2; acc=['*';'(']} in
let state = scan_block thread state lexbuf |> push_block let state = scan_pascaligo_block thread state lexbuf |> push_block
in scan state lexbuf } in scan state lexbuf
else (rollback lexbuf; scan_two_sym state lexbuf)
}
| "/*" { if state.lang = `ReasonLIGO then
let opening, _, state = sync state lexbuf in
let thread = {opening; len=2; acc=['*';'/']} in
let state = scan_reasonligo_block thread state lexbuf |> push_block
in scan state lexbuf
else (rollback lexbuf; scan_two_sym state lexbuf)
}
| "//" { let opening, _, state = sync state lexbuf in | "//" { let opening, _, state = sync state lexbuf in
let thread = {opening; len=2; acc=['/';'/']} in let thread = {opening; len=2; acc=['/';'/']} in
@ -720,6 +739,14 @@ and scan state = parse
| _ as c { let region, _, _ = sync state lexbuf | _ as c { let region, _, _ = sync state lexbuf
in fail region (Unexpected_character c) } in fail region (Unexpected_character c) }
(* Scanning two symbols *)
and scan_two_sym state = parse
symbol { scan_one_sym (mk_sym state lexbuf |> enqueue) lexbuf }
and scan_one_sym state = parse
symbol { scan (mk_sym state lexbuf |> enqueue) lexbuf }
(* Scanning CPP #include flags *) (* Scanning CPP #include flags *)
and scan_flags state acc = parse and scan_flags state acc = parse
@ -751,39 +778,70 @@ and scan_string thread state = parse
(* Finishing a block comment (* Finishing a block comment
(Note for Emacs: ("(*") (For Emacs: ("(*") The lexing of block comments must take care of
The lexing of block comments must take care of embedded block embedded block comments that may occur within, as well as strings,
comments that may occur within, as well as strings, so no substring so no substring "*/" or "*)" may inadvertently close the
"*)" may inadvertently close the block. This is the purpose block. This is the purpose of the first case of the scanners
of the first case of the scanner [scan_block]. [scan_pascaligo_block] and [scan_reasonligo_block].
*) *)
and scan_block thread state = parse and scan_pascaligo_block thread state = parse
'"' | "(*" { let opening = thread.opening in '"' | "(*" { let opening = thread.opening in
let opening', lexeme, state = sync state lexbuf in let opening', lexeme, state = sync state lexbuf in
let thread = push_string lexeme thread in let thread = push_string lexeme thread in
let thread = {thread with opening=opening'} in let thread = {thread with opening=opening'} in
let next = if lexeme = "\"" then scan_string let next = if lexeme = "\"" then scan_string
else scan_block in else scan_pascaligo_block in
let thread, state = next thread state lexbuf in let thread, state = next thread state lexbuf in
let thread = {thread with opening} let thread = {thread with opening}
in scan_block thread state lexbuf } in scan_pascaligo_block thread state lexbuf }
| "*)" { let _, lexeme, state = sync state lexbuf | "*)" { let _, lexeme, state = sync state lexbuf
in push_string lexeme thread, state } in push_string lexeme thread, state }
| nl as nl { let () = Lexing.new_line lexbuf | nl as nl { let () = Lexing.new_line lexbuf
and state = {state with pos = state.pos#new_line nl} and state = {state with pos = state.pos#new_line nl}
and thread = push_string nl thread and thread = push_string nl thread
in scan_block thread state lexbuf } in scan_pascaligo_block thread state lexbuf }
| eof { fail thread.opening Unterminated_comment } | eof { fail thread.opening (Unterminated_comment "*)") }
| _ { let () = rollback lexbuf in | _ { let () = rollback lexbuf in
let len = thread.len in let len = thread.len in
let thread, let thread,
status = scan_utf8 thread state lexbuf in status = scan_utf8 "*)" thread state lexbuf in
let delta = thread.len - len in let delta = thread.len - len in
let pos = state.pos#shift_one_uchar delta in let pos = state.pos#shift_one_uchar delta in
match status with match status with
None -> scan_block thread {state with pos} lexbuf Stdlib.Ok () ->
| Some error -> scan_pascaligo_block thread {state with pos} lexbuf
| Error error ->
let region = Region.make ~start:state.pos ~stop:pos
in fail region error }
and scan_reasonligo_block thread state = parse
'"' | "/*" { let opening = thread.opening in
let opening', lexeme, state = sync state lexbuf in
let thread = push_string lexeme thread in
let thread = {thread with opening=opening'} in
let next = if lexeme = "\"" then scan_string
else scan_reasonligo_block in
let thread, state = next thread state lexbuf in
let thread = {thread with opening}
in scan_reasonligo_block thread state lexbuf }
| "*/" { let _, lexeme, state = sync state lexbuf
in push_string lexeme thread, state }
| nl as nl { let () = Lexing.new_line lexbuf
and state = {state with pos = state.pos#new_line nl}
and thread = push_string nl thread
in scan_reasonligo_block thread state lexbuf }
| eof { fail thread.opening (Unterminated_comment "*/") }
| _ { let () = rollback lexbuf in
let len = thread.len in
let thread,
status = scan_utf8 "*/" thread state lexbuf in
let delta = thread.len - len in
let pos = state.pos#shift_one_uchar delta in
match status with
Stdlib.Ok () ->
scan_reasonligo_block thread {state with pos} lexbuf
| Error error ->
let region = Region.make ~start:state.pos ~stop:pos let region = Region.make ~start:state.pos ~stop:pos
in fail region error } in fail region error }
@ -798,24 +856,36 @@ and scan_line thread state = parse
| _ { let () = rollback lexbuf in | _ { let () = rollback lexbuf in
let len = thread.len in let len = thread.len in
let thread, let thread,
status = scan_utf8 thread state lexbuf in status = scan_utf8_inline thread state lexbuf in
let delta = thread.len - len in let delta = thread.len - len in
let pos = state.pos#shift_one_uchar delta in let pos = state.pos#shift_one_uchar delta in
match status with match status with
None -> scan_line thread {state with pos} lexbuf Stdlib.Ok () ->
| Some error -> scan_line thread {state with pos} lexbuf
| Error error ->
let region = Region.make ~start:state.pos ~stop:pos let region = Region.make ~start:state.pos ~stop:pos
in fail region error } in fail region error }
and scan_utf8 thread state = parse and scan_utf8 closing thread state = parse
eof { fail thread.opening Unterminated_comment } eof { fail thread.opening (Unterminated_comment closing) }
| _ as c { let thread = push_char c thread in | _ as c { let thread = push_char c thread in
let lexeme = Lexing.lexeme lexbuf in let lexeme = Lexing.lexeme lexbuf in
let () = state.supply (Bytes.of_string lexeme) 0 1 in let () = state.supply (Bytes.of_string lexeme) 0 1 in
match Uutf.decode state.decoder with match Uutf.decode state.decoder with
`Uchar _ -> thread, None `Uchar _ -> thread, Stdlib.Ok ()
| `Malformed _ -> thread, Some Invalid_utf8_sequence | `Malformed _ -> thread, Stdlib.Error Invalid_utf8_sequence
| `Await -> scan_utf8 thread state lexbuf | `Await -> scan_utf8 closing thread state lexbuf
| `End -> assert false }
and scan_utf8_inline thread state = parse
eof { thread, Stdlib.Ok () }
| _ as c { let thread = push_char c thread in
let lexeme = Lexing.lexeme lexbuf in
let () = state.supply (Bytes.of_string lexeme) 0 1 in
match Uutf.decode state.decoder with
`Uchar _ -> thread, Stdlib.Ok ()
| `Malformed _ -> thread, Stdlib.Error Invalid_utf8_sequence
| `Await -> scan_utf8_inline thread state lexbuf
| `End -> assert false } | `End -> assert false }
(* END LEXER DEFINITION *) (* END LEXER DEFINITION *)
@ -876,6 +946,13 @@ type input =
| Channel of in_channel | Channel of in_channel
| Buffer of Lexing.lexbuf | Buffer of Lexing.lexbuf
(* Checking if a lexer input is a file *)
let is_file = function
File "-" | File "" -> false
| File _ -> true
| Stdin | String _ | Channel _ | Buffer _ -> false
type instance = { type instance = {
input : input; input : input;
read : log:logger -> Lexing.lexbuf -> token; read : log:logger -> Lexing.lexbuf -> token;
@ -909,7 +986,7 @@ let lexbuf_from_input = function
Ok (Lexing.from_channel chan, close) Ok (Lexing.from_channel chan, close)
| Buffer b -> Ok (b, fun () -> ()) | Buffer b -> Ok (b, fun () -> ())
let open_token_stream input = let open_token_stream (lang: language) input =
let file_path = match input with let file_path = match input with
File file_path -> File file_path ->
if file_path = "-" then "" else file_path if file_path = "-" then "" else file_path
@ -925,7 +1002,8 @@ let open_token_stream input =
pos; pos;
markup = []; markup = [];
decoder; decoder;
supply} in supply;
lang} in
let get_pos () = !state.pos let get_pos () = !state.pos
and get_last () = !state.last and get_last () = !state.last

View File

@ -7,15 +7,22 @@ module type S =
module Lexer : Lexer.S module Lexer : Lexer.S
val output_token : val output_token :
?offsets:bool -> [`Byte | `Point] -> ?offsets:bool ->
EvalOpt.command -> out_channel -> [`Byte | `Point] ->
Markup.t list -> Lexer.token -> unit EvalOpt.command ->
out_channel ->
Markup.t list ->
Lexer.token ->
unit
type file_path = string type file_path = string
val trace : val trace :
?offsets:bool -> [`Byte | `Point] -> ?offsets:bool ->
Lexer.input -> EvalOpt.command -> [`Byte | `Point] ->
EvalOpt.language ->
Lexer.input ->
EvalOpt.command ->
(unit, string Region.reg) Stdlib.result (unit, string Region.reg) Stdlib.result
end end
@ -49,9 +56,9 @@ module Make (Lexer: Lexer.S) : (S with module Lexer = Lexer) =
type file_path = string type file_path = string
let trace ?(offsets=true) mode input command : let trace ?(offsets=true) mode lang input command :
(unit, string Region.reg) Stdlib.result = (unit, string Region.reg) Stdlib.result =
match Lexer.open_token_stream input with match Lexer.open_token_stream lang input with
Ok Lexer.{read; buffer; close; _} -> Ok Lexer.{read; buffer; close; _} ->
let log = output_token ~offsets mode command stdout let log = output_token ~offsets mode command stdout
and close_all () = flush_all (); close () in and close_all () = flush_all (); close () in

View File

@ -5,15 +5,22 @@ module type S =
module Lexer : Lexer.S module Lexer : Lexer.S
val output_token : val output_token :
?offsets:bool -> [`Byte | `Point] -> ?offsets:bool ->
EvalOpt.command -> out_channel -> [`Byte | `Point] ->
Markup.t list -> Lexer.token -> unit EvalOpt.command ->
out_channel ->
Markup.t list ->
Lexer.token ->
unit
type file_path = string type file_path = string
val trace : val trace :
?offsets:bool -> [`Byte | `Point] -> ?offsets:bool ->
Lexer.input -> EvalOpt.command -> [`Byte | `Point] ->
EvalOpt.language ->
Lexer.input ->
EvalOpt.command ->
(unit, string Region.reg) Stdlib.result (unit, string Region.reg) Stdlib.result
end end

View File

@ -20,7 +20,7 @@ module Make (IO: IO) (Lexer: Lexer.S) =
let scan () : (Lexer.token list, string Region.reg) Stdlib.result = let scan () : (Lexer.token list, string Region.reg) Stdlib.result =
(* Preprocessing the input source *) (* Preprocessing the input source *)
let preproc cin = let preproc ~is_file cin =
let buffer = Lexing.from_channel cin in let buffer = Lexing.from_channel cin in
let open Lexing in let open Lexing in
let () = let () =
@ -29,7 +29,7 @@ module Make (IO: IO) (Lexer: Lexer.S) =
| Some pos_fname -> | Some pos_fname ->
buffer.lex_curr_p <- {buffer.lex_curr_p with pos_fname} in buffer.lex_curr_p <- {buffer.lex_curr_p with pos_fname} in
let opt = (IO.options :> Preprocessor.EvalOpt.options) in let opt = (IO.options :> Preprocessor.EvalOpt.options) in
match Preproc.lex opt buffer with match Preproc.lex ~is_file opt buffer with
Stdlib.Error (pp_buffer, err) -> Stdlib.Error (pp_buffer, err) ->
if SSet.mem "preproc" IO.options#verbose then if SSet.mem "preproc" IO.options#verbose then
Printf.printf "%s\n%!" (Buffer.contents pp_buffer); Printf.printf "%s\n%!" (Buffer.contents pp_buffer);
@ -40,7 +40,7 @@ module Make (IO: IO) (Lexer: Lexer.S) =
(* Running the lexer on the preprocessed input *) (* Running the lexer on the preprocessed input *)
let source = Lexer.String (Buffer.contents pp_buffer) in let source = Lexer.String (Buffer.contents pp_buffer) in
match Lexer.open_token_stream source with match Lexer.open_token_stream IO.options#lang source with
Ok Lexer.{read; buffer; close; _} -> Ok Lexer.{read; buffer; close; _} ->
let close_all () = flush_all (); close () in let close_all () = flush_all (); close () in
let rec read_tokens tokens = let rec read_tokens tokens =
@ -63,9 +63,9 @@ module Make (IO: IO) (Lexer: Lexer.S) =
| Stdlib.Error (Lexer.File_opening msg) -> | Stdlib.Error (Lexer.File_opening msg) ->
flush_all (); Stdlib.Error (Region.wrap_ghost msg) in flush_all (); Stdlib.Error (Region.wrap_ghost msg) in
match IO.options#input with match IO.options#input with
Some "-" | None -> preproc stdin Some "-" | None -> preproc ~is_file:false stdin
| Some file_path -> | Some file_path ->
try open_in file_path |> preproc with try open_in file_path |> preproc ~is_file:true with
Sys_error msg -> Stdlib.Error (Region.wrap_ghost msg) Sys_error msg -> Stdlib.Error (Region.wrap_ghost msg)
(* Tracing the lexing *) (* Tracing the lexing *)
@ -74,7 +74,7 @@ module Make (IO: IO) (Lexer: Lexer.S) =
let trace () : (unit, string Region.reg) Stdlib.result = let trace () : (unit, string Region.reg) Stdlib.result =
(* Preprocessing the input *) (* Preprocessing the input *)
let preproc cin = let preproc ~is_file cin =
let buffer = Lexing.from_channel cin in let buffer = Lexing.from_channel cin in
let open Lexing in let open Lexing in
let () = let () =
@ -83,7 +83,7 @@ module Make (IO: IO) (Lexer: Lexer.S) =
| Some pos_fname -> | Some pos_fname ->
buffer.lex_curr_p <- {buffer.lex_curr_p with pos_fname} in buffer.lex_curr_p <- {buffer.lex_curr_p with pos_fname} in
let opt = (IO.options :> Preprocessor.EvalOpt.options) in let opt = (IO.options :> Preprocessor.EvalOpt.options) in
match Preproc.lex opt buffer with match Preproc.lex ~is_file opt buffer with
Stdlib.Error (pp_buffer, err) -> Stdlib.Error (pp_buffer, err) ->
if SSet.mem "preproc" IO.options#verbose then if SSet.mem "preproc" IO.options#verbose then
Printf.printf "%s\n%!" (Buffer.contents pp_buffer); Printf.printf "%s\n%!" (Buffer.contents pp_buffer);
@ -99,11 +99,12 @@ module Make (IO: IO) (Lexer: Lexer.S) =
end end
else Log.trace ~offsets:IO.options#offsets else Log.trace ~offsets:IO.options#offsets
IO.options#mode IO.options#mode
IO.options#lang
(Lexer.String preproc_str) (Lexer.String preproc_str)
IO.options#cmd IO.options#cmd
in match IO.options#input with in match IO.options#input with
Some "-" | None -> preproc stdin Some "-" | None -> preproc ~is_file:false stdin
| Some file_path -> | Some file_path ->
try open_in file_path |> preproc with try open_in file_path |> preproc ~is_file:true with
Sys_error msg -> Stdlib.Error (Region.wrap_ghost msg) Sys_error msg -> Stdlib.Error (Region.wrap_ghost msg)
end end

View File

@ -151,15 +151,6 @@ module Make (Lexer: Lexer.S)
end end
in flush_all (); close (); Ok ast in flush_all (); close (); Ok ast
(* Checking if a lexer input is a file *)
let is_file input =
let open Lexer in
match input with
File "-" | File "" -> false
| File _ -> true
| Stdin | String _ | Channel _ | Buffer _ -> false
(* Wrapper for the parsers above *) (* Wrapper for the parsers above *)
let apply lexer_inst parser = let apply lexer_inst parser =
@ -172,7 +163,7 @@ module Make (Lexer: Lexer.S)
(* Lexing errors *) (* Lexing errors *)
| exception Lexer.Error err -> | exception Lexer.Error err ->
let file = is_file lexer_inst.Lexer.input in let file = Lexer.is_file lexer_inst.Lexer.input in
let error = let error =
Lexer.format_error ~offsets:SubIO.options#offsets Lexer.format_error ~offsets:SubIO.options#offsets
SubIO.options#mode err ~file SubIO.options#mode err ~file
@ -208,8 +199,8 @@ module Make (Lexer: Lexer.S)
(* Preprocessing the input source *) (* Preprocessing the input source *)
let preproc options lexbuf = let preproc ~is_file options lexbuf =
Preproc.lex (options :> Preprocessor.EvalOpt.options) lexbuf Preproc.lex ~is_file (options :> Preprocessor.EvalOpt.options) lexbuf
(* Parsing a contract *) (* Parsing a contract *)
@ -220,13 +211,13 @@ module Make (Lexer: Lexer.S)
| Ok (lexbuf, close) -> | Ok (lexbuf, close) ->
(* Preprocessing the input source *) (* Preprocessing the input source *)
match preproc options lexbuf with match preproc ~is_file:(Lexer.is_file input) options lexbuf with
Stdlib.Error (pp_buffer, err) -> Stdlib.Error (pp_buffer, err) ->
if SSet.mem "preproc" options#verbose then if SSet.mem "preproc" options#verbose then
Printf.printf "%s\n%!" (Buffer.contents pp_buffer); Printf.printf "%s\n%!" (Buffer.contents pp_buffer);
let formatted = let formatted =
Preproc.format ~offsets:options#offsets Preproc.format ~offsets:options#offsets
~file:(is_file input) ~file:(Lexer.is_file input)
err err
in close (); Stdlib.Error formatted in close (); Stdlib.Error formatted
| Stdlib.Ok buffer -> | Stdlib.Ok buffer ->
@ -234,7 +225,7 @@ module Make (Lexer: Lexer.S)
let () = close () in let () = close () in
let input' = Lexer.String (Buffer.contents buffer) in let input' = Lexer.String (Buffer.contents buffer) in
match Lexer.open_token_stream input' with match Lexer.open_token_stream options#lang input' with
Ok instance -> apply instance parser Ok instance -> apply instance parser
| Stdlib.Error (Lexer.File_opening msg) -> | Stdlib.Error (Lexer.File_opening msg) ->
Stdlib.Error (Region.wrap_ghost msg) Stdlib.Error (Region.wrap_ghost msg)

View File

@ -1,4 +1,4 @@
(* IF YOU CHANGE THIS, CHANGE THE EXAMPLE ON THE FRONT PAGE OF THE WEBSITE *) /* IF YOU CHANGE THIS, CHANGE THE EXAMPLE ON THE FRONT PAGE OF THE WEBSITE */
type storage = int; type storage = int;
@ -22,4 +22,4 @@ let main = ((p,storage): (parameter, storage)) => {
([]: list (operation), storage); ([]: list (operation), storage);
}; };
(* IF YOU CHANGE THIS, CHANGE THE EXAMPLE ON THE FRONT PAGE OF THE WEBSITE *) /* IF YOU CHANGE THIS, CHANGE THE EXAMPLE ON THE FRONT PAGE OF THE WEBSITE */

View File

@ -14,9 +14,9 @@ type error =
| Invalid_line_indicator of string | Invalid_line_indicator of string
| No_line_indicator | No_line_indicator
| End_line_indicator | End_line_indicator
| Newline_in_string | Newline_in_string (*XXX*)
| Open_comment | Open_comment (*XXX*)
| Open_string | Open_string (*XXX*)
| Dangling_endif | Dangling_endif
| Open_region_in_conditional | Open_region_in_conditional
| Dangling_endregion | Dangling_endregion
@ -41,6 +41,7 @@ val format :
(* Preprocessing a lexing buffer *) (* Preprocessing a lexing buffer *)
val lex : val lex :
is_file:bool ->
EvalOpt.options -> EvalOpt.options ->
Lexing.lexbuf -> Lexing.lexbuf ->
(Buffer.t, Buffer.t * error Region.reg) Stdlib.result (Buffer.t, Buffer.t * error Region.reg) Stdlib.result

View File

@ -97,7 +97,8 @@ type state = {
out : Buffer.t; out : Buffer.t;
incl : in_channel list; incl : in_channel list;
opt : EvalOpt.options; opt : EvalOpt.options;
dir : string list dir : string list;
is_file : bool
} }
(* Directories *) (* Directories *)
@ -738,6 +739,7 @@ and in_string opening state = parse
and preproc state = parse and preproc state = parse
eof { state } eof { state }
| _ { rollback lexbuf; | _ { rollback lexbuf;
if state.is_file then
print state (sprintf "# 1 \"%s\"\n" print state (sprintf "# 1 \"%s\"\n"
Lexing.(lexbuf.lex_start_p.pos_fname)); Lexing.(lexbuf.lex_start_p.pos_fname));
scan state lexbuf } scan state lexbuf }
@ -749,7 +751,7 @@ and preproc state = parse
the trace is empty at the end. Note that we discard the state at the trace is empty at the end. Note that we discard the state at
the end. *) the end. *)
let lex opt buffer = let lex ~is_file opt buffer =
let state = { let state = {
env = Env.empty; env = Env.empty;
mode = Copy; mode = Copy;
@ -758,7 +760,8 @@ let lex opt buffer =
out = Buffer.create 80; out = Buffer.create 80;
incl = []; incl = [];
opt; opt;
dir = [] dir = [];
is_file;
} in } in
match preproc state buffer with match preproc state buffer with
state -> List.iter close_in state.incl; state -> List.iter close_in state.incl;