diff --git a/gitlab-pages/docs/advanced/entrypoints-contracts.md b/gitlab-pages/docs/advanced/entrypoints-contracts.md index 4073cc1f7..98d29ea51 100644 --- a/gitlab-pages/docs/advanced/entrypoints-contracts.md +++ b/gitlab-pages/docs/advanced/entrypoints-contracts.md @@ -466,8 +466,8 @@ let proxy = ((action, store): (parameter, storage)) : return => { | Some (contract) => contract; | None => (failwith ("Contract not found.") : contract (parameter)); }; - (* Reuse the parameter in the subsequent - transaction or use another one, `mock_param`. *) + /* Reuse the parameter in the subsequent + transaction or use another one, `mock_param`. */ let mock_param : parameter = Increment (5n); let op : operation = Tezos.transaction (action, 0tez, counter); ([op], store) diff --git a/src/bin/expect_tests/literals.ml b/src/bin/expect_tests/literals.ml index da8a4333d..88685acc5 100644 --- a/src/bin/expect_tests/literals.ml +++ b/src/bin/expect_tests/literals.ml @@ -7,7 +7,7 @@ let%expect_test _ = let%expect_test _ = run_ligo_bad ["interpret" ; "(\"thisisnotasignature\":signature)" ; "--syntax=pascaligo"] ; [%expect {| - ligo: in file "", line 0, characters 1-32. Badly formatted literal: Signature thisisnotasignature {"location":"in file \"\", line 0, characters 1-32"} + ligo: in file ".", line 1, characters 1-32. Badly formatted literal: Signature thisisnotasignature {"location":"in file \".\", line 1, characters 1-32"} If you're not sure how to fix this error, you can @@ -25,7 +25,7 @@ let%expect_test _ = let%expect_test _ = run_ligo_bad ["interpret" ; "(\"thisisnotapublickey\":key)" ; "--syntax=pascaligo"] ; [%expect {| - ligo: in file "", line 0, characters 1-26. Badly formatted literal: key thisisnotapublickey {"location":"in file \"\", line 0, characters 1-26"} + ligo: in file ".", line 1, characters 1-26. Badly formatted literal: key thisisnotapublickey {"location":"in file \".\", line 1, characters 1-26"} If you're not sure how to fix this error, you can diff --git a/src/passes/1-parser/cameligo/Makefile.cfg b/src/passes/1-parser/cameligo/Makefile.cfg index 233407070..5fcac2934 100644 --- a/src/passes/1-parser/cameligo/Makefile.cfg +++ b/src/passes/1-parser/cameligo/Makefile.cfg @@ -2,4 +2,4 @@ SHELL := dash BFLAGS := -strict-sequence -w +A-48-4 -g clean:: -> \rm -f Parser.msg.map Parser.msg.states Version.ml +> \rm -f Parser.msg Parser.msg.map Parser.msg.states Version.ml diff --git a/src/passes/1-parser/pascaligo/Makefile.cfg b/src/passes/1-parser/pascaligo/Makefile.cfg index 233407070..5fcac2934 100644 --- a/src/passes/1-parser/pascaligo/Makefile.cfg +++ b/src/passes/1-parser/pascaligo/Makefile.cfg @@ -2,4 +2,4 @@ SHELL := dash BFLAGS := -strict-sequence -w +A-48-4 -g clean:: -> \rm -f Parser.msg.map Parser.msg.states Version.ml +> \rm -f Parser.msg Parser.msg.map Parser.msg.states Version.ml diff --git a/src/passes/1-parser/reasonligo/Makefile.cfg b/src/passes/1-parser/reasonligo/Makefile.cfg index 233407070..5fcac2934 100644 --- a/src/passes/1-parser/reasonligo/Makefile.cfg +++ b/src/passes/1-parser/reasonligo/Makefile.cfg @@ -2,4 +2,4 @@ SHELL := dash BFLAGS := -strict-sequence -w +A-48-4 -g clean:: -> \rm -f Parser.msg.map Parser.msg.states Version.ml +> \rm -f Parser.msg Parser.msg.map Parser.msg.states Version.ml diff --git a/src/passes/1-parser/shared/Lexer.mli b/src/passes/1-parser/shared/Lexer.mli index ef2c01ce1..6435d48d2 100644 --- a/src/passes/1-parser/shared/Lexer.mli +++ b/src/passes/1-parser/shared/Lexer.mli @@ -142,6 +142,8 @@ module type S = | Channel of in_channel | Buffer of Lexing.lexbuf + val is_file : input -> bool + type instance = { input : input; read : log:logger -> Lexing.lexbuf -> token; @@ -158,7 +160,10 @@ module type S = val lexbuf_from_input : input -> (Lexing.lexbuf * (unit -> unit), open_err) Stdlib.result - val open_token_stream : input -> (instance, open_err) Stdlib.result + type language = [`PascaLIGO | `CameLIGO | `ReasonLIGO] + + val open_token_stream : + language -> input -> (instance, open_err) Stdlib.result (* Error reporting *) diff --git a/src/passes/1-parser/shared/Lexer.mll b/src/passes/1-parser/shared/Lexer.mll index 8e7ce915f..59ec4a1ba 100644 --- a/src/passes/1-parser/shared/Lexer.mll +++ b/src/passes/1-parser/shared/Lexer.mll @@ -164,6 +164,8 @@ module type S = | Channel of in_channel | Buffer of Lexing.lexbuf + val is_file : input -> bool + type instance = { input : input; read : log:logger -> Lexing.lexbuf -> token; @@ -180,7 +182,10 @@ module type S = val lexbuf_from_input : input -> (Lexing.lexbuf * (unit -> unit), open_err) Stdlib.result - val open_token_stream : input -> (instance, open_err) Stdlib.result + type language = [`PascaLIGO | `CameLIGO | `ReasonLIGO] + + val open_token_stream : + language -> input -> (instance, open_err) Stdlib.result (* Error reporting *) @@ -297,6 +302,8 @@ module Make (Token: TOKEN) : (S with module Token = Token) = library Uutf. *) + type language = [`PascaLIGO | `CameLIGO | `ReasonLIGO] + type state = { units : (Markup.t list * token) FQueue.t; markup : Markup.t list; @@ -304,7 +311,8 @@ module Make (Token: TOKEN) : (S with module Token = Token) = last : Region.t; pos : Pos.t; decoder : Uutf.decoder; - supply : Bytes.t -> int -> int -> unit + supply : Bytes.t -> int -> int -> unit; + lang : language } (* The call [enqueue (token, state)] updates functionally the @@ -393,7 +401,7 @@ module Make (Token: TOKEN) : (S with module Token = Token) = | Unterminated_string | Unterminated_integer | Odd_lengthed_bytes - | Unterminated_comment + | Unterminated_comment of string | Orphan_minus | Non_canonical_zero | Negative_byte_sequence @@ -424,9 +432,9 @@ module Make (Token: TOKEN) : (S with module Token = Token) = | Odd_lengthed_bytes -> "The length of the byte sequence is an odd number.\n\ Hint: Add or remove a digit." - | Unterminated_comment -> - "Unterminated comment.\n\ - Hint: Close with \"*)\"." + | Unterminated_comment ending -> + sprintf "Unterminated comment.\n\ + Hint: Close with \"%s\"." ending | Orphan_minus -> "Orphan minus sign.\n\ Hint: Remove the trailing space." @@ -623,16 +631,16 @@ rule init state = parse and scan state = parse nl { scan (push_newline state lexbuf) lexbuf } | ' '+ { scan (push_space state lexbuf) lexbuf } -| '\t'+ { scan (push_tabs state lexbuf) lexbuf } +| '\t'+ { scan (push_tabs state lexbuf) lexbuf } | ident { mk_ident state lexbuf |> enqueue } | constr { mk_constr state lexbuf |> enqueue } | bytes { mk_bytes seq state lexbuf |> enqueue } | natural 'n' { mk_nat state lexbuf |> enqueue } | natural "mutez" { mk_mutez state lexbuf |> enqueue } | natural "tz" -| natural "tez" { mk_tez state lexbuf |> enqueue } +| natural "tez" { mk_tez state lexbuf |> enqueue } | decimal "tz" -| decimal "tez" { mk_tez_decimal state lexbuf |> enqueue } +| decimal "tez" { mk_tez_decimal state lexbuf |> enqueue } | natural { mk_int state lexbuf |> enqueue } | symbol { mk_sym state lexbuf |> enqueue } | eof { mk_eof state lexbuf |> enqueue } @@ -643,10 +651,21 @@ and scan state = parse let thread = {opening; len=1; acc=['"']} in scan_string thread state lexbuf |> mk_string |> enqueue } -| "(*" { let opening, _, state = sync state lexbuf in - let thread = {opening; len=2; acc=['*';'(']} in - let state = scan_block thread state lexbuf |> push_block - in scan state lexbuf } +| "(*" { if state.lang = `PascaLIGO || state.lang = `CameLIGO then + let opening, _, state = sync state lexbuf in + let thread = {opening; len=2; acc=['*';'(']} in + let state = scan_pascaligo_block thread state lexbuf |> push_block + in scan state lexbuf + else (rollback lexbuf; scan_two_sym state lexbuf) + } + +| "/*" { if state.lang = `ReasonLIGO then + let opening, _, state = sync state lexbuf in + let thread = {opening; len=2; acc=['*';'/']} in + let state = scan_reasonligo_block thread state lexbuf |> push_block + in scan state lexbuf + else (rollback lexbuf; scan_two_sym state lexbuf) + } | "//" { let opening, _, state = sync state lexbuf in let thread = {opening; len=2; acc=['/';'/']} in @@ -720,6 +739,14 @@ and scan state = parse | _ as c { let region, _, _ = sync state lexbuf in fail region (Unexpected_character c) } +(* Scanning two symbols *) + +and scan_two_sym state = parse + symbol { scan_one_sym (mk_sym state lexbuf |> enqueue) lexbuf } + +and scan_one_sym state = parse + symbol { scan (mk_sym state lexbuf |> enqueue) lexbuf } + (* Scanning CPP #include flags *) and scan_flags state acc = parse @@ -751,39 +778,70 @@ and scan_string thread state = parse (* Finishing a block comment - (Note for Emacs: ("(*") - The lexing of block comments must take care of embedded block - comments that may occur within, as well as strings, so no substring - "*)" may inadvertently close the block. This is the purpose - of the first case of the scanner [scan_block]. + (For Emacs: ("(*") The lexing of block comments must take care of + embedded block comments that may occur within, as well as strings, + so no substring "*/" or "*)" may inadvertently close the + block. This is the purpose of the first case of the scanners + [scan_pascaligo_block] and [scan_reasonligo_block]. *) -and scan_block thread state = parse +and scan_pascaligo_block thread state = parse '"' | "(*" { let opening = thread.opening in let opening', lexeme, state = sync state lexbuf in let thread = push_string lexeme thread in let thread = {thread with opening=opening'} in let next = if lexeme = "\"" then scan_string - else scan_block in + else scan_pascaligo_block in let thread, state = next thread state lexbuf in let thread = {thread with opening} - in scan_block thread state lexbuf } + in scan_pascaligo_block thread state lexbuf } | "*)" { let _, lexeme, state = sync state lexbuf in push_string lexeme thread, state } | nl as nl { let () = Lexing.new_line lexbuf and state = {state with pos = state.pos#new_line nl} and thread = push_string nl thread - in scan_block thread state lexbuf } -| eof { fail thread.opening Unterminated_comment } + in scan_pascaligo_block thread state lexbuf } +| eof { fail thread.opening (Unterminated_comment "*)") } | _ { let () = rollback lexbuf in let len = thread.len in let thread, - status = scan_utf8 thread state lexbuf in + status = scan_utf8 "*)" thread state lexbuf in let delta = thread.len - len in let pos = state.pos#shift_one_uchar delta in match status with - None -> scan_block thread {state with pos} lexbuf - | Some error -> + Stdlib.Ok () -> + scan_pascaligo_block thread {state with pos} lexbuf + | Error error -> + let region = Region.make ~start:state.pos ~stop:pos + in fail region error } + +and scan_reasonligo_block thread state = parse + '"' | "/*" { let opening = thread.opening in + let opening', lexeme, state = sync state lexbuf in + let thread = push_string lexeme thread in + let thread = {thread with opening=opening'} in + let next = if lexeme = "\"" then scan_string + else scan_reasonligo_block in + let thread, state = next thread state lexbuf in + let thread = {thread with opening} + in scan_reasonligo_block thread state lexbuf } +| "*/" { let _, lexeme, state = sync state lexbuf + in push_string lexeme thread, state } +| nl as nl { let () = Lexing.new_line lexbuf + and state = {state with pos = state.pos#new_line nl} + and thread = push_string nl thread + in scan_reasonligo_block thread state lexbuf } +| eof { fail thread.opening (Unterminated_comment "*/") } +| _ { let () = rollback lexbuf in + let len = thread.len in + let thread, + status = scan_utf8 "*/" thread state lexbuf in + let delta = thread.len - len in + let pos = state.pos#shift_one_uchar delta in + match status with + Stdlib.Ok () -> + scan_reasonligo_block thread {state with pos} lexbuf + | Error error -> let region = Region.make ~start:state.pos ~stop:pos in fail region error } @@ -798,24 +856,36 @@ and scan_line thread state = parse | _ { let () = rollback lexbuf in let len = thread.len in let thread, - status = scan_utf8 thread state lexbuf in + status = scan_utf8_inline thread state lexbuf in let delta = thread.len - len in let pos = state.pos#shift_one_uchar delta in match status with - None -> scan_line thread {state with pos} lexbuf - | Some error -> + Stdlib.Ok () -> + scan_line thread {state with pos} lexbuf + | Error error -> let region = Region.make ~start:state.pos ~stop:pos in fail region error } -and scan_utf8 thread state = parse - eof { fail thread.opening Unterminated_comment } +and scan_utf8 closing thread state = parse + eof { fail thread.opening (Unterminated_comment closing) } | _ as c { let thread = push_char c thread in let lexeme = Lexing.lexeme lexbuf in let () = state.supply (Bytes.of_string lexeme) 0 1 in match Uutf.decode state.decoder with - `Uchar _ -> thread, None - | `Malformed _ -> thread, Some Invalid_utf8_sequence - | `Await -> scan_utf8 thread state lexbuf + `Uchar _ -> thread, Stdlib.Ok () + | `Malformed _ -> thread, Stdlib.Error Invalid_utf8_sequence + | `Await -> scan_utf8 closing thread state lexbuf + | `End -> assert false } + +and scan_utf8_inline thread state = parse + eof { thread, Stdlib.Ok () } +| _ as c { let thread = push_char c thread in + let lexeme = Lexing.lexeme lexbuf in + let () = state.supply (Bytes.of_string lexeme) 0 1 in + match Uutf.decode state.decoder with + `Uchar _ -> thread, Stdlib.Ok () + | `Malformed _ -> thread, Stdlib.Error Invalid_utf8_sequence + | `Await -> scan_utf8_inline thread state lexbuf | `End -> assert false } (* END LEXER DEFINITION *) @@ -876,6 +946,13 @@ type input = | Channel of in_channel | Buffer of Lexing.lexbuf +(* Checking if a lexer input is a file *) + +let is_file = function + File "-" | File "" -> false + | File _ -> true + | Stdin | String _ | Channel _ | Buffer _ -> false + type instance = { input : input; read : log:logger -> Lexing.lexbuf -> token; @@ -909,7 +986,7 @@ let lexbuf_from_input = function Ok (Lexing.from_channel chan, close) | Buffer b -> Ok (b, fun () -> ()) -let open_token_stream input = +let open_token_stream (lang: language) input = let file_path = match input with File file_path -> if file_path = "-" then "" else file_path @@ -925,7 +1002,8 @@ let open_token_stream input = pos; markup = []; decoder; - supply} in + supply; + lang} in let get_pos () = !state.pos and get_last () = !state.last diff --git a/src/passes/1-parser/shared/LexerLog.ml b/src/passes/1-parser/shared/LexerLog.ml index 5d1260f73..c338efe23 100644 --- a/src/passes/1-parser/shared/LexerLog.ml +++ b/src/passes/1-parser/shared/LexerLog.ml @@ -7,15 +7,22 @@ module type S = module Lexer : Lexer.S val output_token : - ?offsets:bool -> [`Byte | `Point] -> - EvalOpt.command -> out_channel -> - Markup.t list -> Lexer.token -> unit + ?offsets:bool -> + [`Byte | `Point] -> + EvalOpt.command -> + out_channel -> + Markup.t list -> + Lexer.token -> + unit type file_path = string val trace : - ?offsets:bool -> [`Byte | `Point] -> - Lexer.input -> EvalOpt.command -> + ?offsets:bool -> + [`Byte | `Point] -> + EvalOpt.language -> + Lexer.input -> + EvalOpt.command -> (unit, string Region.reg) Stdlib.result end @@ -49,9 +56,9 @@ module Make (Lexer: Lexer.S) : (S with module Lexer = Lexer) = type file_path = string - let trace ?(offsets=true) mode input command : + let trace ?(offsets=true) mode lang input command : (unit, string Region.reg) Stdlib.result = - match Lexer.open_token_stream input with + match Lexer.open_token_stream lang input with Ok Lexer.{read; buffer; close; _} -> let log = output_token ~offsets mode command stdout and close_all () = flush_all (); close () in diff --git a/src/passes/1-parser/shared/LexerLog.mli b/src/passes/1-parser/shared/LexerLog.mli index 9e2bbce85..e4bd05133 100644 --- a/src/passes/1-parser/shared/LexerLog.mli +++ b/src/passes/1-parser/shared/LexerLog.mli @@ -5,15 +5,22 @@ module type S = module Lexer : Lexer.S val output_token : - ?offsets:bool -> [`Byte | `Point] -> - EvalOpt.command -> out_channel -> - Markup.t list -> Lexer.token -> unit + ?offsets:bool -> + [`Byte | `Point] -> + EvalOpt.command -> + out_channel -> + Markup.t list -> + Lexer.token -> + unit type file_path = string val trace : - ?offsets:bool -> [`Byte | `Point] -> - Lexer.input -> EvalOpt.command -> + ?offsets:bool -> + [`Byte | `Point] -> + EvalOpt.language -> + Lexer.input -> + EvalOpt.command -> (unit, string Region.reg) Stdlib.result end diff --git a/src/passes/1-parser/shared/LexerUnit.ml b/src/passes/1-parser/shared/LexerUnit.ml index 8b6d427e1..04ec04feb 100644 --- a/src/passes/1-parser/shared/LexerUnit.ml +++ b/src/passes/1-parser/shared/LexerUnit.ml @@ -20,7 +20,7 @@ module Make (IO: IO) (Lexer: Lexer.S) = let scan () : (Lexer.token list, string Region.reg) Stdlib.result = (* Preprocessing the input source *) - let preproc cin = + let preproc ~is_file cin = let buffer = Lexing.from_channel cin in let open Lexing in let () = @@ -29,7 +29,7 @@ module Make (IO: IO) (Lexer: Lexer.S) = | Some pos_fname -> buffer.lex_curr_p <- {buffer.lex_curr_p with pos_fname} in let opt = (IO.options :> Preprocessor.EvalOpt.options) in - match Preproc.lex opt buffer with + match Preproc.lex ~is_file opt buffer with Stdlib.Error (pp_buffer, err) -> if SSet.mem "preproc" IO.options#verbose then Printf.printf "%s\n%!" (Buffer.contents pp_buffer); @@ -40,7 +40,7 @@ module Make (IO: IO) (Lexer: Lexer.S) = (* Running the lexer on the preprocessed input *) let source = Lexer.String (Buffer.contents pp_buffer) in - match Lexer.open_token_stream source with + match Lexer.open_token_stream IO.options#lang source with Ok Lexer.{read; buffer; close; _} -> let close_all () = flush_all (); close () in let rec read_tokens tokens = @@ -63,9 +63,9 @@ module Make (IO: IO) (Lexer: Lexer.S) = | Stdlib.Error (Lexer.File_opening msg) -> flush_all (); Stdlib.Error (Region.wrap_ghost msg) in match IO.options#input with - Some "-" | None -> preproc stdin + Some "-" | None -> preproc ~is_file:false stdin | Some file_path -> - try open_in file_path |> preproc with + try open_in file_path |> preproc ~is_file:true with Sys_error msg -> Stdlib.Error (Region.wrap_ghost msg) (* Tracing the lexing *) @@ -74,7 +74,7 @@ module Make (IO: IO) (Lexer: Lexer.S) = let trace () : (unit, string Region.reg) Stdlib.result = (* Preprocessing the input *) - let preproc cin = + let preproc ~is_file cin = let buffer = Lexing.from_channel cin in let open Lexing in let () = @@ -83,7 +83,7 @@ module Make (IO: IO) (Lexer: Lexer.S) = | Some pos_fname -> buffer.lex_curr_p <- {buffer.lex_curr_p with pos_fname} in let opt = (IO.options :> Preprocessor.EvalOpt.options) in - match Preproc.lex opt buffer with + match Preproc.lex ~is_file opt buffer with Stdlib.Error (pp_buffer, err) -> if SSet.mem "preproc" IO.options#verbose then Printf.printf "%s\n%!" (Buffer.contents pp_buffer); @@ -99,11 +99,12 @@ module Make (IO: IO) (Lexer: Lexer.S) = end else Log.trace ~offsets:IO.options#offsets IO.options#mode + IO.options#lang (Lexer.String preproc_str) IO.options#cmd in match IO.options#input with - Some "-" | None -> preproc stdin + Some "-" | None -> preproc ~is_file:false stdin | Some file_path -> - try open_in file_path |> preproc with + try open_in file_path |> preproc ~is_file:true with Sys_error msg -> Stdlib.Error (Region.wrap_ghost msg) end diff --git a/src/passes/1-parser/shared/ParserUnit.ml b/src/passes/1-parser/shared/ParserUnit.ml index 5c5609d44..a32030a7b 100644 --- a/src/passes/1-parser/shared/ParserUnit.ml +++ b/src/passes/1-parser/shared/ParserUnit.ml @@ -151,15 +151,6 @@ module Make (Lexer: Lexer.S) end in flush_all (); close (); Ok ast - (* Checking if a lexer input is a file *) - - let is_file input = - let open Lexer in - match input with - File "-" | File "" -> false - | File _ -> true - | Stdin | String _ | Channel _ | Buffer _ -> false - (* Wrapper for the parsers above *) let apply lexer_inst parser = @@ -172,7 +163,7 @@ module Make (Lexer: Lexer.S) (* Lexing errors *) | exception Lexer.Error err -> - let file = is_file lexer_inst.Lexer.input in + let file = Lexer.is_file lexer_inst.Lexer.input in let error = Lexer.format_error ~offsets:SubIO.options#offsets SubIO.options#mode err ~file @@ -208,8 +199,8 @@ module Make (Lexer: Lexer.S) (* Preprocessing the input source *) - let preproc options lexbuf = - Preproc.lex (options :> Preprocessor.EvalOpt.options) lexbuf + let preproc ~is_file options lexbuf = + Preproc.lex ~is_file (options :> Preprocessor.EvalOpt.options) lexbuf (* Parsing a contract *) @@ -220,13 +211,13 @@ module Make (Lexer: Lexer.S) | Ok (lexbuf, close) -> (* Preprocessing the input source *) - match preproc options lexbuf with + match preproc ~is_file:(Lexer.is_file input) options lexbuf with Stdlib.Error (pp_buffer, err) -> if SSet.mem "preproc" options#verbose then Printf.printf "%s\n%!" (Buffer.contents pp_buffer); let formatted = Preproc.format ~offsets:options#offsets - ~file:(is_file input) + ~file:(Lexer.is_file input) err in close (); Stdlib.Error formatted | Stdlib.Ok buffer -> @@ -234,7 +225,7 @@ module Make (Lexer: Lexer.S) let () = close () in let input' = Lexer.String (Buffer.contents buffer) in - match Lexer.open_token_stream input' with + match Lexer.open_token_stream options#lang input' with Ok instance -> apply instance parser | Stdlib.Error (Lexer.File_opening msg) -> Stdlib.Error (Region.wrap_ghost msg) diff --git a/src/test/contracts/website2.religo b/src/test/contracts/website2.religo index f9b936047..e8c17cf82 100644 --- a/src/test/contracts/website2.religo +++ b/src/test/contracts/website2.religo @@ -1,4 +1,4 @@ -(* IF YOU CHANGE THIS, CHANGE THE EXAMPLE ON THE FRONT PAGE OF THE WEBSITE *) +/* IF YOU CHANGE THIS, CHANGE THE EXAMPLE ON THE FRONT PAGE OF THE WEBSITE */ type storage = int; @@ -22,4 +22,4 @@ let main = ((p,storage): (parameter, storage)) => { ([]: list (operation), storage); }; -(* IF YOU CHANGE THIS, CHANGE THE EXAMPLE ON THE FRONT PAGE OF THE WEBSITE *) +/* IF YOU CHANGE THIS, CHANGE THE EXAMPLE ON THE FRONT PAGE OF THE WEBSITE */ diff --git a/vendors/Preprocessor/Preproc.mli b/vendors/Preprocessor/Preproc.mli index d39b95618..56ddc8c64 100644 --- a/vendors/Preprocessor/Preproc.mli +++ b/vendors/Preprocessor/Preproc.mli @@ -14,9 +14,9 @@ type error = | Invalid_line_indicator of string | No_line_indicator | End_line_indicator -| Newline_in_string -| Open_comment -| Open_string +| Newline_in_string (*XXX*) +| Open_comment (*XXX*) +| Open_string (*XXX*) | Dangling_endif | Open_region_in_conditional | Dangling_endregion @@ -41,6 +41,7 @@ val format : (* Preprocessing a lexing buffer *) val lex : + is_file:bool -> EvalOpt.options -> Lexing.lexbuf -> (Buffer.t, Buffer.t * error Region.reg) Stdlib.result diff --git a/vendors/Preprocessor/Preproc.mll b/vendors/Preprocessor/Preproc.mll index 35e0a5cac..956919933 100644 --- a/vendors/Preprocessor/Preproc.mll +++ b/vendors/Preprocessor/Preproc.mll @@ -90,14 +90,15 @@ in function *) type state = { - env : Env.t; - mode : mode; - offset : offset; - trace : trace; - out : Buffer.t; - incl : in_channel list; - opt : EvalOpt.options; - dir : string list + env : Env.t; + mode : mode; + offset : offset; + trace : trace; + out : Buffer.t; + incl : in_channel list; + opt : EvalOpt.options; + dir : string list; + is_file : bool } (* Directories *) @@ -738,8 +739,9 @@ and in_string opening state = parse and preproc state = parse eof { state } | _ { rollback lexbuf; - print state (sprintf "# 1 \"%s\"\n" - Lexing.(lexbuf.lex_start_p.pos_fname)); + if state.is_file then + print state (sprintf "# 1 \"%s\"\n" + Lexing.(lexbuf.lex_start_p.pos_fname)); scan state lexbuf } { @@ -749,7 +751,7 @@ and preproc state = parse the trace is empty at the end. Note that we discard the state at the end. *) -let lex opt buffer = +let lex ~is_file opt buffer = let state = { env = Env.empty; mode = Copy; @@ -758,7 +760,8 @@ let lex opt buffer = out = Buffer.create 80; incl = []; opt; - dir = [] + dir = []; + is_file; } in match preproc state buffer with state -> List.iter close_in state.incl;