Refactoring of the front-end towards integration of the local builds

and the globol build, using the parser error messages, for instance.
This commit is contained in:
Christian Rinderknecht 2020-01-23 18:28:04 +01:00
parent c73c563461
commit 4f4294bf56
23 changed files with 1007 additions and 589 deletions

View File

@ -1,129 +1,216 @@
open Trace open Trace
module Parser = Parser_cameligo.Parser
module AST = Parser_cameligo.AST module AST = Parser_cameligo.AST
module ParserLog = Parser_cameligo.ParserLog
module LexToken = Parser_cameligo.LexToken module LexToken = Parser_cameligo.LexToken
module Lexer = Lexer.Make(LexToken) module Lexer = Lexer.Make(LexToken)
module Scoping = Parser_cameligo.Scoping
module Region = Simple_utils.Region
module ParErr = Parser_cameligo.ParErr
module Errors = struct (* Mock IOs TODO: Fill them with CLI options *)
let lexer_error (e: Lexer.error AST.reg) = module type IO =
let title () = "lexer error" in sig
let message () = Lexer.error_to_string e.value in val ext : string
let data = [ val options : EvalOpt.options
("parser_loc", end
fun () -> Format.asprintf "%a" Location.pp_lift @@ e.region
)
] in
error ~data title message
let parser_error source (start: Lexing.position) (stop: Lexing.position) lexbuf = module PreIO =
let title () = "parser error" in struct
let file = if source = "" then let ext = ".ligo"
"" let pre_options =
else EvalOpt.make ~input:None
Format.sprintf "In file \"%s|%s\"" start.pos_fname source ~libs:[]
in ~verbose:Utils.String.Set.empty
let str = Format.sprintf ~offsets:true
"Parse error at \"%s\" from (%d, %d) to (%d, %d). %s\n" ~mode:`Point
(Lexing.lexeme lexbuf) ~cmd:EvalOpt.Quiet
start.pos_lnum (start.pos_cnum - start.pos_bol) ~mono:true
stop.pos_lnum (stop.pos_cnum - stop.pos_bol) end
file
in
let message () = str in
let loc = if start.pos_cnum = -1 then
Region.make
~start:(Pos.min ~file:source)
~stop:(Pos.from_byte stop)
else
Region.make
~start:(Pos.from_byte start)
~stop:(Pos.from_byte stop)
in
let data =
[
("parser_loc",
fun () -> Format.asprintf "%a" Location.pp_lift @@ loc
)
]
in
error ~data title message
let unrecognized_error source (start: Lexing.position) (stop: Lexing.position) lexbuf = module Parser =
let title () = "unrecognized error" in struct
let file = if source = "" then type ast = AST.t
"" type expr = AST.expr
else include Parser_cameligo.Parser
Format.sprintf "In file \"%s|%s\"" start.pos_fname source end
in
let str = Format.sprintf
"Parse error at \"%s\" from (%d, %d) to (%d, %d). %s\n"
(Lexing.lexeme lexbuf)
start.pos_lnum (start.pos_cnum - start.pos_bol)
stop.pos_lnum (stop.pos_cnum - stop.pos_bol)
file
in
let message () = str in
let loc = Region.make
~start:(Pos.from_byte start)
~stop:(Pos.from_byte stop)
in
let data = [
("unrecognized_loc",
fun () -> Format.asprintf "%a" Location.pp_lift @@ loc
)
] in
error ~data title message
end module ParserLog =
struct
type ast = AST.t
type expr = AST.expr
include Parser_cameligo.ParserLog
end
open Errors module PreUnit =
ParserUnit.Make (Lexer)(AST)(Parser)(ParErr)(ParserLog)
type 'a parser = (Lexing.lexbuf -> LexToken.token) -> Lexing.lexbuf -> 'a module Errors =
struct
let reserved_name Region.{value; region} =
let title () = Printf.sprintf "\nReserved name \"%s\"" value in
let message () = "" in
let data = [
("location",
fun () -> Format.asprintf "%a" Location.pp_lift @@ region)]
in error ~data title message
let parse (parser: 'a parser) source lexbuf = let duplicate_variant Region.{value; region} =
let Lexer.{read ; close ; _} = Lexer.open_token_stream None in let title () =
let result = Printf.sprintf "\nDuplicate variant \"%s\" in this \
try type declaration" value in
ok (parser read lexbuf) let message () = "" in
with let data = [
| Parser.Error -> ("location",
let start = Lexing.lexeme_start_p lexbuf in fun () -> Format.asprintf "%a" Location.pp_lift @@ region)]
let stop = Lexing.lexeme_end_p lexbuf in in error ~data title message
fail @@ (parser_error source start stop lexbuf)
| Lexer.Error e ->
fail @@ (lexer_error e)
| _ ->
let _ = Printexc.print_backtrace Pervasives.stdout in
let start = Lexing.lexeme_start_p lexbuf in
let stop = Lexing.lexeme_end_p lexbuf in
fail @@ (unrecognized_error source start stop lexbuf)
in
close ();
result
let parse_file (source: string) : AST.t result = let non_linear_pattern Region.{value; region} =
let title () =
Printf.sprintf "\nRepeated variable \"%s\" in this pattern" value in
let message () = "" in
let data = [
("location",
fun () -> Format.asprintf "%a" Location.pp_lift @@ region)]
in error ~data title message
let duplicate_field Region.{value; region} =
let title () =
Printf.sprintf "\nDuplicate field name \"%s\" \
in this record declaration" value in
let message () = "" in
let data = [
("location",
fun () -> Format.asprintf "%a" Location.pp_lift @@ region)]
in error ~data title message
let parser_error Region.{value; region} =
let title () = ""
and message () = value
and loc = region in
let data =
[("parser_loc",
fun () -> Format.asprintf "%a" Location.pp_lift @@ loc)]
in error ~data title message
let lexer_error (e: Lexer.error AST.reg) =
let title () = "\nLexer error" in
let message () = Lexer.error_to_string e.value in
let data = [
("parser_loc",
fun () -> Format.asprintf "%a" Location.pp_lift @@ e.region)]
in error ~data title message
end
let parse (module IO : IO) parser =
let module Unit = PreUnit (IO) in
let mk_error error =
Unit.format_error ~offsets:IO.options#offsets
IO.options#mode error in
match parser () with
(* Scoping errors *)
Stdlib.Ok semantic_value -> ok semantic_value
| Stdlib.Error error -> fail @@ Errors.parser_error error
| exception Lexer.Error e -> fail @@ Errors.lexer_error e
| exception Scoping.Error (Scoping.Reserved_name name) ->
let token =
Lexer.Token.mk_ident name.Region.value name.Region.region in
(match token with
(* Cannot fail because [name] is a not a
reserved name for the lexer. *)
Stdlib.Error _ -> assert false
| Ok invalid ->
let point =
"Reserved name.\nHint: Change the name.\n", None, invalid
in fail @@ Errors.reserved_name @@ mk_error point)
| exception Scoping.Error (Scoping.Duplicate_variant name) ->
let token =
Lexer.Token.mk_constr name.Region.value name.Region.region in
let point =
"Duplicate constructor in this sum type declaration.\n\
Hint: Change the constructor.\n",
None, token
in fail @@ Errors.duplicate_variant @@ mk_error point
| exception Scoping.Error (Scoping.Non_linear_pattern var) ->
let token =
Lexer.Token.mk_ident var.Region.value var.Region.region in
(match token with
(* Cannot fail because [var] is a not a
reserved name for the lexer. *)
Stdlib.Error _ -> assert false
| Ok invalid ->
let point =
"Repeated variable in this pattern.\n\
Hint: Change the name.\n",
None, invalid
in fail @@ Errors.non_linear_pattern @@ mk_error point)
| exception Scoping.Error (Scoping.Duplicate_field name) ->
let token =
Lexer.Token.mk_ident name.Region.value name.Region.region in
(match token with
(* Cannot fail because [name] is a not a
reserved name for the lexer. *)
Stdlib.Error _ -> assert false
| Ok invalid ->
let point =
"Duplicate field name in this record declaration.\n\
Hint: Change the name.\n",
None, invalid
in fail @@ Errors.duplicate_field @@ mk_error point)
let parse_file (source: string) =
let module IO =
struct
let ext = PreIO.ext
let options = PreIO.pre_options ~expr:false
end in
let pp_input = let pp_input =
let prefix = Filename.(source |> basename |> remove_extension) let prefix = Filename.(source |> basename |> remove_extension)
and suffix = ".pp.mligo" and suffix = ".pp.ligo"
in prefix ^ suffix in in prefix ^ suffix in
let cpp_cmd = Printf.sprintf "cpp -traditional-cpp %s > %s" let cpp_cmd = Printf.sprintf "cpp -traditional-cpp %s > %s"
source pp_input in source pp_input in
let%bind () = sys_command cpp_cmd in let%bind () = sys_command cpp_cmd in
let%bind channel = let%bind channel =
generic_try (simple_error "error opening file") @@ generic_try (simple_error "Error when opening file") @@
(fun () -> open_in pp_input) in (fun () -> open_in pp_input) in
let lexbuf = Lexing.from_channel channel in let module Unit = PreUnit (IO) in
parse (Parser.contract) source lexbuf let instance =
match Lexer.open_token_stream (Lexer.Channel channel) with
Ok instance -> instance
| Stdlib.Error _ -> assert false (* No file opening *) in
let thunk () = Unit.apply instance Unit.parse_contract in
parse (module IO) thunk
let parse_string (s:string) : AST.t result = let parse_string (s: string) =
let lexbuf = Lexing.from_string s in let module IO =
parse Parser.contract "" lexbuf struct
let ext = PreIO.ext
let options = PreIO.pre_options ~expr:false
end in
let module Unit = PreUnit (IO) in
let instance =
match Lexer.open_token_stream (Lexer.String s) with
Ok instance -> instance
| Stdlib.Error _ -> assert false (* No file opening *) in
let thunk () = Unit.apply instance Unit.parse_contract in
parse (module IO) thunk
let parse_expression (s:string) : AST.expr result = let parse_expression (s: string) =
let lexbuf = Lexing.from_string s in let module IO =
parse Parser.interactive_expr "" lexbuf struct
let ext = PreIO.ext
let options = PreIO.pre_options ~expr:true
end in
let module Unit = PreUnit (IO) in
let instance =
match Lexer.open_token_stream (Lexer.String s) with
Ok instance -> instance
| Stdlib.Error _ -> assert false (* No file opening *) in
let thunk () = Unit.apply instance Unit.parse_expr in
parse (module IO) thunk

View File

@ -27,12 +27,11 @@ module Unit =
(* Main *) (* Main *)
let issue_error point = let issue_error error : ('a, string Region.reg) Stdlib.result =
let error = Unit.format_error ~offsets:IO.options#offsets Stdlib.Error (Unit.format_error ~offsets:IO.options#offsets
IO.options#mode point IO.options#mode error)
in Stdlib.Error error
let parse parser : ('a,string) Stdlib.result = let parse parser : ('a, string Region.reg) Stdlib.result =
try parser () with try parser () with
(* Scoping errors *) (* Scoping errors *)
@ -81,11 +80,61 @@ let parse parser : ('a,string) Stdlib.result =
None, invalid None, invalid
in issue_error point) in issue_error point)
(* Preprocessing the input source with CPP *)
module SSet = Utils.String.Set
let sprintf = Printf.sprintf
(* Path for CPP inclusions (#include) *)
let lib_path =
match IO.options#libs with
[] -> ""
| libs -> let mk_I dir path = sprintf " -I %s%s" dir path
in List.fold_right mk_I libs ""
let prefix =
match IO.options#input with
None | Some "-" -> "temp"
| Some file -> Filename.(file |> basename |> remove_extension)
let suffix = ".pp" ^ IO.ext
let pp_input =
if SSet.mem "cpp" IO.options#verbose
then prefix ^ suffix
else let pp_input, pp_out =
Filename.open_temp_file prefix suffix
in close_out pp_out; pp_input
let cpp_cmd =
match IO.options#input with
None | Some "-" ->
sprintf "cpp -traditional-cpp%s - > %s"
lib_path pp_input
| Some file ->
sprintf "cpp -traditional-cpp%s %s > %s"
lib_path file pp_input
let () = let () =
if IO.options#expr if Sys.command cpp_cmd <> 0 then
then match parse (fun () -> Unit.parse Unit.parse_expr) with Printf.eprintf "External error: \"%s\" failed." cpp_cmd
Stdlib.Ok _ -> ()
| Error msg -> Printf.eprintf "\027[31m%s\027[0m%!" msg (* Instantiating the lexer and calling the parser *)
else match parse (fun () -> Unit.parse Unit.parse_contract) with
Stdlib.Ok _ -> () let lexer_inst =
| Error msg -> Printf.eprintf "\027[31m%s\027[0m%!" msg match Lexer.open_token_stream (Lexer.File pp_input) with
Ok instance ->
if IO.options#expr
then
match parse (fun () -> Unit.apply instance Unit.parse_expr) with
Stdlib.Ok _ -> ()
| Error Region.{value; _} ->
Printf.eprintf "\027[31m%s\027[0m%!" value
else
(match parse (fun () -> Unit.apply instance Unit.parse_contract) with
Stdlib.Ok _ -> ()
| Error Region.{value; _} ->
Printf.eprintf "\027[31m%s\027[0m%!" value)
| Stdlib.Error (Lexer.File_opening msg) ->
Printf.eprintf "\027[31m%s\027[0m%!" msg

View File

@ -15,17 +15,16 @@
(name parser_cameligo) (name parser_cameligo)
(public_name ligo.parser.cameligo) (public_name ligo.parser.cameligo)
(modules (modules
Scoping AST cameligo Parser ParserLog LexToken) Scoping AST cameligo Parser ParserLog LexToken ParErr)
(libraries (libraries
menhirLib menhirLib
parser_shared parser_shared
str str
simple-utils simple-utils
tezos-utils tezos-utils)
getopt)
(preprocess (preprocess
(pps bisect_ppx --conditional)) (pps bisect_ppx --conditional))
(flags (:standard -open Simple_utils -open Parser_shared))) (flags (:standard -open Parser_shared -open Simple_utils)))
;; Build of the unlexer (for covering the ;; Build of the unlexer (for covering the
;; error states of the LR automaton) ;; error states of the LR automaton)
@ -52,8 +51,7 @@
(executable (executable
(name ParserMain) (name ParserMain)
(libraries parser_cameligo) (libraries parser_cameligo)
(modules (modules ParserMain)
ParErr ParserMain)
(preprocess (preprocess
(pps bisect_ppx --conditional)) (pps bisect_ppx --conditional))
(flags (:standard -open Simple_utils -open Parser_shared -open Parser_cameligo))) (flags (:standard -open Simple_utils -open Parser_shared -open Parser_cameligo)))

View File

@ -4,151 +4,235 @@ module AST = Parser_pascaligo.AST
module LexToken = Parser_pascaligo.LexToken module LexToken = Parser_pascaligo.LexToken
module Lexer = Lexer.Make(LexToken) module Lexer = Lexer.Make(LexToken)
module Scoping = Parser_pascaligo.Scoping module Scoping = Parser_pascaligo.Scoping
module Parser = Parser_pascaligo.Parser module Region = Simple_utils.Region
module ParErr = Parser_pascaligo.ParErr
(* Mock IOs TODO: Fill them with CLI options *)
module type IO =
sig
val ext : string
val options : EvalOpt.options
end
module PreIO =
struct
let ext = ".ligo"
let pre_options =
EvalOpt.make ~input:None
~libs:[]
~verbose:Utils.String.Set.empty
~offsets:true
~mode:`Point
~cmd:EvalOpt.Quiet
~mono:true
end
module Parser =
struct
type ast = AST.t
type expr = AST.expr
include Parser_pascaligo.Parser
end
module ParserLog =
struct
type ast = AST.t
type expr = AST.expr
include Parser_pascaligo.ParserLog
end
module PreUnit =
ParserUnit.Make (Lexer)(AST)(Parser)(ParErr)(ParserLog)
module Errors = module Errors =
struct struct
let reserved_name Region.{value; region} = let reserved_name Region.{value; region} =
let title () = Printf.sprintf "reserved name \"%s\"" value in let title () = Printf.sprintf "\nReserved name \"%s\"" value in
let message () = "" in let message () = "" in
let data = [ let data = [
("location", ("location",
fun () -> Format.asprintf "%a" Location.pp_lift @@ region)] fun () -> Format.asprintf "%a" Location.pp_lift @@ region)]
in error ~data title message
let non_linear_pattern Region.{value; region} =
let title () =
Printf.sprintf "repeated variable \"%s\" in this pattern" value in
let message () = "" in
let data = [
("location",
fun () -> Format.asprintf "%a" Location.pp_lift @@ region)]
in error ~data title message in error ~data title message
let duplicate_parameter Region.{value; region} = let duplicate_parameter Region.{value; region} =
let title () = let title () =
Printf.sprintf "duplicate parameter \"%s\"" value in Printf.sprintf "\nDuplicate parameter \"%s\"" value in
let message () = "" in let message () = "" in
let data = [ let data = [
("location", ("location",
fun () -> Format.asprintf "%a" Location.pp_lift @@ region)] fun () -> Format.asprintf "%a" Location.pp_lift @@ region)]
in error ~data title message in error ~data title message
let duplicate_variant Region.{value; region} = let duplicate_variant Region.{value; region} =
let title () = let title () =
Printf.sprintf "duplicate variant \"%s\" in this\ Printf.sprintf "\nDuplicate variant \"%s\" in this \
type declaration" value in type declaration" value in
let message () = "" in let message () = "" in
let data = [ let data = [
("location", ("location",
fun () -> Format.asprintf "%a" Location.pp_lift @@ region)] fun () -> Format.asprintf "%a" Location.pp_lift @@ region)]
in error ~data title message in error ~data title message
let unrecognized_error source (start: Lexing.position) let non_linear_pattern Region.{value; region} =
(stop: Lexing.position) lexbuf = let title () =
let title () = "unrecognized error" in Printf.sprintf "\nRepeated variable \"%s\" in this pattern" value in
let file = let message () = "" in
if source = "" then ""
else
Format.sprintf "In file \"%s|%s\"" start.pos_fname source in
let message () =
Format.sprintf
"Parse error at \"%s\" from (%d, %d) to (%d, %d). %s\n"
(Lexing.lexeme lexbuf)
start.pos_lnum (start.pos_cnum - start.pos_bol)
stop.pos_lnum (stop.pos_cnum - stop.pos_bol)
file in
let loc = Region.make ~start:(Pos.from_byte start)
~stop:(Pos.from_byte stop) in
let data = [ let data = [
("unrecognized_loc", ("location",
fun () -> Format.asprintf "%a" Location.pp_lift @@ loc)] fun () -> Format.asprintf "%a" Location.pp_lift @@ region)]
in error ~data title message in error ~data title message
let parser_error source (start: Lexing.position) let duplicate_field Region.{value; region} =
(stop: Lexing.position) lexbuf = let title () =
let title () = "parser error" in Printf.sprintf "\nDuplicate field name \"%s\" \
let file = in this record declaration" value in
if source = "" then "" let message () = "" in
else let data = [
Format.sprintf "In file \"%s|%s\"" start.pos_fname source in ("location",
let message () = fun () -> Format.asprintf "%a" Location.pp_lift @@ region)]
Format.sprintf in error ~data title message
"Parse error at \"%s\" from (%d, %d) to (%d, %d). %s\n"
(Lexing.lexeme lexbuf) let parser_error Region.{value; region} =
start.pos_lnum (start.pos_cnum - start.pos_bol) let title () = ""
stop.pos_lnum (stop.pos_cnum - stop.pos_bol) and message () = value
file in and loc = region in
let loc =
if start.pos_cnum = -1 then
Region.make
~start:(Pos.min ~file:source) ~stop:(Pos.from_byte stop)
else
Region.make ~start:(Pos.from_byte start)
~stop:(Pos.from_byte stop) in
let data = let data =
[("parser_loc", [("parser_loc",
fun () -> Format.asprintf "%a" Location.pp_lift @@ loc)] in fun () -> Format.asprintf "%a" Location.pp_lift @@ loc)]
error ~data title message in error ~data title message
let lexer_error (e: Lexer.error AST.reg) = let lexer_error (e: Lexer.error AST.reg) =
let title () = "lexer error" in let title () = "\nLexer error" in
let message () = Lexer.error_to_string e.value in let message () = Lexer.error_to_string e.value in
let data = [ let data = [
("parser_loc", ("parser_loc",
fun () -> Format.asprintf "%a" Location.pp_lift @@ e.region)] fun () -> Format.asprintf "%a" Location.pp_lift @@ e.region)]
in error ~data title message in error ~data title message
end end
open Errors let parse (module IO : IO) parser =
let module Unit = PreUnit (IO) in
let mk_error error =
Unit.format_error ~offsets:IO.options#offsets
IO.options#mode error in
match parser () with
(* Scoping errors *)
type 'a parser = (Lexing.lexbuf -> LexToken.token) -> Lexing.lexbuf -> 'a Stdlib.Ok semantic_value -> ok semantic_value
| Stdlib.Error error -> fail @@ Errors.parser_error error
| exception Lexer.Error e -> fail @@ Errors.lexer_error e
let parse (parser: 'a parser) source lexbuf = | exception Scoping.Error (Scoping.Reserved_name name) ->
let Lexer.{read; close; _} = Lexer.open_token_stream None in let token =
let result = Lexer.Token.mk_ident name.Region.value name.Region.region in
try ok (parser read lexbuf) with (match token with
Lexer.Error e -> (* Cannot fail because [name] is a not a
fail @@ lexer_error e reserved name for the lexer. *)
| Parser.Error -> Stdlib.Error _ -> assert false
let start = Lexing.lexeme_start_p lexbuf in | Ok invalid ->
let stop = Lexing.lexeme_end_p lexbuf in let point =
fail @@ parser_error source start stop lexbuf "Reserved name.\nHint: Change the name.\n", None, invalid
| Scoping.Error (Scoping.Non_linear_pattern var) -> in fail @@ Errors.reserved_name @@ mk_error point)
fail @@ non_linear_pattern var
| Scoping.Error (Duplicate_parameter name) ->
fail @@ duplicate_parameter name
| Scoping.Error (Duplicate_variant name) ->
fail @@ duplicate_variant name
| Scoping.Error (Reserved_name name) ->
fail @@ reserved_name name
| _ ->
let () = Printexc.print_backtrace Pervasives.stdout in
let start = Lexing.lexeme_start_p lexbuf in
let stop = Lexing.lexeme_end_p lexbuf in
fail @@ unrecognized_error source start stop lexbuf
in close (); result
let parse_file (source: string) : AST.t result = | exception Scoping.Error (Scoping.Duplicate_parameter name) ->
let token =
Lexer.Token.mk_ident name.Region.value name.Region.region in
(match token with
(* Cannot fail because [name] is a not a
reserved name for the lexer. *)
Stdlib.Error _ -> assert false
| Ok invalid ->
let point =
"Duplicate parameter.\nHint: Change the name.\n",
None, invalid
in fail @@ Errors.duplicate_parameter @@ mk_error point)
| exception Scoping.Error (Scoping.Duplicate_variant name) ->
let token =
Lexer.Token.mk_constr name.Region.value name.Region.region in
let point =
"Duplicate constructor in this sum type declaration.\n\
Hint: Change the constructor.\n",
None, token
in fail @@ Errors.duplicate_variant @@ mk_error point
| exception Scoping.Error (Scoping.Non_linear_pattern var) ->
let token =
Lexer.Token.mk_ident var.Region.value var.Region.region in
(match token with
(* Cannot fail because [var] is a not a
reserved name for the lexer. *)
Stdlib.Error _ -> assert false
| Ok invalid ->
let point =
"Repeated variable in this pattern.\n\
Hint: Change the name.\n",
None, invalid
in fail @@ Errors.non_linear_pattern @@ mk_error point)
| exception Scoping.Error (Scoping.Duplicate_field name) ->
let token =
Lexer.Token.mk_ident name.Region.value name.Region.region in
(match token with
(* Cannot fail because [name] is a not a
reserved name for the lexer. *)
Stdlib.Error _ -> assert false
| Ok invalid ->
let point =
"Duplicate field name in this record declaration.\n\
Hint: Change the name.\n",
None, invalid
in fail @@ Errors.duplicate_field @@ mk_error point)
let parse_file (source: string) =
let module IO =
struct
let ext = PreIO.ext
let options = PreIO.pre_options ~expr:false
end in
let pp_input = let pp_input =
let prefix = Filename.(source |> basename |> remove_extension) let prefix = Filename.(source |> basename |> remove_extension)
and suffix = ".pp.ligo" and suffix = ".pp.ligo"
in prefix ^ suffix in in prefix ^ suffix in
let cpp_cmd = Printf.sprintf "cpp -traditional-cpp %s > %s" let cpp_cmd = Printf.sprintf "cpp -traditional-cpp %s > %s"
source pp_input in source pp_input in
let%bind () = sys_command cpp_cmd in let%bind () = sys_command cpp_cmd in
let%bind channel = let%bind channel =
generic_try (simple_error "error opening file") @@ generic_try (simple_error "Error when opening file") @@
(fun () -> open_in pp_input) in (fun () -> open_in pp_input) in
let lexbuf = Lexing.from_channel channel in let module Unit = PreUnit (IO) in
parse (Parser.contract) source lexbuf let instance =
match Lexer.open_token_stream (Lexer.Channel channel) with
Ok instance -> instance
| Stdlib.Error _ -> assert false (* No file opening *) in
let thunk () = Unit.apply instance Unit.parse_contract in
parse (module IO) thunk
let parse_string (s:string) : AST.t result = let parse_string (s: string) =
let lexbuf = Lexing.from_string s in let module IO =
parse (Parser.contract) "" lexbuf struct
let ext = PreIO.ext
let options = PreIO.pre_options ~expr:false
end in
let module Unit = PreUnit (IO) in
let instance =
match Lexer.open_token_stream (Lexer.String s) with
Ok instance -> instance
| Stdlib.Error _ -> assert false (* No file opening *) in
let thunk () = Unit.apply instance Unit.parse_contract in
parse (module IO) thunk
let parse_expression (s:string) : AST.expr result = let parse_expression (s: string) =
let lexbuf = Lexing.from_string s in let module IO =
parse (Parser.interactive_expr) "" lexbuf struct
let ext = PreIO.ext
let options = PreIO.pre_options ~expr:true
end in
let module Unit = PreUnit (IO) in
let instance =
match Lexer.open_token_stream (Lexer.String s) with
Ok instance -> instance
| Stdlib.Error _ -> assert false (* No file opening *) in
let thunk () = Unit.apply instance Unit.parse_expr in
parse (module IO) thunk

View File

@ -17,6 +17,7 @@ $HOME/git/ligo/vendors/ligo-utils/simple-utils/region.ml
../shared/Utils.ml ../shared/Utils.ml
../shared/ParserAPI.mli ../shared/ParserAPI.mli
../shared/ParserAPI.ml ../shared/ParserAPI.ml
../shared/LexerUnit.mli
../shared/LexerUnit.ml ../shared/LexerUnit.ml
../shared/ParserUnit.mli ../shared/ParserUnit.mli
../shared/ParserUnit.ml ../shared/ParserUnit.ml

View File

@ -1,4 +1,6 @@
(** Driver for the PascaLIGO lexer *) (* Driver for the PascaLIGO lexer *)
module Region = Simple_utils.Region
module IO = module IO =
struct struct
@ -11,4 +13,5 @@ module M = LexerUnit.Make (IO) (Lexer.Make (LexToken))
let () = let () =
match M.trace () with match M.trace () with
Stdlib.Ok _ -> () Stdlib.Ok _ -> ()
| Error msg -> Printf.eprintf "\027[31m%s\027[0m%!" msg | Error Region.{value; _} ->
Printf.eprintf "\027[31m%s\027[0m%!" value

View File

@ -1,4 +1,4 @@
(** Driver for the PascaLIGO parser *) (* Driver for the PascaLIGO parser *)
module IO = module IO =
struct struct
@ -27,12 +27,11 @@ module Unit =
(* Main *) (* Main *)
let issue_error point = let issue_error error : ('a, string Region.reg) Stdlib.result =
let error = Unit.format_error ~offsets:IO.options#offsets Stdlib.Error (Unit.format_error ~offsets:IO.options#offsets
IO.options#mode point IO.options#mode error)
in Stdlib.Error error
let parse parser : ('a,string) Stdlib.result = let parse parser : ('a, string Region.reg) Stdlib.result =
try parser () with try parser () with
(* Scoping errors *) (* Scoping errors *)
@ -87,16 +86,67 @@ let parse parser : ('a,string) Stdlib.result =
reserved name for the lexer. *) reserved name for the lexer. *)
Stdlib.Error _ -> assert false Stdlib.Error _ -> assert false
| Ok invalid -> | Ok invalid ->
let point = "Duplicate field name in this record declaration.\n\ let point =
Hint: Change the name.\n", "Duplicate field name in this record declaration.\n\
None, invalid Hint: Change the name.\n",
in issue_error point) None, invalid
in issue_error point)
(* Preprocessing the input source with CPP *)
module SSet = Utils.String.Set
let sprintf = Printf.sprintf
(* Path for CPP inclusions (#include) *)
let lib_path =
match IO.options#libs with
[] -> ""
| libs -> let mk_I dir path = sprintf " -I %s%s" dir path
in List.fold_right mk_I libs ""
let prefix =
match IO.options#input with
None | Some "-" -> "temp"
| Some file -> Filename.(file |> basename |> remove_extension)
let suffix = ".pp" ^ IO.ext
let pp_input =
if SSet.mem "cpp" IO.options#verbose
then prefix ^ suffix
else let pp_input, pp_out =
Filename.open_temp_file prefix suffix
in close_out pp_out; pp_input
let cpp_cmd =
match IO.options#input with
None | Some "-" ->
sprintf "cpp -traditional-cpp%s - > %s"
lib_path pp_input
| Some file ->
sprintf "cpp -traditional-cpp%s %s > %s"
lib_path file pp_input
let () = let () =
if IO.options#expr if Sys.command cpp_cmd <> 0 then
then match parse (fun () -> Unit.parse Unit.parse_expr) with Printf.eprintf "External error: \"%s\" failed." cpp_cmd
Stdlib.Ok _ -> ()
| Error msg -> Printf.eprintf "\027[31m%s\027[0m%!" msg (* Instantiating the lexer and calling the parser *)
else match parse (fun () -> Unit.parse Unit.parse_contract) with
Stdlib.Ok _ -> () let lexer_inst =
| Error msg -> Printf.eprintf "\027[31m%s\027[0m%!" msg match Lexer.open_token_stream (Lexer.File pp_input) with
Ok instance ->
if IO.options#expr
then
match parse (fun () -> Unit.apply instance Unit.parse_expr) with
Stdlib.Ok _ -> ()
| Error Region.{value; _} ->
Printf.eprintf "\027[31m%s\027[0m%!" value
else
(match parse (fun () -> Unit.apply instance Unit.parse_contract) with
Stdlib.Ok _ -> ()
| Error Region.{value; _} ->
Printf.eprintf "\027[31m%s\027[0m%!" value)
| Stdlib.Error (Lexer.File_opening msg) ->
Printf.eprintf "\027[31m%s\027[0m%!" msg

View File

@ -7,7 +7,7 @@
(menhir (menhir
(merge_into Parser) (merge_into Parser)
(modules ParToken Parser) (modules ParToken Parser)
(flags -la 1 --table --strict --external-tokens LexToken)) (flags -la 1 --table --strict --explain --external-tokens LexToken))
;; Build of the parser as a library ;; Build of the parser as a library
@ -20,8 +20,7 @@
menhirLib menhirLib
parser_shared parser_shared
hex hex
simple-utils simple-utils)
tezos-utils)
(preprocess (preprocess
(pps bisect_ppx --conditional)) (pps bisect_ppx --conditional))
(flags (:standard -open Parser_shared -open Simple_utils))) (flags (:standard -open Parser_shared -open Simple_utils)))
@ -52,8 +51,7 @@
(executable (executable
(name ParserMain) (name ParserMain)
(libraries parser_pascaligo) (libraries parser_pascaligo)
(modules (modules ParserMain)
ParserMain)
(preprocess (preprocess
(pps bisect_ppx --conditional)) (pps bisect_ppx --conditional))
(flags (:standard -open Simple_utils -open Parser_shared -open Parser_pascaligo))) (flags (:standard -open Simple_utils -open Parser_shared -open Parser_pascaligo)))

View File

@ -1,131 +1,228 @@
open Trace open Trace
module Parser = Parser_reasonligo.Parser
module AST = Parser_cameligo.AST module AST = Parser_cameligo.AST
module ParserLog = Parser_cameligo.ParserLog
module LexToken = Parser_reasonligo.LexToken module LexToken = Parser_reasonligo.LexToken
module Lexer = Lexer.Make(LexToken) module Lexer = Lexer.Make(LexToken)
module SyntaxError = Parser_reasonligo.SyntaxError
module Scoping = Parser_cameligo.Scoping module Scoping = Parser_cameligo.Scoping
module Region = Simple_utils.Region
module ParErr = Parser_reasonligo.ParErr
module SyntaxError = Parser_reasonligo.SyntaxError
(* Mock IOs TODO: Fill them with CLI options *)
module type IO =
sig
val ext : string
val options : EvalOpt.options
end
module PreIO =
struct
let ext = ".ligo"
let pre_options =
EvalOpt.make ~input:None
~libs:[]
~verbose:Utils.String.Set.empty
~offsets:true
~mode:`Point
~cmd:EvalOpt.Quiet
~mono:true
end
module Parser =
struct
type ast = AST.t
type expr = AST.expr
include Parser_reasonligo.Parser
end
module ParserLog =
struct
type ast = AST.t
type expr = AST.expr
include Parser_cameligo.ParserLog
end
module PreUnit =
ParserUnit.Make (Lexer)(AST)(Parser)(ParErr)(ParserLog)
module Errors = module Errors =
struct struct
let reserved_name Region.{value; region} =
let title () = Printf.sprintf "\nReserved name \"%s\"" value in
let message () = "" in
let data = [
("location",
fun () -> Format.asprintf "%a" Location.pp_lift @@ region)]
in error ~data title message
let duplicate_variant Region.{value; region} =
let title () =
Printf.sprintf "\nDuplicate variant \"%s\" in this \
type declaration" value in
let message () = "" in
let data = [
("location",
fun () -> Format.asprintf "%a" Location.pp_lift @@ region)]
in error ~data title message
let non_linear_pattern Region.{value; region} =
let title () =
Printf.sprintf "\nRepeated variable \"%s\" in this pattern" value in
let message () = "" in
let data = [
("location",
fun () -> Format.asprintf "%a" Location.pp_lift @@ region)]
in error ~data title message
let duplicate_field Region.{value; region} =
let title () =
Printf.sprintf "\nDuplicate field name \"%s\" \
in this record declaration" value in
let message () = "" in
let data = [
("location",
fun () -> Format.asprintf "%a" Location.pp_lift @@ region)]
in error ~data title message
let parser_error Region.{value; region} =
let title () = ""
and message () = value
and loc = region in
let data =
[("parser_loc",
fun () -> Format.asprintf "%a" Location.pp_lift @@ loc)]
in error ~data title message
let lexer_error (e: Lexer.error AST.reg) = let lexer_error (e: Lexer.error AST.reg) =
let title () = "lexer error" in let title () = "\nLexer error" in
let message () = Lexer.error_to_string e.value in let message () = Lexer.error_to_string e.value in
let data = [ let data = [
("parser_loc", ("parser_loc",
fun () -> Format.asprintf "%a" Location.pp_lift @@ e.region)] fun () -> Format.asprintf "%a" Location.pp_lift @@ e.region)]
in error ~data title message in error ~data title message
let wrong_function_arguments expr = let wrong_function_arguments (expr: AST.expr) =
let title () = "wrong function arguments" in let title () = "\nWrong function arguments" in
let message () = "" in let message () = "" in
let expression_loc = AST.expr_to_region expr in let expression_loc = AST.expr_to_region expr in
let data = [ let data = [
("location", ("location",
fun () -> Format.asprintf "%a" Location.pp_lift @@ expression_loc)] fun () -> Format.asprintf "%a" Location.pp_lift @@ expression_loc)]
in error ~data title message in error ~data title message
end
let parser_error source (start: Lexing.position) let parse (module IO : IO) parser =
(end_: Lexing.position) lexbuf = let module Unit = PreUnit (IO) in
let title () = "parser error" in let mk_error error =
let file = Unit.format_error ~offsets:IO.options#offsets
if source = "" then "" IO.options#mode error in
else match parser () with
Format.sprintf "In file \"%s|%s\"" start.pos_fname source in (* Scoping errors *)
let str =
Format.sprintf
"Parse error at \"%s\" from (%d, %d) to (%d, %d). %s\n"
(Lexing.lexeme lexbuf)
start.pos_lnum (start.pos_cnum - start.pos_bol)
end_.pos_lnum (end_.pos_cnum - end_.pos_bol)
file in
let message () = str in
let loc =
if start.pos_cnum = -1
then Region.make
~start:(Pos.min ~file:source)
~stop:(Pos.from_byte end_)
else Region.make
~start:(Pos.from_byte start)
~stop:(Pos.from_byte end_) in
let data =
[("parser_loc",
fun () -> Format.asprintf "%a" Location.pp_lift @@ loc)]
in error ~data title message
let unrecognized_error source (start: Lexing.position) (end_: Lexing.position) lexbuf = Stdlib.Ok semantic_value -> ok semantic_value
let title () = "unrecognized error" in | Stdlib.Error error -> fail @@ Errors.parser_error error
let file = | exception Lexer.Error e -> fail @@ Errors.lexer_error e
if source = "" then ""
else
Format.sprintf "In file \"%s|%s\"" start.pos_fname source in
let str =
Format.sprintf
"Parse error at \"%s\" from (%d, %d) to (%d, %d). %s\n"
(Lexing.lexeme lexbuf)
start.pos_lnum (start.pos_cnum - start.pos_bol)
end_.pos_lnum (end_.pos_cnum - end_.pos_bol)
file in
let message () = str in
let loc = Region.make
~start:(Pos.from_byte start)
~stop:(Pos.from_byte end_) in
let data = [
("location",
fun () -> Format.asprintf "%a" Location.pp_lift @@ loc)]
in error ~data title message
end | exception SyntaxError.Error (SyntaxError.WrongFunctionArguments expr) ->
fail @@ Errors.wrong_function_arguments expr
| exception Scoping.Error (Scoping.Reserved_name name) ->
let token =
Lexer.Token.mk_ident name.Region.value name.Region.region in
(match token with
(* Cannot fail because [name] is a not a
reserved name for the lexer. *)
Stdlib.Error _ -> assert false
| Ok invalid ->
let point =
"Reserved name.\nHint: Change the name.\n", None, invalid
in fail @@ Errors.reserved_name @@ mk_error point)
open Errors | exception Scoping.Error (Scoping.Duplicate_variant name) ->
let token =
Lexer.Token.mk_constr name.Region.value name.Region.region in
let point =
"Duplicate constructor in this sum type declaration.\n\
Hint: Change the constructor.\n",
None, token
in fail @@ Errors.duplicate_variant @@ mk_error point
type 'a parser = (Lexing.lexbuf -> LexToken.token) -> Lexing.lexbuf -> 'a | exception Scoping.Error (Scoping.Non_linear_pattern var) ->
let token =
Lexer.Token.mk_ident var.Region.value var.Region.region in
(match token with
(* Cannot fail because [var] is a not a
reserved name for the lexer. *)
Stdlib.Error _ -> assert false
| Ok invalid ->
let point =
"Repeated variable in this pattern.\n\
Hint: Change the name.\n",
None, invalid
in fail @@ Errors.non_linear_pattern @@ mk_error point)
let parse (parser: 'a parser) source lexbuf = | exception Scoping.Error (Scoping.Duplicate_field name) ->
let Lexer.{read ; close ; _} = Lexer.open_token_stream None in let token =
let result = Lexer.Token.mk_ident name.Region.value name.Region.region in
try (match token with
ok (parser read lexbuf) (* Cannot fail because [name] is a not a
with reserved name for the lexer. *)
| SyntaxError.Error (WrongFunctionArguments e) -> Stdlib.Error _ -> assert false
fail @@ (wrong_function_arguments e) | Ok invalid ->
| Parser.Error -> let point =
let start = Lexing.lexeme_start_p lexbuf in "Duplicate field name in this record declaration.\n\
let end_ = Lexing.lexeme_end_p lexbuf in Hint: Change the name.\n",
fail @@ (parser_error source start end_ lexbuf) None, invalid
| Lexer.Error e -> in fail @@ Errors.duplicate_field @@ mk_error point)
fail @@ (lexer_error e)
| _ ->
let _ = Printexc.print_backtrace Pervasives.stdout in
let start = Lexing.lexeme_start_p lexbuf in
let end_ = Lexing.lexeme_end_p lexbuf in
fail @@ (unrecognized_error source start end_ lexbuf)
in
close ();
result
let parse_file (source: string) : AST.t result = let parse_file (source: string) =
let module IO =
struct
let ext = PreIO.ext
let options = PreIO.pre_options ~expr:false
end in
let pp_input = let pp_input =
let prefix = Filename.(source |> basename |> remove_extension) let prefix = Filename.(source |> basename |> remove_extension)
and suffix = ".pp.religo" and suffix = ".pp.ligo"
in prefix ^ suffix in in prefix ^ suffix in
let cpp_cmd = Printf.sprintf "cpp -traditional-cpp %s > %s" let cpp_cmd = Printf.sprintf "cpp -traditional-cpp %s > %s"
source pp_input in source pp_input in
let%bind () = sys_command cpp_cmd in let%bind () = sys_command cpp_cmd in
let%bind channel = let%bind channel =
generic_try (simple_error "error opening file") @@ generic_try (simple_error "Error when opening file") @@
(fun () -> open_in pp_input) in (fun () -> open_in pp_input) in
let lexbuf = Lexing.from_channel channel in let module Unit = PreUnit (IO) in
parse (Parser.contract) source lexbuf let instance =
match Lexer.open_token_stream (Lexer.Channel channel) with
Ok instance -> instance
| Stdlib.Error _ -> assert false (* No file opening *) in
let thunk () = Unit.apply instance Unit.parse_contract in
parse (module IO) thunk
let parse_string (s:string) : AST.t result = let parse_string (s: string) =
let lexbuf = Lexing.from_string s in let module IO =
parse (Parser.contract) "" lexbuf struct
let ext = PreIO.ext
let options = PreIO.pre_options ~expr:false
end in
let module Unit = PreUnit (IO) in
let instance =
match Lexer.open_token_stream (Lexer.String s) with
Ok instance -> instance
| Stdlib.Error _ -> assert false (* No file opening *) in
let thunk () = Unit.apply instance Unit.parse_contract in
parse (module IO) thunk
let parse_expression (s:string) : AST.expr result = let parse_expression (s: string) =
let lexbuf = Lexing.from_string s in let module IO =
parse (Parser.interactive_expr) "" lexbuf struct
let ext = PreIO.ext
let options = PreIO.pre_options ~expr:true
end in
let module Unit = PreUnit (IO) in
let instance =
match Lexer.open_token_stream (Lexer.String s) with
Ok instance -> instance
| Stdlib.Error _ -> assert false (* No file opening *) in
let thunk () = Unit.apply instance Unit.parse_expr in
parse (module IO) thunk

View File

@ -232,11 +232,11 @@ field_decl:
let_declaration: let_declaration:
seq(Attr) "let" let_binding { seq(Attr) "let" let_binding {
let attributes = $1 in let attributes = $1 in
let kwd_let = $2 in let kwd_let = $2 in
let binding = $3 in let binding = $3 in
let value = kwd_let, binding, attributes in let value = kwd_let, binding, attributes in
let stop = expr_to_region binding.let_rhs in let stop = expr_to_region binding.let_rhs in
let region = cover $2 stop let region = cover $2 stop
in {region; value} } in {region; value} }
es6_func: es6_func:
@ -451,11 +451,9 @@ fun_expr:
}; };
region region
} }
| e -> ( | e ->
let open! SyntaxError in let open! SyntaxError in
raise (Error (WrongFunctionArguments e)) raise (Error (WrongFunctionArguments e)) in
)
in
let fun_args_to_pattern = function let fun_args_to_pattern = function
EAnnot { EAnnot {
value = { value = {

View File

@ -27,12 +27,11 @@ module Unit =
(* Main *) (* Main *)
let issue_error point = let issue_error error : ('a, string Region.reg) Stdlib.result =
let error = Unit.format_error ~offsets:IO.options#offsets Stdlib.Error (Unit.format_error ~offsets:IO.options#offsets
IO.options#mode point IO.options#mode error)
in Stdlib.Error error
let parse parser : ('a,string) Stdlib.result = let parse parser : ('a, string Region.reg) Stdlib.result =
try parser () with try parser () with
(* Ad hoc errors from the parser *) (* Ad hoc errors from the parser *)
@ -43,10 +42,10 @@ let parse parser : ('a,string) Stdlib.result =
Examples of valid functions:\n\ Examples of valid functions:\n\
let x = (a: string, b: int) : int => 3;\n\ let x = (a: string, b: int) : int => 3;\n\
let x = (a: string) : string => \"Hello, \" ++ a;\n" let x = (a: string) : string => \"Hello, \" ++ a;\n"
and reg = AST.expr_to_region expr in and region = AST.expr_to_region expr in
let error = Unit.short_error ~offsets:IO.options#offsets let error = Unit.short_error ~offsets:IO.options#offsets
IO.options#mode msg reg IO.options#mode msg region
in Stdlib.Error error in Stdlib.Error Region.{value=error; region}
(* Scoping errors *) (* Scoping errors *)
@ -96,11 +95,61 @@ let parse parser : ('a,string) Stdlib.result =
None, invalid None, invalid
in issue_error point) in issue_error point)
(* Preprocessing the input source with CPP *)
module SSet = Utils.String.Set
let sprintf = Printf.sprintf
(* Path for CPP inclusions (#include) *)
let lib_path =
match IO.options#libs with
[] -> ""
| libs -> let mk_I dir path = sprintf " -I %s%s" dir path
in List.fold_right mk_I libs ""
let prefix =
match IO.options#input with
None | Some "-" -> "temp"
| Some file -> Filename.(file |> basename |> remove_extension)
let suffix = ".pp" ^ IO.ext
let pp_input =
if SSet.mem "cpp" IO.options#verbose
then prefix ^ suffix
else let pp_input, pp_out =
Filename.open_temp_file prefix suffix
in close_out pp_out; pp_input
let cpp_cmd =
match IO.options#input with
None | Some "-" ->
sprintf "cpp -traditional-cpp%s - > %s"
lib_path pp_input
| Some file ->
sprintf "cpp -traditional-cpp%s %s > %s"
lib_path file pp_input
let () = let () =
if IO.options#expr if Sys.command cpp_cmd <> 0 then
then match parse (fun () -> Unit.parse Unit.parse_expr) with Printf.eprintf "External error: \"%s\" failed." cpp_cmd
Stdlib.Ok _ -> ()
| Error msg -> Printf.eprintf "\027[31m%s\027[0m%!" msg (* Instantiating the lexer and calling the parser *)
else match parse (fun () -> Unit.parse Unit.parse_contract) with
Stdlib.Ok _ -> () let lexer_inst =
| Error msg -> Printf.eprintf "\027[31m%s\027[0m%!" msg match Lexer.open_token_stream (Lexer.File pp_input) with
Ok instance ->
if IO.options#expr
then
match parse (fun () -> Unit.apply instance Unit.parse_expr) with
Stdlib.Ok _ -> ()
| Error Region.{value; _} ->
Printf.eprintf "\027[31m%s\027[0m%!" value
else
(match parse (fun () -> Unit.apply instance Unit.parse_contract) with
Stdlib.Ok _ -> ()
| Error Region.{value; _} ->
Printf.eprintf "\027[31m%s\027[0m%!" value)
| Stdlib.Error (Lexer.File_opening msg) ->
Printf.eprintf "\027[31m%s\027[0m%!" msg

View File

@ -7,7 +7,7 @@
(menhir (menhir
(merge_into Parser) (merge_into Parser)
(modules ParToken Parser) (modules ParToken Parser)
(flags -la 1 --table --explain --strict --external-tokens LexToken)) (flags -la 1 --table --strict --explain --external-tokens LexToken))
;; Build of the parser as a library ;; Build of the parser as a library
@ -15,18 +15,16 @@
(name parser_reasonligo) (name parser_reasonligo)
(public_name ligo.parser.reasonligo) (public_name ligo.parser.reasonligo)
(modules (modules
SyntaxError reasonligo LexToken Parser) SyntaxError reasonligo LexToken ParErr Parser)
(libraries (libraries
menhirLib menhirLib
parser_shared parser_shared
parser_cameligo parser_cameligo
str str
simple-utils simple-utils)
tezos-utils
getopt)
(preprocess (preprocess
(pps bisect_ppx --conditional)) (pps bisect_ppx --conditional))
(flags (:standard -open Simple_utils -open Parser_shared -open Parser_cameligo))) (flags (:standard -open Parser_shared -open Simple_utils -open Parser_cameligo)))
;; Build of the unlexer (for covering the ;; Build of the unlexer (for covering the
;; error states of the LR automaton) ;; error states of the LR automaton)
@ -55,8 +53,7 @@
(libraries (libraries
parser_reasonligo parser_reasonligo
parser_cameligo) parser_cameligo)
(modules (modules ParserMain)
ParErr ParserMain)
(preprocess (preprocess
(pps bisect_ppx --conditional)) (pps bisect_ppx --conditional))
(flags (:standard -open Simple_utils -open Parser_cameligo -open Parser_shared -open Parser_reasonligo))) (flags (:standard -open Simple_utils -open Parser_cameligo -open Parser_shared -open Parser_reasonligo)))

View File

@ -145,7 +145,16 @@ module type S =
close : unit -> unit close : unit -> unit
} }
val open_token_stream : file_path option -> instance type input =
File of file_path (* "-" means stdin *)
| Stdin
| String of string
| Channel of in_channel
| Buffer of Lexing.lexbuf
type open_err = File_opening of string
val open_token_stream : input -> (instance, open_err) Stdlib.result
(* Error reporting *) (* Error reporting *)
@ -157,7 +166,7 @@ module type S =
val format_error : val format_error :
?offsets:bool -> [`Byte | `Point] -> ?offsets:bool -> [`Byte | `Point] ->
error Region.reg -> file:bool -> string error Region.reg -> file:bool -> string Region.reg
end end

View File

@ -165,9 +165,18 @@ module type S =
get_last : unit -> Region.t; get_last : unit -> Region.t;
get_file : unit -> file_path; get_file : unit -> file_path;
close : unit -> unit close : unit -> unit
} }
val open_token_stream : file_path option -> instance type input =
File of file_path (* "-" means stdin *)
| Stdin
| String of string
| Channel of in_channel
| Buffer of Lexing.lexbuf
type open_err = File_opening of string
val open_token_stream : input -> (instance, open_err) Stdlib.result
(* Error reporting *) (* Error reporting *)
@ -179,7 +188,7 @@ module type S =
val format_error : val format_error :
?offsets:bool -> [`Byte | `Point] -> ?offsets:bool -> [`Byte | `Point] ->
error Region.reg -> file:bool -> string error Region.reg -> file:bool -> string Region.reg
end end
(* The functorised interface (* The functorised interface
@ -443,8 +452,9 @@ module Make (Token: TOKEN) : (S with module Token = Token) =
let format_error ?(offsets=true) mode Region.{region; value} ~file = let format_error ?(offsets=true) mode Region.{region; value} ~file =
let msg = error_to_string value let msg = error_to_string value
and reg = region#to_string ~file ~offsets mode and reg = region#to_string ~file ~offsets mode in
in sprintf "Lexical error %s:\n%s" reg msg let value = sprintf "Lexical error %s:\n%s" reg msg
in Region.{value; region}
let fail region value = raise (Error Region.{region; value}) let fail region value = raise (Error Region.{region; value})
@ -864,10 +874,20 @@ type instance = {
close : unit -> unit close : unit -> unit
} }
let open_token_stream file_path_opt = type input =
let file_path = match file_path_opt with File of file_path (* "-" means stdin *)
None | Some "-" -> "" | Stdin
| Some file_path -> file_path in | String of string
| Channel of in_channel
| Buffer of Lexing.lexbuf
type open_err = File_opening of string
let open_token_stream input =
let file_path = match input with
File file_path ->
if file_path = "-" then "" else file_path
| _ -> "" in
let pos = Pos.min ~file:file_path in let pos = Pos.min ~file:file_path in
let buf_reg = ref (pos#byte, pos#byte) let buf_reg = ref (pos#byte, pos#byte)
and first_call = ref true and first_call = ref true
@ -934,11 +954,11 @@ let open_token_stream file_path_opt =
in fail region Missing_break in fail region Missing_break
| _ -> () in | _ -> () in
let rec read_token ?(log=fun _ _ -> ()) buffer = let rec read ?(log=fun _ _ -> ()) buffer =
match FQueue.deq !state.units with match FQueue.deq !state.units with
None -> None ->
scan buffer; scan buffer;
read_token ~log buffer read ~log buffer
| Some (units, (left_mark, token)) -> | Some (units, (left_mark, token)) ->
log left_mark token; log left_mark token;
state := {!state with units; state := {!state with units;
@ -948,15 +968,33 @@ let open_token_stream file_path_opt =
patch_buffer (Token.to_region token)#byte_pos buffer; patch_buffer (Token.to_region token)#byte_pos buffer;
token in token in
let cin = match file_path_opt with let buf_close_res =
None | Some "-" -> stdin match input with
| Some file_path -> open_in file_path in File "" | File "-" | Stdin ->
let buffer = Lexing.from_channel cin in Ok (Lexing.from_channel stdin, fun () -> close_in stdin)
let () = match file_path_opt with | File path ->
None | Some "-" -> () (try
| Some file_path -> reset ~file:file_path buffer let chan = open_in path in
and close () = close_in cin in let close () = close_in chan in
{read = read_token; buffer; get_win; get_pos; get_last; get_file; close} Ok (Lexing.from_channel chan, close)
with
Sys_error msg -> Stdlib.Error (File_opening msg))
| String s ->
Ok (Lexing.from_string s, fun () -> ())
| Channel chan ->
let close () = close_in chan in
Ok (Lexing.from_channel chan, close)
| Buffer b -> Ok (b, fun () -> ()) in
match buf_close_res with
Ok (buffer, close) ->
let () =
match input with
File path when path <> "" -> reset ~file:path buffer
| _ -> () in
let instance = {
read; buffer; get_win; get_pos; get_last; get_file; close}
in Ok instance
| Error _ as e -> e
end (* of functor [Make] in HEADER *) end (* of functor [Make] in HEADER *)
(* END TRAILER *) (* END TRAILER *)

View File

@ -1,4 +1,6 @@
(** Embedding the LIGO lexer in a debug module *) (* Embedding the LIGO lexer in a debug module *)
module Region = Simple_utils.Region
module type S = module type S =
sig sig
@ -14,7 +16,7 @@ module type S =
val trace : val trace :
?offsets:bool -> [`Byte | `Point] -> ?offsets:bool -> [`Byte | `Point] ->
file_path option -> EvalOpt.command -> file_path option -> EvalOpt.command ->
(unit, string) Stdlib.result (unit, string Region.reg) Stdlib.result
end end
module Make (Lexer: Lexer.S) : (S with module Lexer = Lexer) = module Make (Lexer: Lexer.S) : (S with module Lexer = Lexer) =
@ -48,28 +50,31 @@ module Make (Lexer: Lexer.S) : (S with module Lexer = Lexer) =
type file_path = string type file_path = string
let trace ?(offsets=true) mode file_path_opt command : let trace ?(offsets=true) mode file_path_opt command :
(unit, string) Stdlib.result = (unit, string Region.reg) Stdlib.result =
try let input =
let Lexer.{read; buffer; close; _} = match file_path_opt with
Lexer.open_token_stream file_path_opt in Some file_path -> Lexer.File file_path
let log = output_token ~offsets mode command stdout | None -> Lexer.Stdin in
and close_all () = close (); close_out stdout in match Lexer.open_token_stream input with
let rec iter () = Ok Lexer.{read; buffer; close; _} ->
match read ~log buffer with let log = output_token ~offsets mode command stdout
token -> and close_all () = close (); close_out stdout in
if Token.is_eof token let rec iter () =
then Stdlib.Ok () match read ~log buffer with
else iter () token ->
| exception Lexer.Error error -> if Token.is_eof token
let file = then Stdlib.Ok ()
match file_path_opt with else iter ()
None | Some "-" -> false | exception Lexer.Error error ->
| Some _ -> true in let file =
let msg = match file_path_opt with
Lexer.format_error ~offsets mode ~file error None | Some "-" -> false
in Stdlib.Error msg in | Some _ -> true in
let result = iter () let msg =
in (close_all (); result) Lexer.format_error ~offsets mode ~file error
with Sys_error msg -> Stdlib.Error msg in Stdlib.Error msg in
let result = iter ()
in close_all (); result
| Stdlib.Error (Lexer.File_opening msg) ->
close_out stdout; Stdlib.Error (Region.wrap_ghost msg)
end end

View File

@ -1,3 +1,5 @@
module Region = Simple_utils.Region
module type S = module type S =
sig sig
module Lexer : Lexer.S module Lexer : Lexer.S
@ -12,7 +14,7 @@ module type S =
val trace : val trace :
?offsets:bool -> [`Byte | `Point] -> ?offsets:bool -> [`Byte | `Point] ->
file_path option -> EvalOpt.command -> file_path option -> EvalOpt.command ->
(unit, string) Stdlib.result (unit, string Region.reg) Stdlib.result
end end
module Make (Lexer: Lexer.S) : S with module Lexer = Lexer module Make (Lexer: Lexer.S) : S with module Lexer = Lexer

View File

@ -1,5 +1,7 @@
(* Functor to build a standalone LIGO lexer *) (* Functor to build a standalone LIGO lexer *)
module Region = Simple_utils.Region
module type IO = module type IO =
sig sig
val ext : string (* LIGO file extension *) val ext : string (* LIGO file extension *)
@ -49,7 +51,7 @@ module Make (IO: IO) (Lexer: Lexer.S) =
(* Running the lexer on the input file *) (* Running the lexer on the input file *)
let scan () : (Lexer.token list, string) Stdlib.result = let scan () : (Lexer.token list, string Region.reg) Stdlib.result =
(* Preprocessing the input *) (* Preprocessing the input *)
if SSet.mem "cpp" IO.options#verbose if SSet.mem "cpp" IO.options#verbose
@ -59,36 +61,36 @@ module Make (IO: IO) (Lexer: Lexer.S) =
if Sys.command cpp_cmd <> 0 then if Sys.command cpp_cmd <> 0 then
let msg = let msg =
sprintf "External error: the command \"%s\" failed." cpp_cmd sprintf "External error: the command \"%s\" failed." cpp_cmd
in Stdlib.Error msg in Stdlib.Error (Region.wrap_ghost msg)
else else
try match Lexer.open_token_stream (Lexer.File pp_input) with
let Lexer.{read; buffer; close; _} = Ok Lexer.{read; buffer; close; _} ->
Lexer.open_token_stream (Some pp_input) in let close_all () = close (); close_out stdout in
let close_all () = close (); close_out stdout in let rec read_tokens tokens =
let rec read_tokens tokens = match read ~log:(fun _ _ -> ()) buffer with
match read ~log:(fun _ _ -> ()) buffer with token ->
token -> if Lexer.Token.is_eof token
if Lexer.Token.is_eof token then Stdlib.Ok (List.rev tokens)
then Stdlib.Ok (List.rev tokens) else read_tokens (token::tokens)
else read_tokens (token::tokens) | exception Lexer.Error error ->
| exception Lexer.Error error -> let file =
let file = match IO.options#input with
match IO.options#input with None | Some "-" -> false
None | Some "-" -> false | Some _ -> true in
| Some _ -> true in let msg =
let msg = Lexer.format_error ~offsets:IO.options#offsets
Lexer.format_error ~offsets:IO.options#offsets IO.options#mode ~file error
IO.options#mode ~file error in Stdlib.Error msg in
in Stdlib.Error msg in let result = read_tokens []
let result = read_tokens [] in close_all (); result
in close_all (); result | Stdlib.Error (Lexer.File_opening msg) ->
with Sys_error msg -> close_out stdout; Stdlib.Error msg close_out stdout; Stdlib.Error (Region.wrap_ghost msg)
(* Tracing the lexing (effectful) *) (* Tracing the lexing (effectful) *)
module Log = LexerLog.Make (Lexer) module Log = LexerLog.Make (Lexer)
let trace () : (unit, string) Stdlib.result = let trace () : (unit, string Region.reg) Stdlib.result =
(* Preprocessing the input *) (* Preprocessing the input *)
if SSet.mem "cpp" IO.options#verbose if SSet.mem "cpp" IO.options#verbose
@ -98,7 +100,7 @@ module Make (IO: IO) (Lexer: Lexer.S) =
if Sys.command cpp_cmd <> 0 then if Sys.command cpp_cmd <> 0 then
let msg = let msg =
sprintf "External error: the command \"%s\" failed." cpp_cmd sprintf "External error: the command \"%s\" failed." cpp_cmd
in Stdlib.Error msg in Stdlib.Error (Region.wrap_ghost msg)
else else
Log.trace ~offsets:IO.options#offsets Log.trace ~offsets:IO.options#offsets
IO.options#mode IO.options#mode

View File

@ -1,5 +1,7 @@
(* Functor to build a standalone LIGO lexer *) (* Functor to build a standalone LIGO lexer *)
module Region = Simple_utils.Region
module type IO = module type IO =
sig sig
val ext : string (* LIGO file extension *) val ext : string (* LIGO file extension *)
@ -8,6 +10,6 @@ module type IO =
module Make (IO: IO) (Lexer: Lexer.S) : module Make (IO: IO) (Lexer: Lexer.S) :
sig sig
val scan : unit -> (Lexer.token list, string) Stdlib.result val scan : unit -> (Lexer.token list, string Region.reg) Stdlib.result
val trace : unit -> (unit, string) Stdlib.result val trace : unit -> (unit, string Region.reg) Stdlib.result
end end

View File

@ -18,6 +18,7 @@ module type PARSER =
val interactive_expr : val interactive_expr :
(Lexing.lexbuf -> token) -> Lexing.lexbuf -> expr (Lexing.lexbuf -> token) -> Lexing.lexbuf -> expr
val contract : val contract :
(Lexing.lexbuf -> token) -> Lexing.lexbuf -> ast (Lexing.lexbuf -> token) -> Lexing.lexbuf -> ast
@ -35,6 +36,7 @@ module type PARSER =
sig sig
val interactive_expr : val interactive_expr :
Lexing.position -> expr MenhirInterpreter.checkpoint Lexing.position -> expr MenhirInterpreter.checkpoint
val contract : val contract :
Lexing.position -> ast MenhirInterpreter.checkpoint Lexing.position -> ast MenhirInterpreter.checkpoint
end end
@ -95,7 +97,9 @@ module Make (Lexer: Lexer.S)
let invalid_lexeme = Lexer.Token.to_lexeme invalid in let invalid_lexeme = Lexer.Token.to_lexeme invalid in
Printf.sprintf "%s and before \"%s\"" s invalid_lexeme in Printf.sprintf "%s and before \"%s\"" s invalid_lexeme in
let header = header ^ trailer in let header = header ^ trailer in
header ^ (if msg = "" then ".\n" else ":\n" ^ msg) let msg =
header ^ (if msg = "" then ".\n" else ":\n" ^ msg)
in Region.{value=msg; region=invalid_region}
let failure get_win checkpoint = let failure get_win checkpoint =
let message = ParErr.message (state checkpoint) in let message = ParErr.message (state checkpoint) in

View File

@ -47,6 +47,8 @@ module Make (Lexer: Lexer.S)
(Parser: PARSER with type token = Lexer.Token.token) (Parser: PARSER with type token = Lexer.Token.token)
(ParErr: sig val message : int -> string end) : (ParErr: sig val message : int -> string end) :
sig sig
(* WARNING: The following parsers may all raise [Lexer.Error] *)
(* The monolithic API of Menhir *) (* The monolithic API of Menhir *)
val mono_contract : val mono_contract :
@ -67,5 +69,6 @@ module Make (Lexer: Lexer.S)
val incr_contract : Lexer.instance -> Parser.ast val incr_contract : Lexer.instance -> Parser.ast
val incr_expr : Lexer.instance -> Parser.expr val incr_expr : Lexer.instance -> Parser.expr
val format_error : ?offsets:bool -> [`Point | `Byte] -> error -> string val format_error :
?offsets:bool -> [`Point | `Byte] -> error -> string Region.reg
end end

View File

@ -37,42 +37,13 @@ module Make (Lexer: Lexer.S)
open Printf open Printf
module SSet = Utils.String.Set module SSet = Utils.String.Set
(* Error printing and exception tracing *) (* Log of the lexer *)
let () = Printexc.record_backtrace true module Log = LexerLog.Make (Lexer)
(* Preprocessing the input source and opening the input channels *) let log =
Log.output_token ~offsets:IO.options#offsets
(* Path for CPP inclusions (#include) *) IO.options#mode IO.options#cmd stdout
let lib_path =
match IO.options#libs with
[] -> ""
| libs -> let mk_I dir path = sprintf " -I %s%s" dir path
in List.fold_right mk_I libs ""
let prefix =
match IO.options#input with
None | Some "-" -> "temp"
| Some file -> Filename.(file |> basename |> remove_extension)
let suffix = ".pp" ^ IO.ext
let pp_input =
if SSet.mem "cpp" IO.options#verbose
then prefix ^ suffix
else let pp_input, pp_out =
Filename.open_temp_file prefix suffix
in close_out pp_out; pp_input
let cpp_cmd =
match IO.options#input with
None | Some "-" ->
sprintf "cpp -traditional-cpp%s - > %s"
lib_path pp_input
| Some file ->
sprintf "cpp -traditional-cpp%s %s > %s"
lib_path file pp_input
(* Error handling (reexported from [ParserAPI]) *) (* Error handling (reexported from [ParserAPI]) *)
@ -81,8 +52,6 @@ module Make (Lexer: Lexer.S)
type invalid = Parser.token type invalid = Parser.token
type error = message * valid option * invalid type error = message * valid option * invalid
exception Point of error
(* Instantiating the parser *) (* Instantiating the parser *)
module Front = ParserAPI.Make (Lexer)(Parser)(ParErr) module Front = ParserAPI.Make (Lexer)(Parser)(ParErr)
@ -94,15 +63,21 @@ module Make (Lexer: Lexer.S)
(* Parsing an expression *) (* Parsing an expression *)
let parse_expr lexer_inst tokeniser output state : let parse_expr lexer_inst :
(AST.expr, string) Stdlib.result = (AST.expr, message Region.reg) Stdlib.result =
let output = Buffer.create 131 in
let state =
ParserLog.mk_state ~offsets:IO.options#offsets
~mode:IO.options#mode
~buffer:output in
let close_all () = let close_all () =
lexer_inst.Lexer.close (); close_out stdout in lexer_inst.Lexer.close (); close_out stdout in
let lexbuf = lexer_inst.Lexer.buffer in
let expr = let expr =
try try
if IO.options#mono then if IO.options#mono then
Front.mono_expr tokeniser lexbuf let tokeniser = lexer_inst.Lexer.read ~log
and lexbuf = lexer_inst.Lexer.buffer
in Front.mono_expr tokeniser lexbuf
else else
Front.incr_expr lexer_inst Front.incr_expr lexer_inst
with exn -> close_all (); raise exn in with exn -> close_all (); raise exn in
@ -124,15 +99,21 @@ module Make (Lexer: Lexer.S)
(* Parsing a contract *) (* Parsing a contract *)
let parse_contract lexer_inst tokeniser output state let parse_contract lexer_inst :
: (AST.t, string) Stdlib.result = (AST.t, message Region.reg) Stdlib.result =
let output = Buffer.create 131 in
let state =
ParserLog.mk_state ~offsets:IO.options#offsets
~mode:IO.options#mode
~buffer:output in
let close_all () = let close_all () =
lexer_inst.Lexer.close (); close_out stdout in lexer_inst.Lexer.close (); close_out stdout in
let lexbuf = lexer_inst.Lexer.buffer in
let ast = let ast =
try try
if IO.options#mono then if IO.options#mono then
Front.mono_contract tokeniser lexbuf let tokeniser = lexer_inst.Lexer.read ~log
and lexbuf = lexer_inst.Lexer.buffer
in Front.mono_contract tokeniser lexbuf
else else
Front.incr_contract lexer_inst Front.incr_contract lexer_inst
with exn -> close_all (); raise exn in with exn -> close_all (); raise exn in
@ -154,70 +135,41 @@ module Make (Lexer: Lexer.S)
(* Wrapper for the parsers above *) (* Wrapper for the parsers above *)
let parse parser = type 'a parser = Lexer.instance -> ('a, message Region.reg) result
(* Preprocessing the input *)
if SSet.mem "cpp" IO.options#verbose let apply lexer_inst parser =
then eprintf "%s\n%!" cpp_cmd (* Calling the parser and filtering errors *)
else ();
if Sys.command cpp_cmd <> 0 then match parser lexer_inst with
let msg = Stdlib.Error _ as error -> error
sprintf "External error: \"%s\" failed." cpp_cmd | Stdlib.Ok _ as node -> node
in Stdlib.Error msg
else
(* Instantiating the lexer *)
let lexer_inst = Lexer.open_token_stream (Some pp_input) in (* Lexing errors *)
(* Making the tokeniser *) | exception Lexer.Error err ->
let file =
match IO.options#input with
None | Some "-" -> false
| Some _ -> true in
let error =
Lexer.format_error ~offsets:IO.options#offsets
IO.options#mode err ~file
in Stdlib.Error error
let module Log = LexerLog.Make (Lexer) in (* Incremental API of Menhir *)
let log = | exception Front.Point point ->
Log.output_token ~offsets:IO.options#offsets let error =
IO.options#mode IO.options#cmd stdout in Front.format_error ~offsets:IO.options#offsets
IO.options#mode point
in Stdlib.Error error
let tokeniser = lexer_inst.Lexer.read ~log in (* Monolithic API of Menhir *)
let output = Buffer.create 131 in | exception Parser.Error ->
let state = ParserLog.mk_state let invalid, valid_opt =
~offsets:IO.options#offsets match lexer_inst.Lexer.get_win () with
~mode:IO.options#mode Lexer.Nil ->
~buffer:output in
(* Calling the specific parser (that is, the parameter) *)
match parser lexer_inst tokeniser output state with
Stdlib.Error _ as error -> error
| Stdlib.Ok _ as node -> node
(* Lexing errors *)
| exception Lexer.Error err ->
let file =
match IO.options#input with
None | Some "-" -> false
| Some _ -> true in
let error =
Lexer.format_error ~offsets:IO.options#offsets
IO.options#mode err ~file
in Stdlib.Error error
(* Incremental API of Menhir *)
| exception Front.Point point ->
let error =
Front.format_error ~offsets:IO.options#offsets
IO.options#mode point
in Stdlib.Error error
(* Monolithic API of Menhir *)
| exception Parser.Error ->
let invalid, valid_opt =
match lexer_inst.Lexer.get_win () with
Lexer.Nil ->
assert false (* Safe: There is always at least EOF. *) assert false (* Safe: There is always at least EOF. *)
| Lexer.One invalid -> invalid, None | Lexer.One invalid -> invalid, None
| Lexer.Two (invalid, valid) -> invalid, Some valid in | Lexer.Two (invalid, valid) -> invalid, Some valid in
@ -227,8 +179,9 @@ module Make (Lexer: Lexer.S)
IO.options#mode point IO.options#mode point
in Stdlib.Error error in Stdlib.Error error
(* I/O errors *) (* I/O errors *)
| exception Sys_error error -> Stdlib.Error error | exception Sys_error error ->
Stdlib.Error (Region.wrap_ghost error)
end end

View File

@ -34,38 +34,26 @@ module Make (Lexer: Lexer.S)
and type expr = AST.expr) and type expr = AST.expr)
(IO: IO) : (IO: IO) :
sig sig
(* Error handling (reexported from [ParserAPI]) *) (* Error handling reexported from [ParserAPI] without the
exception [Point] *)
type message = string type message = string
type valid = Parser.token type valid = Parser.token
type invalid = Parser.token type invalid = Parser.token
type error = message * valid option * invalid type error = message * valid option * invalid
exception Point of error
val format_error : val format_error :
?offsets:bool -> [`Byte | `Point] -> error -> string ?offsets:bool -> [`Byte | `Point] -> error -> string Region.reg
val short_error : val short_error :
?offsets:bool -> [`Point | `Byte] -> string -> Region.t -> string ?offsets:bool -> [`Point | `Byte] -> message -> Region.t -> string
(* Parsers *) (* Parsers *)
val parse : type 'a parser = Lexer.instance -> ('a, message Region.reg) result
(Lexer.instance ->
(Lexing.lexbuf -> Lexer.token) ->
Buffer.t -> ParserLog.state -> ('a, string) result) ->
('a, string) result
val parse_contract : val apply : Lexer.instance -> 'a parser -> ('a, message Region.reg) result
Lexer.instance ->
(Lexing.lexbuf -> Lexer.token) ->
Buffer.t -> ParserLog.state ->
(AST.t, string) Stdlib.result
val parse_expr :
Lexer.instance ->
(Lexing.lexbuf -> Lexer.token) ->
Buffer.t -> ParserLog.state -> (AST.expr, string) Stdlib.result
val parse_contract : AST.t parser
val parse_expr : AST.expr parser
end end

View File

@ -685,6 +685,7 @@ let bind_and (a, b) =
a >>? fun a -> a >>? fun a ->
b >>? fun b -> b >>? fun b ->
ok (a, b) ok (a, b)
let bind_and3 (a, b, c) = let bind_and3 (a, b, c) =
a >>? fun a -> a >>? fun a ->
b >>? fun b -> b >>? fun b ->
@ -692,18 +693,18 @@ let bind_and3 (a, b, c) =
ok (a, b, c) ok (a, b, c)
let bind_pair = bind_and let bind_pair = bind_and
let bind_map_pair f (a, b) = let bind_map_pair f (a, b) =
bind_pair (f a, f b) bind_pair (f a, f b)
let bind_fold_map_pair f acc (a, b) = let bind_fold_map_pair f acc (a, b) =
f acc a >>? fun (acc' , a') -> f acc a >>? fun (acc' , a') ->
f acc' b >>? fun (acc'' , b') -> f acc' b >>? fun (acc'' , b') ->
ok (acc'' , (a' , b')) ok (acc'' , (a' , b'))
let bind_map_triple f (a, b, c) =
bind_and3 (f a, f b, f c)
let bind_list_cons v lst = let bind_map_triple f (a, b, c) = bind_and3 (f a, f b, f c)
lst >>? fun lst ->
ok (v::lst) let bind_list_cons v lst = lst >>? fun lst -> ok (v::lst)
let rec bind_chain : ('a -> 'a result) list -> 'a -> 'a result = fun fs x -> let rec bind_chain : ('a -> 'a result) list -> 'a -> 'a result = fun fs x ->
match fs with match fs with