From b1dd35b56d3598532ac4be352ef46159bb1ceb46 Mon Sep 17 00:00:00 2001 From: Lesenechal Remi Date: Fri, 3 Jan 2020 23:01:46 +0100 Subject: [PATCH 01/15] fix definition of predecessor_timestamp --- src/bin/cli.ml | 2 +- src/bin/expect_tests/help_tests.ml | 25 +++++++++++++++---------- 2 files changed, 16 insertions(+), 11 deletions(-) diff --git a/src/bin/cli.ml b/src/bin/cli.ml index 61c4e7a4e..0e1859839 100644 --- a/src/bin/cli.ml +++ b/src/bin/cli.ml @@ -90,7 +90,7 @@ let predecessor_timestamp = let open Arg in let info = let docv = "PREDECESSOR_TIMESTAMP" in - let doc = "$(docv) is the pedecessor_timestamp (now value) the michelson interpreter will use (e.g. '2000-01-01T10:10:10Z')" in + let doc = "$(docv) is the pedecessor_timestamp (now value minus one minute) the michelson interpreter will use (e.g. '2000-01-01T10:10:10Z')" in info ~docv ~doc ["predecessor-timestamp"] in value @@ opt (some string) None info diff --git a/src/bin/expect_tests/help_tests.ml b/src/bin/expect_tests/help_tests.ml index af5ab1797..fc9bf0e7d 100644 --- a/src/bin/expect_tests/help_tests.ml +++ b/src/bin/expect_tests/help_tests.ml @@ -198,8 +198,9 @@ let%expect_test _ = are 'text' (default), 'json' and 'hex'. --predecessor-timestamp=PREDECESSOR_TIMESTAMP - PREDECESSOR_TIMESTAMP is the pedecessor_timestamp (now value) the - michelson interpreter will use (e.g. '2000-01-01T10:10:10Z') + PREDECESSOR_TIMESTAMP is the pedecessor_timestamp (now value minus + one minute) the michelson interpreter will use (e.g. + '2000-01-01T10:10:10Z') -s SYNTAX, --syntax=SYNTAX (absent=auto) SYNTAX is the syntax that will be used. Currently supported @@ -263,8 +264,9 @@ let%expect_test _ = are 'text' (default), 'json' and 'hex'. --predecessor-timestamp=PREDECESSOR_TIMESTAMP - PREDECESSOR_TIMESTAMP is the pedecessor_timestamp (now value) the - michelson interpreter will use (e.g. '2000-01-01T10:10:10Z') + PREDECESSOR_TIMESTAMP is the pedecessor_timestamp (now value minus + one minute) the michelson interpreter will use (e.g. + '2000-01-01T10:10:10Z') -s SYNTAX, --syntax=SYNTAX (absent=auto) SYNTAX is the syntax that will be used. Currently supported @@ -324,8 +326,9 @@ let%expect_test _ = `plain' whenever the TERM env var is `dumb' or undefined. --predecessor-timestamp=PREDECESSOR_TIMESTAMP - PREDECESSOR_TIMESTAMP is the pedecessor_timestamp (now value) the - michelson interpreter will use (e.g. '2000-01-01T10:10:10Z') + PREDECESSOR_TIMESTAMP is the pedecessor_timestamp (now value minus + one minute) the michelson interpreter will use (e.g. + '2000-01-01T10:10:10Z') -s SYNTAX, --syntax=SYNTAX (absent=auto) SYNTAX is the syntax that will be used. Currently supported @@ -382,8 +385,9 @@ let%expect_test _ = `plain' whenever the TERM env var is `dumb' or undefined. --predecessor-timestamp=PREDECESSOR_TIMESTAMP - PREDECESSOR_TIMESTAMP is the pedecessor_timestamp (now value) the - michelson interpreter will use (e.g. '2000-01-01T10:10:10Z') + PREDECESSOR_TIMESTAMP is the pedecessor_timestamp (now value minus + one minute) the michelson interpreter will use (e.g. + '2000-01-01T10:10:10Z') -s SYNTAX, --syntax=SYNTAX (absent=auto) SYNTAX is the syntax that will be used. Currently supported @@ -435,8 +439,9 @@ let%expect_test _ = `plain' whenever the TERM env var is `dumb' or undefined. --predecessor-timestamp=PREDECESSOR_TIMESTAMP - PREDECESSOR_TIMESTAMP is the pedecessor_timestamp (now value) the - michelson interpreter will use (e.g. '2000-01-01T10:10:10Z') + PREDECESSOR_TIMESTAMP is the pedecessor_timestamp (now value minus + one minute) the michelson interpreter will use (e.g. + '2000-01-01T10:10:10Z') -s SYNTAX, --syntax=SYNTAX (absent=auto) SYNTAX is the syntax that will be used. Currently supported From f13372931852bf8a5794758889f94f6ead0a1b67 Mon Sep 17 00:00:00 2001 From: Lesenechal Remi Date: Sun, 5 Jan 2020 14:32:15 +0100 Subject: [PATCH 02/15] fix dune file warnings --- src/passes/1-parser/cameligo/dune | 2 ++ src/passes/1-parser/reasonligo/dune | 2 ++ 2 files changed, 4 insertions(+) diff --git a/src/passes/1-parser/cameligo/dune b/src/passes/1-parser/cameligo/dune index 786b20f31..95290beb8 100644 --- a/src/passes/1-parser/cameligo/dune +++ b/src/passes/1-parser/cameligo/dune @@ -40,4 +40,6 @@ (executable (name Unlexer) (libraries str) + (preprocess + (pps bisect_ppx --conditional)) (modules Unlexer)) diff --git a/src/passes/1-parser/reasonligo/dune b/src/passes/1-parser/reasonligo/dune index 12b0c6d27..39250ecc8 100644 --- a/src/passes/1-parser/reasonligo/dune +++ b/src/passes/1-parser/reasonligo/dune @@ -44,4 +44,6 @@ (executable (name Unlexer) (libraries str) + (preprocess + (pps bisect_ppx --conditional)) (modules Unlexer)) From a6e4837aa0852df84560b234d33f39d52c71cd90 Mon Sep 17 00:00:00 2001 From: Christian Rinderknecht Date: Sat, 4 Jan 2020 19:49:22 +0100 Subject: [PATCH 03/15] Sharing standalone lexers and parsers, and parser error API. --- src/passes/1-parser/cameligo/.links | 4 + src/passes/1-parser/cameligo/LexerMain.ml | 61 +------ src/passes/1-parser/cameligo/ParserAPI.mli | 22 --- src/passes/1-parser/cameligo/ParserMain.ml | 162 +++-------------- src/passes/1-parser/cameligo/dune | 6 +- src/passes/1-parser/pascaligo/.links | 4 + src/passes/1-parser/pascaligo/LexerMain.ml | 61 +------ src/passes/1-parser/pascaligo/ParserAPI.ml | 82 --------- src/passes/1-parser/pascaligo/ParserAPI.mli | 22 --- src/passes/1-parser/pascaligo/ParserMain.ml | 162 +++-------------- src/passes/1-parser/pascaligo/dune | 15 +- src/passes/1-parser/reasonligo/.links | 4 + src/passes/1-parser/reasonligo/LexerMain.ml | 61 +------ src/passes/1-parser/reasonligo/ParserAPI.ml | 82 --------- src/passes/1-parser/reasonligo/ParserAPI.mli | 22 --- src/passes/1-parser/reasonligo/ParserMain.ml | 164 +++--------------- src/passes/1-parser/reasonligo/dune | 2 +- .../{cameligo => shared}/ParserAPI.ml | 62 +++++-- src/passes/1-parser/shared/ParserAPI.mli | 60 +++++++ src/passes/1-parser/shared/dune | 10 +- 20 files changed, 227 insertions(+), 841 deletions(-) delete mode 100644 src/passes/1-parser/cameligo/ParserAPI.mli delete mode 100644 src/passes/1-parser/pascaligo/ParserAPI.ml delete mode 100644 src/passes/1-parser/pascaligo/ParserAPI.mli delete mode 100644 src/passes/1-parser/reasonligo/ParserAPI.ml delete mode 100644 src/passes/1-parser/reasonligo/ParserAPI.mli rename src/passes/1-parser/{cameligo => shared}/ParserAPI.ml (59%) create mode 100644 src/passes/1-parser/shared/ParserAPI.mli diff --git a/src/passes/1-parser/cameligo/.links b/src/passes/1-parser/cameligo/.links index eca6c8680..a29429a42 100644 --- a/src/passes/1-parser/cameligo/.links +++ b/src/passes/1-parser/cameligo/.links @@ -16,4 +16,8 @@ $HOME/git/ligo/vendors/ligo-utils/simple-utils/region.ml ../shared/Markup.mli ../shared/Utils.mli ../shared/Utils.ml +../shared/ParserAPI.mli +../shared/ParserAPI.ml +../shared/LexerUnit.ml +../shared/ParserUnit.ml Stubs/Simple_utils.ml diff --git a/src/passes/1-parser/cameligo/LexerMain.ml b/src/passes/1-parser/cameligo/LexerMain.ml index 80ae8b00d..e9775b803 100644 --- a/src/passes/1-parser/cameligo/LexerMain.ml +++ b/src/passes/1-parser/cameligo/LexerMain.ml @@ -1,56 +1,9 @@ -(** Driver for the LIGO lexer *) +(** Driver for the CameLIGO lexer *) -let extension = ".mligo" -let options = EvalOpt.read "CameLIGO" extension +module IO = + struct + let ext = ".mligo" + let options = EvalOpt.read "CameLIGO" ext + end -(** Error printing and exception tracing -*) -let () = Printexc.record_backtrace true - -let external_ text = - Utils.highlight (Printf.sprintf "External error: %s" text); exit 1;; - -(** {1 Preprocessing the input source and opening the input channels} *) - -(** Path for CPP inclusions (#include) -*) -let lib_path = - match options#libs with - [] -> "" - | libs -> let mk_I dir path = Printf.sprintf " -I %s%s" dir path - in List.fold_right mk_I libs "" - -let prefix = - match options#input with - None | Some "-" -> "temp" - | Some file -> Filename.(file |> basename |> remove_extension) - -let suffix = ".pp" ^ extension - -let pp_input = - if Utils.String.Set.mem "cpp" options#verbose - then prefix ^ suffix - else let pp_input, pp_out = Filename.open_temp_file prefix suffix - in close_out pp_out; pp_input - -let cpp_cmd = - match options#input with - None | Some "-" -> - Printf.sprintf "cpp -traditional-cpp%s - > %s" - lib_path pp_input - | Some file -> - Printf.sprintf "cpp -traditional-cpp%s %s > %s" - lib_path file pp_input - -let () = - if Utils.String.Set.mem "cpp" options#verbose - then Printf.eprintf "%s\n%!" cpp_cmd; - if Sys.command cpp_cmd <> 0 then - external_ (Printf.sprintf "the command \"%s\" failed." cpp_cmd) - -(** {1 Running the lexer on the input file} *) - -module Log = LexerLog.Make (Lexer.Make (LexToken)) - -let () = Log.trace ~offsets:options#offsets - options#mode (Some pp_input) options#cmd +module M = LexerUnit.Make (IO) (Lexer.Make (LexToken)) diff --git a/src/passes/1-parser/cameligo/ParserAPI.mli b/src/passes/1-parser/cameligo/ParserAPI.mli deleted file mode 100644 index 7d969a33c..000000000 --- a/src/passes/1-parser/cameligo/ParserAPI.mli +++ /dev/null @@ -1,22 +0,0 @@ -(** Generic parser API for LIGO *) - -module Make (Lexer: Lexer.S with module Token := LexToken) - (Parser: module type of Parser) - (ParErr: sig val message: int -> string end) : - sig - (* Monolithic and incremental APIs of Menhir for parsing *) - - val mono_contract : (Lexing.lexbuf -> Lexer.token) -> Lexing.lexbuf -> AST.t - val incr_contract : Lexer.instance -> AST.t - - (* Error handling *) - - type message = string - type valid = Lexer.token - type invalid = Lexer.token - type error = message * valid option * invalid - - exception Point of error - - val format_error : ?offsets:bool -> [`Byte | `Point] -> error -> string - end diff --git a/src/passes/1-parser/cameligo/ParserMain.ml b/src/passes/1-parser/cameligo/ParserMain.ml index 8ed546f50..855dc639e 100644 --- a/src/passes/1-parser/cameligo/ParserMain.ml +++ b/src/passes/1-parser/cameligo/ParserMain.ml @@ -1,145 +1,27 @@ (** Driver for the CameLIGO parser *) -let extension = ".mligo" -let options = EvalOpt.read "CameLIGO" extension +module IO = + struct + let ext = ".mligo" + let options = EvalOpt.read "CameLIGO" ext + end -open Printf +module ExtParser = + struct + type ast = AST.t + type expr = AST.expr + include Parser + end -(** Error printing and exception tracing -*) -let () = Printexc.record_backtrace true +module ExtParserLog = + struct + type ast = AST.t + include ParserLog + end -(** Extracting the input file -*) -let file = - match options#input with - None | Some "-" -> false - | Some _ -> true - -(** {1 Error printing and exception tracing} *) - -let () = Printexc.record_backtrace true - -let external_ text = - Utils.highlight (sprintf "External error: %s" text); exit 1;; - -(** {1 Preprocessing the input source and opening the input channels} *) - -(** Path for CPP inclusions (#include) -*) -let lib_path = - match options#libs with - [] -> "" - | libs -> let mk_I dir path = sprintf " -I %s%s" dir path - in List.fold_right mk_I libs "" - -let prefix = - match options#input with - None | Some "-" -> "temp" - | Some file -> Filename.(file |> basename |> remove_extension) - -let suffix = ".pp" ^ extension - -let pp_input = - if Utils.String.Set.mem "cpp" options#verbose - then prefix ^ suffix - else let pp_input, pp_out = Filename.open_temp_file prefix suffix - in close_out pp_out; pp_input - -let cpp_cmd = - match options#input with - None | Some "-" -> - sprintf "cpp -traditional-cpp%s - > %s" - lib_path pp_input - | Some file -> - sprintf "cpp -traditional-cpp%s %s > %s" - lib_path file pp_input - -let () = - if Utils.String.Set.mem "cpp" options#verbose - then eprintf "%s\n%!" cpp_cmd; - if Sys.command cpp_cmd <> 0 then - external_ (sprintf "the command \"%s\" failed." cpp_cmd) - -(** {1 Instanciating the lexer} *) - -module Lexer = Lexer.Make (LexToken) -module Log = LexerLog.Make (Lexer) -module ParserFront = ParserAPI.Make (Lexer) (Parser) (ParErr) - -let lexer_inst = Lexer.open_token_stream (Some pp_input) -let Lexer.{read; buffer; get_win; get_pos; get_last; close} = lexer_inst - -and cout = stdout - -let log = Log.output_token ~offsets:options#offsets - options#mode options#cmd cout - -and close_all () = close (); close_out cout - -(** {1 Tokeniser} *) - -let tokeniser = read ~log - -(** {1 Main} *) - -let () = - try - let ast = - if options#mono - then ParserFront.mono_contract tokeniser buffer - else ParserFront.incr_contract lexer_inst in - if Utils.String.Set.mem "ast" options#verbose - then let buffer = Buffer.create 131 in - let state = ParserLog.mk_state - ~offsets:options#offsets - ~mode:options#mode - ~buffer in - begin - ParserLog.pp_ast state ast; - Buffer.output_buffer stdout buffer - end - else if Utils.String.Set.mem "ast-tokens" options#verbose - then let buffer = Buffer.create 131 in - let state = ParserLog.mk_state - ~offsets:options#offsets - ~mode:options#mode - ~buffer in - begin - ParserLog.print_tokens state ast; - Buffer.output_buffer stdout buffer - end - with - (* Lexing errors *) - Lexer.Error err -> - close_all (); - let msg = - Lexer.format_error ~offsets:options#offsets - options#mode err ~file - in prerr_string msg - - (* Incremental API of Menhir *) - | ParserFront.Point point -> - let () = close_all () in - let error = - ParserFront.format_error ~offsets:options#offsets - options#mode point - in eprintf "\027[31m%s\027[0m%!" error - - (* Monolithic API of Menhir *) - | Parser.Error -> - let () = close_all () in - let invalid, valid_opt = - match get_win () with - Lexer.Nil -> - assert false (* Safe: There is always at least EOF. *) - | Lexer.One invalid -> invalid, None - | Lexer.Two (invalid, valid) -> invalid, Some valid in - let point = "", valid_opt, invalid in - let error = - ParserFront.format_error ~offsets:options#offsets - options#mode point - in eprintf "\027[31m%s\027[0m%!" error - - (* I/O errors *) - | Sys_error msg -> Utils.highlight msg +module M = ParserUnit.Make (IO) + (Lexer.Make (LexToken)) + (AST) + (ExtParser) + (ParErr) + (ExtParserLog) diff --git a/src/passes/1-parser/cameligo/dune b/src/passes/1-parser/cameligo/dune index 95290beb8..e9f496034 100644 --- a/src/passes/1-parser/cameligo/dune +++ b/src/passes/1-parser/cameligo/dune @@ -15,9 +15,9 @@ str simple-utils tezos-utils - getopt ) + getopt) (preprocess - (pps bisect_ppx --conditional) ) + (pps bisect_ppx --conditional)) (flags (:standard -open Simple_utils -open Parser_shared))) (executable @@ -32,7 +32,7 @@ (name ParserMain) (libraries parser_cameligo) (modules - ParErr ParserAPI ParserMain) + ParErr ParserMain) (preprocess (pps bisect_ppx --conditional)) (flags (:standard -open Simple_utils -open Parser_shared -open Parser_cameligo))) diff --git a/src/passes/1-parser/pascaligo/.links b/src/passes/1-parser/pascaligo/.links index eca6c8680..a29429a42 100644 --- a/src/passes/1-parser/pascaligo/.links +++ b/src/passes/1-parser/pascaligo/.links @@ -16,4 +16,8 @@ $HOME/git/ligo/vendors/ligo-utils/simple-utils/region.ml ../shared/Markup.mli ../shared/Utils.mli ../shared/Utils.ml +../shared/ParserAPI.mli +../shared/ParserAPI.ml +../shared/LexerUnit.ml +../shared/ParserUnit.ml Stubs/Simple_utils.ml diff --git a/src/passes/1-parser/pascaligo/LexerMain.ml b/src/passes/1-parser/pascaligo/LexerMain.ml index 9838fcbc4..4f1940204 100644 --- a/src/passes/1-parser/pascaligo/LexerMain.ml +++ b/src/passes/1-parser/pascaligo/LexerMain.ml @@ -1,56 +1,9 @@ -(** Driver for the LIGO lexer *) +(** Driver for the PascaLIGO lexer *) -let extension = ".ligo" -let options = EvalOpt.read "PascaLIGO" extension +module IO = + struct + let ext = ".ligo" + let options = EvalOpt.read "PascaLIGO" ext + end -(** Error printing and exception tracing -*) -let () = Printexc.record_backtrace true - -let external_ text = - Utils.highlight (Printf.sprintf "External error: %s" text); exit 1;; - -(** {1 Preprocessing the input source and opening the input channels} *) - -(** Path for CPP inclusions (#include) -*) -let lib_path = - match options#libs with - [] -> "" - | libs -> let mk_I dir path = Printf.sprintf " -I %s%s" dir path - in List.fold_right mk_I libs "" - -let prefix = - match options#input with - None | Some "-" -> "temp" - | Some file -> Filename.(file |> basename |> remove_extension) - -let suffix = ".pp" ^ extension - -let pp_input = - if Utils.String.Set.mem "cpp" options#verbose - then prefix ^ suffix - else let pp_input, pp_out = Filename.open_temp_file prefix suffix - in close_out pp_out; pp_input - -let cpp_cmd = - match options#input with - None | Some "-" -> - Printf.sprintf "cpp -traditional-cpp%s - > %s" - lib_path pp_input - | Some file -> - Printf.sprintf "cpp -traditional-cpp%s %s > %s" - lib_path file pp_input - -let () = - if Utils.String.Set.mem "cpp" options#verbose - then Printf.eprintf "%s\n%!" cpp_cmd; - if Sys.command cpp_cmd <> 0 then - external_ (Printf.sprintf "the command \"%s\" failed." cpp_cmd) - -(** {1 Running the lexer on the input file} *) - -module Log = LexerLog.Make (Lexer.Make (LexToken)) - -let () = Log.trace ~offsets:options#offsets - options#mode (Some pp_input) options#cmd +module M = LexerUnit.Make (IO) (Lexer.Make (LexToken)) diff --git a/src/passes/1-parser/pascaligo/ParserAPI.ml b/src/passes/1-parser/pascaligo/ParserAPI.ml deleted file mode 100644 index df82173a9..000000000 --- a/src/passes/1-parser/pascaligo/ParserAPI.ml +++ /dev/null @@ -1,82 +0,0 @@ -(* Generic parser for LIGO *) - -(* Main functor *) - -module Make (Lexer: Lexer.S with module Token := LexToken) - (Parser: module type of Parser) - (ParErr: sig val message : int -> string end) = - struct - module I = Parser.MenhirInterpreter - module S = MenhirLib.General (* Streams *) - - (* The call [stack checkpoint] extracts the parser's stack out of - a checkpoint. *) - - let stack = function - I.HandlingError env -> I.stack env - | _ -> assert false - - (* The call [state checkpoint] extracts the number of the current - state out of a parser checkpoint. *) - - let state checkpoint : int = - match Lazy.force (stack checkpoint) with - S.Nil -> 0 (* WARNING: Hack. The first state should be 0. *) - | S.Cons (I.Element (s,_,_,_),_) -> I.number s - - (* The parser has successfully produced a semantic value. *) - - let success v = v - - (* The parser has suspended itself because of a syntax error. Stop. *) - - type message = string - type valid = Lexer.token - type invalid = Lexer.token - type error = message * valid option * invalid - - exception Point of error - - let failure get_win checkpoint = - let message = ParErr.message (state checkpoint) in - match get_win () with - Lexer.Nil -> assert false - | Lexer.One invalid -> - raise (Point (message, None, invalid)) - | Lexer.Two (invalid, valid) -> - raise (Point (message, Some valid, invalid)) - - (* The two Menhir APIs are called from the following two functions. *) - - let incr_contract Lexer.{read; buffer; get_win; close; _} : AST.t = - let supplier = I.lexer_lexbuf_to_supplier read buffer - and failure = failure get_win in - let parser = Parser.Incremental.contract buffer.Lexing.lex_curr_p in - let ast = I.loop_handle success failure supplier parser - in close (); ast - - let mono_contract = Parser.contract - - (* Errors *) - - let format_error ?(offsets=true) mode (msg, valid_opt, invalid) = - let invalid_region = LexToken.to_region invalid in - let header = - "Parse error " ^ invalid_region#to_string ~offsets mode in - let trailer = - match valid_opt with - None -> - if LexToken.is_eof invalid then "" - else let invalid_lexeme = LexToken.to_lexeme invalid in - Printf.sprintf ", before \"%s\"" invalid_lexeme - | Some valid -> - let valid_lexeme = LexToken.to_lexeme valid in - let s = Printf.sprintf ", after \"%s\"" valid_lexeme in - if LexToken.is_eof invalid then s - else - let invalid_lexeme = LexToken.to_lexeme invalid in - Printf.sprintf "%s and before \"%s\"" s invalid_lexeme in - let header = header ^ trailer in - header ^ (if msg = "" then ".\n" else ":\n" ^ msg) - - end diff --git a/src/passes/1-parser/pascaligo/ParserAPI.mli b/src/passes/1-parser/pascaligo/ParserAPI.mli deleted file mode 100644 index afc0fb8ba..000000000 --- a/src/passes/1-parser/pascaligo/ParserAPI.mli +++ /dev/null @@ -1,22 +0,0 @@ -(** Generic parser API for LIGO *) - -module Make (Lexer: Lexer.S with module Token := LexToken) - (Parser: module type of Parser) - (ParErr: module type of ParErr) : - sig - (* Monolithic and incremental APIs of Menhir for parsing *) - - val mono_contract : (Lexing.lexbuf -> Lexer.token) -> Lexing.lexbuf -> AST.t - val incr_contract : Lexer.instance -> AST.t - - (* Error handling *) - - type message = string - type valid = Lexer.token - type invalid = Lexer.token - type error = message * valid option * invalid - - exception Point of error - - val format_error : ?offsets:bool -> [`Byte | `Point] -> error -> string - end diff --git a/src/passes/1-parser/pascaligo/ParserMain.ml b/src/passes/1-parser/pascaligo/ParserMain.ml index 489008453..3fcae9dec 100644 --- a/src/passes/1-parser/pascaligo/ParserMain.ml +++ b/src/passes/1-parser/pascaligo/ParserMain.ml @@ -1,145 +1,27 @@ (** Driver for the PascaLIGO parser *) -let extension = ".ligo" -let options = EvalOpt.read "PascaLIGO" extension +module IO = + struct + let ext = ".ligo" + let options = EvalOpt.read "PascaLIGO" ext + end -open Printf +module ExtParser = + struct + type ast = AST.t + type expr = AST.expr + include Parser + end -(** Error printing and exception tracing -*) -let () = Printexc.record_backtrace true +module ExtParserLog = + struct + type ast = AST.t + include ParserLog + end -(** Extracting the input file -*) -let file = - match options#input with - None | Some "-" -> false - | Some _ -> true - -(** {1 Error printing and exception tracing} *) - -let () = Printexc.record_backtrace true - -let external_ text = - Utils.highlight (sprintf "External error: %s" text); exit 1;; - -(** {1 Preprocessing the input source and opening the input channels} *) - -(** Path for CPP inclusions (#include) -*) -let lib_path = - match options#libs with - [] -> "" - | libs -> let mk_I dir path = sprintf " -I %s%s" dir path - in List.fold_right mk_I libs "" - -let prefix = - match options#input with - None | Some "-" -> "temp" - | Some file -> Filename.(file |> basename |> remove_extension) - -let suffix = ".pp" ^ extension - -let pp_input = - if Utils.String.Set.mem "cpp" options#verbose - then prefix ^ suffix - else let pp_input, pp_out = Filename.open_temp_file prefix suffix - in close_out pp_out; pp_input - -let cpp_cmd = - match options#input with - None | Some "-" -> - sprintf "cpp -traditional-cpp%s - > %s" - lib_path pp_input - | Some file -> - sprintf "cpp -traditional-cpp%s %s > %s" - lib_path file pp_input - -let () = - if Utils.String.Set.mem "cpp" options#verbose - then eprintf "%s\n%!" cpp_cmd; - if Sys.command cpp_cmd <> 0 then - external_ (sprintf "the command \"%s\" failed." cpp_cmd) - -(** {1 Instanciating the lexer} *) - -module Lexer = Lexer.Make (LexToken) -module Log = LexerLog.Make (Lexer) -module ParserFront = ParserAPI.Make (Lexer) (Parser) (ParErr) - -let lexer_inst = Lexer.open_token_stream (Some pp_input) -let Lexer.{read; buffer; get_win; get_pos; get_last; close} = lexer_inst - -and cout = stdout - -let log = Log.output_token ~offsets:options#offsets - options#mode options#cmd cout - -and close_all () = close (); close_out cout - -(** {1 Tokeniser} *) - -let tokeniser = read ~log - -(** {1 Main} *) - -let () = - try - let ast = - if options#mono - then ParserFront.mono_contract tokeniser buffer - else ParserFront.incr_contract lexer_inst in - if Utils.String.Set.mem "ast" options#verbose - then let buffer = Buffer.create 131 in - let state = ParserLog.mk_state - ~offsets:options#offsets - ~mode:options#mode - ~buffer in - begin - ParserLog.pp_ast state ast; - Buffer.output_buffer stdout buffer - end - else if Utils.String.Set.mem "ast-tokens" options#verbose - then let buffer = Buffer.create 131 in - let state = ParserLog.mk_state - ~offsets:options#offsets - ~mode:options#mode - ~buffer in - begin - ParserLog.print_tokens state ast; - Buffer.output_buffer stdout buffer - end - with - (* Lexing errors *) - Lexer.Error err -> - close_all (); - let msg = - Lexer.format_error ~offsets:options#offsets - options#mode err ~file - in prerr_string msg - - (* Incremental API of Menhir *) - | ParserFront.Point point -> - let () = close_all () in - let error = - ParserFront.format_error ~offsets:options#offsets - options#mode point - in eprintf "\027[31m%s\027[0m%!" error - - (* Monolithic API of Menhir *) - | Parser.Error -> - let () = close_all () in - let invalid, valid_opt = - match get_win () with - Lexer.Nil -> - assert false (* Safe: There is always at least EOF. *) - | Lexer.One invalid -> invalid, None - | Lexer.Two (invalid, valid) -> invalid, Some valid in - let point = "", valid_opt, invalid in - let error = - ParserFront.format_error ~offsets:options#offsets - options#mode point - in eprintf "\027[31m%s\027[0m%!" error - - (* I/O errors *) - | Sys_error msg -> Utils.highlight msg +module M = ParserUnit.Make (IO) + (Lexer.Make (LexToken)) + (AST) + (ExtParser) + (ParErr) + (ExtParserLog) diff --git a/src/passes/1-parser/pascaligo/dune b/src/passes/1-parser/pascaligo/dune index 908455acb..1c12ca706 100644 --- a/src/passes/1-parser/pascaligo/dune +++ b/src/passes/1-parser/pascaligo/dune @@ -10,15 +10,14 @@ (public_name ligo.parser.pascaligo) (modules AST pascaligo Parser ParserLog LexToken) (libraries - menhirLib - parser_shared - hex - simple-utils - tezos-utils - ) + menhirLib + parser_shared + hex + simple-utils + tezos-utils) (preprocess (pps bisect_ppx --conditional)) - (flags (:standard -open Parser_shared -open Simple_utils))) + (flags (:standard -open Parser_shared -open Simple_utils))) (executable (name LexerMain) @@ -33,7 +32,7 @@ (name ParserMain) (libraries parser_pascaligo) (modules - ParErr ParserAPI ParserMain) + ParErr ParserMain) (preprocess (pps bisect_ppx --conditional)) (flags (:standard -open Simple_utils -open Parser_shared -open Parser_pascaligo))) diff --git a/src/passes/1-parser/reasonligo/.links b/src/passes/1-parser/reasonligo/.links index e827ae13e..e972ad9c6 100644 --- a/src/passes/1-parser/reasonligo/.links +++ b/src/passes/1-parser/reasonligo/.links @@ -16,6 +16,10 @@ $HOME/git/ligo/vendors/ligo-utils/simple-utils/region.ml ../shared/Markup.mli ../shared/Utils.mli ../shared/Utils.ml +../shared/ParserAPI.mli +../shared/ParserAPI.ml +../shared/LexerUnit.ml +../shared/ParserUnit.ml Stubs/Simple_utils.ml Stubs/Parser_cameligo.ml ../cameligo/AST.mli diff --git a/src/passes/1-parser/reasonligo/LexerMain.ml b/src/passes/1-parser/reasonligo/LexerMain.ml index b49af81ff..756a2f103 100644 --- a/src/passes/1-parser/reasonligo/LexerMain.ml +++ b/src/passes/1-parser/reasonligo/LexerMain.ml @@ -1,56 +1,9 @@ -(** Driver for the LIGO lexer *) +(** Driver for the ReasonLIGO lexer *) -let extension = ".religo" -let options = EvalOpt.read "ReasonLIGO" extension +module IO = + struct + let ext = ".religo" + let options = EvalOpt.read "ReasonLIGO" ext + end -(** Error printing and exception tracing -*) -let () = Printexc.record_backtrace true - -let external_ text = - Utils.highlight (Printf.sprintf "External error: %s" text); exit 1;; - -(** {1 Preprocessing the input source and opening the input channels} *) - -(** Path for CPP inclusions (#include) -*) -let lib_path = - match options#libs with - [] -> "" - | libs -> let mk_I dir path = Printf.sprintf " -I %s%s" dir path - in List.fold_right mk_I libs "" - -let prefix = - match options#input with - None | Some "-" -> "temp" - | Some file -> Filename.(file |> basename |> remove_extension) - -let suffix = ".pp" ^ extension - -let pp_input = - if Utils.String.Set.mem "cpp" options#verbose - then prefix ^ suffix - else let pp_input, pp_out = Filename.open_temp_file prefix suffix - in close_out pp_out; pp_input - -let cpp_cmd = - match options#input with - None | Some "-" -> - Printf.sprintf "cpp -traditional-cpp%s - > %s" - lib_path pp_input - | Some file -> - Printf.sprintf "cpp -traditional-cpp%s %s > %s" - lib_path file pp_input - -let () = - if Utils.String.Set.mem "cpp" options#verbose - then Printf.eprintf "%s\n%!" cpp_cmd; - if Sys.command cpp_cmd <> 0 then - external_ (Printf.sprintf "the command \"%s\" failed." cpp_cmd) - -(** {1 Running the lexer on the input file} *) - -module Log = LexerLog.Make (Lexer.Make (LexToken)) - -let () = Log.trace ~offsets:options#offsets - options#mode (Some pp_input) options#cmd +module M = LexerUnit.Make (IO) (Lexer.Make (LexToken)) diff --git a/src/passes/1-parser/reasonligo/ParserAPI.ml b/src/passes/1-parser/reasonligo/ParserAPI.ml deleted file mode 100644 index df82173a9..000000000 --- a/src/passes/1-parser/reasonligo/ParserAPI.ml +++ /dev/null @@ -1,82 +0,0 @@ -(* Generic parser for LIGO *) - -(* Main functor *) - -module Make (Lexer: Lexer.S with module Token := LexToken) - (Parser: module type of Parser) - (ParErr: sig val message : int -> string end) = - struct - module I = Parser.MenhirInterpreter - module S = MenhirLib.General (* Streams *) - - (* The call [stack checkpoint] extracts the parser's stack out of - a checkpoint. *) - - let stack = function - I.HandlingError env -> I.stack env - | _ -> assert false - - (* The call [state checkpoint] extracts the number of the current - state out of a parser checkpoint. *) - - let state checkpoint : int = - match Lazy.force (stack checkpoint) with - S.Nil -> 0 (* WARNING: Hack. The first state should be 0. *) - | S.Cons (I.Element (s,_,_,_),_) -> I.number s - - (* The parser has successfully produced a semantic value. *) - - let success v = v - - (* The parser has suspended itself because of a syntax error. Stop. *) - - type message = string - type valid = Lexer.token - type invalid = Lexer.token - type error = message * valid option * invalid - - exception Point of error - - let failure get_win checkpoint = - let message = ParErr.message (state checkpoint) in - match get_win () with - Lexer.Nil -> assert false - | Lexer.One invalid -> - raise (Point (message, None, invalid)) - | Lexer.Two (invalid, valid) -> - raise (Point (message, Some valid, invalid)) - - (* The two Menhir APIs are called from the following two functions. *) - - let incr_contract Lexer.{read; buffer; get_win; close; _} : AST.t = - let supplier = I.lexer_lexbuf_to_supplier read buffer - and failure = failure get_win in - let parser = Parser.Incremental.contract buffer.Lexing.lex_curr_p in - let ast = I.loop_handle success failure supplier parser - in close (); ast - - let mono_contract = Parser.contract - - (* Errors *) - - let format_error ?(offsets=true) mode (msg, valid_opt, invalid) = - let invalid_region = LexToken.to_region invalid in - let header = - "Parse error " ^ invalid_region#to_string ~offsets mode in - let trailer = - match valid_opt with - None -> - if LexToken.is_eof invalid then "" - else let invalid_lexeme = LexToken.to_lexeme invalid in - Printf.sprintf ", before \"%s\"" invalid_lexeme - | Some valid -> - let valid_lexeme = LexToken.to_lexeme valid in - let s = Printf.sprintf ", after \"%s\"" valid_lexeme in - if LexToken.is_eof invalid then s - else - let invalid_lexeme = LexToken.to_lexeme invalid in - Printf.sprintf "%s and before \"%s\"" s invalid_lexeme in - let header = header ^ trailer in - header ^ (if msg = "" then ".\n" else ":\n" ^ msg) - - end diff --git a/src/passes/1-parser/reasonligo/ParserAPI.mli b/src/passes/1-parser/reasonligo/ParserAPI.mli deleted file mode 100644 index 7d969a33c..000000000 --- a/src/passes/1-parser/reasonligo/ParserAPI.mli +++ /dev/null @@ -1,22 +0,0 @@ -(** Generic parser API for LIGO *) - -module Make (Lexer: Lexer.S with module Token := LexToken) - (Parser: module type of Parser) - (ParErr: sig val message: int -> string end) : - sig - (* Monolithic and incremental APIs of Menhir for parsing *) - - val mono_contract : (Lexing.lexbuf -> Lexer.token) -> Lexing.lexbuf -> AST.t - val incr_contract : Lexer.instance -> AST.t - - (* Error handling *) - - type message = string - type valid = Lexer.token - type invalid = Lexer.token - type error = message * valid option * invalid - - exception Point of error - - val format_error : ?offsets:bool -> [`Byte | `Point] -> error -> string - end diff --git a/src/passes/1-parser/reasonligo/ParserMain.ml b/src/passes/1-parser/reasonligo/ParserMain.ml index 0af4c4a76..ee7d562de 100644 --- a/src/passes/1-parser/reasonligo/ParserMain.ml +++ b/src/passes/1-parser/reasonligo/ParserMain.ml @@ -1,145 +1,27 @@ -(** Driver for the Reason LIGO parser *) +(** Driver for the ReasonLIGO parser *) -let extension = ".religo" -let options = EvalOpt.read "ReasonLIGO" extension +module IO = + struct + let ext = ".religo" + let options = EvalOpt.read "ReasonLIGO" ext + end -open Printf +module ExtParser = + struct + type ast = AST.t + type expr = AST.expr + include Parser + end -(** Error printing and exception tracing -*) -let () = Printexc.record_backtrace true +module ExtParserLog = + struct + type ast = AST.t + include ParserLog + end -(** Extracting the input file -*) -let file = - match options#input with - None | Some "-" -> false - | Some _ -> true - -(** {1 Error printing and exception tracing} *) - -let () = Printexc.record_backtrace true - -let external_ text = - Utils.highlight (sprintf "External error: %s" text); exit 1;; - -(** {1 Preprocessing the input source and opening the input channels} *) - -(** Path for CPP inclusions (#include) -*) -let lib_path = - match options#libs with - [] -> "" - | libs -> let mk_I dir path = sprintf " -I %s%s" dir path - in List.fold_right mk_I libs "" - -let prefix = - match options#input with - None | Some "-" -> "temp" - | Some file -> Filename.(file |> basename |> remove_extension) - -let suffix = ".pp" ^ extension - -let pp_input = - if Utils.String.Set.mem "cpp" options#verbose - then prefix ^ suffix - else let pp_input, pp_out = Filename.open_temp_file prefix suffix - in close_out pp_out; pp_input - -let cpp_cmd = - match options#input with - None | Some "-" -> - sprintf "cpp -traditional-cpp%s - > %s" - lib_path pp_input - | Some file -> - sprintf "cpp -traditional-cpp%s %s > %s" - lib_path file pp_input - -let () = - if Utils.String.Set.mem "cpp" options#verbose - then eprintf "%s\n%!" cpp_cmd; - if Sys.command cpp_cmd <> 0 then - external_ (sprintf "the command \"%s\" failed." cpp_cmd) - -(** {1 Instanciating the lexer} *) - -module Lexer = Lexer.Make (LexToken) -module Log = LexerLog.Make (Lexer) -module ParserFront = ParserAPI.Make (Lexer) (Parser) (ParErr) - -let lexer_inst = Lexer.open_token_stream (Some pp_input) -let Lexer.{read; buffer; get_win; get_pos; get_last; close} = lexer_inst - -and cout = stdout - -let log = Log.output_token ~offsets:options#offsets - options#mode options#cmd cout - -and close_all () = close (); close_out cout - -(** {1 Tokeniser} *) - -let tokeniser = read ~log - -(** {1 Main} *) - -let () = - try - let ast = - if options#mono - then ParserFront.mono_contract tokeniser buffer - else ParserFront.incr_contract lexer_inst in - if Utils.String.Set.mem "ast" options#verbose - then let buffer = Buffer.create 131 in - let state = ParserLog.mk_state - ~offsets:options#offsets - ~mode:options#mode - ~buffer in - begin - ParserLog.pp_ast state ast; - Buffer.output_buffer stdout buffer - end - else if Utils.String.Set.mem "ast-tokens" options#verbose - then let buffer = Buffer.create 131 in - let state = ParserLog.mk_state - ~offsets:options#offsets - ~mode:options#mode - ~buffer in - begin - ParserLog.print_tokens state ast; - Buffer.output_buffer stdout buffer - end - with - (* Lexing errors *) - Lexer.Error err -> - close_all (); - let msg = - Lexer.format_error ~offsets:options#offsets - options#mode err ~file - in prerr_string msg - - (* Incremental API of Menhir *) - | ParserFront.Point point -> - let () = close_all () in - let error = - ParserFront.format_error ~offsets:options#offsets - options#mode point - in eprintf "\027[31m%s\027[0m%!" error - - (* Monolithic API of Menhir *) - | Parser.Error -> - let () = close_all () in - let invalid, valid_opt = - match get_win () with - Lexer.Nil -> - assert false (* Safe: There is always at least EOF. *) - | Lexer.One invalid -> invalid, None - | Lexer.Two (invalid, valid) -> invalid, Some valid in - let point = "", valid_opt, invalid in - let error = - ParserFront.format_error ~offsets:options#offsets - options#mode point - in eprintf "\027[31m%s\027[0m%!" error - - (* I/O errors *) - | Sys_error msg -> Utils.highlight msg +module M = ParserUnit.Make (IO) + (Lexer.Make (LexToken)) + (AST) + (ExtParser) + (ParErr) + (ExtParserLog) diff --git a/src/passes/1-parser/reasonligo/dune b/src/passes/1-parser/reasonligo/dune index 39250ecc8..e59426b63 100644 --- a/src/passes/1-parser/reasonligo/dune +++ b/src/passes/1-parser/reasonligo/dune @@ -36,7 +36,7 @@ parser_reasonligo parser_cameligo) (modules - ParErr ParserAPI ParserMain) + ParErr ParserMain) (preprocess (pps bisect_ppx --conditional)) (flags (:standard -open Simple_utils -open Parser_cameligo -open Parser_shared -open Parser_reasonligo))) diff --git a/src/passes/1-parser/cameligo/ParserAPI.ml b/src/passes/1-parser/shared/ParserAPI.ml similarity index 59% rename from src/passes/1-parser/cameligo/ParserAPI.ml rename to src/passes/1-parser/shared/ParserAPI.ml index df82173a9..e24be2b48 100644 --- a/src/passes/1-parser/cameligo/ParserAPI.ml +++ b/src/passes/1-parser/shared/ParserAPI.ml @@ -1,9 +1,47 @@ (* Generic parser for LIGO *) +module type PARSER = + sig + (* The type of tokens, abstract syntax trees and expressions *) + + type token + type ast + type expr + + (* This exception is raised by the monolithic API functions. *) + + exception Error + + (* The monolithic API. *) + + val interactive_expr : + (Lexing.lexbuf -> token) -> Lexing.lexbuf -> expr + val contract : + (Lexing.lexbuf -> token) -> Lexing.lexbuf -> ast + + module MenhirInterpreter : + sig + (* The incremental API. *) + + include MenhirLib.IncrementalEngine.INCREMENTAL_ENGINE + with type token = token + end + + (* The entry point(s) to the incremental API. *) + + module Incremental : + sig + val interactive_expr : + Lexing.position -> expr MenhirInterpreter.checkpoint + val contract : + Lexing.position -> ast MenhirInterpreter.checkpoint + end + end + (* Main functor *) -module Make (Lexer: Lexer.S with module Token := LexToken) - (Parser: module type of Parser) +module Make (Lexer: Lexer.S) + (Parser: PARSER with type token = Lexer.Token.token) (ParErr: sig val message : int -> string end) = struct module I = Parser.MenhirInterpreter @@ -31,9 +69,9 @@ module Make (Lexer: Lexer.S with module Token := LexToken) (* The parser has suspended itself because of a syntax error. Stop. *) type message = string - type valid = Lexer.token - type invalid = Lexer.token - type error = message * valid option * invalid + type valid = Parser.token + type invalid = Parser.token + type error = message * valid option * invalid exception Point of error @@ -48,7 +86,7 @@ module Make (Lexer: Lexer.S with module Token := LexToken) (* The two Menhir APIs are called from the following two functions. *) - let incr_contract Lexer.{read; buffer; get_win; close; _} : AST.t = + let incr_contract Lexer.{read; buffer; get_win; close; _} : Parser.ast = let supplier = I.lexer_lexbuf_to_supplier read buffer and failure = failure get_win in let parser = Parser.Incremental.contract buffer.Lexing.lex_curr_p in @@ -60,21 +98,21 @@ module Make (Lexer: Lexer.S with module Token := LexToken) (* Errors *) let format_error ?(offsets=true) mode (msg, valid_opt, invalid) = - let invalid_region = LexToken.to_region invalid in + let invalid_region = Lexer.Token.to_region invalid in let header = "Parse error " ^ invalid_region#to_string ~offsets mode in let trailer = match valid_opt with None -> - if LexToken.is_eof invalid then "" - else let invalid_lexeme = LexToken.to_lexeme invalid in + if Lexer.Token.is_eof invalid then "" + else let invalid_lexeme = Lexer.Token.to_lexeme invalid in Printf.sprintf ", before \"%s\"" invalid_lexeme | Some valid -> - let valid_lexeme = LexToken.to_lexeme valid in + let valid_lexeme = Lexer.Token.to_lexeme valid in let s = Printf.sprintf ", after \"%s\"" valid_lexeme in - if LexToken.is_eof invalid then s + if Lexer.Token.is_eof invalid then s else - let invalid_lexeme = LexToken.to_lexeme invalid in + let invalid_lexeme = Lexer.Token.to_lexeme invalid in Printf.sprintf "%s and before \"%s\"" s invalid_lexeme in let header = header ^ trailer in header ^ (if msg = "" then ".\n" else ":\n" ^ msg) diff --git a/src/passes/1-parser/shared/ParserAPI.mli b/src/passes/1-parser/shared/ParserAPI.mli new file mode 100644 index 000000000..79ca137c4 --- /dev/null +++ b/src/passes/1-parser/shared/ParserAPI.mli @@ -0,0 +1,60 @@ +(* Generic parser API for LIGO *) + +module type PARSER = + sig + (* The type of tokens. *) + + type token + type ast + type expr + + (* This exception is raised by the monolithic API functions. *) + + exception Error + + (* The monolithic API. *) + + val interactive_expr : (Lexing.lexbuf -> token) -> Lexing.lexbuf -> expr + val contract : (Lexing.lexbuf -> token) -> Lexing.lexbuf -> ast + + (* The incremental API. *) + + module MenhirInterpreter : + sig + include MenhirLib.IncrementalEngine.INCREMENTAL_ENGINE + with type token = token + end + + (* The entry point(s) to the incremental API. *) + + module Incremental : + sig + val interactive_expr : + Lexing.position -> expr MenhirInterpreter.checkpoint + val contract : + Lexing.position -> ast MenhirInterpreter.checkpoint + end + end + +module Make (Lexer: Lexer.S) + (Parser: PARSER with type token = Lexer.Token.token) + (ParErr: sig val message : int -> string end) : + sig + (* Monolithic and incremental APIs of Menhir for parsing *) + + val mono_contract : + (Lexing.lexbuf -> Lexer.token) -> Lexing.lexbuf -> Parser.ast + val incr_contract : + Lexer.instance -> Parser.ast + + (* Error handling *) + + type message = string + type valid = Parser.token + type invalid = Parser.token + type error = message * valid option * invalid + + exception Point of error + + val format_error : ?offsets:bool -> [`Byte | `Point] -> error -> string + end diff --git a/src/passes/1-parser/shared/dune b/src/passes/1-parser/shared/dune index ca41804a8..10e377a93 100644 --- a/src/passes/1-parser/shared/dune +++ b/src/passes/1-parser/shared/dune @@ -4,15 +4,17 @@ (name parser_shared) (public_name ligo.parser.shared) (libraries + menhirLib simple-utils uutf getopt - zarith - ) + zarith) (preprocess - (pps bisect_ppx --conditional) - ) + (pps bisect_ppx --conditional)) (modules + LexerUnit + ParserUnit + ParserAPI Lexer LexerLog Utils From ddd438aaa84574fe5df32c8c004901d893d70e4f Mon Sep 17 00:00:00 2001 From: Christian Rinderknecht Date: Sat, 4 Jan 2020 23:32:50 +0100 Subject: [PATCH 04/15] Forgot to add. --- src/passes/1-parser/shared/LexerUnit.ml | 65 +++++++++ src/passes/1-parser/shared/ParserUnit.ml | 167 +++++++++++++++++++++++ 2 files changed, 232 insertions(+) create mode 100644 src/passes/1-parser/shared/LexerUnit.ml create mode 100644 src/passes/1-parser/shared/ParserUnit.ml diff --git a/src/passes/1-parser/shared/LexerUnit.ml b/src/passes/1-parser/shared/LexerUnit.ml new file mode 100644 index 000000000..8094bdddd --- /dev/null +++ b/src/passes/1-parser/shared/LexerUnit.ml @@ -0,0 +1,65 @@ +(* Functor to build a standalone LIGO lexer *) + +module type S = + sig + val ext : string (* LIGO file extension *) + val options : EvalOpt.options (* CLI options *) + end + +module Make (IO: S) (Lexer: Lexer.S) = + struct + open Printf + (* Error printing and exception tracing *) + + let () = Printexc.record_backtrace true + + let external_ text = + Utils.highlight (sprintf "External error: %s" text); exit 1 + + (* Preprocessing the input source and opening the input channels *) + + (* Path for CPP inclusions (#include) *) + + let lib_path = + match IO.options#libs with + [] -> "" + | libs -> let mk_I dir path = sprintf " -I %s%s" dir path + in List.fold_right mk_I libs "" + + let prefix = + match IO.options#input with + None | Some "-" -> "temp" + | Some file -> Filename.(file |> basename |> remove_extension) + + let suffix = ".pp" ^ IO.ext + + let pp_input = + if Utils.String.Set.mem "cpp" IO.options#verbose + then prefix ^ suffix + else let pp_input, pp_out = Filename.open_temp_file prefix suffix + in close_out pp_out; pp_input + + let cpp_cmd = + match IO.options#input with + None | Some "-" -> + sprintf "cpp -traditional-cpp%s - > %s" + lib_path pp_input + | Some file -> + sprintf "cpp -traditional-cpp%s %s > %s" + lib_path file pp_input + + let () = + if Utils.String.Set.mem "cpp" IO.options#verbose + then eprintf "%s\n%!" cpp_cmd; + if Sys.command cpp_cmd <> 0 then + external_ (sprintf "the command \"%s\" failed." cpp_cmd) + + (* Running the lexer on the input file *) + + module Log = LexerLog.Make (Lexer) + + let () = Log.trace ~offsets:IO.options#offsets + IO.options#mode (Some pp_input) + IO.options#cmd + + end diff --git a/src/passes/1-parser/shared/ParserUnit.ml b/src/passes/1-parser/shared/ParserUnit.ml new file mode 100644 index 000000000..fe1af9559 --- /dev/null +++ b/src/passes/1-parser/shared/ParserUnit.ml @@ -0,0 +1,167 @@ +(* Functor to build a standalone LIGO parser *) + +module type S = + sig + val ext : string (* LIGO file extension *) + val options : EvalOpt.options (* CLI options *) + end + +module type Pretty = + sig + type state + type ast + val pp_ast : + state -> ast -> unit + val mk_state : + offsets:bool -> mode:[`Point|`Byte] -> buffer:Buffer.t -> state + val print_tokens : state -> ast -> unit + end + +module Make (IO: S) + (Lexer: Lexer.S) + (AST: sig type t type expr end) + (Parser: ParserAPI.PARSER + with type ast = AST.t + and type expr = AST.expr + and type token = Lexer.token) + (ParErr: sig val message : int -> string end) + (ParserLog: Pretty with type ast = AST.t) = + struct + open Printf + + (* Error printing and exception tracing *) + + let () = Printexc.record_backtrace true + + let external_ text = + Utils.highlight (sprintf "External error: %s" text); exit 1 + + (* Extracting the input file *) + + let file = + match IO.options#input with + None | Some "-" -> false + | Some _ -> true + + (* Preprocessing the input source and opening the input channels *) + + (* Path for CPP inclusions (#include) *) + + let lib_path = + match IO.options#libs with + [] -> "" + | libs -> let mk_I dir path = sprintf " -I %s%s" dir path + in List.fold_right mk_I libs "" + + let prefix = + match IO.options#input with + None | Some "-" -> "temp" + | Some file -> Filename.(file |> basename |> remove_extension) + + let suffix = ".pp" ^ IO.ext + + let pp_input = + if Utils.String.Set.mem "cpp" IO.options#verbose + then prefix ^ suffix + else let pp_input, pp_out = Filename.open_temp_file prefix suffix + in close_out pp_out; pp_input + + let cpp_cmd = + match IO.options#input with + None | Some "-" -> + sprintf "cpp -traditional-cpp%s - > %s" + lib_path pp_input + | Some file -> + sprintf "cpp -traditional-cpp%s %s > %s" + lib_path file pp_input + + let () = + if Utils.String.Set.mem "cpp" IO.options#verbose + then eprintf "%s\n%!" cpp_cmd; + if Sys.command cpp_cmd <> 0 then + external_ (sprintf "the command \"%s\" failed." cpp_cmd) + + (* Instanciating the lexer *) + + module ParserFront = ParserAPI.Make (Lexer) (Parser) (ParErr) + + let lexer_inst = Lexer.open_token_stream (Some pp_input) + let Lexer.{read; buffer; get_win; get_pos; get_last; close} = lexer_inst + + and cout = stdout + + let close_all () = close (); close_out cout + + (* Tokeniser *) + + module Log = LexerLog.Make (Lexer) + + let log = Log.output_token ~offsets:IO.options#offsets + IO.options#mode IO.options#cmd cout + + let tokeniser = read ~log + + (* Main *) + + let () = + try + let ast = + if IO.options#mono + then ParserFront.mono_contract tokeniser buffer + else ParserFront.incr_contract lexer_inst in + if Utils.String.Set.mem "ast" IO.options#verbose + then let buffer = Buffer.create 131 in + let state = ParserLog.mk_state + ~offsets:IO.options#offsets + ~mode:IO.options#mode + ~buffer in + begin + ParserLog.pp_ast state ast; + Buffer.output_buffer stdout buffer + end + else if Utils.String.Set.mem "ast-tokens" IO.options#verbose + then let buffer = Buffer.create 131 in + let state = ParserLog.mk_state + ~offsets:IO.options#offsets + ~mode:IO.options#mode + ~buffer in + begin + ParserLog.print_tokens state ast; + Buffer.output_buffer stdout buffer + end + with + (* Lexing errors *) + Lexer.Error err -> + close_all (); + let msg = + Lexer.format_error ~offsets:IO.options#offsets + IO.options#mode err ~file + in prerr_string msg + + (* Incremental API of Menhir *) + | ParserFront.Point point -> + let () = close_all () in + let error = + ParserFront.format_error ~offsets:IO.options#offsets + IO.options#mode point + in eprintf "\027[31m%s\027[0m%!" error + + (* Monolithic API of Menhir *) + | Parser.Error -> + let () = close_all () in + let invalid, valid_opt = + match get_win () with + Lexer.Nil -> + assert false (* Safe: There is always at least EOF. *) + | Lexer.One invalid -> invalid, None + | Lexer.Two (invalid, valid) -> invalid, Some valid in + let point = "", valid_opt, invalid in + let error = + ParserFront.format_error ~offsets:IO.options#offsets + IO.options#mode point + in eprintf "\027[31m%s\027[0m%!" error + + (* I/O errors *) + | Sys_error msg -> Utils.highlight msg + + end From 301defda3fab5f428e43a8e7954e1dbfa8ea3082 Mon Sep 17 00:00:00 2001 From: Sander Spies Date: Mon, 6 Jan 2020 13:46:11 +0100 Subject: [PATCH 05/15] Show lexer error messages in CameLIGO + PascaLIGO. --- src/passes/1-parser/cameligo.ml | 166 ++++++++++++++----------------- src/passes/1-parser/pascaligo.ml | 156 +++++++++++++---------------- 2 files changed, 147 insertions(+), 175 deletions(-) diff --git a/src/passes/1-parser/cameligo.ml b/src/passes/1-parser/cameligo.ml index ed0830312..8a4eb33f9 100644 --- a/src/passes/1-parser/cameligo.ml +++ b/src/passes/1-parser/cameligo.ml @@ -6,107 +6,93 @@ module ParserLog = Parser_cameligo.ParserLog module LexToken = Parser_cameligo.LexToken module Lexer = Lexer.Make(LexToken) +module Errors = struct + + let lexer_error (e: Lexer.error AST.reg) = + let title () = "lexer error" in + let message () = Lexer.error_to_string e.value in + let data = [ + ("parser_loc", + fun () -> Format.asprintf "%a" Location.pp_lift @@ e.region + ) + ] in + error ~data title message + + let parser_error start end_ = + let title () = "parser error" in + let message () = "" in + let loc = Region.make + ~start:(Pos.from_byte start) + ~stop:(Pos.from_byte end_) + in + let data = [ + ("parser_loc", + fun () -> Format.asprintf "%a" Location.pp_lift @@ loc + ) + ] in + error ~data title message + + let unrecognized_error start end_ = + let title () = "unrecognized error" in + let message () = "" in + let loc = Region.make + ~start:(Pos.from_byte start) + ~stop:(Pos.from_byte end_) + in + let data = [ + ("unrecognized_loc", + fun () -> Format.asprintf "%a" Location.pp_lift @@ loc + ) + ] in + error ~data title message + +end + +open Errors + +type 'a parser = (Lexing.lexbuf -> LexToken.token) -> Lexing.lexbuf -> 'a + +let parse (parser: 'a parser) lexbuf = + let Lexer.{read ; close ; _} = Lexer.open_token_stream None in + let result = + try + ok (parser read lexbuf) + with + | Parser.Error -> + let start = Lexing.lexeme_start_p lexbuf in + let end_ = Lexing.lexeme_end_p lexbuf in + fail @@ (parser_error start end_) + | Lexer.Error e -> + fail @@ (lexer_error e) + | _ -> + let _ = Printexc.print_backtrace Pervasives.stdout in + let start = Lexing.lexeme_start_p lexbuf in + let end_ = Lexing.lexeme_end_p lexbuf in + fail @@ (unrecognized_error start end_) + in + close (); + result + let parse_file (source: string) : AST.t result = let pp_input = - let prefix = Filename.(source |> basename |> remove_extension) - and suffix = ".pp.mligo" - in prefix ^ suffix in + let prefix = Filename.(source |> basename |> remove_extension) + and suffix = ".pp.mligo" + in prefix ^ suffix in let cpp_cmd = Printf.sprintf "cpp -traditional-cpp %s > %s" - source pp_input in - let%bind () = sys_command cpp_cmd in + source pp_input in + let%bind () = sys_command cpp_cmd in let%bind channel = generic_try (simple_error "error opening file") @@ (fun () -> open_in pp_input) in let lexbuf = Lexing.from_channel channel in - let Lexer.{read ; close ; _} = - Lexer.open_token_stream None in - specific_try (function - | Parser.Error -> ( - let start = Lexing.lexeme_start_p lexbuf in - let end_ = Lexing.lexeme_end_p lexbuf in - let str = Format.sprintf - "Parse error at \"%s\" from (%d, %d) to (%d, %d). In file \"%s|%s\"\n" - (Lexing.lexeme lexbuf) - start.pos_lnum (start.pos_cnum - start.pos_bol) - end_.pos_lnum (end_.pos_cnum - end_.pos_bol) - start.pos_fname source - in - simple_error str - ) - | exn -> - let start = Lexing.lexeme_start_p lexbuf in - let end_ = Lexing.lexeme_end_p lexbuf in - let str = Format.sprintf - "Unrecognized error (%s) at \"%s\" from (%d, %d) to (%d, %d). In file \"%s|%s\"\n" - (Printexc.to_string exn) - (Lexing.lexeme lexbuf) - start.pos_lnum (start.pos_cnum - start.pos_bol) - end_.pos_lnum (end_.pos_cnum - end_.pos_bol) - start.pos_fname source - in - simple_error str - ) @@ (fun () -> - let raw = Parser.contract read lexbuf in - close () ; - raw - ) >>? fun raw -> - ok raw + parse (Parser.contract) lexbuf let parse_string (s:string) : AST.t result = let lexbuf = Lexing.from_string s in - let Lexer.{read ; close ; _} = - Lexer.open_token_stream None in - specific_try (function - | Parser.Error -> ( - let start = Lexing.lexeme_start_p lexbuf in - let end_ = Lexing.lexeme_end_p lexbuf in - let str = Format.sprintf - "Parse error at \"%s\" from (%d, %d) to (%d, %d)\n" - (Lexing.lexeme lexbuf) - start.pos_lnum (start.pos_cnum - start.pos_bol) - end_.pos_lnum (end_.pos_cnum - end_.pos_bol) in - simple_error str - ) - | _ -> simple_error "unrecognized parse_ error" - ) @@ (fun () -> - let raw = Parser.contract read lexbuf in - close () ; - raw - ) >>? fun raw -> - ok raw + parse (Parser.contract) lexbuf let parse_expression (s:string) : AST.expr result = - let lexbuf = Lexing.from_string s in - let Lexer.{read ; close; _} = - Lexer.open_token_stream None in - specific_try (function - | Parser.Error -> ( - let start = Lexing.lexeme_start_p lexbuf in - let end_ = Lexing.lexeme_end_p lexbuf in - let str = Format.sprintf - "Parse error at \"%s\" from (%d, %d) to (%d, %d)\n" - (Lexing.lexeme lexbuf) - start.pos_lnum (start.pos_cnum - start.pos_bol) - end_.pos_lnum (end_.pos_cnum - end_.pos_bol) in - simple_error str - ) - | exn -> - let start = Lexing.lexeme_start_p lexbuf in - let end_ = Lexing.lexeme_end_p lexbuf in - let str = Format.sprintf - "Unrecognized error (%s) at \"%s\" from (%d, %d) to (%d, %d). In expression \"%s|%s\"\n" - (Printexc.to_string exn) - (Lexing.lexeme lexbuf) - start.pos_lnum (start.pos_cnum - start.pos_bol) - end_.pos_lnum (end_.pos_cnum - end_.pos_bol) - start.pos_fname s - in - simple_error str - ) @@ (fun () -> - let raw = Parser.interactive_expr read lexbuf in - close () ; - raw - ) >>? fun raw -> - ok raw + let lexbuf = Lexing.from_string s in + parse (Parser.interactive_expr) lexbuf \ No newline at end of file diff --git a/src/passes/1-parser/pascaligo.ml b/src/passes/1-parser/pascaligo.ml index 62d56ab8b..05c0af4df 100644 --- a/src/passes/1-parser/pascaligo.ml +++ b/src/passes/1-parser/pascaligo.ml @@ -6,6 +6,73 @@ module ParserLog = Parser_pascaligo.ParserLog module LexToken = Parser_pascaligo.LexToken module Lexer = Lexer.Make(LexToken) +module Errors = struct + + let lexer_error (e: Lexer.error AST.reg) = + let title () = "lexer error" in + let message () = Lexer.error_to_string e.value in + let data = [ + ("parser_loc", + fun () -> Format.asprintf "%a" Location.pp_lift @@ e.region + ) + ] in + error ~data title message + + let parser_error start end_ = + let title () = "parser error" in + let message () = "" in + let loc = Region.make + ~start:(Pos.from_byte start) + ~stop:(Pos.from_byte end_) + in + let data = [ + ("parser_loc", + fun () -> Format.asprintf "%a" Location.pp_lift @@ loc + ) + ] in + error ~data title message + + let unrecognized_error start end_ = + let title () = "unrecognized error" in + let message () = "" in + let loc = Region.make + ~start:(Pos.from_byte start) + ~stop:(Pos.from_byte end_) + in + let data = [ + ("unrecognized_loc", + fun () -> Format.asprintf "%a" Location.pp_lift @@ loc + ) + ] in + error ~data title message + +end + +open Errors + +type 'a parser = (Lexing.lexbuf -> LexToken.token) -> Lexing.lexbuf -> 'a + +let parse (parser: 'a parser) lexbuf = + let Lexer.{read ; close ; _} = Lexer.open_token_stream None in + let result = + try + ok (parser read lexbuf) + with + | Parser.Error -> + let start = Lexing.lexeme_start_p lexbuf in + let end_ = Lexing.lexeme_end_p lexbuf in + fail @@ (parser_error start end_) + | Lexer.Error e -> + fail @@ (lexer_error e) + | _ -> + let _ = Printexc.print_backtrace Pervasives.stdout in + let start = Lexing.lexeme_start_p lexbuf in + let end_ = Lexing.lexeme_end_p lexbuf in + fail @@ (unrecognized_error start end_) + in + close (); + result + let parse_file (source: string) : AST.t result = let pp_input = let prefix = Filename.(source |> basename |> remove_extension) @@ -20,93 +87,12 @@ let parse_file (source: string) : AST.t result = generic_try (simple_error "error opening file") @@ (fun () -> open_in pp_input) in let lexbuf = Lexing.from_channel channel in - let Lexer.{read ; close ; _} = - Lexer.open_token_stream None in - specific_try (function - | Parser.Error -> ( - let start = Lexing.lexeme_start_p lexbuf in - let end_ = Lexing.lexeme_end_p lexbuf in - let str = Format.sprintf - "Parse error at \"%s\" from (%d, %d) to (%d, %d). In file \"%s|%s\"\n" - (Lexing.lexeme lexbuf) - start.pos_lnum (start.pos_cnum - start.pos_bol) - end_.pos_lnum (end_.pos_cnum - end_.pos_bol) - start.pos_fname source - in - simple_error str - ) - | exn -> - let start = Lexing.lexeme_start_p lexbuf in - let end_ = Lexing.lexeme_end_p lexbuf in - let str = Format.sprintf - "Unrecognized error (%s) at \"%s\" from (%d, %d) to (%d, %d). In file \"%s|%s\"\n" - (Printexc.to_string exn) - (Lexing.lexeme lexbuf) - start.pos_lnum (start.pos_cnum - start.pos_bol) - end_.pos_lnum (end_.pos_cnum - end_.pos_bol) - start.pos_fname source - in - simple_error str - ) @@ (fun () -> - let raw = Parser.contract read lexbuf in - close () ; - raw - ) >>? fun raw -> - ok raw + parse (Parser.contract) lexbuf let parse_string (s:string) : AST.t result = let lexbuf = Lexing.from_string s in - let Lexer.{read ; close ; _} = - Lexer.open_token_stream None in - specific_try (function - | Parser.Error -> ( - let start = Lexing.lexeme_start_p lexbuf in - let end_ = Lexing.lexeme_end_p lexbuf in - let str = Format.sprintf - "Parse error at \"%s\" from (%d, %d) to (%d, %d)\n" - (Lexing.lexeme lexbuf) - start.pos_lnum (start.pos_cnum - start.pos_bol) - end_.pos_lnum (end_.pos_cnum - end_.pos_bol) in - simple_error str - ) - | _ -> simple_error "unrecognized parse_ error" - ) @@ (fun () -> - let raw = Parser.contract read lexbuf in - close () ; - raw - ) >>? fun raw -> - ok raw + parse (Parser.contract) lexbuf let parse_expression (s:string) : AST.expr result = - let lexbuf = Lexing.from_string s in - let Lexer.{read ; close; _} = - Lexer.open_token_stream None in - specific_try (function - | Parser.Error -> ( - let start = Lexing.lexeme_start_p lexbuf in - let end_ = Lexing.lexeme_end_p lexbuf in - let str = Format.sprintf - "Parse error at \"%s\" from (%d, %d) to (%d, %d)\n" - (Lexing.lexeme lexbuf) - start.pos_lnum (start.pos_cnum - start.pos_bol) - end_.pos_lnum (end_.pos_cnum - end_.pos_bol) in - simple_error str - ) - | exn -> - let start = Lexing.lexeme_start_p lexbuf in - let end_ = Lexing.lexeme_end_p lexbuf in - let str = Format.sprintf - "Unrecognized error (%s) at \"%s\" from (%d, %d) to (%d, %d). In expression \"%s|%s\"\n" - (Printexc.to_string exn) - (Lexing.lexeme lexbuf) - start.pos_lnum (start.pos_cnum - start.pos_bol) - end_.pos_lnum (end_.pos_cnum - end_.pos_bol) - start.pos_fname s - in - simple_error str - ) @@ (fun () -> - let raw = Parser.interactive_expr read lexbuf in - close () ; - raw - ) >>? fun raw -> - ok raw + let lexbuf = Lexing.from_string s in + parse (Parser.interactive_expr) lexbuf From 7e05b7d276e4620cfd6a881b5fa893432e948280 Mon Sep 17 00:00:00 2001 From: Lesenechal Remi Date: Mon, 6 Jan 2020 17:46:00 +0100 Subject: [PATCH 06/15] now removes the entry-point declaration from the program before aggregation --- src/main/compile/of_mini_c.ml | 7 ++++--- src/stages/mini_c/misc.ml | 4 ++-- vendors/ligo-utils/simple-utils/x_list.ml | 5 +++++ 3 files changed, 11 insertions(+), 5 deletions(-) diff --git a/src/main/compile/of_mini_c.ml b/src/main/compile/of_mini_c.ml index 4387ca133..fb2945265 100644 --- a/src/main/compile/of_mini_c.ml +++ b/src/main/compile/of_mini_c.ml @@ -25,9 +25,10 @@ let aggregate_and_compile = fun program form -> | ContractForm _ -> compile_contract aggregated' | ExpressionForm _ -> compile_expression aggregated' -let aggregate_and_compile_contract = fun program name -> - let%bind (exp, _) = get_entry program name in - aggregate_and_compile program (ContractForm exp) +let aggregate_and_compile_contract = fun (program : Types.program) name -> + let%bind (exp, idx) = get_entry program name in + let program' = List.remove_from idx program in + aggregate_and_compile program' (ContractForm exp) let aggregate_and_compile_expression = fun program exp -> aggregate_and_compile program (ExpressionForm exp) diff --git a/src/stages/mini_c/misc.ml b/src/stages/mini_c/misc.ml index 5cae24799..2dae579d3 100644 --- a/src/stages/mini_c/misc.ml +++ b/src/stages/mini_c/misc.ml @@ -129,14 +129,14 @@ let get_entry (lst : program) (name : string) : (expression * int) result = then Some decl_expr else None in - List.find_map aux lst + List.find_map aux (List.rev lst) in let entry_index = let aux x = let (((decl_name , _) , _)) = x in Var.equal decl_name (Var.of_name name) in - List.find_index aux lst + (List.length lst) - (List.find_index aux (List.rev lst)) - 1 in ok (entry_expression , entry_index) diff --git a/vendors/ligo-utils/simple-utils/x_list.ml b/vendors/ligo-utils/simple-utils/x_list.ml index 19bf881a5..8541c4614 100644 --- a/vendors/ligo-utils/simple-utils/x_list.ml +++ b/vendors/ligo-utils/simple-utils/x_list.ml @@ -5,6 +5,11 @@ let rec remove n = function | _ :: tl when n = 0 -> tl | hd :: tl -> hd :: remove (n - 1) tl +let rec remove_from n = function + | [] -> raise (Failure "List.remove_from") + | _ when n = 0 -> [] + | hd :: tl -> hd :: remove_from (n - 1) tl + let map ?(acc = []) f lst = let rec aux acc f = function | [] -> acc From 8ce4772ae4f202f177726eb4a21757a37ae7654f Mon Sep 17 00:00:00 2001 From: Lesenechal Remi Date: Mon, 6 Jan 2020 18:51:43 +0100 Subject: [PATCH 07/15] add tests --- src/bin/expect_tests/contract_tests.ml | 10 +++++++++- src/test/contracts/double_main.ligo | 8 ++++++++ src/test/contracts/redeclaration.ligo | 6 ++++++ 3 files changed, 23 insertions(+), 1 deletion(-) create mode 100644 src/test/contracts/double_main.ligo create mode 100644 src/test/contracts/redeclaration.ligo diff --git a/src/bin/expect_tests/contract_tests.ml b/src/bin/expect_tests/contract_tests.ml index b9b34c076..97516018e 100644 --- a/src/bin/expect_tests/contract_tests.ml +++ b/src/bin/expect_tests/contract_tests.ml @@ -943,4 +943,12 @@ let%expect_test _ = let%expect_test _ = run_ligo_bad [ "compile-contract" ; contract "bad_timestamp.ligo" ; "main" ] ; - [%expect {| ligo: in file "bad_timestamp.ligo", line 5, characters 29-43. Badly formatted timestamp "badtimestamp": {"location":"in file \"bad_timestamp.ligo\", line 5, characters 29-43"} |}] \ No newline at end of file + [%expect {| ligo: in file "bad_timestamp.ligo", line 5, characters 29-43. Badly formatted timestamp "badtimestamp": {"location":"in file \"bad_timestamp.ligo\", line 5, characters 29-43"} |}] + +let%expect_test _ = + run_ligo_good [ "dry-run" ; contract "redeclaration.ligo" ; "main" ; "unit" ; "0" ] ; + [%expect {|( [] , 0 ) |}] + +let%expect_test _ = + run_ligo_good [ "dry-run" ; contract "double_main.ligo" ; "main" ; "unit" ; "0" ] ; + [%expect {|( [] , 2 ) |}] \ No newline at end of file diff --git a/src/test/contracts/double_main.ligo b/src/test/contracts/double_main.ligo new file mode 100644 index 000000000..6ad75dd80 --- /dev/null +++ b/src/test/contracts/double_main.ligo @@ -0,0 +1,8 @@ +function main(const p : unit; const s : int) : list(operation) * int is + ((list end : list(operation)), s + 1) + +function main(const p : unit; const s : int) : list(operation) * int is + begin + const ret : list(operation) * int = main(p, s) + end + with (ret.0, ret.1 + 1) \ No newline at end of file diff --git a/src/test/contracts/redeclaration.ligo b/src/test/contracts/redeclaration.ligo new file mode 100644 index 000000000..c74594ad3 --- /dev/null +++ b/src/test/contracts/redeclaration.ligo @@ -0,0 +1,6 @@ +function foo(const p : unit) : int is 0 + +function main(const p : unit; const s : int) : list(operation) * int is + ((list end : list(operation)), foo(unit)) + +function foo(const p : unit) : int is 1 \ No newline at end of file From a0a8f114c0ed599fbed8aa169e623dc1d23e40e3 Mon Sep 17 00:00:00 2001 From: Lesenechal Remi Date: Mon, 6 Jan 2020 19:24:23 +0100 Subject: [PATCH 08/15] replaces remove_from by take which does not raise any exceptions --- src/main/compile/of_mini_c.ml | 2 +- vendors/ligo-utils/simple-utils/x_list.ml | 6 +++--- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/src/main/compile/of_mini_c.ml b/src/main/compile/of_mini_c.ml index fb2945265..be27f0f6b 100644 --- a/src/main/compile/of_mini_c.ml +++ b/src/main/compile/of_mini_c.ml @@ -27,7 +27,7 @@ let aggregate_and_compile = fun program form -> let aggregate_and_compile_contract = fun (program : Types.program) name -> let%bind (exp, idx) = get_entry program name in - let program' = List.remove_from idx program in + let program' = List.take idx program in aggregate_and_compile program' (ContractForm exp) let aggregate_and_compile_expression = fun program exp -> diff --git a/vendors/ligo-utils/simple-utils/x_list.ml b/vendors/ligo-utils/simple-utils/x_list.ml index 8541c4614..4b74c0261 100644 --- a/vendors/ligo-utils/simple-utils/x_list.ml +++ b/vendors/ligo-utils/simple-utils/x_list.ml @@ -5,10 +5,10 @@ let rec remove n = function | _ :: tl when n = 0 -> tl | hd :: tl -> hd :: remove (n - 1) tl -let rec remove_from n = function - | [] -> raise (Failure "List.remove_from") +let rec take n = function + | [] -> [] | _ when n = 0 -> [] - | hd :: tl -> hd :: remove_from (n - 1) tl + | hd :: tl -> hd :: take (n - 1) tl let map ?(acc = []) f lst = let rec aux acc f = function From 6841609362a6a18bf09ec7f1946b14c444c4a6db Mon Sep 17 00:00:00 2001 From: Lesenechal Remi Date: Mon, 6 Jan 2020 21:31:45 +0100 Subject: [PATCH 09/15] allow key_hash literals --- src/passes/3-self_ast_simplified/tezos_type_annotation.ml | 1 + 1 file changed, 1 insertion(+) diff --git a/src/passes/3-self_ast_simplified/tezos_type_annotation.ml b/src/passes/3-self_ast_simplified/tezos_type_annotation.ml index a6436257f..32f5fcb5c 100644 --- a/src/passes/3-self_ast_simplified/tezos_type_annotation.ml +++ b/src/passes/3-self_ast_simplified/tezos_type_annotation.ml @@ -17,6 +17,7 @@ let peephole_expression : expression -> expression result = fun e -> match e.expression with | E_ascription (e' , t) as e -> ( match (e'.expression , t.type_expression') with + | (E_literal (Literal_string s) , T_constant (TC_key_hash)) -> return @@ E_literal (Literal_key_hash s) | (E_literal (Literal_int i) , T_constant (TC_timestamp)) -> return @@ E_literal (Literal_timestamp i) | (E_literal (Literal_string str) , T_constant (TC_timestamp)) -> let%bind time = From 55a8734c81b85e0fe0cdb63f13f465409f88166a Mon Sep 17 00:00:00 2001 From: Lesenechal Remi Date: Mon, 6 Jan 2020 21:31:54 +0100 Subject: [PATCH 10/15] key_hash now comparable --- src/passes/8-compiler/compiler_type.ml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/passes/8-compiler/compiler_type.ml b/src/passes/8-compiler/compiler_type.ml index 9d0f9b734..5094bca67 100644 --- a/src/passes/8-compiler/compiler_type.ml +++ b/src/passes/8-compiler/compiler_type.ml @@ -15,6 +15,7 @@ module Ty = struct let tez_k = Mutez_key None let int_k = Int_key None let string_k = String_key None + let key_hash_k = Key_hash_key None let address_k = Address_key None let timestamp_k = Timestamp_key None let bytes_k = Bytes_key None @@ -72,7 +73,7 @@ module Ty = struct | Base_operation -> fail (not_comparable "operation") | Base_signature -> fail (not_comparable "signature") | Base_key -> fail (not_comparable "key") - | Base_key_hash -> fail (not_comparable "key_hash") + | Base_key_hash -> return key_hash_k | Base_chain_id -> fail (not_comparable "chain_id") let comparable_type : type_value -> ex_comparable_ty result = fun tv -> From 2667ed62b376701f5097c55e7a9a6bb440e32431 Mon Sep 17 00:00:00 2001 From: Lesenechal Remi Date: Mon, 6 Jan 2020 22:29:07 +0100 Subject: [PATCH 11/15] fix indent (unrelated) --- src/test/multisig_tests.ml | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/src/test/multisig_tests.ml b/src/test/multisig_tests.ml index d89719c4c..87258f844 100644 --- a/src/test/multisig_tests.ml +++ b/src/test/multisig_tests.ml @@ -17,10 +17,10 @@ let get_program = ) let compile_main () = - let%bind simplified = Ligo.Compile.Of_source.compile "./contracts/multisig.ligo" (Syntax_name "pascaligo") in - let%bind typed_prg,_ = Ligo.Compile.Of_simplified.compile simplified in - let%bind mini_c_prg = Ligo.Compile.Of_typed.compile typed_prg in - let%bind michelson_prg = Ligo.Compile.Of_mini_c.aggregate_and_compile_contract mini_c_prg "main" in + let%bind simplified = Ligo.Compile.Of_source.compile "./contracts/multisig.ligo" (Syntax_name "pascaligo") in + let%bind typed_prg,_ = Ligo.Compile.Of_simplified.compile simplified in + let%bind mini_c_prg = Ligo.Compile.Of_typed.compile typed_prg in + let%bind michelson_prg = Ligo.Compile.Of_mini_c.aggregate_and_compile_contract mini_c_prg "main" in let%bind (_contract: Tezos_utils.Michelson.michelson) = (* fails if the given entry point is not a valid contract *) Ligo.Compile.Of_michelson.build_contract michelson_prg in From d885eb8d4a1013a089251ef553f80c0919e77017 Mon Sep 17 00:00:00 2001 From: Lesenechal Remi Date: Mon, 6 Jan 2020 22:52:30 +0100 Subject: [PATCH 12/15] check key_hash format --- src/bin/expect_tests/contract_tests.ml | 2 +- src/passes/3-self_ast_simplified/literals.ml | 19 ++++++++++++++----- src/stages/common/PP.ml | 6 +++--- 3 files changed, 18 insertions(+), 9 deletions(-) diff --git a/src/bin/expect_tests/contract_tests.ml b/src/bin/expect_tests/contract_tests.ml index 97516018e..acf82eb2f 100644 --- a/src/bin/expect_tests/contract_tests.ml +++ b/src/bin/expect_tests/contract_tests.ml @@ -939,7 +939,7 @@ let%expect_test _ = let%expect_test _ = run_ligo_bad [ "compile-contract" ; contract "bad_address_format.religo" ; "main" ] ; - [%expect {| ligo: in file "bad_address_format.religo", line 2, characters 25-47. Badly formatted address "KT1badaddr": {"location":"in file \"bad_address_format.religo\", line 2, characters 25-47"} |}] + [%expect {| ligo: in file "bad_address_format.religo", line 2, characters 25-47. Badly formatted literal: address "KT1badaddr" {"location":"in file \"bad_address_format.religo\", line 2, characters 25-47"} |}] let%expect_test _ = run_ligo_bad [ "compile-contract" ; contract "bad_timestamp.ligo" ; "main" ] ; diff --git a/src/passes/3-self_ast_simplified/literals.ml b/src/passes/3-self_ast_simplified/literals.ml index dfe3cac4a..c2694ed28 100644 --- a/src/passes/3-self_ast_simplified/literals.ml +++ b/src/passes/3-self_ast_simplified/literals.ml @@ -3,23 +3,32 @@ open Trace open Proto_alpha_utils module Errors = struct - let bad_literal_address s_addr loc () = - let title = (thunk ("Badly formatted address \""^s_addr^"\"")) in - let message () = "" in + + let bad_format e () = + let title = (thunk ("Badly formatted literal")) in + let message () = Format.asprintf "%a" Ast_simplified.PP.expression e in let data = [ - ("location" , fun () -> Format.asprintf "%a" Location.pp loc) + ("location" , fun () -> Format.asprintf "%a" Location.pp e.location) ] in error ~data title message () + end open Errors let peephole_expression : expression -> expression result = fun e -> let return expression = ok { e with expression } in match e.expression with + | E_literal (Literal_key_hash s) as l -> ( + let open Tezos_crypto in + let%bind (_pkh:Crypto.Signature.public_key_hash) = + Trace.trace_tzresult (bad_format e) @@ + Signature.Public_key_hash.of_b58check s in + return l + ) | E_literal (Literal_address s) as l -> ( let open Memory_proto_alpha in let%bind (_contract:Protocol.Alpha_context.Contract.t) = - Trace.trace_alpha_tzresult (bad_literal_address s e.location) @@ + Trace.trace_alpha_tzresult (bad_format e) @@ Protocol.Alpha_context.Contract.of_b58check s in return l ) diff --git a/src/stages/common/PP.ml b/src/stages/common/PP.ml index dbcc3c43f..74dd5b78b 100644 --- a/src/stages/common/PP.ml +++ b/src/stages/common/PP.ml @@ -190,12 +190,12 @@ let literal ppf (l:literal) = match l with | Literal_mutez n -> fprintf ppf "%dmutez" n | Literal_string s -> fprintf ppf "%S" s | Literal_bytes b -> fprintf ppf "0x%a" Hex.pp (Hex.of_bytes b) - | Literal_address s -> fprintf ppf "@%S" s + | Literal_address s -> fprintf ppf "address %S" s | Literal_operation _ -> fprintf ppf "Operation(...bytes)" | Literal_key s -> fprintf ppf "key %s" s | Literal_key_hash s -> fprintf ppf "key_hash %s" s - | Literal_signature s -> fprintf ppf "Signature %s" s - | Literal_chain_id s -> fprintf ppf "Chain_id %s" s + | Literal_signature s -> fprintf ppf "signature %s" s + | Literal_chain_id s -> fprintf ppf "chain_id %s" s let%expect_test _ = Format.printf "%a" literal (Literal_bytes (Bytes.of_string "foo")) ; From e73c90113549c1fed3aa2513fc50217be5d81b51 Mon Sep 17 00:00:00 2001 From: Tom Jack Date: Mon, 30 Dec 2019 15:16:22 -0600 Subject: [PATCH 13/15] Show 'deepest' location for errors --- src/bin/expect_tests/typer_error_tests.ml | 2 +- src/main/display.ml | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/src/bin/expect_tests/typer_error_tests.ml b/src/bin/expect_tests/typer_error_tests.ml index 6ecae91e7..45146c911 100644 --- a/src/bin/expect_tests/typer_error_tests.ml +++ b/src/bin/expect_tests/typer_error_tests.ml @@ -20,6 +20,6 @@ let%expect_test _ = [%expect {| ligo: in file "error_typer_6.mligo", line 1, characters 30-64. different type constructors: Expected these two constant type constructors to be the same, but they're different {"a":"string","b":"bool"} |} ] ; run_ligo_bad [ "compile-contract" ; "../../test/contracts/negative/error_typer_7.mligo" ; "main" ] ; - [%expect {| ligo: in file "error_typer_7.mligo", line 4, characters 17-56. records have different sizes: Expected these two types to be the same, but they're different (both are records, but with a different number of arguments) {"a":"record[b -> string , a -> int]","b":"record[c -> bool , b -> string , a -> int]"} |} ] ; + [%expect {| ligo: in file "error_typer_7.mligo", line 4, characters 18-48. records have different sizes: Expected these two types to be the same, but they're different (both are records, but with a different number of arguments) {"a":"record[b -> string , a -> int]","b":"record[c -> bool , b -> string , a -> int]"} |} ] ; diff --git a/src/main/display.ml b/src/main/display.ml index 991f7c2cc..9bea4ca56 100644 --- a/src/main/display.ml +++ b/src/main/display.ml @@ -34,12 +34,12 @@ let rec error_pp ?(dev = false) out (e : error) = | x -> [ x ] in let location = let opt = e |> member "data" |> member "location" |> string in - let aux prec cur = + let aux cur prec = match prec with | None -> cur |> member "data" |> member "location" |> string | Some s -> Some s in - match List.fold_left aux opt infos with + match List.fold_right aux infos opt with | None -> "" | Some s -> s ^ ". " in From 6716af91233344d87cd8021060ff76c22adb5c47 Mon Sep 17 00:00:00 2001 From: Tom Jack Date: Mon, 30 Dec 2019 15:24:42 -0600 Subject: [PATCH 14/15] Eliminate some spurious synonyms of "location" --- src/passes/1-parser/reasonligo.ml | 6 +++--- src/passes/2-simplify/cameligo.ml | 18 +++++++++--------- src/passes/2-simplify/pascaligo.ml | 18 +++++++++--------- 3 files changed, 21 insertions(+), 21 deletions(-) diff --git a/src/passes/1-parser/reasonligo.ml b/src/passes/1-parser/reasonligo.ml index 77c2e8e42..dbf28b756 100644 --- a/src/passes/1-parser/reasonligo.ml +++ b/src/passes/1-parser/reasonligo.ml @@ -24,7 +24,7 @@ module Errors = struct let message () = "" in let expression_loc = AST.expr_to_region expr in let data = [ - ("expression_loc", + ("location", fun () -> Format.asprintf "%a" Location.pp_lift @@ expression_loc) ] in error ~data title message @@ -37,7 +37,7 @@ module Errors = struct ~stop:(Pos.from_byte end_) in let data = [ - ("parser_loc", + ("location", fun () -> Format.asprintf "%a" Location.pp_lift @@ loc ) ] in @@ -51,7 +51,7 @@ module Errors = struct ~stop:(Pos.from_byte end_) in let data = [ - ("unrecognized_loc", + ("location", fun () -> Format.asprintf "%a" Location.pp_lift @@ loc ) ] in diff --git a/src/passes/2-simplify/cameligo.ml b/src/passes/2-simplify/cameligo.ml index 7dbb027d7..5be02f4bf 100644 --- a/src/passes/2-simplify/cameligo.ml +++ b/src/passes/2-simplify/cameligo.ml @@ -32,7 +32,7 @@ module Errors = struct in let data = [ ("expected", fun () -> expected_name); - ("actual_loc" , fun () -> Format.asprintf "%a" Location.pp_lift @@ Raw.pattern_to_region actual) + ("location" , fun () -> Format.asprintf "%a" Location.pp_lift @@ Raw.pattern_to_region actual) ] in error ~data title message @@ -43,7 +43,7 @@ module Errors = struct List.fold_left (fun a p -> Region.cover a (Raw.pattern_to_region p)) Region.ghost patterns in let data = [ - ("loc", fun () -> Format.asprintf "%a" Location.pp_lift @@ patterns_loc) + ("location", fun () -> Format.asprintf "%a" Location.pp_lift @@ patterns_loc) ] in error ~data title message @@ -52,7 +52,7 @@ module Errors = struct let message () = Format.asprintf "unknown predefined type \"%s\"" name.Region.value in let data = [ - ("typename_loc", + ("location", fun () -> Format.asprintf "%a" Location.pp_lift @@ name.Region.region) ] in error ~data title message @@ -63,7 +63,7 @@ module Errors = struct Format.asprintf "untyped function parameters are not supported yet" in let param_loc = var.Region.region in let data = [ - ("param_loc", + ("location", fun () -> Format.asprintf "%a" Location.pp_lift @@ param_loc) ] in error ~data title message @@ -74,7 +74,7 @@ module Errors = struct Format.asprintf "tuple patterns are not supported yet" in let pattern_loc = Raw.pattern_to_region p in let data = [ - ("pattern_loc", + ("location", fun () -> Format.asprintf "%a" Location.pp_lift @@ pattern_loc) ] in error ~data title message @@ -85,7 +85,7 @@ module Errors = struct Format.asprintf "constant constructors are not supported yet" in let pattern_loc = Raw.pattern_to_region p in let data = [ - ("pattern_loc", + ("location", fun () -> Format.asprintf "%a" Location.pp_lift @@ pattern_loc) ] in error ~data title message @@ -97,7 +97,7 @@ module Errors = struct are not supported yet" in let pattern_loc = Raw.pattern_to_region p in let data = [ - ("pattern_loc", + ("location", fun () -> Format.asprintf "%a" Location.pp_lift @@ pattern_loc) ] in error ~data title message @@ -119,7 +119,7 @@ module Errors = struct Format.asprintf "currently, only constructors are supported in patterns" in let pattern_loc = Raw.pattern_to_region p in let data = [ - ("pattern_loc", + ("location", fun () -> Format.asprintf "%a" Location.pp_lift @@ pattern_loc) ] in error ~data title message @@ -130,7 +130,7 @@ module Errors = struct Format.asprintf "currently, only empty lists and constructors (::) \ are supported in patterns" in let data = [ - ("pattern_loc", + ("location", fun () -> Format.asprintf "%a" Location.pp_lift @@ region) ] in error ~data title message diff --git a/src/passes/2-simplify/pascaligo.ml b/src/passes/2-simplify/pascaligo.ml index 0707ee85a..4f9e92deb 100644 --- a/src/passes/2-simplify/pascaligo.ml +++ b/src/passes/2-simplify/pascaligo.ml @@ -72,7 +72,7 @@ module Errors = struct Format.asprintf "constant constructors are not supported yet" in let pattern_loc = Raw.pattern_to_region p in let data = [ - ("pattern_loc", + ("location", fun () -> Format.asprintf "%a" Location.pp_lift @@ pattern_loc) ] in error ~data title message @@ -104,7 +104,7 @@ module Errors = struct let message () = Format.asprintf "unknown predefined type \"%s\"" name.Region.value in let data = [ - ("typename_loc", + ("location", fun () -> Format.asprintf "%a" Location.pp_lift @@ name.Region.region) ] in error ~data title message @@ -116,7 +116,7 @@ module Errors = struct are not supported yet" in let pattern_loc = Raw.pattern_to_region p in let data = [ - ("pattern_loc", + ("location", fun () -> Format.asprintf "%a" Location.pp_lift @@ pattern_loc) ] in error ~data title message @@ -127,7 +127,7 @@ module Errors = struct Format.asprintf "currently, only constructors are supported in patterns" in let pattern_loc = Raw.pattern_to_region p in let data = [ - ("pattern_loc", + ("location", fun () -> Format.asprintf "%a" Location.pp_lift @@ pattern_loc) ] in error ~data title message @@ -138,7 +138,7 @@ module Errors = struct Format.asprintf "tuple patterns are not supported yet" in let pattern_loc = Raw.pattern_to_region p in let data = [ - ("pattern_loc", + ("location", fun () -> Format.asprintf "%a" Location.pp_lift @@ pattern_loc) ; (** TODO: The labelled arguments should be flowing from the CLI. *) ("pattern", @@ -154,7 +154,7 @@ module Errors = struct in patterns are supported" in let pattern_loc = Raw.pattern_to_region pattern in let data = [ - ("pattern_loc", + ("location", fun () -> Format.asprintf "%a" Location.pp_lift @@ pattern_loc) ] in error ~data title message @@ -165,7 +165,7 @@ module Errors = struct Format.asprintf "currently, only empty lists and x::y \ are supported in patterns" in let data = [ - ("pattern_loc", + ("location", fun () -> Format.asprintf "%a" Location.pp_lift @@ cons.Region.region) ] in error ~data title message @@ -174,7 +174,7 @@ module Errors = struct let title () = "unexpected anonymous function" in let message () = "you provided a function declaration without name" in let data = [ - ("loc" , fun () -> Format.asprintf "%a" Location.pp @@ loc) + ("location" , fun () -> Format.asprintf "%a" Location.pp @@ loc) ] in error ~data title message @@ -182,7 +182,7 @@ module Errors = struct let title () = "unexpected named function" in let message () = "you provided a function expression with a name (remove it)" in let data = [ - ("loc" , fun () -> Format.asprintf "%a" Location.pp @@ loc) + ("location" , fun () -> Format.asprintf "%a" Location.pp @@ loc) ] in error ~data title message From 3b806d0bb7ca0602e93ed795d352375ed20834c1 Mon Sep 17 00:00:00 2001 From: John David Pressman Date: Tue, 7 Jan 2020 07:32:44 +0000 Subject: [PATCH 15/15] [LIGO-342] Add big maps to docs, change example used for ordinary maps --- .../docs/language-basics/maps-records.md | 287 +++++++++++++++--- 1 file changed, 244 insertions(+), 43 deletions(-) diff --git a/gitlab-pages/docs/language-basics/maps-records.md b/gitlab-pages/docs/language-basics/maps-records.md index e56160d3f..93bf51fde 100644 --- a/gitlab-pages/docs/language-basics/maps-records.md +++ b/gitlab-pages/docs/language-basics/maps-records.md @@ -14,17 +14,20 @@ Here's how a custom map type is defined: ```pascaligo -type ledger is map(address, tez); +type move is (int * int); +type moveset is map(address, move); ``` ```cameligo -type ledger = (address, tez) map +type move = int * int +type moveset = (address, move) map ``` ```reasonligo -type ledger = map(address, tez); +type move = (int, int); +type moveset = map(address, move); ``` @@ -35,9 +38,9 @@ And here's how a map value is populated: ```pascaligo -const ledger: ledger = map - ("tz1KqTpEZ7Yob7QbPE4Hy4Wo8fHG8LhKxZSx": address) -> 1000mutez; - ("tz1gjaF81ZRRvdzjobyfVNsAeSC6PScjfQwN": address) -> 2000mutez; +const moves: moveset = map + ("tz1KqTpEZ7Yob7QbPE4Hy4Wo8fHG8LhKxZSx": address) -> (1, 2); + ("tz1gjaF81ZRRvdzjobyfVNsAeSC6PScjfQwN": address) -> (0, 3); end ``` > Notice the `->` between the key and its value and `;` to separate individual map entries. @@ -47,9 +50,9 @@ end ```cameligo -let ledger: ledger = Map.literal - [ (("tz1KqTpEZ7Yob7QbPE4Hy4Wo8fHG8LhKxZSx": address), 1000mutez) ; - (("tz1gjaF81ZRRvdzjobyfVNsAeSC6PScjfQwN": address), 2000mutez) ; +let moves: moveset = Map.literal + [ (("tz1KqTpEZ7Yob7QbPE4Hy4Wo8fHG8LhKxZSx": address), (1, 2)) ; + (("tz1gjaF81ZRRvdzjobyfVNsAeSC6PScjfQwN": address), (0, 3)) ; ] ``` > Map.literal constructs the map from a list of key-value pair tuples, `(, )`. @@ -60,10 +63,10 @@ let ledger: ledger = Map.literal ```reasonligo -let ledger: ledger = +let moves: moveset = Map.literal([ - ("tz1KqTpEZ7Yob7QbPE4Hy4Wo8fHG8LhKxZSx": address, 1000mutez), - ("tz1gjaF81ZRRvdzjobyfVNsAeSC6PScjfQwN": address, 2000mutez), + ("tz1KqTpEZ7Yob7QbPE4Hy4Wo8fHG8LhKxZSx": address, (1, 2)), + ("tz1gjaF81ZRRvdzjobyfVNsAeSC6PScjfQwN": address, (0, 3)), ]); ``` > Map.literal constructs the map from a list of key-value pair tuples, `(, )`. @@ -74,25 +77,25 @@ let ledger: ledger = ### Accessing map values by key -If we want to access a balance from our ledger above, we can use the `[]` operator/accessor to read the associated `tez` value. However, the value we'll get will be wrapped as an optional; in our case `option(tez)`. Here's an example: +If we want to access a move from our moveset above, we can use the `[]` operator/accessor to read the associated `move` value. However, the value we'll get will be wrapped as an optional; in our case `option(move)`. Here's an example: ```pascaligo -const balance: option(tez) = ledger[("tz1gjaF81ZRRvdzjobyfVNsAeSC6PScjfQwN": address)]; +const balance: option(move) = moves[("tz1gjaF81ZRRvdzjobyfVNsAeSC6PScjfQwN": address)]; ``` ```cameligo -let balance: tez option = Map.find_opt ("tz1gjaF81ZRRvdzjobyfVNsAeSC6PScjfQwN": address) ledger +let balance: move option = Map.find_opt ("tz1gjaF81ZRRvdzjobyfVNsAeSC6PScjfQwN": address) moves ``` ```reasonligo -let balance: option(tez) = - Map.find_opt("tz1gjaF81ZRRvdzjobyfVNsAeSC6PScjfQwN": address, ledger); +let balance: option(move) = + Map.find_opt("tz1gjaF81ZRRvdzjobyfVNsAeSC6PScjfQwN": address, moves); ``` @@ -103,24 +106,61 @@ Accessing a value in a map yields an option, however you can also get the value ```pascaligo -const balance: tez = get_force(("tz1gjaF81ZRRvdzjobyfVNsAeSC6PScjfQwN": address), ledger); +const balance: move = get_force(("tz1gjaF81ZRRvdzjobyfVNsAeSC6PScjfQwN": address), moves); ``` ```cameligo -let balance: tez = Map.find ("tz1gjaF81ZRRvdzjobyfVNsAeSC6PScjfQwN": address) ledger +let balance: move = Map.find ("tz1gjaF81ZRRvdzjobyfVNsAeSC6PScjfQwN": address) moves ``` ```reasonligo -let balance: tez = - Map.find("tz1gjaF81ZRRvdzjobyfVNsAeSC6PScjfQwN": address, ledger); +let balance: move = + Map.find("tz1gjaF81ZRRvdzjobyfVNsAeSC6PScjfQwN": address, moves); ``` +### Updating the contents of a map + + + + + +The values of a PascaLIGO map can be updated using the ordinary assignment syntax: + +```pascaligo + +function set_ (var m: moveset) : moveset is + block { + m[("tz1gjaF81ZRRvdzjobyfVNsAeSC6PScjfQwN": address)] := (4,9); + } with m +``` + + + +We can update a map in CameLIGO using the `Map.update` built-in: + +```cameligo + +let updated_map: moveset = Map.update ("tz1gjaF81ZRRvdzjobyfVNsAeSC6PScjfQwN": address) (Some (4,9)) moves +``` + + + +We can update a map in ReasonLIGO using the `Map.update` built-in: + +```reasonligo + +let updated_map: moveset = Map.update(("tz1gjaF81ZRRvdzjobyfVNsAeSC6PScjfQwN": address), Some((4,9)), moves); +``` + + + + ### Iteration over the contents of a map There are three kinds of iteration on LIGO maps, `iter`, `map` and `fold`. `iter` @@ -132,24 +172,24 @@ otherwise. ```pascaligo -function iter_op (const m : ledger) : unit is +function iter_op (const m : moveset) : unit is block { - function aggregate (const i : address ; const j : tez) : unit is block - { if (j > 100mutez) then skip else failwith("fail") } with unit ; + function aggregate (const i : address ; const j : move) : unit is block + { if (j.1 > 1) then skip else failwith("fail") } with unit ; } with map_iter(aggregate, m) ; ``` ```cameligo -let iter_op (m : ledger) : unit = - let assert_eq = fun (i: address) (j: tez) -> assert (j > 100tz) +let iter_op (m : moveset) : unit = + let assert_eq = fun (i: address) (j: move) -> assert (j.0 > 1) in Map.iter assert_eq m ``` ```reasonligo -let iter_op = (m: ledger): unit => { - let assert_eq = (i: address, j: tez) => assert(j > 100mutez); +let iter_op = (m: moveset): unit => { + let assert_eq = (i: address, j: move) => assert(j[0] > 1); Map.iter(assert_eq, m); }; ``` @@ -160,23 +200,23 @@ let iter_op = (m: ledger): unit => { ```pascaligo -function map_op (const m : ledger) : ledger is +function map_op (const m : moveset) : moveset is block { - function increment (const i : address ; const j : tez) : tez is block { skip } with j + 1mutez ; + function increment (const i : address ; const j : move) : move is block { skip } with (j.0, j.1 + 1) ; } with map_map(increment, m) ; ``` ```cameligo -let map_op (m : ledger) : ledger = - let increment = fun (_: address) (j: tez) -> j + 1tz +let map_op (m : moveset) : moveset = + let increment = fun (_: address) (j: move) -> (j.0, j.1 + 1) in Map.map increment m ``` ```reasonligo -let map_op = (m: ledger): ledger => { - let increment = (ignore: address, j: tez) => j + 1tz; +let map_op = (m: moveset): moveset => { + let increment = (ignore: address, j: move) => (j[0], j[1] + 1); Map.map(increment, m); }; ``` @@ -194,30 +234,191 @@ It eventually returns the result of combining all the elements. ```pascaligo -function fold_op (const m : ledger) : tez is +function fold_op (const m : moveset) : int is block { - function aggregate (const j : tez ; const cur : (address * tez)) : tez is j + cur.1 ; - } with map_fold(aggregate, m , 10mutez) + function aggregate (const j : int ; const cur : (address * (int * int))) : int is j + cur.1.1 ; + } with map_fold(aggregate, m , 5) ``` ```cameligo -let fold_op (m : ledger) : ledger = - let aggregate = fun (j: tez) (cur: address * tez) -> j + cur.1 in - Map.fold aggregate m 10tz +let fold_op (m : moveset) : moveset = + let aggregate = fun (j: int) (cur: address * (int * int)) -> j + cur.1.1 in + Map.fold aggregate m 5 ``` ```reasonligo -let fold_op = (m: ledger): ledger => { - let aggregate = (j: tez, cur: (address, tez)) => j + cur[1]; - Map.fold(aggregate, m, 10tz); +let fold_op = (m: moveset): moveset => { + let aggregate = (j: int, cur: (address, (int,int))) => j + cur[1][1]; + Map.fold(aggregate, m, 5); }; ``` +## Big Maps + +Ordinary maps are fine for contracts with a finite lifespan or a bounded number +of users. For many contracts however, the intention is to have a map hold *many* +entries, potentially millions or billions. The cost of loading these entries into +the environment each time a user executes the contract would eventually become +too expensive were it not for big maps. Big maps are a data structure offered by +Tezos which handles the scaling concerns for us. In LIGO, the interface for big +maps is analogous to the one used for ordinary maps. + +Here's how we define a big map: + + + +```pascaligo +type move is (int * int); +type moveset is big_map(address, move); +``` + + +```cameligo +type move = int * int +type moveset = (address, move) big_map +``` + + +```reasonligo +type move = (int, int); +type moveset = big_map(address, move); +``` + + + +And here's how a map value is populated: + + + + +```pascaligo +const moves: moveset = big_map + ("tz1KqTpEZ7Yob7QbPE4Hy4Wo8fHG8LhKxZSx": address) -> (1, 2); + ("tz1gjaF81ZRRvdzjobyfVNsAeSC6PScjfQwN": address) -> (0, 3); +end +``` +> Notice the `->` between the key and its value and `;` to separate individual map entries. +> +> `("": address)` means that we type-cast a string into an address. + + + +```cameligo +let moves: moveset = Big_map.literal + [ (("tz1KqTpEZ7Yob7QbPE4Hy4Wo8fHG8LhKxZSx": address), (1, 2)) ; + (("tz1gjaF81ZRRvdzjobyfVNsAeSC6PScjfQwN": address), (0, 3)) ; + ] +``` +> Big_map.literal constructs the map from a list of key-value pair tuples, `(, )`. +> Note also the `;` to separate individual map entries. +> +> `("": address)` means that we type-cast a string into an address. + + + +```reasonligo +let moves: moveset = + Big_map.literal([ + ("tz1KqTpEZ7Yob7QbPE4Hy4Wo8fHG8LhKxZSx": address, (1, 2)), + ("tz1gjaF81ZRRvdzjobyfVNsAeSC6PScjfQwN": address, (0, 3)), + ]); +``` +> Big_map.literal constructs the map from a list of key-value pair tuples, `(, )`. +> +> `("": address)` means that we type-cast a string into an address. + + + +### Accessing map values by key + +If we want to access a move from our moveset above, we can use the `[]` operator/accessor to read the associated `move` value. However, the value we'll get will be wrapped as an optional; in our case `option(move)`. Here's an example: + + + +```pascaligo +const balance: option(move) = moves[("tz1gjaF81ZRRvdzjobyfVNsAeSC6PScjfQwN": address)]; +``` + + + +```cameligo +let balance: move option = Big_map.find_opt ("tz1gjaF81ZRRvdzjobyfVNsAeSC6PScjfQwN": address) moves +``` + + + +```reasonligo +let balance: option(move) = + Big_map.find_opt("tz1gjaF81ZRRvdzjobyfVNsAeSC6PScjfQwN": address, moves); +``` + + +#### Obtaining a map value forcefully + +Accessing a value in a map yields an option, however you can also get the value directly: + + + +```pascaligo +const balance: move = get_force(("tz1gjaF81ZRRvdzjobyfVNsAeSC6PScjfQwN": address), moves); +``` + + + +```cameligo +let balance: move = Big_map.find ("tz1gjaF81ZRRvdzjobyfVNsAeSC6PScjfQwN": address) moves +``` + + + +```reasonligo +let balance: move = Big_map.find("tz1gjaF81ZRRvdzjobyfVNsAeSC6PScjfQwN": address, moves); +``` + + + +### Updating the contents of a big map + + + + + +The values of a PascaLIGO big map can be updated using the ordinary assignment syntax: + +```pascaligo + +function set_ (var m: moveset) : moveset is + block { + m[("tz1gjaF81ZRRvdzjobyfVNsAeSC6PScjfQwN": address)] := (4,9); + } with m +``` + + + +We can update a big map in CameLIGO using the `Big_map.update` built-in: + +```cameligo + +let updated_map: moveset = + Big_map.update ("tz1gjaF81ZRRvdzjobyfVNsAeSC6PScjfQwN": address) (Some (4,9)) moves +``` + + + +We can update a big map in ReasonLIGO using the `Big_map.update` built-in: + +```reasonligo +let updated_map: moveset = + Big_map.update(("tz1gjaF81ZRRvdzjobyfVNsAeSC6PScjfQwN": address), Some((4,9)), moves); +``` + + + ## Records Records are a construct introduced in LIGO, and are not natively available in Michelson. The LIGO compiler translates records into Michelson `Pairs`.