Merge branch 'dev' into feature/set-delegate

This commit is contained in:
John David Pressman 2019-12-20 08:37:01 -08:00
commit 1bf04574c1
92 changed files with 1522 additions and 382 deletions

4
.gitignore vendored
View File

@ -1,5 +1,4 @@
/_build/ /_build/
dune-project
*~ *~
*.merlin *.merlin
cache/* cache/*
@ -7,4 +6,5 @@ Version.ml
/_opam/ /_opam/
/*.pp.ligo /*.pp.ligo
**/.DS_Store **/.DS_Store
.vscode/ .vscode/
/ligo.install

3
dune-project Normal file
View File

@ -0,0 +1,3 @@
(lang dune 1.11)
(name ligo)
(using menhir 2.0)

View File

@ -66,7 +66,7 @@ let amount =
let open Arg in let open Arg in
let info = let info =
let docv = "AMOUNT" in let docv = "AMOUNT" in
let doc = "$(docv) is the amount the dry-run transaction will use." in let doc = "$(docv) is the amount the michelson interpreter will use." in
info ~docv ~doc ["amount"] in info ~docv ~doc ["amount"] in
value @@ opt string "0" info value @@ opt string "0" info
@ -74,7 +74,7 @@ let sender =
let open Arg in let open Arg in
let info = let info =
let docv = "SENDER" in let docv = "SENDER" in
let doc = "$(docv) is the sender the dry-run transaction will use." in let doc = "$(docv) is the sender the michelson interpreter transaction will use." in
info ~docv ~doc ["sender"] in info ~docv ~doc ["sender"] in
value @@ opt (some string) None info value @@ opt (some string) None info
@ -82,10 +82,18 @@ let source =
let open Arg in let open Arg in
let info = let info =
let docv = "SOURCE" in let docv = "SOURCE" in
let doc = "$(docv) is the source the dry-run transaction will use." in let doc = "$(docv) is the source the michelson interpreter transaction will use." in
info ~docv ~doc ["source"] in info ~docv ~doc ["source"] in
value @@ opt (some string) None info value @@ opt (some string) None info
let predecessor_timestamp =
let open Arg in
let info =
let docv = "PREDECESSOR_TIMESTAMP" in
let doc = "$(docv) is the pedecessor_timestamp the michelson interpreter transaction will use (e.g. '2000-01-01T10:10:10Z')" in
info ~docv ~doc ["predecessor-timestamp"] in
value @@ opt (some string) None info
let display_format = let display_format =
let open Arg in let open Arg in
let info = let info =
@ -121,7 +129,7 @@ let compile_file =
let%bind typed,_ = Compile.Of_simplified.compile simplified in let%bind typed,_ = Compile.Of_simplified.compile simplified in
let%bind mini_c = Compile.Of_typed.compile typed in let%bind mini_c = Compile.Of_typed.compile typed in
let%bind michelson = Compile.Of_mini_c.aggregate_and_compile_contract mini_c entry_point in let%bind michelson = Compile.Of_mini_c.aggregate_and_compile_contract mini_c entry_point in
let%bind contract = Compile.Of_mini_c.build_contract michelson in let%bind contract = Compile.Of_michelson.build_contract michelson in
ok @@ Format.asprintf "%a\n" (Main.Display.michelson_pp michelson_format) contract ok @@ Format.asprintf "%a\n" (Main.Display.michelson_pp michelson_format) contract
in in
let term = let term =
@ -137,7 +145,7 @@ let measure_contract =
let%bind typed,_ = Compile.Of_simplified.compile simplified in let%bind typed,_ = Compile.Of_simplified.compile simplified in
let%bind mini_c = Compile.Of_typed.compile typed in let%bind mini_c = Compile.Of_typed.compile typed in
let%bind michelson = Compile.Of_mini_c.aggregate_and_compile_contract mini_c entry_point in let%bind michelson = Compile.Of_mini_c.aggregate_and_compile_contract mini_c entry_point in
let%bind contract = Compile.Of_mini_c.build_contract michelson in let%bind contract = Compile.Of_michelson.build_contract michelson in
let open Tezos_utils in let open Tezos_utils in
ok @@ Format.asprintf "%d bytes\n" (Michelson.measure contract) ok @@ Format.asprintf "%d bytes\n" (Michelson.measure contract)
in in
@ -150,11 +158,6 @@ let measure_contract =
let compile_parameter = let compile_parameter =
let f source_file entry_point expression syntax display_format michelson_format = let f source_file entry_point expression syntax display_format michelson_format =
toplevel ~display_format @@ toplevel ~display_format @@
(*
TODO:
source_to_michelson_contract will fail if the entry_point does not point to a michelson contract
but we do not check that the type of the parameter matches the type of the given expression
*)
let%bind simplified = Compile.Of_source.compile source_file (Syntax_name syntax) in let%bind simplified = Compile.Of_source.compile source_file (Syntax_name syntax) in
let%bind typed_prg,state = Compile.Of_simplified.compile simplified in let%bind typed_prg,state = Compile.Of_simplified.compile simplified in
let%bind mini_c_prg = Compile.Of_typed.compile typed_prg in let%bind mini_c_prg = Compile.Of_typed.compile typed_prg in
@ -162,13 +165,15 @@ let compile_parameter =
let env = Ast_typed.program_environment typed_prg in let env = Ast_typed.program_environment typed_prg in
let%bind (_contract: Tezos_utils.Michelson.michelson) = let%bind (_contract: Tezos_utils.Michelson.michelson) =
(* fails if the given entry point is not a valid contract *) (* fails if the given entry point is not a valid contract *)
Compile.Of_mini_c.build_contract michelson_prg in Compile.Of_michelson.build_contract michelson_prg in
let%bind v_syntax = Helpers.syntax_to_variant (Syntax_name syntax) (Some source_file) in let%bind v_syntax = Helpers.syntax_to_variant (Syntax_name syntax) (Some source_file) in
let%bind simplified_param = Compile.Of_source.compile_expression v_syntax expression in let%bind simplified_param = Compile.Of_source.compile_expression v_syntax expression in
let%bind (typed_param,_) = Compile.Of_simplified.compile_expression ~env ~state simplified_param in let%bind (typed_param,_) = Compile.Of_simplified.compile_expression ~env ~state simplified_param in
let%bind mini_c_param = Compile.Of_typed.compile_expression typed_param in let%bind mini_c_param = Compile.Of_typed.compile_expression typed_param in
let%bind compiled_param = Compile.Of_mini_c.aggregate_and_compile_expression mini_c_prg mini_c_param in let%bind compiled_param = Compile.Of_mini_c.aggregate_and_compile_expression mini_c_prg mini_c_param in
let%bind () = Compile.Of_typed.assert_equal_contract_type Check_parameter entry_point typed_prg typed_param in
let%bind () = Compile.Of_michelson.assert_equal_contract_type Check_parameter michelson_prg compiled_param in
let%bind value = Run.evaluate_expression compiled_param.expr compiled_param.expr_ty in let%bind value = Run.evaluate_expression compiled_param.expr compiled_param.expr_ty in
ok @@ Format.asprintf "%a\n" (Main.Display.michelson_pp michelson_format) value ok @@ Format.asprintf "%a\n" (Main.Display.michelson_pp michelson_format) value
in in
@ -179,7 +184,7 @@ let compile_parameter =
(Term.ret term , Term.info ~doc cmdname) (Term.ret term , Term.info ~doc cmdname)
let interpret = let interpret =
let f expression init_file syntax amount sender source display_format = let f expression init_file syntax amount sender source predecessor_timestamp display_format =
toplevel ~display_format @@ toplevel ~display_format @@
let%bind (decl_list,state,env) = match init_file with let%bind (decl_list,state,env) = match init_file with
| Some init_file -> | Some init_file ->
@ -195,13 +200,13 @@ let interpret =
let%bind (typed_exp,_) = Compile.Of_simplified.compile_expression ~env ~state simplified_exp in let%bind (typed_exp,_) = Compile.Of_simplified.compile_expression ~env ~state simplified_exp in
let%bind mini_c_exp = Compile.Of_typed.compile_expression typed_exp in let%bind mini_c_exp = Compile.Of_typed.compile_expression typed_exp in
let%bind compiled_exp = Compile.Of_mini_c.aggregate_and_compile_expression decl_list mini_c_exp in let%bind compiled_exp = Compile.Of_mini_c.aggregate_and_compile_expression decl_list mini_c_exp in
let%bind options = Run.make_dry_run_options {amount ; sender ; source } in let%bind options = Run.make_dry_run_options {predecessor_timestamp ; amount ; sender ; source } in
let%bind value = Run.run ~options compiled_exp.expr compiled_exp.expr_ty in let%bind value = Run.run ~options compiled_exp.expr compiled_exp.expr_ty in
let%bind simplified_output = Uncompile.uncompile_expression typed_exp.type_annotation value in let%bind simplified_output = Uncompile.uncompile_expression typed_exp.type_annotation value in
ok @@ Format.asprintf "%a\n" Ast_simplified.PP.expression simplified_output ok @@ Format.asprintf "%a\n" Ast_simplified.PP.expression simplified_output
in in
let term = let term =
Term.(const f $ expression "EXPRESSION" 0 $ init_file $ syntax $ amount $ sender $ source $ display_format ) in Term.(const f $ expression "EXPRESSION" 0 $ init_file $ syntax $ amount $ sender $ source $ predecessor_timestamp $ display_format ) in
let cmdname = "interpret" in let cmdname = "interpret" in
let doc = "Subcommand: interpret the expression in the context initialized by the provided source file." in let doc = "Subcommand: interpret the expression in the context initialized by the provided source file." in
(Term.ret term , Term.info ~doc cmdname) (Term.ret term , Term.info ~doc cmdname)
@ -210,11 +215,6 @@ let interpret =
let compile_storage = let compile_storage =
let f source_file entry_point expression syntax display_format michelson_format = let f source_file entry_point expression syntax display_format michelson_format =
toplevel ~display_format @@ toplevel ~display_format @@
(*
TODO:
source_to_michelson_contract will fail if the entry_point does not point to a michelson contract
but we do not check that the type of the storage matches the type of the given expression
*)
let%bind simplified = Compile.Of_source.compile source_file (Syntax_name syntax) in let%bind simplified = Compile.Of_source.compile source_file (Syntax_name syntax) in
let%bind typed_prg,state = Compile.Of_simplified.compile simplified in let%bind typed_prg,state = Compile.Of_simplified.compile simplified in
let%bind mini_c_prg = Compile.Of_typed.compile typed_prg in let%bind mini_c_prg = Compile.Of_typed.compile typed_prg in
@ -222,13 +222,15 @@ let compile_storage =
let env = Ast_typed.program_environment typed_prg in let env = Ast_typed.program_environment typed_prg in
let%bind (_contract: Tezos_utils.Michelson.michelson) = let%bind (_contract: Tezos_utils.Michelson.michelson) =
(* fails if the given entry point is not a valid contract *) (* fails if the given entry point is not a valid contract *)
Compile.Of_mini_c.build_contract michelson_prg in Compile.Of_michelson.build_contract michelson_prg in
let%bind v_syntax = Helpers.syntax_to_variant (Syntax_name syntax) (Some source_file) in let%bind v_syntax = Helpers.syntax_to_variant (Syntax_name syntax) (Some source_file) in
let%bind simplified_param = Compile.Of_source.compile_expression v_syntax expression in let%bind simplified_param = Compile.Of_source.compile_expression v_syntax expression in
let%bind (typed_param,_) = Compile.Of_simplified.compile_expression ~env ~state simplified_param in let%bind (typed_param,_) = Compile.Of_simplified.compile_expression ~env ~state simplified_param in
let%bind mini_c_param = Compile.Of_typed.compile_expression typed_param in let%bind mini_c_param = Compile.Of_typed.compile_expression typed_param in
let%bind compiled_param = Compile.Of_mini_c.compile_expression mini_c_param in let%bind compiled_param = Compile.Of_mini_c.compile_expression mini_c_param in
let%bind () = Compile.Of_typed.assert_equal_contract_type Check_storage entry_point typed_prg typed_param in
let%bind () = Compile.Of_michelson.assert_equal_contract_type Check_storage michelson_prg compiled_param in
let%bind value = Run.evaluate_expression compiled_param.expr compiled_param.expr_ty in let%bind value = Run.evaluate_expression compiled_param.expr compiled_param.expr_ty in
ok @@ Format.asprintf "%a\n" (Main.Display.michelson_pp michelson_format) value ok @@ Format.asprintf "%a\n" (Main.Display.michelson_pp michelson_format) value
in in
@ -239,7 +241,7 @@ let compile_storage =
(Term.ret term , Term.info ~doc cmdname) (Term.ret term , Term.info ~doc cmdname)
let dry_run = let dry_run =
let f source_file entry_point storage input amount sender source syntax display_format = let f source_file entry_point storage input amount sender source predecessor_timestamp syntax display_format =
toplevel ~display_format @@ toplevel ~display_format @@
let%bind simplified = Compile.Of_source.compile source_file (Syntax_name syntax) in let%bind simplified = Compile.Of_source.compile source_file (Syntax_name syntax) in
let%bind typed_prg,state = Compile.Of_simplified.compile simplified in let%bind typed_prg,state = Compile.Of_simplified.compile simplified in
@ -248,7 +250,7 @@ let dry_run =
let%bind michelson_prg = Compile.Of_mini_c.aggregate_and_compile_contract mini_c_prg entry_point in let%bind michelson_prg = Compile.Of_mini_c.aggregate_and_compile_contract mini_c_prg entry_point in
let%bind (_contract: Tezos_utils.Michelson.michelson) = let%bind (_contract: Tezos_utils.Michelson.michelson) =
(* fails if the given entry point is not a valid contract *) (* fails if the given entry point is not a valid contract *)
Compile.Of_mini_c.build_contract michelson_prg in Compile.Of_michelson.build_contract michelson_prg in
let%bind v_syntax = Helpers.syntax_to_variant (Syntax_name syntax) (Some source_file) in let%bind v_syntax = Helpers.syntax_to_variant (Syntax_name syntax) (Some source_file) in
let%bind simplified = Compile.Of_source.compile_contract_input storage input v_syntax in let%bind simplified = Compile.Of_source.compile_contract_input storage input v_syntax in
@ -257,20 +259,20 @@ let dry_run =
let%bind compiled_params = Compile.Of_mini_c.compile_expression mini_c in let%bind compiled_params = Compile.Of_mini_c.compile_expression mini_c in
let%bind args_michelson = Run.evaluate_expression compiled_params.expr compiled_params.expr_ty in let%bind args_michelson = Run.evaluate_expression compiled_params.expr compiled_params.expr_ty in
let%bind options = Run.make_dry_run_options {amount ; sender ; source } in let%bind options = Run.make_dry_run_options {predecessor_timestamp ; amount ; sender ; source } in
let%bind michelson_output = Run.run_contract ~options michelson_prg.expr michelson_prg.expr_ty args_michelson in let%bind michelson_output = Run.run_contract ~options michelson_prg.expr michelson_prg.expr_ty args_michelson in
let%bind simplified_output = Uncompile.uncompile_typed_program_entry_function_result typed_prg entry_point michelson_output in let%bind simplified_output = Uncompile.uncompile_typed_program_entry_function_result typed_prg entry_point michelson_output in
ok @@ Format.asprintf "%a\n" Ast_simplified.PP.expression simplified_output ok @@ Format.asprintf "%a\n" Ast_simplified.PP.expression simplified_output
in in
let term = let term =
Term.(const f $ source_file 0 $ entry_point 1 $ expression "PARAMETER" 2 $ expression "STORAGE" 3 $ amount $ sender $ source $ syntax $ display_format) in Term.(const f $ source_file 0 $ entry_point 1 $ expression "PARAMETER" 2 $ expression "STORAGE" 3 $ amount $ sender $ source $ predecessor_timestamp $ syntax $ display_format) in
let cmdname = "dry-run" in let cmdname = "dry-run" in
let doc = "Subcommand: run a smart-contract with the given storage and input." in let doc = "Subcommand: run a smart-contract with the given storage and input." in
(Term.ret term , Term.info ~doc cmdname) (Term.ret term , Term.info ~doc cmdname)
let run_function = let run_function =
let f source_file entry_point parameter amount sender source syntax display_format = let f source_file entry_point parameter amount sender source predecessor_timestamp syntax display_format =
toplevel ~display_format @@ toplevel ~display_format @@
let%bind v_syntax = Helpers.syntax_to_variant (Syntax_name syntax) (Some source_file) in let%bind v_syntax = Helpers.syntax_to_variant (Syntax_name syntax) (Some source_file) in
let%bind simplified_prg = Compile.Of_source.compile source_file (Syntax_name syntax) in let%bind simplified_prg = Compile.Of_source.compile source_file (Syntax_name syntax) in
@ -284,32 +286,32 @@ let run_function =
let%bind compiled_applied = Compile.Of_typed.compile_expression typed_app in let%bind compiled_applied = Compile.Of_typed.compile_expression typed_app in
let%bind michelson = Compile.Of_mini_c.aggregate_and_compile_expression mini_c_prg compiled_applied in let%bind michelson = Compile.Of_mini_c.aggregate_and_compile_expression mini_c_prg compiled_applied in
let%bind options = Run.make_dry_run_options {amount ; sender ; source } in let%bind options = Run.make_dry_run_options {predecessor_timestamp ; amount ; sender ; source } in
let%bind michelson_output = Run.run ~options michelson.expr michelson.expr_ty in let%bind michelson_output = Run.run ~options michelson.expr michelson.expr_ty in
let%bind simplified_output = Uncompile.uncompile_typed_program_entry_function_result typed_prg entry_point michelson_output in let%bind simplified_output = Uncompile.uncompile_typed_program_entry_function_result typed_prg entry_point michelson_output in
ok @@ Format.asprintf "%a\n" Ast_simplified.PP.expression simplified_output ok @@ Format.asprintf "%a\n" Ast_simplified.PP.expression simplified_output
in in
let term = let term =
Term.(const f $ source_file 0 $ entry_point 1 $ expression "PARAMETER" 2 $ amount $ sender $ source $ syntax $ display_format) in Term.(const f $ source_file 0 $ entry_point 1 $ expression "PARAMETER" 2 $ amount $ sender $ source $ predecessor_timestamp $ syntax $ display_format) in
let cmdname = "run-function" in let cmdname = "run-function" in
let doc = "Subcommand: run a function with the given parameter." in let doc = "Subcommand: run a function with the given parameter." in
(Term.ret term , Term.info ~doc cmdname) (Term.ret term , Term.info ~doc cmdname)
let evaluate_value = let evaluate_value =
let f source_file entry_point amount sender source syntax display_format = let f source_file entry_point amount sender source predecessor_timestamp syntax display_format =
toplevel ~display_format @@ toplevel ~display_format @@
let%bind simplified = Compile.Of_source.compile source_file (Syntax_name syntax) in let%bind simplified = Compile.Of_source.compile source_file (Syntax_name syntax) in
let%bind typed_prg,_ = Compile.Of_simplified.compile simplified in let%bind typed_prg,_ = Compile.Of_simplified.compile simplified in
let%bind mini_c = Compile.Of_typed.compile typed_prg in let%bind mini_c = Compile.Of_typed.compile typed_prg in
let%bind (exp,_) = Mini_c.get_entry mini_c entry_point in let%bind (exp,_) = Mini_c.get_entry mini_c entry_point in
let%bind compiled = Compile.Of_mini_c.aggregate_and_compile_expression mini_c exp in let%bind compiled = Compile.Of_mini_c.aggregate_and_compile_expression mini_c exp in
let%bind options = Run.make_dry_run_options {amount ; sender ; source } in let%bind options = Run.make_dry_run_options {predecessor_timestamp ; amount ; sender ; source } in
let%bind michelson_output = Run.run ~options compiled.expr compiled.expr_ty in let%bind michelson_output = Run.run ~options compiled.expr compiled.expr_ty in
let%bind simplified_output = Uncompile.uncompile_typed_program_entry_expression_result typed_prg entry_point michelson_output in let%bind simplified_output = Uncompile.uncompile_typed_program_entry_expression_result typed_prg entry_point michelson_output in
ok @@ Format.asprintf "%a\n" Ast_simplified.PP.expression simplified_output ok @@ Format.asprintf "%a\n" Ast_simplified.PP.expression simplified_output
in in
let term = let term =
Term.(const f $ source_file 0 $ entry_point 1 $ amount $ sender $ source $ syntax $ display_format) in Term.(const f $ source_file 0 $ entry_point 1 $ amount $ sender $ source $ predecessor_timestamp $ syntax $ display_format) in
let cmdname = "evaluate-value" in let cmdname = "evaluate-value" in
let doc = "Subcommand: evaluate a given definition." in let doc = "Subcommand: evaluate a given definition." in
(Term.ret term , Term.info ~doc cmdname) (Term.ret term , Term.info ~doc cmdname)

View File

@ -16,6 +16,18 @@ let%expect_test _ =
run_ligo_good [ "measure-contract" ; contract "vote.mligo" ; "main" ] ; run_ligo_good [ "measure-contract" ; contract "vote.mligo" ; "main" ] ;
[%expect {| 628 bytes |}] ; [%expect {| 628 bytes |}] ;
run_ligo_good [ "compile-parameter" ; contract "coase.ligo" ; "main" ; "Buy_single (record card_to_buy = 1n end)" ] ;
[%expect {| (Left (Left 1)) |}] ;
run_ligo_good [ "compile-storage" ; contract "coase.ligo" ; "main" ; "record cards = (map end : cards) ; card_patterns = (map end : card_patterns) ; next_id = 3n ; end" ] ;
[%expect {| (Pair (Pair {} {}) 3) |}] ;
run_ligo_bad [ "compile-storage" ; contract "coase.ligo" ; "main" ; "Buy_single (record card_to_buy = 1n end)" ] ;
[%expect {| ligo: different kinds: {"a":"record[next_id -> nat , cards -> (TO_Map (nat,record[card_pattern -> nat , card_owner -> address])) , card_patterns -> (TO_Map (nat,record[quantity -> nat , coefficient -> mutez]))]","b":"sum[Transfer_single -> record[destination -> address , card_to_transfer -> nat] , Sell_single -> record[card_to_sell -> nat] , Buy_single -> record[card_to_buy -> nat]]"} |}] ;
run_ligo_bad [ "compile-parameter" ; contract "coase.ligo" ; "main" ; "record cards = (map end : cards) ; card_patterns = (map end : card_patterns) ; next_id = 3n ; end" ] ;
[%expect {| ligo: different kinds: {"a":"sum[Transfer_single -> record[destination -> address , card_to_transfer -> nat] , Sell_single -> record[card_to_sell -> nat] , Buy_single -> record[card_to_buy -> nat]]","b":"record[next_id -> nat , cards -> (TO_Map (nat,record[card_pattern -> nat , card_owner -> address])) , card_patterns -> (TO_Map (nat,record[quantity -> nat , coefficient -> mutez]))]"} |}] ;
() ()
let%expect_test _ = let%expect_test _ =

View File

@ -278,7 +278,7 @@ let%expect_test _ =
OPTIONS OPTIONS
--amount=AMOUNT (absent=0) --amount=AMOUNT (absent=0)
AMOUNT is the amount the dry-run transaction will use. AMOUNT is the amount the michelson interpreter will use.
--format=DISPLAY_FORMAT, --display-format=DISPLAY_FORMAT --format=DISPLAY_FORMAT, --display-format=DISPLAY_FORMAT
(absent=human-readable) (absent=human-readable)
@ -293,16 +293,22 @@ let%expect_test _ =
`pager', `groff' or `plain'. With `auto', the format is `pager` or `pager', `groff' or `plain'. With `auto', the format is `pager` or
`plain' whenever the TERM env var is `dumb' or undefined. `plain' whenever the TERM env var is `dumb' or undefined.
--predecessor-timestamp=PREDECESSOR_TIMESTAMP
PREDECESSOR_TIMESTAMP is the pedecessor_timestamp the michelson
interpreter transaction will use (e.g. '2000-01-01T10:10:10Z')
-s SYNTAX, --syntax=SYNTAX (absent=auto) -s SYNTAX, --syntax=SYNTAX (absent=auto)
SYNTAX is the syntax that will be used. Currently supported SYNTAX is the syntax that will be used. Currently supported
syntaxes are "pascaligo" and "cameligo". By default, the syntax is syntaxes are "pascaligo" and "cameligo". By default, the syntax is
guessed from the extension (.ligo and .mligo, respectively). guessed from the extension (.ligo and .mligo, respectively).
--sender=SENDER --sender=SENDER
SENDER is the sender the dry-run transaction will use. SENDER is the sender the michelson interpreter transaction will
use.
--source=SOURCE --source=SOURCE
SOURCE is the source the dry-run transaction will use. SOURCE is the source the michelson interpreter transaction will
use.
--version --version
Show version information. |} ] ; Show version information. |} ] ;
@ -330,7 +336,7 @@ let%expect_test _ =
OPTIONS OPTIONS
--amount=AMOUNT (absent=0) --amount=AMOUNT (absent=0)
AMOUNT is the amount the dry-run transaction will use. AMOUNT is the amount the michelson interpreter will use.
--format=DISPLAY_FORMAT, --display-format=DISPLAY_FORMAT --format=DISPLAY_FORMAT, --display-format=DISPLAY_FORMAT
(absent=human-readable) (absent=human-readable)
@ -345,16 +351,22 @@ let%expect_test _ =
`pager', `groff' or `plain'. With `auto', the format is `pager` or `pager', `groff' or `plain'. With `auto', the format is `pager` or
`plain' whenever the TERM env var is `dumb' or undefined. `plain' whenever the TERM env var is `dumb' or undefined.
--predecessor-timestamp=PREDECESSOR_TIMESTAMP
PREDECESSOR_TIMESTAMP is the pedecessor_timestamp the michelson
interpreter transaction will use (e.g. '2000-01-01T10:10:10Z')
-s SYNTAX, --syntax=SYNTAX (absent=auto) -s SYNTAX, --syntax=SYNTAX (absent=auto)
SYNTAX is the syntax that will be used. Currently supported SYNTAX is the syntax that will be used. Currently supported
syntaxes are "pascaligo" and "cameligo". By default, the syntax is syntaxes are "pascaligo" and "cameligo". By default, the syntax is
guessed from the extension (.ligo and .mligo, respectively). guessed from the extension (.ligo and .mligo, respectively).
--sender=SENDER --sender=SENDER
SENDER is the sender the dry-run transaction will use. SENDER is the sender the michelson interpreter transaction will
use.
--source=SOURCE --source=SOURCE
SOURCE is the source the dry-run transaction will use. SOURCE is the source the michelson interpreter transaction will
use.
--version --version
Show version information. |} ] ; Show version information. |} ] ;
@ -377,7 +389,7 @@ let%expect_test _ =
OPTIONS OPTIONS
--amount=AMOUNT (absent=0) --amount=AMOUNT (absent=0)
AMOUNT is the amount the dry-run transaction will use. AMOUNT is the amount the michelson interpreter will use.
--format=DISPLAY_FORMAT, --display-format=DISPLAY_FORMAT --format=DISPLAY_FORMAT, --display-format=DISPLAY_FORMAT
(absent=human-readable) (absent=human-readable)
@ -392,16 +404,22 @@ let%expect_test _ =
`pager', `groff' or `plain'. With `auto', the format is `pager` or `pager', `groff' or `plain'. With `auto', the format is `pager` or
`plain' whenever the TERM env var is `dumb' or undefined. `plain' whenever the TERM env var is `dumb' or undefined.
--predecessor-timestamp=PREDECESSOR_TIMESTAMP
PREDECESSOR_TIMESTAMP is the pedecessor_timestamp the michelson
interpreter transaction will use (e.g. '2000-01-01T10:10:10Z')
-s SYNTAX, --syntax=SYNTAX (absent=auto) -s SYNTAX, --syntax=SYNTAX (absent=auto)
SYNTAX is the syntax that will be used. Currently supported SYNTAX is the syntax that will be used. Currently supported
syntaxes are "pascaligo" and "cameligo". By default, the syntax is syntaxes are "pascaligo" and "cameligo". By default, the syntax is
guessed from the extension (.ligo and .mligo, respectively). guessed from the extension (.ligo and .mligo, respectively).
--sender=SENDER --sender=SENDER
SENDER is the sender the dry-run transaction will use. SENDER is the sender the michelson interpreter transaction will
use.
--source=SOURCE --source=SOURCE
SOURCE is the source the dry-run transaction will use. SOURCE is the source the michelson interpreter transaction will
use.
--version --version
Show version information. |} ] ; Show version information. |} ] ;

View File

@ -1,11 +1,25 @@
open Cli_expect open Cli_expect
let%expect_test _ = let%expect_test _ =
run_ligo_bad [ "compile-contract" ; "../../test/contracts/error_typer_1.mligo" ; "foo" ] ; run_ligo_bad [ "compile-contract" ; "../../test/contracts/negative/error_typer_1.mligo" ; "main" ] ;
[%expect {| ligo: in file "error_typer_1.mligo", line 3, characters 19-27. different type constructors: Expected these two constant type constructors to be the same, but they're different {"a":"string","b":"int"} |} ] ; [%expect {| ligo: in file "error_typer_1.mligo", line 3, characters 19-27. different type constructors: Expected these two constant type constructors to be the same, but they're different {"a":"string","b":"int"} |} ] ;
run_ligo_bad [ "compile-contract" ; "../../test/contracts/error_typer_2.mligo" ; "foo" ] ; run_ligo_bad [ "compile-contract" ; "../../test/contracts/negative/error_typer_2.mligo" ; "main" ] ;
[%expect {| ligo: in file "error_typer_2.mligo", line 3, characters 24-39. different type constructors: Expected these two n-ary type constructors to be the same, but they're different {"a":"(TO_list(string))","b":"(TO_option(int))"} |} ] ; [%expect {| ligo: in file "error_typer_2.mligo", line 3, characters 24-39. different type constructors: Expected these two n-ary type constructors to be the same, but they're different {"a":"(TO_list(string))","b":"(TO_option(int))"} |} ] ;
(* run_ligo_bad [ "compile-contract" ; "../../test/contracts/error_typer_3.mligo" ; "foo" ] ; run_ligo_bad [ "compile-contract" ; "../../test/contracts/negative/error_typer_3.mligo" ; "main" ] ;
* [%expect some type error ] ; *) [%expect {| ligo: in file "error_typer_3.mligo", line 3, characters 34-53. tuples have different sizes: Expected these two types to be the same, but they're different (both are tuples, but with a different number of arguments) {"a":"tuple[int , string , bool]","b":"tuple[int , string]"} |} ] ;
run_ligo_bad [ "compile-contract" ; "../../test/contracts/negative/error_typer_4.mligo" ; "main" ] ;
[%expect {| ligo: in file "error_typer_4.mligo", line 4, characters 17-56. different keys in record: {"key_a":"d","key_b":"c"} |} ] ;
run_ligo_bad [ "compile-contract" ; "../../test/contracts/negative/error_typer_5.mligo" ; "main" ] ;
[%expect {| ligo: unbound type variable: {"variable":"boolean","in":"- E[]\tT[] ]","did_you_mean":"bool"} |} ] ;
run_ligo_bad [ "compile-contract" ; "../../test/contracts/negative/error_typer_6.mligo" ; "main" ] ;
[%expect {| ligo: in file "error_typer_6.mligo", line 1, characters 30-64. different type constructors: Expected these two constant type constructors to be the same, but they're different {"a":"string","b":"bool"} |} ] ;
run_ligo_bad [ "compile-contract" ; "../../test/contracts/negative/error_typer_7.mligo" ; "main" ] ;
[%expect {| ligo: in file "error_typer_7.mligo", line 4, characters 17-56. records have different sizes: Expected these two types to be the same, but they're different (both are records, but with a different number of arguments) {"a":"record[b -> string , a -> int]","b":"record[c -> bool , b -> string , a -> int]"} |} ] ;

View File

@ -0,0 +1,32 @@
open Tezos_utils
open Proto_alpha_utils
open Trace
let build_contract : Compiler.compiled_expression -> Michelson.michelson result =
fun compiled ->
let%bind ((Ex_ty _param_ty),(Ex_ty _storage_ty)) = Self_michelson.fetch_contract_inputs compiled.expr_ty in
let%bind param_michelson =
Trace.trace_tzresult_lwt (simple_error "Invalid contract: Could not unparse parameter") @@
Proto_alpha_utils.Memory_proto_alpha.unparse_ty_michelson _param_ty in
let%bind storage_michelson =
Trace.trace_tzresult_lwt (simple_error "Invalid contract: Could not unparse storage") @@
Proto_alpha_utils.Memory_proto_alpha.unparse_ty_michelson _storage_ty in
let contract = Michelson.contract param_michelson storage_michelson compiled.expr in
let%bind () =
Trace.trace_tzresult_lwt (simple_error "Invalid contract: Contract did not typecheck") @@
Proto_alpha_utils.Memory_proto_alpha.typecheck_contract contract in
ok contract
type check_type = Check_parameter | Check_storage
let assert_equal_contract_type : check_type -> Compiler.compiled_expression -> Compiler.compiled_expression -> unit result =
fun c compiled_prg compiled_param ->
let%bind (Ex_ty expected_ty) =
let%bind (c_param_ty,c_storage_ty) = Self_michelson.fetch_contract_inputs compiled_prg.expr_ty in
match c with
| Check_parameter -> ok c_param_ty
| Check_storage -> ok c_storage_ty in
let (Ex_ty actual_ty) = compiled_param.expr_ty in
let%bind _ =
Trace.trace_tzresult (simple_error "Passed parameter does not match the contract type") @@
Proto_alpha_utils.Memory_proto_alpha.assert_equal_michelson_type expected_ty actual_ty in
ok ()

View File

@ -1,5 +1,4 @@
open Mini_c open Mini_c
open Tezos_utils
open Proto_alpha_utils open Proto_alpha_utils
open Trace open Trace
@ -32,18 +31,3 @@ let aggregate_and_compile_contract = fun program name ->
let aggregate_and_compile_expression = fun program exp -> let aggregate_and_compile_expression = fun program exp ->
aggregate_and_compile program (ExpressionForm exp) aggregate_and_compile program (ExpressionForm exp)
let build_contract : Compiler.compiled_expression -> Michelson.michelson result =
fun compiled ->
let%bind ((Ex_ty _param_ty),(Ex_ty _storage_ty)) = Self_michelson.fetch_contract_inputs compiled.expr_ty in
let%bind param_michelson =
Trace.trace_tzresult_lwt (simple_error "Invalid contract: Could not unparse parameter") @@
Proto_alpha_utils.Memory_proto_alpha.unparse_ty_michelson _param_ty in
let%bind storage_michelson =
Trace.trace_tzresult_lwt (simple_error "Invalid contract: Could not unparse storage") @@
Proto_alpha_utils.Memory_proto_alpha.unparse_ty_michelson _storage_ty in
let contract = Michelson.contract param_michelson storage_michelson compiled.expr in
let%bind () =
Trace.trace_tzresult_lwt (simple_error "Invalid contract: Contract did not typecheck") @@
Proto_alpha_utils.Memory_proto_alpha.typecheck_contract contract in
ok contract

View File

@ -5,4 +5,20 @@ let compile : Ast_typed.program -> Mini_c.program result = fun p ->
Transpiler.transpile_program p Transpiler.transpile_program p
let compile_expression : annotated_expression -> Mini_c.expression result = fun e -> let compile_expression : annotated_expression -> Mini_c.expression result = fun e ->
Transpiler.transpile_annotated_expression e Transpiler.transpile_annotated_expression e
type check_type = Check_parameter | Check_storage
let assert_equal_contract_type : check_type -> string -> Ast_typed.program -> Ast_typed.value -> unit result =
fun c entry contract param -> Trace.trace (simple_info "Check argument type against contract type") (
let%bind entry_point = Ast_typed.get_entry contract entry in
match entry_point.type_annotation.type_value' with
| T_arrow (args,_) -> (
match args.type_value' with
| T_tuple [param_exp;storage_exp] -> (
match c with
| Check_parameter -> assert_type_value_eq (param_exp, param.type_annotation)
| Check_storage -> assert_type_value_eq (storage_exp, param.type_annotation)
)
| _ -> dummy_fail
)
| _ -> dummy_fail )

View File

@ -1,4 +1,4 @@
open! Trace open Trace
let rec error_pp ?(dev = false) out (e : error) = let rec error_pp ?(dev = false) out (e : error) =
let open JSON_string_utils in let open JSON_string_utils in

View File

@ -16,6 +16,7 @@ type run_failwith_res =
type dry_run_options = type dry_run_options =
{ amount : string ; { amount : string ;
predecessor_timestamp : string option ;
sender : string option ; sender : string option ;
source : string option } source : string option }
@ -44,7 +45,14 @@ let make_dry_run_options (opts : dry_run_options) : options result =
(simple_error "invalid source address") (simple_error "invalid source address")
(Contract.of_b58check source) in (Contract.of_b58check source) in
ok (Some source) in ok (Some source) in
ok @@ make_options ~amount ?source:sender ?payer:source () let%bind predecessor_timestamp =
match opts.predecessor_timestamp with
| None -> ok None
| Some st ->
match Memory_proto_alpha.Protocol.Alpha_context.Timestamp.of_notation st with
| Some t -> ok (Some t)
| None -> simple_fail ("\""^st^"\" is a bad timestamp notation") in
ok @@ make_options ?predecessor_timestamp:predecessor_timestamp ~amount ?source:sender ?payer:source ()
let ex_value_ty_to_michelson (v : ex_typed_value) : Michelson.t result = let ex_value_ty_to_michelson (v : ex_typed_value) : Michelson.t result =
let (Ex_typed_value (value , ty)) = v in let (Ex_typed_value (value , ty)) = v in

View File

@ -1 +1 @@
--explain --external-tokens LexToken --base Parser ParToken.mly --table --strict --explain --external-tokens LexToken --base Parser ParToken.mly

View File

@ -6,7 +6,6 @@ $HOME/git/ligo/vendors/ligo-utils/simple-utils/region.mli
$HOME/git/ligo/vendors/ligo-utils/simple-utils/region.ml $HOME/git/ligo/vendors/ligo-utils/simple-utils/region.ml
../shared/Lexer.mli ../shared/Lexer.mli
../shared/Lexer.mll ../shared/Lexer.mll
../shared/Error.mli
../shared/EvalOpt.ml ../shared/EvalOpt.ml
../shared/EvalOpt.mli ../shared/EvalOpt.mli
../shared/FQueue.ml ../shared/FQueue.ml

View File

@ -0,0 +1,57 @@
(** Generic parser for LIGO *)
module type PARSER =
sig
(* The type of tokens *)
type token
(* This exception is raised by the monolithic API functions *)
exception Error
(* The monolithic API *)
val contract : (Lexing.lexbuf -> token) -> Lexing.lexbuf -> AST.t
(* The incremental API *)
module MenhirInterpreter :
sig
include MenhirLib.IncrementalEngine.INCREMENTAL_ENGINE
with type token = token
end
module Incremental :
sig
val contract : Lexing.position -> AST.t MenhirInterpreter.checkpoint
end
end
(* Main functor *)
module Make (Lexer: Lexer.S)
(Parser: PARSER with type token = Lexer.Token.token) =
struct
module I = Parser.MenhirInterpreter
(* The parser has successfully produced a semantic value. *)
let success v = v
(* The parser has suspended itself because of a syntax error. Stop. *)
let fail _checkpoint = raise Parser.Error
(* The generic parsing function *)
let incr_contract Lexer.{read; buffer; close; _} : AST.t =
let supplier = I.lexer_lexbuf_to_supplier read buffer in
let parser = Parser.Incremental.contract buffer.Lexing.lex_curr_p in
let ast = I.loop_handle success fail supplier parser
in close (); ast
let mono_contract = Parser.contract
end

View File

@ -0,0 +1,39 @@
(** Generic parser API for LIGO *)
module type PARSER =
sig
(* The type of tokens *)
type token
(* This exception is raised by the monolithic API functions *)
exception Error
(* The monolithic API *)
val contract : (Lexing.lexbuf -> token) -> Lexing.lexbuf -> AST.t
(* The incremental API *)
module MenhirInterpreter :
sig
include MenhirLib.IncrementalEngine.INCREMENTAL_ENGINE
with type token = token
end
module Incremental :
sig
val contract : Lexing.position -> AST.t MenhirInterpreter.checkpoint
end
end
(* Main functor *)
module Make (Lexer: Lexer.S)
(Parser: PARSER with type token = Lexer.Token.token) :
sig
val mono_contract : (Lexing.lexbuf -> Lexer.token) -> Lexing.lexbuf -> AST.t
val incr_contract : Lexer.instance -> AST.t
end

View File

@ -25,15 +25,14 @@ let () = Printexc.record_backtrace true
let external_ text = let external_ text =
Utils.highlight (Printf.sprintf "External error: %s" text); exit 1;; Utils.highlight (Printf.sprintf "External error: %s" text); exit 1;;
type Error.t += ParseError type error = SyntaxError
let error_to_string = function let error_to_string = function
ParseError -> "Syntax error.\n" SyntaxError -> "Syntax error.\n"
| _ -> assert false
let print_error ?(offsets=true) mode Region.{region; value} ~file = let print_error ?(offsets=true) mode Region.{region; value} ~file =
let msg = error_to_string value in let msg = error_to_string value
let reg = region#to_string ~file ~offsets mode in and reg = region#to_string ~file ~offsets mode in
Utils.highlight (sprintf "Parse error %s:\n%s%!" reg msg) Utils.highlight (sprintf "Parse error %s:\n%s%!" reg msg)
(** {1 Preprocessing the input source and opening the input channels} *) (** {1 Preprocessing the input source and opening the input channels} *)
@ -77,11 +76,11 @@ let () =
(** {1 Instanciating the lexer} *) (** {1 Instanciating the lexer} *)
module Lexer = Lexer.Make (LexToken) module Lexer = Lexer.Make (LexToken)
module Log = LexerLog.Make (Lexer) module Log = LexerLog.Make (Lexer)
module ParserFront = ParserAPI.Make (Lexer) (Parser)
let Lexer.{read; buffer; get_pos; get_last; close} = let lexer_inst = Lexer.open_token_stream (Some pp_input)
Lexer.open_token_stream (Some pp_input) let Lexer.{read; buffer; get_pos; get_last; close} = lexer_inst
and cout = stdout and cout = stdout
@ -98,7 +97,10 @@ let tokeniser = read ~log
let () = let () =
try try
let ast = Parser.contract tokeniser buffer in (* The incremental API *)
let ast = ParserFront.incr_contract lexer_inst in
(* The monolithic API *)
(* let ast = ParserFront.mono_contract tokeniser buffer in *)
if Utils.String.Set.mem "ast" options#verbose if Utils.String.Set.mem "ast" options#verbose
then let buffer = Buffer.create 131 in then let buffer = Buffer.create 131 in
let state = ParserLog.mk_state let state = ParserLog.mk_state
@ -126,7 +128,7 @@ let () =
options#mode err ~file options#mode err ~file
| Parser.Error -> | Parser.Error ->
let region = get_last () in let region = get_last () in
let error = Region.{region; value=ParseError} in let error = Region.{region; value=SyntaxError} in
let () = close_all () in let () = close_all () in
print_error ~offsets:options#offsets print_error ~offsets:options#offsets
options#mode error ~file options#mode error ~file

View File

@ -3,38 +3,33 @@
(menhir (menhir
(merge_into Parser) (merge_into Parser)
(modules ParToken Parser) (modules ParToken Parser)
(flags -la 1 --explain --external-tokens LexToken)) (flags -la 1 --table --strict --explain --external-tokens LexToken))
(library (library
(name parser_cameligo) (name parser_cameligo)
(public_name ligo.parser.cameligo) (public_name ligo.parser.cameligo)
(modules AST cameligo Parser ParserLog LexToken) (modules AST cameligo Parser ParserLog LexToken)
(libraries (libraries
menhirLib
parser_shared parser_shared
str str
simple-utils simple-utils
tezos-utils tezos-utils
getopt getopt)
) (flags (:standard -open Simple_utils -open Parser_shared )))
(flags (:standard -open Simple_utils -open Parser_shared ))
)
(executable (executable
(name LexerMain) (name LexerMain)
(libraries (libraries
parser_cameligo) parser_cameligo)
(modules (modules
LexerMain LexerMain)
) (flags (:standard -open Parser_shared -open Parser_cameligo)))
(flags (:standard -open Parser_shared -open Parser_cameligo))
)
(executable (executable
(name ParserMain) (name ParserMain)
(libraries (libraries
parser_cameligo) parser_cameligo)
(modules (modules
ParserMain ParserMain)
) (flags (:standard -open Simple_utils -open Parser_shared -open Parser_cameligo)))
(flags (:standard -open Simple_utils -open Parser_shared -open Parser_cameligo))
)

View File

@ -12,5 +12,5 @@
(preprocess (preprocess
(pps ppx_let) (pps ppx_let)
) )
(flags (:standard -w +1..62-4-9-44-40-42-48-30@39@33 -open Simple_utils -open Parser_shared )) (flags (:standard -w +1..62-4-9-44-40-42-48-30@39@33 -open Simple_utils -open Parser_shared))
) )

View File

@ -1 +1 @@
--explain --external-tokens LexToken --base Parser ParToken.mly --table --strict --explain --external-tokens LexToken --base Parser ParToken.mly

View File

@ -6,7 +6,6 @@ $HOME/git/ligo/vendors/ligo-utils/simple-utils/region.mli
$HOME/git/ligo/vendors/ligo-utils/simple-utils/region.ml $HOME/git/ligo/vendors/ligo-utils/simple-utils/region.ml
../shared/Lexer.mli ../shared/Lexer.mli
../shared/Lexer.mll ../shared/Lexer.mll
../shared/Error.mli
../shared/EvalOpt.ml ../shared/EvalOpt.ml
../shared/EvalOpt.mli ../shared/EvalOpt.mli
../shared/FQueue.ml ../shared/FQueue.ml

View File

@ -77,10 +77,8 @@ type t =
| Case of Region.t (* "case" *) | Case of Region.t (* "case" *)
| Const of Region.t (* "const" *) | Const of Region.t (* "const" *)
| Contains of Region.t (* "contains" *) | Contains of Region.t (* "contains" *)
| Down of Region.t (* "down" *)
| Else of Region.t (* "else" *) | Else of Region.t (* "else" *)
| End of Region.t (* "end" *) | End of Region.t (* "end" *)
| Fail of Region.t (* "fail" *)
| False of Region.t (* "False" *) | False of Region.t (* "False" *)
| For of Region.t (* "for" *) | For of Region.t (* "for" *)
| From of Region.t (* "from" *) | From of Region.t (* "from" *)
@ -100,7 +98,6 @@ type t =
| Remove of Region.t (* "remove" *) | Remove of Region.t (* "remove" *)
| Set of Region.t (* "set" *) | Set of Region.t (* "set" *)
| Skip of Region.t (* "skip" *) | Skip of Region.t (* "skip" *)
| Step of Region.t (* "step" *)
| Then of Region.t (* "then" *) | Then of Region.t (* "then" *)
| To of Region.t (* "to" *) | To of Region.t (* "to" *)
| True of Region.t (* "True" *) | True of Region.t (* "True" *)

View File

@ -75,10 +75,8 @@ type t =
| Case of Region.t (* "case" *) | Case of Region.t (* "case" *)
| Const of Region.t (* "const" *) | Const of Region.t (* "const" *)
| Contains of Region.t (* "contains" *) | Contains of Region.t (* "contains" *)
| Down of Region.t (* "down" *)
| Else of Region.t (* "else" *) | Else of Region.t (* "else" *)
| End of Region.t (* "end" *) | End of Region.t (* "end" *)
| Fail of Region.t (* "fail" *)
| False of Region.t (* "False" *) | False of Region.t (* "False" *)
| For of Region.t (* "for" *) | For of Region.t (* "for" *)
| From of Region.t (* "from" *) | From of Region.t (* "from" *)
@ -98,7 +96,6 @@ type t =
| Remove of Region.t (* "remove" *) | Remove of Region.t (* "remove" *)
| Set of Region.t (* "set" *) | Set of Region.t (* "set" *)
| Skip of Region.t (* "skip" *) | Skip of Region.t (* "skip" *)
| Step of Region.t (* "step" *)
| Then of Region.t (* "then" *) | Then of Region.t (* "then" *)
| To of Region.t (* "to" *) | To of Region.t (* "to" *)
| True of Region.t (* "True" *) | True of Region.t (* "True" *)
@ -184,10 +181,8 @@ let proj_token = function
| Case region -> region, "Case" | Case region -> region, "Case"
| Const region -> region, "Const" | Const region -> region, "Const"
| Contains region -> region, "Contains" | Contains region -> region, "Contains"
| Down region -> region, "Down"
| Else region -> region, "Else" | Else region -> region, "Else"
| End region -> region, "End" | End region -> region, "End"
| Fail region -> region, "Fail"
| False region -> region, "False" | False region -> region, "False"
| For region -> region, "For" | For region -> region, "For"
| From region -> region, "From" | From region -> region, "From"
@ -207,7 +202,6 @@ let proj_token = function
| Remove region -> region, "Remove" | Remove region -> region, "Remove"
| Set region -> region, "Set" | Set region -> region, "Set"
| Skip region -> region, "Skip" | Skip region -> region, "Skip"
| Step region -> region, "Step"
| Then region -> region, "Then" | Then region -> region, "Then"
| To region -> region, "To" | To region -> region, "To"
| True region -> region, "True" | True region -> region, "True"
@ -276,10 +270,8 @@ let to_lexeme = function
| Case _ -> "case" | Case _ -> "case"
| Const _ -> "const" | Const _ -> "const"
| Contains _ -> "contains" | Contains _ -> "contains"
| Down _ -> "down"
| Else _ -> "else" | Else _ -> "else"
| End _ -> "end" | End _ -> "end"
| Fail _ -> "fail"
| False _ -> "False" | False _ -> "False"
| For _ -> "for" | For _ -> "for"
| From _ -> "from" | From _ -> "from"
@ -299,7 +291,6 @@ let to_lexeme = function
| Remove _ -> "remove" | Remove _ -> "remove"
| Set _ -> "set" | Set _ -> "set"
| Skip _ -> "skip" | Skip _ -> "skip"
| Step _ -> "step"
| Then _ -> "then" | Then _ -> "then"
| To _ -> "to" | To _ -> "to"
| True _ -> "True" | True _ -> "True"
@ -336,13 +327,11 @@ let keywords = [
(fun reg -> Case reg); (fun reg -> Case reg);
(fun reg -> Const reg); (fun reg -> Const reg);
(fun reg -> Contains reg); (fun reg -> Contains reg);
(fun reg -> Down reg);
(fun reg -> Else reg); (fun reg -> Else reg);
(fun reg -> End reg); (fun reg -> End reg);
(fun reg -> For reg); (fun reg -> For reg);
(fun reg -> From reg); (fun reg -> From reg);
(fun reg -> Function reg); (fun reg -> Function reg);
(fun reg -> Fail reg);
(fun reg -> False reg); (fun reg -> False reg);
(fun reg -> If reg); (fun reg -> If reg);
(fun reg -> In reg); (fun reg -> In reg);
@ -360,7 +349,6 @@ let keywords = [
(fun reg -> Remove reg); (fun reg -> Remove reg);
(fun reg -> Set reg); (fun reg -> Set reg);
(fun reg -> Skip reg); (fun reg -> Skip reg);
(fun reg -> Step reg);
(fun reg -> Then reg); (fun reg -> Then reg);
(fun reg -> To reg); (fun reg -> To reg);
(fun reg -> True reg); (fun reg -> True reg);
@ -560,10 +548,8 @@ let is_kwd = function
| Case _ | Case _
| Const _ | Const _
| Contains _ | Contains _
| Down _
| Else _ | Else _
| End _ | End _
| Fail _
| False _ | False _
| For _ | For _
| From _ | From _
@ -583,7 +569,6 @@ let is_kwd = function
| Remove _ | Remove _
| Set _ | Set _
| Skip _ | Skip _
| Step _
| Then _ | Then _
| To _ | To _
| True _ | True _

View File

@ -0,0 +1,57 @@
(** Generic parser for LIGO *)
module type PARSER =
sig
(* The type of tokens *)
type token
(* This exception is raised by the monolithic API functions *)
exception Error
(* The monolithic API *)
val contract : (Lexing.lexbuf -> token) -> Lexing.lexbuf -> AST.t
(* The incremental API *)
module MenhirInterpreter :
sig
include MenhirLib.IncrementalEngine.INCREMENTAL_ENGINE
with type token = token
end
module Incremental :
sig
val contract : Lexing.position -> AST.t MenhirInterpreter.checkpoint
end
end
(* Main functor *)
module Make (Lexer: Lexer.S)
(Parser: PARSER with type token = Lexer.Token.token) =
struct
module I = Parser.MenhirInterpreter
(* The parser has successfully produced a semantic value. *)
let success v = v
(* The parser has suspended itself because of a syntax error. Stop. *)
let fail _checkpoint = raise Parser.Error
(* The generic parsing function *)
let incr_contract Lexer.{read; buffer; close; _} : AST.t =
let supplier = I.lexer_lexbuf_to_supplier read buffer in
let parser = Parser.Incremental.contract buffer.Lexing.lex_curr_p in
let ast = I.loop_handle success fail supplier parser
in close (); ast
let mono_contract = Parser.contract
end

View File

@ -0,0 +1,39 @@
(** Generic parser API for LIGO *)
module type PARSER =
sig
(* The type of tokens *)
type token
(* This exception is raised by the monolithic API functions *)
exception Error
(* The monolithic API *)
val contract : (Lexing.lexbuf -> token) -> Lexing.lexbuf -> AST.t
(* The incremental API *)
module MenhirInterpreter :
sig
include MenhirLib.IncrementalEngine.INCREMENTAL_ENGINE
with type token = token
end
module Incremental :
sig
val contract : Lexing.position -> AST.t MenhirInterpreter.checkpoint
end
end
(* Main functor *)
module Make (Lexer: Lexer.S)
(Parser: PARSER with type token = Lexer.Token.token) :
sig
val mono_contract : (Lexing.lexbuf -> Lexer.token) -> Lexing.lexbuf -> AST.t
val incr_contract : Lexer.instance -> AST.t
end

View File

@ -25,15 +25,14 @@ let () = Printexc.record_backtrace true
let external_ text = let external_ text =
Utils.highlight (Printf.sprintf "External error: %s" text); exit 1;; Utils.highlight (Printf.sprintf "External error: %s" text); exit 1;;
type Error.t += ParseError type error = SyntaxError
let error_to_string = function let error_to_string = function
ParseError -> "Syntax error.\n" SyntaxError -> "Syntax error.\n"
| _ -> assert false
let print_error ?(offsets=true) mode Region.{region; value} ~file = let print_error ?(offsets=true) mode Region.{region; value} ~file =
let msg = error_to_string value in let msg = error_to_string value
let reg = region#to_string ~file ~offsets mode in and reg = region#to_string ~file ~offsets mode in
Utils.highlight (sprintf "Parse error %s:\n%s%!" reg msg) Utils.highlight (sprintf "Parse error %s:\n%s%!" reg msg)
(** {1 Preprocessing the input source and opening the input channels} *) (** {1 Preprocessing the input source and opening the input channels} *)
@ -77,11 +76,11 @@ let () =
(** {1 Instanciating the lexer} *) (** {1 Instanciating the lexer} *)
module Lexer = Lexer.Make (LexToken) module Lexer = Lexer.Make (LexToken)
module Log = LexerLog.Make (Lexer) module Log = LexerLog.Make (Lexer)
module ParserFront = ParserAPI.Make (Lexer) (Parser)
let Lexer.{read; buffer; get_pos; get_last; close} = let lexer_inst = Lexer.open_token_stream (Some pp_input)
Lexer.open_token_stream (Some pp_input) let Lexer.{read; buffer; get_pos; get_last; close} = lexer_inst
and cout = stdout and cout = stdout
@ -98,7 +97,10 @@ let tokeniser = read ~log
let () = let () =
try try
let ast = Parser.contract tokeniser buffer in (* The incremental API *)
let ast = ParserFront.incr_contract lexer_inst in
(* The monolithic API *)
(* let ast = ParserFront.mono_contract tokeniser buffer in *)
if Utils.String.Set.mem "ast" options#verbose if Utils.String.Set.mem "ast" options#verbose
then let buffer = Buffer.create 131 in then let buffer = Buffer.create 131 in
let state = ParserLog.mk_state let state = ParserLog.mk_state
@ -126,7 +128,7 @@ let () =
options#mode err ~file options#mode err ~file
| Parser.Error -> | Parser.Error ->
let region = get_last () in let region = get_last () in
let error = Region.{region; value=ParseError} in let error = Region.{region; value=SyntaxError} in
let () = close_all () in let () = close_all () in
print_error ~offsets:options#offsets print_error ~offsets:options#offsets
options#mode error ~file options#mode error ~file

View File

@ -3,43 +3,41 @@
(menhir (menhir
(merge_into Parser) (merge_into Parser)
(modules ParToken Parser) (modules ParToken Parser)
(flags -la 1 --explain --external-tokens LexToken)) (flags -la 1 --table --strict --explain --external-tokens LexToken))
(library (library
(name parser_pascaligo) (name parser_pascaligo)
(public_name ligo.parser.pascaligo) (public_name ligo.parser.pascaligo)
(modules AST pascaligo Parser ParserLog LexToken) (modules AST pascaligo Parser ParserLog LexToken)
(libraries (libraries
parser_shared menhirLib
hex parser_shared
simple-utils hex
tezos-utils simple-utils
) tezos-utils)
(flags (:standard -open Parser_shared -open Simple_utils)) (flags (:standard -open Parser_shared -open Simple_utils)))
)
(executable (executable
(name LexerMain) (name LexerMain)
(libraries (libraries
hex hex
simple-utils simple-utils
tezos-utils tezos-utils
parser_pascaligo) parser_pascaligo)
(modules (modules
LexerMain LexerMain)
) (flags (:standard -open Parser_shared -open Parser_pascaligo)))
(flags (:standard -open Parser_shared -open Parser_pascaligo))
)
(executable (executable
(name ParserMain) (name ParserMain)
(libraries (libraries
parser_pascaligo) parser_pascaligo)
(modules (modules ParserMain)
ParserMain (flags (:standard -open Simple_utils -open Parser_shared -open Parser_pascaligo)))
)
(flags (:standard -open Simple_utils -open Parser_shared -open Parser_pascaligo)) (executable
) (name Unlexer)
(modules Unlexer))
;; Les deux directives (rule) qui suivent sont pour le dev local. ;; Les deux directives (rule) qui suivent sont pour le dev local.
;; Il suffit de faire "dune build Parser.exe" pour avoir un Parser.exe dans le dossier. ;; Il suffit de faire "dune build Parser.exe" pour avoir un Parser.exe dans le dossier.

View File

@ -0,0 +1,121 @@
(** Converting the textual representation of tokens produced by Menhir
into concrete syntax *)
(* See [ParToken.mly] *)
let gen_sym prefix =
let count = ref 0 in
fun () -> incr count;
prefix ^ string_of_int !count
let id_sym = gen_sym "id"
and ctor_sym = gen_sym "C"
let concrete = function
(* Keywords *)
"And" -> "and"
| "Begin" -> "begin"
| "BigMap" -> "big_map"
| "Block" -> "block"
| "Case" -> "case"
| "Const" -> "const"
| "Contains" -> "contains"
| "Else" -> "else"
| "End" -> "end"
| "False" -> "False"
| "For" -> "for"
| "Function" -> "function"
| "From" -> "from"
| "If" -> "if"
| "In" -> "in"
| "Is" -> "is"
| "List" -> "list"
| "Map" -> "map"
| "Mod" -> "mod"
| "Nil" -> "nil"
| "Not" -> "not"
| "Of" -> "of"
| "Or" -> "or"
| "Patch" -> "patch"
| "Record" -> "record"
| "Remove" -> "remove"
| "Set" -> "set"
| "Skip" -> "skip"
| "Then" -> "then"
| "To" -> "to"
| "True" -> "True"
| "Type" -> "type"
| "Unit" -> "Unit"
| "Var" -> "var"
| "While" -> "while"
| "With" -> "with"
(* Data constructors *)
| "C_None" -> "None"
| "C_Some" -> "Some"
(* Symbols *)
| "SEMI" -> ";"
| "COMMA" -> ","
| "LPAR" -> "("
| "RPAR" -> ")"
| "LBRACE" -> "{"
| "RBRACE" -> "}"
| "LBRACKET" -> "["
| "RBRACKET" -> "]"
| "CONS" -> "#"
| "VBAR" -> "|"
| "ARROW" -> "->"
| "ASS" -> ":="
| "EQ" -> "="
| "COLON" -> ":"
| "LT" -> "<"
| "LE" -> "<="
| "GT" -> ">"
| "GE" -> ">="
| "NE" -> "=/="
| "PLUS" -> "+"
| "MINUS" -> " -"
| "SLASH" -> "/"
| "TIMES" -> "*"
| "DOT" -> "."
| "WILD" -> "_"
| "CAT" -> "^"
(* Literals *)
| "String" -> "\"a string\""
| "Bytes" -> "0xAA"
| "Int" -> "1"
| "Nat" -> "1n"
| "Mutez" -> "1mutez"
| "Ident" -> id_sym ()
| "Constr" -> ctor_sym ()
(* Virtual tokens *)
| "EOF" -> ""
(* For completeness of open sum types *)
| _ -> "<Unknown>"
(* Unlexing a sentence *)
let unlex (sentence: string) : Buffer.t =
let tokens = Str.split (Str.regexp " ") sentence in
let lexemes = List.map concrete tokens in
let buffer = Buffer.create 31 in
let rec trans = function
[] -> ()
| [s] -> Buffer.add_string buffer s
| s::l -> Buffer.add_string buffer (s ^ " "); trans l
in trans lexemes; buffer
(* Reading one line from the standard input channel and unlex it. *)
let out = unlex (input_line stdin) |> Buffer.contents
let () = Printf.printf "%s\n" out

View File

@ -5,6 +5,73 @@ module AST = Parser_cameligo.AST
module ParserLog = Parser_cameligo.ParserLog module ParserLog = Parser_cameligo.ParserLog
module LexToken = Parser_reasonligo.LexToken module LexToken = Parser_reasonligo.LexToken
module Lexer = Lexer.Make(LexToken) module Lexer = Lexer.Make(LexToken)
module SyntaxError = Parser_reasonligo.SyntaxError
module Errors = struct
let wrong_function_arguments expr =
let title () = "wrong function arguments" in
let message () = "" in
let expression_loc = AST.expr_to_region expr in
let data = [
("expression_loc",
fun () -> Format.asprintf "%a" Location.pp_lift @@ expression_loc)
] in
error ~data title message
let parser_error start end_ =
let title () = "parser error" in
let message () = "" in
let loc = Region.make
~start:(Pos.from_byte start)
~stop:(Pos.from_byte end_)
in
let data = [
("parser_loc",
fun () -> Format.asprintf "%a" Location.pp_lift @@ loc
)
] in
error ~data title message
let unrecognized_error start end_ =
let title () = "unrecognized error" in
let message () = "" in
let loc = Region.make
~start:(Pos.from_byte start)
~stop:(Pos.from_byte end_)
in
let data = [
("unrecognized_loc",
fun () -> Format.asprintf "%a" Location.pp_lift @@ loc
)
] in
error ~data title message
end
open Errors
type 'a parser = (Lexing.lexbuf -> LexToken.token) -> Lexing.lexbuf -> 'a
let parse (parser: 'a parser) lexbuf =
let Lexer.{read ; close ; _} = Lexer.open_token_stream None in
let result =
try
ok (parser read lexbuf)
with
| SyntaxError.Error (WrongFunctionArguments e) ->
fail @@ (wrong_function_arguments e)
| Parser.Error ->
let start = Lexing.lexeme_start_p lexbuf in
let end_ = Lexing.lexeme_end_p lexbuf in
fail @@ (parser_error start end_)
| _ ->
let start = Lexing.lexeme_start_p lexbuf in
let end_ = Lexing.lexeme_end_p lexbuf in
fail @@ (unrecognized_error start end_)
in
close ();
result
let parse_file (source: string) : AST.t result = let parse_file (source: string) : AST.t result =
let pp_input = let pp_input =
@ -20,93 +87,12 @@ let parse_file (source: string) : AST.t result =
generic_try (simple_error "error opening file") @@ generic_try (simple_error "error opening file") @@
(fun () -> open_in pp_input) in (fun () -> open_in pp_input) in
let lexbuf = Lexing.from_channel channel in let lexbuf = Lexing.from_channel channel in
let Lexer.{read ; close ; _} = parse (Parser.contract) lexbuf
Lexer.open_token_stream None in
specific_try (function
| Parser.Error -> (
let start = Lexing.lexeme_start_p lexbuf in
let end_ = Lexing.lexeme_end_p lexbuf in
let str = Format.sprintf
"Parse error at \"%s\" from (%d, %d) to (%d, %d). In file \"%s|%s\"\n"
(Lexing.lexeme lexbuf)
start.pos_lnum (start.pos_cnum - start.pos_bol)
end_.pos_lnum (end_.pos_cnum - end_.pos_bol)
start.pos_fname source
in
simple_error str
)
| exn ->
let start = Lexing.lexeme_start_p lexbuf in
let end_ = Lexing.lexeme_end_p lexbuf in
let str = Format.sprintf
"Unrecognized error (%s) at \"%s\" from (%d, %d) to (%d, %d). In file \"%s|%s\"\n"
(Printexc.to_string exn)
(Lexing.lexeme lexbuf)
start.pos_lnum (start.pos_cnum - start.pos_bol)
end_.pos_lnum (end_.pos_cnum - end_.pos_bol)
start.pos_fname source
in
simple_error str
) @@ (fun () ->
let raw = Parser.contract read lexbuf in
close () ;
raw
) >>? fun raw ->
ok raw
let parse_string (s:string) : AST.t result = let parse_string (s:string) : AST.t result =
let lexbuf = Lexing.from_string s in let lexbuf = Lexing.from_string s in
let Lexer.{read ; close ; _} = parse (Parser.contract) lexbuf
Lexer.open_token_stream None in
specific_try (function
| Parser.Error -> (
let start = Lexing.lexeme_start_p lexbuf in
let end_ = Lexing.lexeme_end_p lexbuf in
let str = Format.sprintf
"Parse error at \"%s\" from (%d, %d) to (%d, %d)\n"
(Lexing.lexeme lexbuf)
start.pos_lnum (start.pos_cnum - start.pos_bol)
end_.pos_lnum (end_.pos_cnum - end_.pos_bol) in
simple_error str
)
| _ -> simple_error "unrecognized parse_ error"
) @@ (fun () ->
let raw = Parser.contract read lexbuf in
close () ;
raw
) >>? fun raw ->
ok raw
let parse_expression (s:string) : AST.expr result = let parse_expression (s:string) : AST.expr result =
let lexbuf = Lexing.from_string s in let lexbuf = Lexing.from_string s in
let Lexer.{read ; close; _} = parse (Parser.interactive_expr) lexbuf
Lexer.open_token_stream None in
specific_try (function
| Parser.Error -> (
let start = Lexing.lexeme_start_p lexbuf in
let end_ = Lexing.lexeme_end_p lexbuf in
let str = Format.sprintf
"Parse error at \"%s\" from (%d, %d) to (%d, %d)\n"
(Lexing.lexeme lexbuf)
start.pos_lnum (start.pos_cnum - start.pos_bol)
end_.pos_lnum (end_.pos_cnum - end_.pos_bol) in
simple_error str
)
| exn ->
let start = Lexing.lexeme_start_p lexbuf in
let end_ = Lexing.lexeme_end_p lexbuf in
let str = Format.sprintf
"Unrecognized error (%s) at \"%s\" from (%d, %d) to (%d, %d). In expression \"%s|%s\"\n"
(Printexc.to_string exn)
(Lexing.lexeme lexbuf)
start.pos_lnum (start.pos_cnum - start.pos_bol)
end_.pos_lnum (end_.pos_cnum - end_.pos_bol)
start.pos_fname s
in
simple_error str
) @@ (fun () ->
let raw = Parser.interactive_expr read lexbuf in
close () ;
raw
) >>? fun raw ->
ok raw

View File

@ -1 +1 @@
--explain --external-tokens LexToken --base Parser ParToken.mly --table --explain --external-tokens LexToken --base Parser ParToken.mly

View File

@ -6,7 +6,6 @@ $HOME/git/ligo/vendors/ligo-utils/simple-utils/region.mli
$HOME/git/ligo/vendors/ligo-utils/simple-utils/region.ml $HOME/git/ligo/vendors/ligo-utils/simple-utils/region.ml
../shared/Lexer.mli ../shared/Lexer.mli
../shared/Lexer.mll ../shared/Lexer.mll
../shared/Error.mli
../shared/EvalOpt.ml ../shared/EvalOpt.ml
../shared/EvalOpt.mli ../shared/EvalOpt.mli
../shared/FQueue.ml ../shared/FQueue.ml

View File

@ -5,12 +5,13 @@
(* Literals *) (* Literals *)
%token <string Region.reg> Ident "<ident>" %token <string Region.reg> String "<string>"
%token <string Region.reg> Constr "<constr>" %token <(LexToken.lexeme * Hex.t) Region.reg> Bytes "<bytes>"
%token <string Region.reg> String "<string>" %token <(string * Z.t) Region.reg> Int "<int>"
%token <(string * Z.t) Region.reg> Int "<int>" %token <(string * Z.t) Region.reg> Nat "<nat>"
%token <(string * Z.t) Region.reg> Nat "<nat>" %token <(string * Z.t) Region.reg> Mutez "<mutez>"
%token <(string * Z.t) Region.reg> Mutez "<mutez>" %token <string Region.reg> Ident "<ident>"
%token <string Region.reg> Constr "<constr>"
(* Symbols *) (* Symbols *)

View File

@ -388,30 +388,14 @@ type_expr_simple_args:
par(nsepseq(type_expr_simple, ",")) { $1 } par(nsepseq(type_expr_simple, ",")) { $1 }
type_expr_simple: type_expr_simple:
core_expr_2 type_expr_simple_args? { type_name type_expr_simple_args? {
let args = $2 in let args = $2 in
let constr = match args with
match $1 with Some {value; _} ->
EVar i -> i let region = cover $1.region value.rpar in
| EProj {value={struct_name; field_path; _}; region} -> let value = $1, {region; value}
let app a = function in TApp {region; value}
FieldName v -> a ^ "." ^ v.value | None -> TVar $1
| Component {value = c, _; _} -> a ^ "." ^ c in
let value =
Utils.nsepseq_foldl app struct_name.value field_path
in {region; value}
| EArith Mutez r | EArith Int r | EArith Nat r ->
{r with value = fst r.value}
| EString String s -> s
| ELogic BoolExpr (True t) -> {region=t; value="true"}
| ELogic BoolExpr (False f) -> {region=f; value="false"}
| _ -> failwith "Not supported" (* TODO: raise a proper exception *)
in match args with
Some {value; _} ->
let region = cover (expr_to_region $1) value.rpar in
let value = constr, {region; value}
in TApp {region; value}
| None -> TVar constr
} }
| "(" nsepseq(type_expr_simple, ",") ")" { | "(" nsepseq(type_expr_simple, ",") ")" {
TProd {region = cover $1 $3; value=$2} TProd {region = cover $1 $3; value=$2}
@ -440,8 +424,8 @@ fun_expr:
{p.value with inside = arg_to_pattern p.value.inside} {p.value with inside = arg_to_pattern p.value.inside}
in PPar {p with value} in PPar {p with value}
| EUnit u -> PUnit u | EUnit u -> PUnit u
| _ -> failwith "Not supported" in (* TODO: raise a proper exception *) | e -> raise (SyntaxError.Error (WrongFunctionArguments e))
in
let fun_args_to_pattern = function let fun_args_to_pattern = function
EAnnot { EAnnot {
value = { value = {
@ -469,8 +453,8 @@ fun_expr:
in arg_to_pattern (fst fun_args), bindings in arg_to_pattern (fst fun_args), bindings
| EUnit e -> | EUnit e ->
arg_to_pattern (EUnit e), [] arg_to_pattern (EUnit e), []
| _ -> failwith "Not supported" in (* TODO: raise a proper exception *) | e -> raise (SyntaxError.Error (WrongFunctionArguments e))
in
let binders = fun_args_to_pattern $1 in let binders = fun_args_to_pattern $1 in
let f = {kwd_fun; let f = {kwd_fun;
binders; binders;

View File

@ -0,0 +1,57 @@
(** Generic parser for LIGO *)
module type PARSER =
sig
(* The type of tokens *)
type token
(* This exception is raised by the monolithic API functions *)
exception Error
(* The monolithic API *)
val contract : (Lexing.lexbuf -> token) -> Lexing.lexbuf -> AST.t
(* The incremental API *)
module MenhirInterpreter :
sig
include MenhirLib.IncrementalEngine.INCREMENTAL_ENGINE
with type token = token
end
module Incremental :
sig
val contract : Lexing.position -> AST.t MenhirInterpreter.checkpoint
end
end
(* Main functor *)
module Make (Lexer: Lexer.S)
(Parser: PARSER with type token = Lexer.Token.token) =
struct
module I = Parser.MenhirInterpreter
(* The parser has successfully produced a semantic value. *)
let success v = v
(* The parser has suspended itself because of a syntax error. Stop. *)
let fail _checkpoint = raise Parser.Error
(* The generic parsing function *)
let incr_contract Lexer.{read; buffer; close; _} : AST.t =
let supplier = I.lexer_lexbuf_to_supplier read buffer in
let parser = Parser.Incremental.contract buffer.Lexing.lex_curr_p in
let ast = I.loop_handle success fail supplier parser
in close (); ast
let mono_contract = Parser.contract
end

View File

@ -0,0 +1,39 @@
(** Generic parser API for LIGO *)
module type PARSER =
sig
(* The type of tokens *)
type token
(* This exception is raised by the monolithic API functions *)
exception Error
(* The monolithic API *)
val contract : (Lexing.lexbuf -> token) -> Lexing.lexbuf -> AST.t
(* The incremental API *)
module MenhirInterpreter :
sig
include MenhirLib.IncrementalEngine.INCREMENTAL_ENGINE
with type token = token
end
module Incremental :
sig
val contract : Lexing.position -> AST.t MenhirInterpreter.checkpoint
end
end
(* Main functor *)
module Make (Lexer: Lexer.S)
(Parser: PARSER with type token = Lexer.Token.token) :
sig
val mono_contract : (Lexing.lexbuf -> Lexer.token) -> Lexing.lexbuf -> AST.t
val incr_contract : Lexer.instance -> AST.t
end

View File

@ -25,15 +25,14 @@ let () = Printexc.record_backtrace true
let external_ text = let external_ text =
Utils.highlight (Printf.sprintf "External error: %s" text); exit 1;; Utils.highlight (Printf.sprintf "External error: %s" text); exit 1;;
type Error.t += ParseError type error = SyntaxError
let error_to_string = function let error_to_string = function
ParseError -> "Syntax error.\n" SyntaxError -> "Syntax error.\n"
| _ -> assert false
let print_error ?(offsets=true) mode Region.{region; value} ~file = let print_error ?(offsets=true) mode Region.{region; value} ~file =
let msg = error_to_string value in let msg = error_to_string value
let reg = region#to_string ~file ~offsets mode in and reg = region#to_string ~file ~offsets mode in
Utils.highlight (sprintf "Parse error %s:\n%s%!" reg msg) Utils.highlight (sprintf "Parse error %s:\n%s%!" reg msg)
(** {1 Preprocessing the input source and opening the input channels} *) (** {1 Preprocessing the input source and opening the input channels} *)
@ -77,11 +76,11 @@ let () =
(** {1 Instanciating the lexer} *) (** {1 Instanciating the lexer} *)
module Lexer = Lexer.Make (LexToken) module Lexer = Lexer.Make (LexToken)
module Log = LexerLog.Make (Lexer) module Log = LexerLog.Make (Lexer)
module ParserFront = ParserAPI.Make (Lexer) (Parser)
let Lexer.{read; buffer; get_pos; get_last; close} = let lexer_inst = Lexer.open_token_stream (Some pp_input)
Lexer.open_token_stream (Some pp_input) let Lexer.{read; buffer; get_pos; get_last; close} = lexer_inst
and cout = stdout and cout = stdout
@ -98,7 +97,10 @@ let tokeniser = read ~log
let () = let () =
try try
let ast = Parser.contract tokeniser buffer in (* The incremental API *)
let ast = ParserFront.incr_contract lexer_inst in
(* The monolithic API *)
(* let ast = ParserFront.mono_contract tokeniser buffer in *)
if Utils.String.Set.mem "ast" options#verbose if Utils.String.Set.mem "ast" options#verbose
then let buffer = Buffer.create 131 in then let buffer = Buffer.create 131 in
let state = ParserLog.mk_state let state = ParserLog.mk_state
@ -126,7 +128,7 @@ let () =
options#mode err ~file options#mode err ~file
| Parser.Error -> | Parser.Error ->
let region = get_last () in let region = get_last () in
let error = Region.{region; value=ParseError} in let error = Region.{region; value=SyntaxError} in
let () = close_all () in let () = close_all () in
print_error ~offsets:options#offsets print_error ~offsets:options#offsets
options#mode error ~file options#mode error ~file

View File

@ -0,0 +1,4 @@
type error =
| WrongFunctionArguments of AST.expr
exception Error of error

View File

@ -0,0 +1,4 @@
type error =
| WrongFunctionArguments of AST.expr
exception Error of error

View File

@ -3,39 +3,34 @@
(menhir (menhir
(merge_into Parser) (merge_into Parser)
(modules ParToken Parser) (modules ParToken Parser)
(flags -la 1 --explain --dump --strict --external-tokens LexToken)) (flags -la 1 --table --explain --strict --external-tokens LexToken))
(library (library
(name parser_reasonligo) (name parser_reasonligo)
(public_name ligo.parser.reasonligo) (public_name ligo.parser.reasonligo)
(modules reasonligo LexToken Parser) (modules SyntaxError reasonligo LexToken Parser)
(libraries (libraries
menhirLib
parser_shared parser_shared
parser_cameligo parser_cameligo
str str
simple-utils simple-utils
tezos-utils tezos-utils
getopt getopt)
) (flags (:standard -open Simple_utils -open Parser_shared -open Parser_cameligo)))
(flags (:standard -open Simple_utils -open Parser_shared -open Parser_cameligo ))
)
(executable (executable
(name LexerMain) (name LexerMain)
(libraries (libraries
parser_reasonligo) parser_reasonligo)
(modules (modules
LexerMain LexerMain)
) (flags (:standard -open Parser_shared -open Parser_reasonligo)))
(flags (:standard -open Parser_shared -open Parser_reasonligo))
)
(executable (executable
(name ParserMain) (name ParserMain)
(libraries (libraries
parser_reasonligo) parser_reasonligo)
(modules (modules
ParserMain ParserMain)
) (flags (:standard -open Simple_utils -open Parser_shared -open Parser_reasonligo)))
(flags (:standard -open Simple_utils -open Parser_shared -open Parser_reasonligo))
)

View File

@ -3,3 +3,4 @@ module AST = Parser_cameligo.AST
module Lexer = Lexer module Lexer = Lexer
module LexToken = LexToken module LexToken = LexToken
module ParserLog = Parser_cameligo.ParserLog module ParserLog = Parser_cameligo.ParserLog
module SyntaxError = SyntaxError

View File

@ -1,3 +0,0 @@
type t = ..
type error = t

View File

@ -136,11 +136,13 @@ module type S =
(* Error reporting *) (* Error reporting *)
exception Error of Error.t Region.reg type error
exception Error of error Region.reg
val print_error : val print_error :
?offsets:bool -> [`Byte | `Point] -> ?offsets:bool -> [`Byte | `Point] ->
Error.t Region.reg -> file:bool -> unit error Region.reg -> file:bool -> unit
end end

View File

@ -159,10 +159,11 @@ module type S = sig
(* Error reporting *) (* Error reporting *)
exception Error of Error.t Region.reg type error
exception Error of error Region.reg
val print_error : ?offsets:bool -> [`Byte | `Point] -> val print_error : ?offsets:bool -> [`Byte | `Point] ->
Error.t Region.reg -> file:bool -> unit error Region.reg -> file:bool -> unit
end end
(* The functorised interface (* The functorised interface
@ -330,22 +331,23 @@ module Make (Token: TOKEN) : (S with module Token = Token) =
(* ERRORS *) (* ERRORS *)
type Error.t += Invalid_utf8_sequence type error =
type Error.t += Unexpected_character of char Invalid_utf8_sequence
type Error.t += Undefined_escape_sequence | Unexpected_character of char
type Error.t += Missing_break | Undefined_escape_sequence
type Error.t += Unterminated_string | Missing_break
type Error.t += Unterminated_integer | Unterminated_string
type Error.t += Odd_lengthed_bytes | Unterminated_integer
type Error.t += Unterminated_comment | Odd_lengthed_bytes
type Error.t += Orphan_minus | Unterminated_comment
type Error.t += Non_canonical_zero | Orphan_minus
type Error.t += Negative_byte_sequence | Non_canonical_zero
type Error.t += Broken_string | Negative_byte_sequence
type Error.t += Invalid_character_in_string | Broken_string
type Error.t += Reserved_name | Invalid_character_in_string
type Error.t += Invalid_symbol | Reserved_name
type Error.t += Invalid_natural | Invalid_symbol
| Invalid_natural
let error_to_string = function let error_to_string = function
Invalid_utf8_sequence -> Invalid_utf8_sequence ->
@ -393,9 +395,8 @@ module Make (Token: TOKEN) : (S with module Token = Token) =
Hint: Check the LIGO syntax you use.\n" Hint: Check the LIGO syntax you use.\n"
| Invalid_natural -> | Invalid_natural ->
"Invalid natural." "Invalid natural."
| _ -> assert false
exception Error of Error.t Region.reg exception Error of error Region.reg
let print_error ?(offsets=true) mode Region.{region; value} ~file = let print_error ?(offsets=true) mode Region.{region; value} ~file =
let msg = error_to_string value in let msg = error_to_string value in

View File

@ -7,20 +7,17 @@
simple-utils simple-utils
uutf uutf
getopt getopt
zarith zarith)
)
(modules (modules
Error
Lexer Lexer
LexerLog LexerLog
Utils Utils
Markup Markup
FQueue FQueue
EvalOpt EvalOpt
Version Version
) ))
(modules_without_implementation Error)
)
(rule (rule
(targets Version.ml) (targets Version.ml)

View File

@ -564,11 +564,11 @@ and simpl_declaration : Raw.declaration -> declaration Location.wrap list result
let%bind (v, v_type) = pattern_to_typed_var par_var in let%bind (v, v_type) = pattern_to_typed_var par_var in
let%bind v_type_expression = let%bind v_type_expression =
match v_type with match v_type with
| Some v_type -> ok @@ (simpl_type_expression v_type) | Some v_type -> ok (to_option (simpl_type_expression v_type))
| None -> fail @@ wrong_pattern "typed var tuple" par_var in | None -> ok None
let%bind v_type_expression = v_type_expression in in
let%bind simpl_rhs_expr = simpl_expression rhs_expr in let%bind simpl_rhs_expr = simpl_expression rhs_expr in
ok @@ loc x @@ Declaration_constant (Var.of_name v.value, Some v_type_expression, simpl_rhs_expr) ) ok @@ loc x @@ Declaration_constant (Var.of_name v.value, v_type_expression, simpl_rhs_expr) )
in let%bind variables = ok @@ npseq_to_list pt.value in let%bind variables = ok @@ npseq_to_list pt.value
in let%bind expr_bind_lst = in let%bind expr_bind_lst =
match let_rhs with match let_rhs with

View File

@ -1,4 +1,4 @@
open! Trace open Trace
open Ast_simplified open Ast_simplified
module Raw = Parser.Pascaligo.AST module Raw = Parser.Pascaligo.AST

View File

@ -54,7 +54,7 @@ module Errors = struct
let match_redundant_case : type a . (a, unit) I.matching -> Location.t -> unit -> _ = let match_redundant_case : type a . (a, unit) I.matching -> Location.t -> unit -> _ =
fun matching loc () -> fun matching loc () ->
let title = (thunk "missing case in match") in let title = (thunk "redundant case in match") in
let message () = "" in let message () = "" in
let data = [ let data = [
("variant" , fun () -> Format.asprintf "%a" I.PP.matching_type matching) ; ("variant" , fun () -> Format.asprintf "%a" I.PP.matching_type matching) ;

View File

@ -12,13 +12,20 @@ type environment = Environment.t
module Errors = struct module Errors = struct
let unbound_type_variable (e:environment) (tv:I.type_variable) () = let unbound_type_variable (e:environment) (tv:I.type_variable) () =
let name = Var.to_name tv in
let suggestion = match name with
| "integer" -> "int"
| "str" -> "string"
| "boolean" -> "bool"
| _ -> "no suggestion" in
let title = (thunk "unbound type variable") in let title = (thunk "unbound type variable") in
let message () = "" in let message () = "" in
let data = [ let data = [
("variable" , fun () -> Format.asprintf "%a" Stage_common.PP.type_variable tv) ; ("variable" , fun () -> Format.asprintf "%a" Stage_common.PP.type_variable tv) ;
(* TODO: types don't have srclocs for now. *) (* TODO: types don't have srclocs for now. *)
(* ("location" , fun () -> Format.asprintf "%a" Location.pp (n.location)) ; *) (* ("location" , fun () -> Format.asprintf "%a" Location.pp (n.location)) ; *)
("in" , fun () -> Format.asprintf "%a" Environment.PP.full_environment e) ("in" , fun () -> Format.asprintf "%a" Environment.PP.full_environment e) ;
("did_you_mean" , fun () -> suggestion)
] in ] in
error ~data title message () error ~data title message ()
@ -54,7 +61,7 @@ module Errors = struct
let match_redundant_case : type a . (a, unit) I.matching -> Location.t -> unit -> _ = let match_redundant_case : type a . (a, unit) I.matching -> Location.t -> unit -> _ =
fun matching loc () -> fun matching loc () ->
let title = (thunk "missing case in match") in let title = (thunk "redundant case in match") in
let message () = "" in let message () = "" in
let data = [ let data = [
("variant" , fun () -> Format.asprintf "%a" I.PP.matching_type matching) ; ("variant" , fun () -> Format.asprintf "%a" I.PP.matching_type matching) ;
@ -464,8 +471,6 @@ and type_expression' : environment -> ?tv_opt:O.type_value -> I.expression -> O.
| None -> ok () | None -> ok ()
| Some tv' -> O.assert_type_value_eq (tv' , ae.type_annotation) in | Some tv' -> O.assert_type_value_eq (tv' , ae.type_annotation) in
ok(ae) ok(ae)
(* Sum *) (* Sum *)
| E_constructor (c, expr) -> | E_constructor (c, expr) ->
let%bind (c_tv, sum_tv) = let%bind (c_tv, sum_tv) =
@ -793,7 +798,12 @@ and type_expression' : environment -> ?tv_opt:O.type_value -> I.expression -> O.
(Some tv) (Some tv)
(Some expr'.type_annotation) (Some expr'.type_annotation)
(internal_assertion_failure "merge_annotations (Some ...) (Some ...) failed") in (internal_assertion_failure "merge_annotations (Some ...) (Some ...) failed") in
ok {expr' with type_annotation} (* check type annotation of the expression as a whole (e.g. let x : t = (v : t') ) *)
let%bind () =
match tv_opt with
| None -> ok ()
| Some tv' -> O.assert_type_value_eq (tv' , type_annotation) in
ok @@ {expr' with type_annotation}
and type_constant (name:I.constant) (lst:O.type_value list) (tv_opt:O.type_value option) : (O.constant * O.type_value) result = and type_constant (name:I.constant) (lst:O.type_value list) (tv_opt:O.type_value option) : (O.constant * O.type_value) result =

View File

@ -2,7 +2,7 @@
For more info, see back-end.md: https://gitlab.com/ligolang/ligo/blob/dev/gitlab-pages/docs/contributors/big-picture/back-end.md *) For more info, see back-end.md: https://gitlab.com/ligolang/ligo/blob/dev/gitlab-pages/docs/contributors/big-picture/back-end.md *)
open! Trace open Trace
open Helpers open Helpers
module AST = Ast_typed module AST = Ast_typed

View File

@ -1,4 +1,4 @@
open! Trace open Trace
module AST = Ast_typed module AST = Ast_typed
module Append_tree = Tree.Append module Append_tree = Tree.Append

View File

@ -1,5 +1,5 @@
open Mini_c open Mini_c
open! Trace open Trace
(* TODO hack to specialize map_expression to identity monad *) (* TODO hack to specialize map_expression to identity monad *)
let map_expression : let map_expression :

View File

@ -29,8 +29,8 @@ open Errors
(* This does not makes sense to me *) (* This does not makes sense to me *)
let get_operator : constant -> type_value -> expression list -> predicate result = fun s ty lst -> let get_operator : constant -> type_value -> expression list -> predicate result = fun s ty lst ->
match Operators.Compiler.get_operators s with match Operators.Compiler.get_operators s with
| Trace.Ok (x,_) -> ok x | Ok (x,_) -> ok x
| Trace.Error _ -> ( | Error _ -> (
match s with match s with
| C_NONE -> ( | C_NONE -> (
let%bind ty' = Mini_c.get_t_option ty in let%bind ty' = Mini_c.get_t_option ty in
@ -452,4 +452,4 @@ and translate_function anon env input_ty output_ty : michelson result =
type compiled_expression = { type compiled_expression = {
expr_ty : ex_ty ; expr_ty : ex_ty ;
expr : michelson ; expr : michelson ;
} }

View File

@ -138,7 +138,7 @@ let get_t_pair (t:type_value) : (type_value * type_value) result = match t.type_
let get_t_function (t:type_value) : (type_value * type_value) result = match t.type_value' with let get_t_function (t:type_value) : (type_value * type_value) result = match t.type_value' with
| T_arrow (a,r) -> ok (a,r) | T_arrow (a,r) -> ok (a,r)
| _ -> simple_fail "not a tuple" | _ -> simple_fail "not a function"
let get_t_sum (t:type_value) : type_value constructor_map result = match t.type_value' with let get_t_sum (t:type_value) : type_value constructor_map result = match t.type_value' with
| T_sum m -> ok m | T_sum m -> ok m

View File

@ -56,7 +56,7 @@ module Errors = struct
let different_types name a b () = let different_types name a b () =
let title () = name ^ " are different" in let title () = name ^ " are different" in
let message () = "" in let message () = "Expected these two types to be the same, but they're different" in
let data = [ let data = [
("a" , fun () -> Format.asprintf "%a" PP.type_value a) ; ("a" , fun () -> Format.asprintf "%a" PP.type_value a) ;
("b" , fun () -> Format.asprintf "%a" PP.type_value b ) ("b" , fun () -> Format.asprintf "%a" PP.type_value b )
@ -321,7 +321,7 @@ let rec assert_type_value_eq (a, b: (type_value * type_value)) : unit result = m
| TC_big_map (ka,va), TC_big_map (kb,vb) -> ok @@ ([ka;va] ,[kb;vb]) | TC_big_map (ka,va), TC_big_map (kb,vb) -> ok @@ ([ka;va] ,[kb;vb])
| _,_ -> fail @@ different_operators opa opb | _,_ -> fail @@ different_operators opa opb
in in
trace (different_types "constant sub-expression" a b) trace (different_types "arguments to type operators" a b)
@@ bind_list_iter (fun (a,b) -> assert_type_value_eq (a,b) )(List.combine lsta lstb) @@ bind_list_iter (fun (a,b) -> assert_type_value_eq (a,b) )(List.combine lsta lstb)
) )
| T_operator _, _ -> fail @@ different_kinds a b | T_operator _, _ -> fail @@ different_kinds a b

View File

@ -189,10 +189,14 @@ let literal ppf (l:literal) = match l with
| Literal_timestamp n -> fprintf ppf "+%d" n | Literal_timestamp n -> fprintf ppf "+%d" n
| Literal_mutez n -> fprintf ppf "%dmutez" n | Literal_mutez n -> fprintf ppf "%dmutez" n
| Literal_string s -> fprintf ppf "%S" s | Literal_string s -> fprintf ppf "%S" s
| Literal_bytes b -> fprintf ppf "0x%s" @@ Bytes.to_string @@ Bytes.escaped b | Literal_bytes b -> fprintf ppf "0x%a" Hex.pp (Hex.of_bytes b)
| Literal_address s -> fprintf ppf "@%S" s | Literal_address s -> fprintf ppf "@%S" s
| Literal_operation _ -> fprintf ppf "Operation(...bytes)" | Literal_operation _ -> fprintf ppf "Operation(...bytes)"
| Literal_key s -> fprintf ppf "key %s" s | Literal_key s -> fprintf ppf "key %s" s
| Literal_key_hash s -> fprintf ppf "key_hash %s" s | Literal_key_hash s -> fprintf ppf "key_hash %s" s
| Literal_signature s -> fprintf ppf "Signature %s" s | Literal_signature s -> fprintf ppf "Signature %s" s
| Literal_chain_id s -> fprintf ppf "Chain_id %s" s | Literal_chain_id s -> fprintf ppf "Chain_id %s" s
let%expect_test _ =
Format.printf "%a" literal (Literal_bytes (Bytes.of_string "foo")) ;
[%expect{| 0x666f6f |}]

View File

@ -5,8 +5,9 @@
simple-utils simple-utils
tezos-utils tezos-utils
) )
(inline_tests)
(preprocess (preprocess
(pps ppx_let) (pps ppx_let ppx_expect)
) )
(flags (:standard -open Simple_utils)) (flags (:standard -open Simple_utils))
) )

View File

@ -58,8 +58,7 @@ let rec value ppf : value -> unit = function
| D_unit -> fprintf ppf "unit" | D_unit -> fprintf ppf "unit"
| D_string s -> fprintf ppf "\"%s\"" s | D_string s -> fprintf ppf "\"%s\"" s
| D_bytes x -> | D_bytes x ->
let (`Hex hex) = Hex.of_bytes x in fprintf ppf "0x%a" Hex.pp @@ Hex.of_bytes x
fprintf ppf "0x%s" hex
| D_pair (a, b) -> fprintf ppf "(%a), (%a)" value a value b | D_pair (a, b) -> fprintf ppf "(%a), (%a)" value a value b
| D_left a -> fprintf ppf "L(%a)" value a | D_left a -> fprintf ppf "L(%a)" value a
| D_right b -> fprintf ppf "R(%a)" value b | D_right b -> fprintf ppf "R(%a)" value b
@ -124,6 +123,10 @@ let tl_statement ppf (ass, _) = assignment ppf ass
let program ppf (p:program) = let program ppf (p:program) =
fprintf ppf "Program:\n---\n%a" (pp_print_list ~pp_sep:pp_print_newline tl_statement) p fprintf ppf "Program:\n---\n%a" (pp_print_list ~pp_sep:pp_print_newline tl_statement) p
let%expect_test _ =
Format.printf "%a" value (D_bytes (Bytes.of_string "foo")) ;
[%expect{| 0x666f6f |}]
let%expect_test _ = let%expect_test _ =
let pp = expression' Format.std_formatter in let pp = expression' Format.std_formatter in
let dummy_type = T_base Base_unit in let dummy_type = T_base Base_unit in

View File

@ -26,7 +26,7 @@ let compile_main () =
let%bind michelson_prg = Ligo.Compile.Of_mini_c.aggregate_and_compile_contract mini_c_prg "main" in let%bind michelson_prg = Ligo.Compile.Of_mini_c.aggregate_and_compile_contract mini_c_prg "main" in
let%bind (_contract: Tezos_utils.Michelson.michelson) = let%bind (_contract: Tezos_utils.Michelson.michelson) =
(* fails if the given entry point is not a valid contract *) (* fails if the given entry point is not a valid contract *)
Ligo.Compile.Of_mini_c.build_contract michelson_prg in Ligo.Compile.Of_michelson.build_contract michelson_prg in
ok () ok ()
open Ast_simplified open Ast_simplified

View File

@ -1,3 +0,0 @@
type toto = int
let foo : string = 42 + 127

View File

@ -1,3 +0,0 @@
type toto = int option
let foo : string list = Some (42 + 127)

View File

@ -0,0 +1 @@
This folder contains contracts for negative tests: contracts that are expected to fail (parse error, type error and so on).

View File

@ -0,0 +1,6 @@
type toto = int
let foo : string = 42 + 127
let main (p:int) (storage : int) =
(([] : operation list) , p + foo)

View File

@ -0,0 +1,6 @@
type toto = int option
let foo : string list = Some (42 + 127)
let main (p:int) (storage : int) =
(([] : operation list) , p)

View File

@ -0,0 +1,7 @@
type toto = { a : int ; b : string ; c : bool }
type tata = { a : int ; d : string ; c : bool }
let foo : tata = ({a = 1 ; b = "foo" ; c = true} : toto)
let main (p:int) (storage : int) =
(([] : operation list) , p + foo.a)

View File

@ -0,0 +1,4 @@
let foo : boolean = 3
let main (p:int) (storage : int) =
(([] : operation list) , p + foo)

View File

@ -0,0 +1,3 @@
let foo : (int, string) map = (Map.literal [] : (int, bool) map)
let main (p:int) (storage : int) =
(([] : operation list) , p)

View File

@ -0,0 +1,7 @@
type toto = { a : int ; b : string }
type tata = { a : int ; }
let foo : tata = ({a = 1 ; b = "foo" ; c = true} : toto)
let main (p:int) (storage : int) =
(([] : operation list) , p + foo.a)

View File

@ -0,0 +1,25 @@
type storage_t is timestamp
type message_t is (unit -> list(operation))
type default_pt is unit
type call_pt is message_t
type contract_return_t is (list(operation) * storage_t)
type entry_point_t is
| Call of call_pt
| Default of default_pt
function call (const p : call_pt; const s : storage_t) : contract_return_t is block {
if s >= now then failwith("Contract is still time locked") else skip ;
const message : message_t = p ;
const ret_ops : list(operation) = message(unit) ;
} with (ret_ops,s)
function default (const p : default_pt; const s : storage_t) : contract_return_t is
((nil: list(operation)) , s)
function main(const param : entry_point_t; const s : storage_t) : contract_return_t is
case param of
| Call (p) -> call(p,s)
| Default (p) -> default(p,s)
end

View File

@ -0,0 +1,11 @@
type foobar = int * int
let test_t: foobar = 10, 25
let foo, bar = test_t
let type_tuple_d (p: unit) = foo + bar
type complex = string * int * string * nat
let test_t_2 = "hello", 10, "world", 50n
let hello, ten, world, fifty_n = test_t_2
let type_tuple_d_2 (p: unit) = hello ^ world

View File

@ -15,7 +15,8 @@
(alias (alias
(name ligo-test) (name ligo-test)
(action (run ./test.exe)) (action (run ./test.exe))
(deps (glob_files contracts/*)) (deps (glob_files contracts/*)
(glob_files contracts/negative/*))
) )
(alias (alias

View File

@ -1789,6 +1789,12 @@ let set_delegate_religo () : unit result =
let%bind () = expect_eq program "main" (e_key_hash pkh_str) (e_typed_list [] t_operation) let%bind () = expect_eq program "main" (e_key_hash pkh_str) (e_typed_list [] t_operation)
in ok () in ok ()
let type_tuple_destruct () : unit result =
let%bind program = mtype_file "./contracts/type_tuple_destruct.mligo" in
let%bind () = expect_eq program "type_tuple_d" (e_unit ()) (e_int 35) in
let%bind () = expect_eq program "type_tuple_d_2" (e_unit ()) (e_string "helloworld") in
ok ()
let main = test_suite "Integration (End to End)" [ let main = test_suite "Integration (End to End)" [
test "key hash" key_hash ; test "key hash" key_hash ;
test "chain id" chain_id ; test "chain id" chain_id ;
@ -1926,4 +1932,5 @@ let main = test_suite "Integration (End to End)" [
test "simple_access (ligo)" simple_access_ligo; test "simple_access (ligo)" simple_access_ligo;
test "deep_access (ligo)" deep_access_ligo; test "deep_access (ligo)" deep_access_ligo;
test "entrypoints (ligo)" entrypoints_ligo ; test "entrypoints (ligo)" entrypoints_ligo ;
test "type tuple destruct (mligo)" type_tuple_destruct ;
] ]

View File

@ -23,7 +23,7 @@ let compile_main () =
let%bind michelson_prg = Ligo.Compile.Of_mini_c.aggregate_and_compile_contract mini_c_prg "main" in let%bind michelson_prg = Ligo.Compile.Of_mini_c.aggregate_and_compile_contract mini_c_prg "main" in
let%bind (_contract: Tezos_utils.Michelson.michelson) = let%bind (_contract: Tezos_utils.Michelson.michelson) =
(* fails if the given entry point is not a valid contract *) (* fails if the given entry point is not a valid contract *)
Ligo.Compile.Of_mini_c.build_contract michelson_prg in Ligo.Compile.Of_michelson.build_contract michelson_prg in
ok () ok ()
open Ast_simplified open Ast_simplified

View File

@ -23,7 +23,7 @@ let compile_main () =
let%bind michelson_prg = Ligo.Compile.Of_mini_c.aggregate_and_compile_contract mini_c_prg "main" in let%bind michelson_prg = Ligo.Compile.Of_mini_c.aggregate_and_compile_contract mini_c_prg "main" in
let%bind (_contract: Tezos_utils.Michelson.michelson) = let%bind (_contract: Tezos_utils.Michelson.michelson) =
(* fails if the given entry point is not a valid contract *) (* fails if the given entry point is not a valid contract *)
Ligo.Compile.Of_mini_c.build_contract michelson_prg in Ligo.Compile.Of_michelson.build_contract michelson_prg in
ok () ok ()
open Ast_simplified open Ast_simplified

View File

@ -23,7 +23,7 @@ let compile_main () =
let%bind michelson_prg = Ligo.Compile.Of_mini_c.aggregate_and_compile_contract mini_c_prg "main" in let%bind michelson_prg = Ligo.Compile.Of_mini_c.aggregate_and_compile_contract mini_c_prg "main" in
let%bind (_contract: Tezos_utils.Michelson.michelson) = let%bind (_contract: Tezos_utils.Michelson.michelson) =
(* fails if the given entry point is not a valid contract *) (* fails if the given entry point is not a valid contract *)
Ligo.Compile.Of_mini_c.build_contract michelson_prg in Ligo.Compile.Of_michelson.build_contract michelson_prg in
ok () ok ()
open Ast_simplified open Ast_simplified

View File

@ -13,5 +13,6 @@ let () =
Multisig_tests.main ; Multisig_tests.main ;
Multisig_v2_tests.main ; Multisig_v2_tests.main ;
Replaceable_id_tests.main ; Replaceable_id_tests.main ;
Time_lock_tests.main ;
] ; ] ;
() ()

View File

@ -1,4 +1,4 @@
open! Trace open Trace
type test_case = unit Alcotest.test_case type test_case = unit Alcotest.test_case
type test = type test =
@ -17,7 +17,7 @@ let wrap_test name f =
let wrap_test_raw f = let wrap_test_raw f =
match f () with match f () with
| Trace.Ok ((), annotations) -> ignore annotations; () | Ok ((), annotations) -> ignore annotations; ()
| Error err -> | Error err ->
Format.printf "%a\n%!" (Ligo.Display.error_pp ~dev:true) (err ()) Format.printf "%a\n%!" (Ligo.Display.error_pp ~dev:true) (err ())

View File

@ -0,0 +1,69 @@
open Trace
open Test_helpers
let type_file f =
let%bind simplified = Ligo.Compile.Of_source.compile f (Syntax_name "pascaligo") in
let%bind typed,state = Ligo.Compile.Of_simplified.compile simplified in
ok @@ (typed,state)
let get_program =
let s = ref None in
fun () -> match !s with
| Some s -> ok s
| None -> (
let%bind program = type_file "./contracts/time-lock.ligo" in
s := Some program ;
ok program
)
let compile_main () =
let%bind simplified = Ligo.Compile.Of_source.compile "./contracts/time-lock.ligo" (Syntax_name "pascaligo") in
let%bind typed_prg,_ = Ligo.Compile.Of_simplified.compile simplified in
let%bind mini_c_prg = Ligo.Compile.Of_typed.compile typed_prg in
let%bind michelson_prg = Ligo.Compile.Of_mini_c.aggregate_and_compile_contract mini_c_prg "main" in
let%bind (_contract: Tezos_utils.Michelson.michelson) =
(* fails if the given entry point is not a valid contract *)
Ligo.Compile.Of_michelson.build_contract michelson_prg in
ok ()
open Ast_simplified
let empty_op_list =
(e_typed_list [] t_operation)
let empty_message = e_lambda (Var.of_name "arguments")
(Some t_unit) (Some (t_list t_operation))
empty_op_list
let call msg = e_constructor "Call" msg
let mk_time st =
match Memory_proto_alpha.Protocol.Alpha_context.Timestamp.of_notation st with
| Some s -> ok s
| None -> simple_fail "bad timestamp notation"
let to_sec t = Tezos_utils.Time.Protocol.to_seconds t
let storage st = e_timestamp (Int64.to_int @@ to_sec st)
let early_call () =
let%bind program,_ = get_program () in
let%bind predecessor_timestamp = mk_time "2000-01-01T00:10:10Z" in
let%bind lock_time = mk_time "2000-01-01T10:10:10Z" in
let init_storage = storage lock_time in
let options =
Proto_alpha_utils.Memory_proto_alpha.make_options ~predecessor_timestamp () in
let exp_failwith = "Contract is still time locked" in
expect_string_failwith ~options program "main"
(e_pair (call empty_message) init_storage) exp_failwith
let call_on_time () =
let%bind program,_ = get_program () in
let%bind predecessor_timestamp = mk_time "2000-01-01T10:10:10Z" in
let%bind lock_time = mk_time "2000-01-01T00:10:10Z" in
let init_storage = storage lock_time in
let options =
Proto_alpha_utils.Memory_proto_alpha.make_options ~predecessor_timestamp () in
expect_eq ~options program "main"
(e_pair (call empty_message) init_storage) (e_pair empty_op_list init_storage)
let main = test_suite "Time lock" [
test "compile" compile_main ;
test "early call" early_call ;
test "call on time" call_on_time ;
]

View File

@ -0,0 +1,2 @@
(lang dune 1.11)
(name tezos-memory-proto-alpha)

View File

@ -0,0 +1,2 @@
(lang dune 1.11)
(name proto-alpha-utils)

View File

@ -11,7 +11,7 @@ let of_alpha_tz_error err = of_tz_error (AE.Ecoproto_error err)
let trace_alpha_tzresult err : 'a AE.Error_monad.tzresult -> 'a result = let trace_alpha_tzresult err : 'a AE.Error_monad.tzresult -> 'a result =
function function
| Result.Ok x -> ok x | Ok x -> ok x
| Error errs -> fail @@ thunk @@ patch_children (List.map of_alpha_tz_error errs) (err ()) | Error errs -> fail @@ thunk @@ patch_children (List.map of_alpha_tz_error errs) (err ())
let trace_alpha_tzresult_lwt error (x:_ AE.Error_monad.tzresult Lwt.t) : _ result = let trace_alpha_tzresult_lwt error (x:_ AE.Error_monad.tzresult Lwt.t) : _ result =
@ -19,17 +19,17 @@ let trace_alpha_tzresult_lwt error (x:_ AE.Error_monad.tzresult Lwt.t) : _ resul
let trace_tzresult err = let trace_tzresult err =
function function
| Result.Ok x -> ok x | Ok x -> ok x
| Error errs -> fail @@ thunk @@ patch_children (List.map of_tz_error errs) (err ()) | Error errs -> fail @@ thunk @@ patch_children (List.map of_tz_error errs) (err ())
(* TODO: should be a combination of trace_tzresult and trace_r *) (* TODO: should be a combination of trace_tzresult and trace_r *)
let trace_tzresult_r err_thunk_may_fail = let trace_tzresult_r err_thunk_may_fail =
function function
| Result.Ok x -> ok x | Ok x -> ok x
| Error errs -> | Error errs ->
let tz_errs = List.map of_tz_error errs in let tz_errs = List.map of_tz_error errs in
match err_thunk_may_fail () with match err_thunk_may_fail () with
| Simple_utils.Trace.Ok (err, annotations) -> | Ok (err, annotations) ->
ignore annotations ; ignore annotations ;
Error (fun () -> patch_children tz_errs (err ())) Error (fun () -> patch_children tz_errs (err ()))
| Error errors_while_generating_error -> | Error errors_while_generating_error ->

View File

@ -1066,13 +1066,16 @@ type options = {
let make_options let make_options
?(tezos_context = dummy_environment.tezos_context) ?(tezos_context = dummy_environment.tezos_context)
?(predecessor_timestamp = dummy_environment.tezos_context.predecessor_timestamp)
?(source = (List.nth dummy_environment.identities 0).implicit_contract) ?(source = (List.nth dummy_environment.identities 0).implicit_contract)
?(self = (List.nth dummy_environment.identities 0).implicit_contract) ?(self = (List.nth dummy_environment.identities 0).implicit_contract)
?(payer = (List.nth dummy_environment.identities 1).implicit_contract) ?(payer = (List.nth dummy_environment.identities 1).implicit_contract)
?(amount = Alpha_context.Tez.one) ?(amount = Alpha_context.Tez.one)
?(chain_id = Environment.Chain_id.zero) ?(chain_id = Environment.Chain_id.zero)
() ()
= { =
let tezos_context = { tezos_context with predecessor_timestamp } in
{
tezos_context ; tezos_context ;
source ; source ;
self ; self ;
@ -1105,6 +1108,10 @@ let typecheck_contract contract =
Script_ir_translator.typecheck_code dummy_environment.tezos_context contract' >>=?? Script_ir_translator.typecheck_code dummy_environment.tezos_context contract' >>=??
fun _ -> return () fun _ -> return ()
let assert_equal_michelson_type ty1 ty2 =
(* alpha_wrap (Script_ir_translator.ty_eq tezos_context a b) >>? fun (Eq, _) -> *)
alpha_wrap (Script_ir_translator.ty_eq dummy_environment.tezos_context ty1 ty2)
type 'a interpret_res = type 'a interpret_res =
| Succeed of 'a stack | Succeed of 'a stack
| Fail of Script_repr.expr | Fail of Script_repr.expr

258
vendors/ligo-utils/simple-utils/cover.sh vendored Executable file
View File

@ -0,0 +1,258 @@
#!/bin/sh
# This script extracts the error states of an LR automaton produced by
# Menhir and generates minimal inputs that cover all of them and only
# them.
set -x
# ====================================================================
# General Settings and wrappers
script=$(basename $0)
print_nl () { test "$quiet" != "yes" && echo "$1"; }
print () { test "$quiet" != "yes" && printf "$1"; }
fatal_error () {
echo "$script: fatal error:"
echo "$1" 1>&2
exit 1
}
warn () {
print_nl "$script: warning:"
print_nl "$1"
}
failed () {
printf "\033[31mFAILED$1\033[0m\n"
}
emphasise () {
printf "\033[31m$1\033[0m\n"
}
# ====================================================================
# Parsing loop
#
while : ; do
case "$1" in
"") break;;
--par-tokens=*)
if test -n "$par_tokens"; then
fatal_error "Repeated option --par-tokens."; fi
par_tokens=$(expr "$1" : "[^=]*=\(.*\)")
;;
--par-tokens)
no_eq=$1
break
;;
--lex-tokens=*)
if test -n "$lex_tokens"; then
fatal_error "Repeated option --lex-tokens."; fi
lex_tokens=$(expr "$1" : "[^=]*=\(.*\)")
;;
--lex-tokens)
no_eq=$1
break
;;
--ext=*)
if test -n "$ext_opt"; then
fatal_error "Repeated option --ext."; fi
ext=$(expr "$1" : "[^=]*=\(.*\)")
;;
--ext)
no_eq=$1
break
;;
--dir=*)
if test -n "$dir_opt"; then
fatal_error "Repeated option --dir."; fi
dir=$(expr "$1" : "[^=]*=\(.*\)")
;;
--dir)
no_eq=$1
break
;;
# Help
#
--unlexer=*)
if test -n "$unlexer"; then
fatal_error "Repeated option --unlexer."; fi
unlexer=$(expr "$1" : "[^=]*=\(.*\)")
;;
--unlexer)
no_eq=$1
break
;;
-h | --help | -help)
help=yes
;;
# Invalid option
#
-*)
fatal_error "Invalid option \"$1\"."
;;
# Invalid argument
#
*)
if test -n "$parser_arg"; then
fatal_error "Only one Menhir specification allowed."; fi
parser=$1
esac
shift
done
# ====================================================================
# Help
#
usage () {
cat <<EOF
Usage: $(basename $0) [-h|--help]
--par-tokens=<par_tolens>.mly
--lex-tokens=<par_tokens>.mli
--unlexer=<binary>
--ext=<extension>
--dir=<path>
<parser>.mly
Generates in directory <path> a set of LIGO source files with
extension <extension> covering all erroneous states of the LR
automaton produced by Menhir from <parser>.mly, <par_tokens>.mly,
<lex_tokens>.mli and <parser>.msg (see script `messages.sh` for
generating the latter). The LIGO files will be numbered with their
corresponding state number in the automaton. The executable <binary>
reads a line on stdin of tokens and produces a line of corresponding
lexemes.
The following options, if given, must be given only once.
Display control:
-h, --help display this help and exit
Mandatory options:
--lex-tokens=<name>.mli the lexical tokens
--par-tokens=<name>.mly the syntactical tokens
--ext=EXT Unix file extension for the
generated LIGO files
(no starting period)
--dir=PATH directory to store the generated
LIGO files (no trailing slash)
--unlexer=<binary> from tokens to lexemes (one line on stdin)
EOF
exit 1
}
if test "$help" = "yes"; then usage; fi
# ====================================================================
# Checking the command-line options and arguments and applying some of
# them.
# It is a common mistake to forget the "=" in GNU long-option style.
if test -n "$no_eq"
then
fatal_error "Long option style $no_eq must be followed by \"=\"."
fi
# Checking options
if test -z "$unlexer"; then
fatal_error "Unlexer binary not found (use --unlexer)."; fi
if test -z "$parser"; then
fatal_error "No parser specification."; fi
if test -z "$par_tokens"; then
fatal_error "No syntactical tokens specification (use --par-tokens)."; fi
if test -z "$lex_tokens"; then
fatal_error "No lexical tokens specification (use --lex-tokens)."; fi
if test ! -e "$parser"; then
fatal_error "Parser specification \"$parser\" not found."; fi
if test ! -e "$lex_tokens"; then
fatal_error "Lexical tokens specification \"$lex_tokens\" not found."; fi
if test ! -e "$par_tokens"; then
fatal_error "Syntactical tokens specification \"$par_tokens\" not found."; fi
parser_ext=$(expr "$parser" : ".*\.mly$")
if test "$parser_ext" = "0"; then
fatal_error "Parser specification must have extension \".mly\"."; fi
par_tokens_ext=$(expr "$par_tokens" : ".*\.mly$")
if test "$par_tokens_ext" = "0"; then
fatal_error "Syntactical tokens specification must have extension \".mly\"."
fi
lex_tokens_ext=$(expr "$lex_tokens" : ".*\.mli$")
if test "$lex_tokens_ext" = "0"; then
fatal_error "Lexical tokens specification must have extension \".mli\"."
fi
mly=$parser
parser_base=$(basename $mly .mly)
par_tokens_base=$(basename $par_tokens .mly)
lex_tokens_base=$(basename $lex_tokens .mli)
# Checking the output directory
if test -z "$dir"; then
fatal_error "No output directory (use --dir)."; fi
if test ! -d "$dir"; then
fatal_error "Output directory \"$dir\" not found."; fi
# Checking the LIGO extension
if test -z "$ext"; then
fatal_error "No LIGO extension (use --ext)."; fi
ext_start=$(expr "$ext" : "^\..*")
if test "$ext_start" != "0"
then fatal_error "LIGO extensions must not start with a period."
fi
# Checking the presence of the messages
msg=$parser_base.msg
if test ! -e $msg; then
fatal_error "File $msg not found."; fi
# ====================================================================
# Menhir's flags
flags="--table --strict --external-tokens $lex_tokens_base \
--base $parser_base $par_tokens"
# ====================================================================
# Producing erroneous sentences from Menhir's error messages
msg=$parser_base.msg
raw=$parser_base.msg.raw
printf "Making $raw from $msg... "
menhir --echo-errors $parser_base.msg $flags $mly > $raw 2>/dev/null
sed -i -e 's/^.*: \(.*\)$/\1/g' $raw
printf "done.\n"
# ====================================================================
# Converting Menhir's minimal erroneous sentences to concrete syntax
printf "Unlexing the erroneous sentences... "
states=$msg.states
map=$msg.map
sed -n "s/.* state\: \([0-9]\+\)./\1/p" $msg > $states
paste -d ':' $states $raw > $map
rm -f $dir/*.$ext
while read -r line; do
state=$(echo $line | sed -n 's/\(.*\):.*/\1/p')
filename=$(printf "$dir/%04d.$ext" $state)
sentence=$(echo $line | sed -n 's/.*:\(.*\)/\1/p')
echo $sentence | $unlexer >> $filename
done < $map
printf "done.\n"

View File

@ -0,0 +1,2 @@
(lang dune 1.11)
(name simple-utils)

222
vendors/ligo-utils/simple-utils/messages.sh vendored Executable file
View File

@ -0,0 +1,222 @@
#!/bin/sh
# This script uses Menhir to generate the exhaustive list of errors
# for a given parser specification. The generated file has to be
# filled with the error messages. The script must be called in the
# same directory where the parser specification and external token
# specifications are located, in accordance with the convention of the
# LIGO compiler source code.
#set -x
# ====================================================================
# General Settings and wrappers
script=$(basename $0)
print_nl () { test "$quiet" != "yes" && echo "$1"; }
print () { test "$quiet" != "yes" && printf "$1"; }
fatal_error () {
echo "$script: fatal error:"
echo "$1" 1>&2
exit 1
}
warn () {
print_nl "$script: warning:"
print_nl "$1"
}
failed () {
printf "\033[31mFAILED$1\033[0m\n"
}
emphasise () {
printf "\033[31m$1\033[0m\n"
}
# ====================================================================
# Parsing loop
#
while : ; do
case "$1" in
"") break;;
--par-tokens=*)
if test -n "$par_tokens"; then
fatal_error "Repeated option --par-tokens."; fi
par_tokens=$(expr "$1" : "[^=]*=\(.*\)")
;;
--par-tokens)
no_eq=$1
break
;;
--lex-tokens=*)
if test -n "$lex_tokens"; then
fatal_error "Repeated option --lex-tokens."; fi
lex_tokens=$(expr "$1" : "[^=]*=\(.*\)")
;;
--lex-tokens)
no_eq=$1
break
;;
-h | --help | -help)
help=yes
;;
# Invalid option
#
-*)
fatal_error "Invalid option \"$1\"."
;;
# Invalid argument
#
*)
if test -n "$parser"; then
fatal_error "Only one Menhir specification allowed."; fi
parser=$1
esac
shift
done
# ====================================================================
# Help
#
usage () {
cat <<EOF
Usage: $(basename $0) [-h|--help] --lex-tokens=<lex_tokens>.mli \
--par-tokens=<par_tokens>.mly <parser>.mly
Generates in place <parser>.msg, the form containing the exhaustive
list of errors for the LR automaton generated by Menhir from
<parser>.mly, <par_tokens>.mly and <lex_tokens>.mli. The file
<parser>.msg is meant to be edited and filled with the error messages.
The following options, if given, must be given only once.
Display control:
-h, --help display this help and exit
Mandatory options:
--lex-tokens=<name>.mli the lexical tokens
--par-tokens=<name>.mly the syntactical tokens
EOF
exit 1
}
if test "$help" = "yes"; then usage; fi
# ====================================================================
# Checking the command-line options and arguments and applying some of
# them.
# It is a common mistake to forget the "=" in GNU long-option style.
if test -n "$no_eq"; then
fatal_error "Long option style $no_eq must be followed by \"=\"."
fi
# Checking the parser and tokens
if test -z "$parser"; then
fatal_error "No parser specification."; fi
if test -z "$par_tokens"; then
fatal_error "No syntactical tokens specification (use --par-tokens)."; fi
if test -z "$lex_tokens"; then
fatal_error "No lexical tokens specification (use --lex-tokens)."; fi
if test ! -e "$parser"; then
fatal_error "Parser specification \"$parser\" not found."; fi
if test ! -e "$lex_tokens"; then
fatal_error "Lexical tokens specification \"$lex_tokens\" not found."; fi
if test ! -e "$par_tokens"; then
fatal_error "Syntactical tokens specification \"$par_tokens\" not found."; fi
parser_ext=$(expr "$parser" : ".*\.mly$")
if test "$parser_ext" = "0"; then
fatal_error "Parser specification must have extension \".mly\"."; fi
par_tokens_ext=$(expr "$par_tokens" : ".*\.mly$")
if test "$par_tokens_ext" = "0"; then
fatal_error "Syntactical tokens specification must have extension \".mly\"."
fi
lex_tokens_ext=$(expr "$lex_tokens" : ".*\.mli$")
if test "$lex_tokens_ext" = "0"; then
fatal_error "Lexical tokens specification must have extension \".mli\"."
fi
mly=$parser
parser_base=$(basename $mly .mly)
par_tokens_base=$(basename $par_tokens .mly)
lex_tokens_base=$(basename $lex_tokens .mli)
# ====================================================================
# Menhir's flags
flags="--table --strict --external-tokens $lex_tokens_base \
--base $parser_base $par_tokens"
# ====================================================================
# Generating error messages with Menhir
msg=$parser_base.msg
err=.$msg.err
out=.$mly.out
if test -e $msg; then mv -f $msg $msg.old; echo "Saved $msg."; fi
printf "Making new $msg from $mly... "
menhir --list-errors $flags $mly > $msg 2>$out
if test "$?" = "0"; then
sentences=$(grep "YOUR SYNTAX ERROR MESSAGE HERE" $msg | wc -l)
if test -z "$sentences"; then printf "done.\n"
else
spurious=$(grep WARNING $msg | wc -l)
printf "done:\n"
printf "There are %s error sentences, %s with spurious reductions.\n" \
$sentences $spurious; fi
if test -s $out; then cat $out; fi
if test -f $msg.old; then
printf "Checking inclusion of mappings (new in old)... "
menhir --compare-errors $msg \
--compare-errors $msg.old \
$flags $mly 2> $out
if test "$?" = "0"; then
if test -s $out; then
printf "done:\n"
cat $out
else printf "done.\n"; fi
rm -f $out
printf "Updating $msg... "
menhir --update-errors $msg.old \
$flags $mly > $msg 2> $err
if test "$?" = "0"; then
printf "done:\n"
emphasise "Warning: The LR items may have changed."
emphasise "> Check your error messages again."
rm -f $err
else failed "."
touch $err
mv -f $msg.old $msg
echo "Restored $msg."; fi
else failed ":"
mv -f $out $err
sed -i -e "s/\.msg/.msg.new/g" \
-e "s/\.new\.old//g" $err
mv -f $msg $msg.new
emphasise "See $err and update $msg."
echo "The default messages are in $msg.new."
mv -f $msg.old $msg
echo "Restored $msg."; fi; fi
else
failed ":"
mv -f $out $err
emphasise "> See $err."
mv -f $msg.old $msg
echo "Restored $msg."
fi

View File

@ -6,8 +6,8 @@
*) *)
module Trace_tutorial = struct module Trace_tutorial = struct
(** The trace monad is fairly similar to the predefined option (** The trace monad is fairly similar to the predefined [option]
type. *) type. It is an instance of the predefined [result] type. *)
type annotation = string type annotation = string
type error = string type error = string
@ -23,18 +23,20 @@ module Trace_tutorial = struct
list of annotations (information about past successful list of annotations (information about past successful
computations), or it is a list of errors accumulated so far. computations), or it is a list of errors accumulated so far.
The former case is denoted by the data constructor [Ok], and the The former case is denoted by the data constructor [Ok], and the
second by [Errors]. second by [Error].
*) *)
type 'a result = type nonrec 'a result = ('a * annotation list, error list) result
Ok of 'a * annotation list (*
| Errors of error list = Ok of 'a * annotation list
| Error of error list
*)
(** The function [divide_trace] shows the basic use of the trace (** The function [divide_trace] shows the basic use of the trace
monad. monad.
*) *)
let divide_trace a b = let divide_trace a b =
if b = 0 if b = 0
then Errors [Printf.sprintf "division by zero: %d/%d" a b] then Error [Printf.sprintf "division by zero: %d/%d" a b]
else Ok (a/b, []) else Ok (a/b, [])
(** The function [divide_three] shows that when composing two (** The function [divide_three] shows that when composing two
@ -81,7 +83,7 @@ module Trace_tutorial = struct
match f x with match f x with
Ok (x', annot') -> Ok (x', annot' @ annot) Ok (x', annot') -> Ok (x', annot' @ annot)
| errors -> ignore annot; errors) | errors -> ignore annot; errors)
| Errors _ as e -> e | Error _ as e -> e
(** The function [divide_three_bind] is equivalent to the verbose (** The function [divide_three_bind] is equivalent to the verbose
[divide_three] above, but makes use of [bind]. [divide_three] above, but makes use of [bind].
@ -169,7 +171,7 @@ module Trace_tutorial = struct
{li If the list only contains [Ok] values, it strips the [Ok] {li If the list only contains [Ok] values, it strips the [Ok]
of each element and returns that list wrapped with [Ok].} of each element and returns that list wrapped with [Ok].}
{li Otherwise, one or more of the elements of the input list {li Otherwise, one or more of the elements of the input list
is [Errors], then [bind_list] returns the first error in the is [Error], then [bind_list] returns the first error in the
list.}} list.}}
*) *)
let rec bind_list = function let rec bind_list = function
@ -199,7 +201,7 @@ module Trace_tutorial = struct
And this will pass along the error triggered by [get key map]. And this will pass along the error triggered by [get key map].
*) *)
let trace err = function let trace err = function
Errors e -> Errors (err::e) Error e -> Error (err::e)
| ok -> ok | ok -> ok
(** The real trace monad is very similar to the one that we have (** The real trace monad is very similar to the one that we have
@ -293,9 +295,11 @@ type annotation_thunk = annotation thunk
(** Types of traced elements. It might be good to rename it [trace] at (** Types of traced elements. It might be good to rename it [trace] at
some point. some point.
*) *)
type 'a result = type nonrec 'a result = ('a * annotation_thunk list, error_thunk) result
Ok of 'a * annotation_thunk list (*
= Ok of 'a * annotation_thunk list
| Error of error_thunk | Error of error_thunk
*)
(** {1 Constructors} *) (** {1 Constructors} *)

View File

@ -0,0 +1,2 @@
(lang dune 1.11)
(name tezos-protocol-005-PsBabyM1-parameters)

View File

@ -30,7 +30,7 @@ module type BASIC_DATA = sig
val pp: Format.formatter -> t -> unit val pp: Format.formatter -> t -> unit
end end
type t type t = Raw_context.t
type context = t type context = t
type public_key = Signature.Public_key.t type public_key = Signature.Public_key.t

View File

@ -0,0 +1,2 @@
(lang dune 1.11)
(name tezos-embedded-protocol-005-PsBabyM1)

View File

@ -45,7 +45,34 @@ val storage_error: storage_error -> 'a tzresult Lwt.t
(** Abstract view of the context. (** Abstract view of the context.
Includes a handle to the functional key-value database Includes a handle to the functional key-value database
({!Context.t}) along with some in-memory values (gas, etc.). *) ({!Context.t}) along with some in-memory values (gas, etc.). *)
type t module Int_set : sig
type t
end
type t = {
context: Context.t ;
constants: Constants_repr.parametric ;
first_level: Raw_level_repr.t ;
level: Level_repr.t ;
predecessor_timestamp: Time.t ;
timestamp: Time.t ;
fitness: Int64.t ;
deposits: Tez_repr.t Signature.Public_key_hash.Map.t ;
included_endorsements: int ;
allowed_endorsements:
(Signature.Public_key.t * int list * bool) Signature.Public_key_hash.Map.t ;
fees: Tez_repr.t ;
rewards: Tez_repr.t ;
block_gas: Z.t ;
operation_gas: Gas_limit_repr.t ;
internal_gas: Gas_limit_repr.internal_gas ;
storage_space_to_pay: Z.t option ;
allocated_contracts: int option ;
origination_nonce: Contract_repr.origination_nonce option ;
temporary_big_map: Z.t ;
internal_nonce: int ;
internal_nonces_used: Int_set.t ;
}
type context = t type context = t
type root_context = t type root_context = t

View File

@ -0,0 +1,2 @@
(lang dune 1.11)
(name tezos-utils)

View File

@ -0,0 +1,2 @@
(lang dune 1.11)
(name michelson-parser)