Merge branch 'rinderknecht-dev' into 'dev'

Fixing and adding errors in the parser

See merge request ligolang/ligo!316
This commit is contained in:
Christian Rinderknecht 2020-01-08 17:09:48 +00:00
commit d49a72847a
25 changed files with 692 additions and 332 deletions

View File

@ -63,7 +63,7 @@ let moves: moveset = Map.literal
<!--ReasonLIGO-->
```reasonligo
let moves: moveset =
let moves : moveset =
Map.literal([
("tz1KqTpEZ7Yob7QbPE4Hy4Wo8fHG8LhKxZSx": address, (1, 2)),
("tz1gjaF81ZRRvdzjobyfVNsAeSC6PScjfQwN": address, (0, 3)),
@ -82,19 +82,19 @@ If we want to access a move from our moveset above, we can use the `[]` operator
<!--DOCUSAURUS_CODE_TABS-->
<!--Pascaligo-->
```pascaligo
const balance: option(move) = moves[("tz1gjaF81ZRRvdzjobyfVNsAeSC6PScjfQwN": address)];
const my_balance : option(move) = moves[("tz1gjaF81ZRRvdzjobyfVNsAeSC6PScjfQwN": address)];
```
<!--CameLIGO-->
```cameligo
let balance: move option = Map.find_opt ("tz1gjaF81ZRRvdzjobyfVNsAeSC6PScjfQwN": address) moves
let my_balance : move option = Map.find_opt ("tz1gjaF81ZRRvdzjobyfVNsAeSC6PScjfQwN": address) moves
```
<!--ReasonLIGO-->
```reasonligo
let balance: option(move) =
let my_balance : option(move) =
Map.find_opt("tz1gjaF81ZRRvdzjobyfVNsAeSC6PScjfQwN": address, moves);
```
<!--END_DOCUSAURUS_CODE_TABS-->
@ -106,19 +106,19 @@ Accessing a value in a map yields an option, however you can also get the value
<!--DOCUSAURUS_CODE_TABS-->
<!--Pascaligo-->
```pascaligo
const balance: move = get_force(("tz1gjaF81ZRRvdzjobyfVNsAeSC6PScjfQwN": address), moves);
const my_balance : move = get_force(("tz1gjaF81ZRRvdzjobyfVNsAeSC6PScjfQwN": address), moves);
```
<!--CameLIGO-->
```cameligo
let balance: move = Map.find ("tz1gjaF81ZRRvdzjobyfVNsAeSC6PScjfQwN": address) moves
let my_balance : move = Map.find ("tz1gjaF81ZRRvdzjobyfVNsAeSC6PScjfQwN": address) moves
```
<!--ReasonLIGO-->
```reasonligo
let balance: move =
let my_balance : move =
Map.find("tz1gjaF81ZRRvdzjobyfVNsAeSC6PScjfQwN": address, moves);
```
@ -134,8 +134,8 @@ The values of a PascaLIGO map can be updated using the ordinary assignment synta
```pascaligo
function set_ (var m: moveset) : moveset is
block {
function set_ (var m: moveset) : moveset is
block {
m[("tz1gjaF81ZRRvdzjobyfVNsAeSC6PScjfQwN": address)] := (4,9);
} with m
```
@ -266,7 +266,7 @@ entries, potentially millions or billions. The cost of loading these entries int
the environment each time a user executes the contract would eventually become
too expensive were it not for big maps. Big maps are a data structure offered by
Tezos which handles the scaling concerns for us. In LIGO, the interface for big
maps is analogous to the one used for ordinary maps.
maps is analogous to the one used for ordinary maps.
Here's how we define a big map:
@ -341,19 +341,19 @@ If we want to access a move from our moveset above, we can use the `[]` operator
<!--DOCUSAURUS_CODE_TABS-->
<!--Pascaligo-->
```pascaligo
const balance: option(move) = moves[("tz1gjaF81ZRRvdzjobyfVNsAeSC6PScjfQwN": address)];
const my_balance : option(move) = moves[("tz1gjaF81ZRRvdzjobyfVNsAeSC6PScjfQwN": address)];
```
<!--CameLIGO-->
```cameligo
let balance: move option = Big_map.find_opt ("tz1gjaF81ZRRvdzjobyfVNsAeSC6PScjfQwN": address) moves
let my_balance : move option = Big_map.find_opt ("tz1gjaF81ZRRvdzjobyfVNsAeSC6PScjfQwN": address) moves
```
<!--ReasonLIGO-->
```reasonligo
let balance: option(move) =
let my_balance : option(move) =
Big_map.find_opt("tz1gjaF81ZRRvdzjobyfVNsAeSC6PScjfQwN": address, moves);
```
<!--END_DOCUSAURUS_CODE_TABS-->
@ -365,19 +365,19 @@ Accessing a value in a map yields an option, however you can also get the value
<!--DOCUSAURUS_CODE_TABS-->
<!--Pascaligo-->
```pascaligo
const balance: move = get_force(("tz1gjaF81ZRRvdzjobyfVNsAeSC6PScjfQwN": address), moves);
const my_balance : move = get_force(("tz1gjaF81ZRRvdzjobyfVNsAeSC6PScjfQwN": address), moves);
```
<!--CameLIGO-->
```cameligo
let balance: move = Big_map.find ("tz1gjaF81ZRRvdzjobyfVNsAeSC6PScjfQwN": address) moves
let my_balance : move = Big_map.find ("tz1gjaF81ZRRvdzjobyfVNsAeSC6PScjfQwN": address) moves
```
<!--ReasonLIGO-->
```reasonligo
let balance: move = Big_map.find("tz1gjaF81ZRRvdzjobyfVNsAeSC6PScjfQwN": address, moves);
let my_balance : move = Big_map.find("tz1gjaF81ZRRvdzjobyfVNsAeSC6PScjfQwN": address, moves);
```
<!--END_DOCUSAURUS_CODE_TABS-->
@ -392,8 +392,8 @@ The values of a PascaLIGO big map can be updated using the ordinary assignment s
```pascaligo
function set_ (var m: moveset) : moveset is
block {
function set_ (var m : moveset) : moveset is
block {
m[("tz1gjaF81ZRRvdzjobyfVNsAeSC6PScjfQwN": address)] := (4,9);
} with m
```
@ -404,7 +404,7 @@ We can update a big map in CameLIGO using the `Big_map.update` built-in:
```cameligo
let updated_map: moveset =
let updated_map : moveset =
Big_map.update ("tz1gjaF81ZRRvdzjobyfVNsAeSC6PScjfQwN": address) (Some (4,9)) moves
```
@ -428,7 +428,7 @@ Here's how a custom record type is defined:
<!--DOCUSAURUS_CODE_TABS-->
<!--Pascaligo-->
```pascaligo
type user is record
type user is record
id: nat;
is_admin: bool;
name: string;
@ -479,8 +479,8 @@ let user: user = {
<!--ReasonLIGO-->
```reasonligo
let user: user = {
id: 1n,
is_admin: true,
id: 1n,
is_admin: true,
name: "Alice"
};
```
@ -494,12 +494,12 @@ If we want to obtain a value from a record for a given key, we can do the follow
<!--DOCUSAURUS_CODE_TABS-->
<!--Pascaligo-->
```pascaligo
const is_admin: bool = user.is_admin;
const is_admin : bool = user.is_admin;
```
<!--CameLIGO-->
```cameligo
let is_admin: bool = user.is_admin
let is_admin : bool = user.is_admin
```
<!--ReasonLIGO-->

View File

@ -137,12 +137,14 @@ type ident_err = Reserved_name
type nat_err = Invalid_natural
| Non_canonical_zero_nat
type sym_err = Invalid_symbol
type kwd_err = Invalid_keyword
val mk_int : lexeme -> Region.t -> (token, int_err) result
val mk_nat : lexeme -> Region.t -> (token, nat_err) result
val mk_mutez : lexeme -> Region.t -> (token, int_err) result
val mk_ident : lexeme -> Region.t -> (token, ident_err) result
val mk_sym : lexeme -> Region.t -> (token, sym_err) result
val mk_kwd : lexeme -> Region.t -> (token, kwd_err) result
val mk_string : lexeme -> Region.t -> token
val mk_bytes : lexeme -> Region.t -> token
val mk_constr : lexeme -> Region.t -> token

View File

@ -1,4 +1,6 @@
{
(* START HEADER *)
type lexeme = string
let sprintf = Printf.sprintf
@ -236,8 +238,7 @@ let to_region token = proj_token token |> fst
(* Injections *)
type int_err =
Non_canonical_zero
type int_err = Non_canonical_zero
(* LEXIS *)
@ -258,8 +259,7 @@ let keywords = [
(fun reg -> Then reg);
(fun reg -> True reg);
(fun reg -> Type reg);
(fun reg -> With reg)
]
(fun reg -> With reg)]
let reserved =
let open SSet in
@ -323,8 +323,20 @@ let lexicon : lexis =
cstr = build constructors;
res = reserved}
(* Keywords *)
type kwd_err = Invalid_keyword
let mk_kwd ident region =
match SMap.find_opt ident lexicon.kwd with
Some mk_kwd -> Ok (mk_kwd region)
| None -> Error Invalid_keyword
(* Identifiers *)
type ident_err = Reserved_name
(* END OF HEADER *)
}
(* START LEXER DEFINITION *)

View File

@ -19,9 +19,9 @@ module ExtParserLog =
include ParserLog
end
module M = ParserUnit.Make (IO)
(Lexer.Make (LexToken))
(AST)
(ExtParser)
(ParErr)
(ExtParserLog)
module MyLexer = Lexer.Make (LexToken)
module Unit =
ParserUnit.Make (IO)(MyLexer)(AST)(ExtParser)(ParErr)(ExtParserLog)
let () = Unit.run ()

View File

@ -5,8 +5,9 @@ module AST = Parser_pascaligo.AST
module ParserLog = Parser_pascaligo.ParserLog
module LexToken = Parser_pascaligo.LexToken
module Lexer = Lexer.Make(LexToken)
module SyntaxError = Parser_pascaligo.SyntaxError
module Errors = struct
module Errors = struct
let lexer_error (e: Lexer.error AST.reg) =
let title () = "lexer error" in
@ -18,37 +19,39 @@ module Errors = struct
] in
error ~data title message
let parser_error source (start: Lexing.position) (end_: Lexing.position) lexbuf =
let title () = "parser error" in
let file = if source = "" then
""
else
Format.sprintf "In file \"%s|%s\"" start.pos_fname source
in
let str = Format.sprintf
"Parse error at \"%s\" from (%d, %d) to (%d, %d). %s\n"
(Lexing.lexeme lexbuf)
start.pos_lnum (start.pos_cnum - start.pos_bol)
end_.pos_lnum (end_.pos_cnum - end_.pos_bol)
file
in
let message () = str in
let loc = Region.make
~start:(Pos.from_byte start)
~stop:(Pos.from_byte end_)
in
let reserved_name Region.{value; region} =
let title () = Printf.sprintf "reserved name \"%s\"" value in
let message () = "" in
let data = [
("parser_loc",
fun () -> Format.asprintf "%a" Location.pp_lift @@ loc
)
("location",
fun () -> Format.asprintf "%a" Location.pp_lift @@ region)
] in
error ~data title message
let unrecognized_error source (start: Lexing.position) (end_: Lexing.position) lexbuf =
let title () = "unrecognized error" in
let file = if source = "" then
""
else
let duplicate_parameter Region.{value; region} =
let title () = Printf.sprintf "duplicate parameter \"%s\"" value in
let message () = "" in
let data = [
("location",
fun () -> Format.asprintf "%a" Location.pp_lift @@ region)
] in
error ~data title message
let duplicate_variant Region.{value; region} =
let title () = Printf.sprintf "duplicate variant \"%s\" in this\
type declaration" value in
let message () = "" in
let data = [
("location",
fun () -> Format.asprintf "%a" Location.pp_lift @@ region)
] in
error ~data title message
let parser_error source (start: Lexing.position) (end_: Lexing.position) lexbuf =
let title () = "parser error" in
let file = if source = "" then
""
else
Format.sprintf "In file \"%s|%s\"" start.pos_fname source
in
let str = Format.sprintf
@ -59,14 +62,40 @@ module Errors = struct
file
in
let message () = str in
let loc = Region.make
~start:(Pos.from_byte start)
~stop:(Pos.from_byte end_)
let loc = Region.make
~start:(Pos.from_byte start)
~stop:(Pos.from_byte end_)
in
let data = [
("unrecognized_loc",
("parser_loc",
fun () -> Format.asprintf "%a" Location.pp_lift @@ loc
)
)
] in
error ~data title message
let unrecognized_error source (start: Lexing.position) (end_: Lexing.position) lexbuf =
let title () = "unrecognized error" in
let file = if source = "" then
""
else
Format.sprintf "In file \"%s|%s\"" start.pos_fname source
in
let str = Format.sprintf
"Parse error at \"%s\" from (%d, %d) to (%d, %d). %s\n"
(Lexing.lexeme lexbuf)
start.pos_lnum (start.pos_cnum - start.pos_bol)
end_.pos_lnum (end_.pos_cnum - end_.pos_bol)
file
in
let message () = str in
let loc = Region.make
~start:(Pos.from_byte start)
~stop:(Pos.from_byte end_)
in
let data = [
("unrecognized_loc",
fun () -> Format.asprintf "%a" Location.pp_lift @@ loc
)
] in
error ~data title message
@ -76,19 +105,25 @@ open Errors
type 'a parser = (Lexing.lexbuf -> LexToken.token) -> Lexing.lexbuf -> 'a
let parse (parser: 'a parser) source lexbuf =
let parse (parser: 'a parser) source lexbuf =
let Lexer.{read ; close ; _} = Lexer.open_token_stream None in
let result =
let result =
try
ok (parser read lexbuf)
with
| Parser.Error ->
SyntaxError.Error (Duplicate_parameter name) ->
fail @@ (duplicate_parameter name)
| SyntaxError.Error (Duplicate_variant name) ->
fail @@ (duplicate_variant name)
| SyntaxError.Error (Reserved_name name) ->
fail @@ (reserved_name name)
| Parser.Error ->
let start = Lexing.lexeme_start_p lexbuf in
let end_ = Lexing.lexeme_end_p lexbuf in
fail @@ (parser_error source start end_ lexbuf)
| Lexer.Error e ->
| Lexer.Error e ->
fail @@ (lexer_error e)
| _ ->
| _ ->
let _ = Printexc.print_backtrace Pervasives.stdout in
let start = Lexing.lexeme_start_p lexbuf in
let end_ = Lexing.lexeme_end_p lexbuf in

View File

@ -760,4 +760,49 @@ let rhs_to_region = expr_to_region
let selection_to_region = function
FieldName {region; _}
| Component {region; _} -> region
| Component {region; _} -> region
(* Extracting variables from patterns *)
module Ord =
struct
type t = string Region.reg
let compare v1 v2 =
compare v1.value v2.value
end
module VSet = Set.Make (Ord)
let rec vars_of_pattern env = function
PConstr p -> vars_of_pconstr env p
| PVar v -> VSet.add v env
| PWild _ | PInt _ | PNat _ | PBytes _ | PString _ -> env
| PList l -> vars_of_plist env l
| PTuple t -> vars_of_ptuple env t.value
and vars_of_pconstr env = function
PUnit _ | PFalse _ | PTrue _ | PNone _ -> env
| PSomeApp {value=_, {value={inside; _};_}; _} ->
vars_of_pattern env inside
| PConstrApp {value=_, Some tuple; _} ->
vars_of_ptuple env tuple.value
| PConstrApp {value=_,None; _} -> env
and vars_of_plist env = function
PListComp {value; _} ->
vars_of_pinj env value
| PNil _ ->
env
| PParCons {value={inside; _}; _} ->
let head, _, tail = inside in
vars_of_pattern (vars_of_pattern env head) tail
| PCons {value; _} ->
Utils.nsepseq_foldl vars_of_pattern env value
and vars_of_pinj env inj =
Utils.sepseq_foldl vars_of_pattern env inj.elements
and vars_of_ptuple env {inside; _} =
Utils.nsepseq_foldl vars_of_pattern env inside
let vars_of_pattern = vars_of_pattern VSet.empty

View File

@ -615,3 +615,9 @@ val lhs_to_region : lhs -> Region.t
val rhs_to_region : rhs -> Region.t
val if_clause_to_region : if_clause -> Region.t
val selection_to_region : selection -> Region.t
(* Extracting variables from patterns *)
module VSet : Set.S with type elt = string Region.reg
val vars_of_pattern : pattern -> VSet.t

View File

@ -138,12 +138,14 @@ type ident_err = Reserved_name
type nat_err = Invalid_natural
| Non_canonical_zero_nat
type sym_err = Invalid_symbol
type kwd_err = Invalid_keyword
val mk_int : lexeme -> Region.t -> (token, int_err) result
val mk_nat : lexeme -> Region.t -> (token, nat_err) result
val mk_mutez : lexeme -> Region.t -> (token, int_err) result
val mk_ident : lexeme -> Region.t -> (token, ident_err) result
val mk_sym : lexeme -> Region.t -> (token, sym_err) result
val mk_kwd : lexeme -> Region.t -> (token, kwd_err) result
val mk_string : lexeme -> Region.t -> token
val mk_bytes : lexeme -> Region.t -> token
val mk_constr : lexeme -> Region.t -> token

View File

@ -389,6 +389,15 @@ let lexicon : lexis =
cstr = build constructors;
res = reserved}
(* Keywords *)
type kwd_err = Invalid_keyword
let mk_kwd ident region =
match SMap.find_opt ident lexicon.kwd with
Some mk_kwd -> Ok (mk_kwd region)
| None -> Error Invalid_keyword
(* Identifiers *)
type ident_err = Reserved_name

View File

@ -6,7 +6,60 @@
open Region
open AST
(* END HEADER *)
module SSet = Utils.String.Set
let reserved =
let open SSet in
empty
|> add "get_force"
|> add "get_chain_id"
|> add "transaction"
|> add "get_contract"
|> add "get_entrypoint"
|> add "size"
|> add "int"
|> add "abs"
|> add "is_nat"
|> add "amount"
|> add "balance"
|> add "now"
|> add "unit"
|> add "source"
|> add "sender"
|> add "failwith"
|> add "bitwise_or"
|> add "bitwise_and"
|> add "bitwise_xor"
|> add "string_concat"
|> add "string_slice"
|> add "crypto_check"
|> add "crypto_hash_key"
|> add "bytes_concat"
|> add "bytes_slice"
|> add "bytes_pack"
|> add "bytes_unpack"
|> add "set_empty"
|> add "set_mem"
|> add "set_add"
|> add "set_remove"
|> add "set_iter"
|> add "set_fold"
|> add "list_iter"
|> add "list_fold"
|> add "list_map"
|> add "map_iter"
|> add "map_map"
|> add "map_fold"
|> add "map_remove"
|> add "map_update"
|> add "map_get"
|> add "map_mem"
|> add "sha_256"
|> add "sha_512"
|> add "blake2b"
|> add "cons"
(* END HEADER *)
%}
(* See [ParToken.mly] for the definition of tokens. *)
@ -118,6 +171,10 @@ declaration:
type_decl:
"type" type_name "is" type_expr ";"? {
let () =
if SSet.mem $2.value reserved then
let open! SyntaxError in
raise (Error (Reserved_name $2)) in
let stop =
match $5 with
Some region -> region
@ -185,6 +242,14 @@ type_tuple:
sum_type:
"|"? nsepseq(variant,"|") {
let add acc {value; _} =
if VSet.mem value.constr acc then
let open! SyntaxError in
raise (Error (Duplicate_variant value.constr))
else VSet.add value.constr acc in
let variants =
Utils.nsepseq_foldl add VSet.empty $2 in
let () = ignore variants in
let region = nsepseq_to_region (fun x -> x.region) $2
in TSum {region; value=$2} }
@ -225,6 +290,13 @@ fun_expr:
"function" fun_name? parameters ":" type_expr "is"
block
"with" expr {
let () =
match $2 with
Some name ->
if SSet.mem name.value reserved then
let open! SyntaxError in
raise (Error (Reserved_name name))
| None -> () in
let stop = expr_to_region $9 in
let region = cover $1 stop
and value = {kwd_function = $1;
@ -237,6 +309,13 @@ fun_expr:
return = $9}
in {region; value} }
| "function" fun_name? parameters ":" type_expr "is" expr {
let () =
match $2 with
Some name ->
if SSet.mem name.value reserved then
let open! SyntaxError in
raise (Error (Reserved_name name))
| None -> () in
let stop = expr_to_region $7 in
let region = cover $1 stop
and value = {kwd_function = $1;
@ -256,7 +335,7 @@ fun_decl:
open_fun_decl { $1 }
| fun_expr ";" {
let region = cover $1.region $2
and value = {fun_expr=$1; terminator= Some $2}
and value = {fun_expr=$1; terminator = Some $2}
in {region; value} }
open_fun_decl:
@ -266,10 +345,31 @@ open_fun_decl:
in {region; value} }
parameters:
par(nsepseq(param_decl,";")) { $1 }
par(nsepseq(param_decl,";")) {
let open! AST in
let contents : (param_decl, semi) Utils.nsepseq par reg = $1 in
let add acc = function
ParamConst {value; _} ->
if VSet.mem value.var acc then
let open! SyntaxError in
raise (Error (Duplicate_parameter value.var))
else VSet.add value.var acc
| ParamVar {value; _} ->
if VSet.mem value.var acc then
let open! SyntaxError in
raise (Error (Duplicate_parameter value.var))
else VSet.add value.var acc in
let params =
Utils.nsepseq_foldl add VSet.empty contents.value.inside in
let () = ignore params
in $1 }
param_decl:
"var" var ":" param_type {
let () =
if SSet.mem $2.value reserved then
let open! SyntaxError in
raise (Error (Reserved_name $2)) in
let stop = type_expr_to_region $4 in
let region = cover $1 stop
and value = {kwd_var = $1;
@ -279,6 +379,10 @@ param_decl:
in ParamVar {region; value}
}
| "const" var ":" param_type {
let () =
if SSet.mem $2.value reserved then
let open! SyntaxError in
raise (Error (Reserved_name $2)) in
let stop = type_expr_to_region $4 in
let region = cover $1 stop
and value = {kwd_const = $1;
@ -346,13 +450,16 @@ open_var_decl:
unqualified_decl(OP):
var ":" type_expr OP expr {
let () =
if SSet.mem $1.value reserved then
let open! SyntaxError in
raise (Error (Reserved_name $1)) in
let region = expr_to_region $5
in $1, $2, $3, $4, $5, region }
const_decl:
open_const_decl { $1 }
| open_const_decl ";" {
{$1 with value = {$1.value with terminator = Some $2}} }
open_const_decl ";"? {
{$1 with value = {$1.value with terminator=$2}} }
instruction:
conditional { Cond $1 }
@ -555,6 +662,14 @@ cases(rhs):
case_clause(rhs):
pattern "->" rhs {
let vars = AST.vars_of_pattern $1 in
let is_reserved elt = SSet.mem elt.value reserved in
let inter = VSet.filter is_reserved vars in
let () =
if not (VSet.is_empty inter) then
let clash = VSet.choose inter in
let open! SyntaxError in
raise (Error (Reserved_name clash)) in
fun rhs_to_region ->
let start = pattern_to_region $1 in
let region = cover start (rhs_to_region $3)
@ -596,6 +711,10 @@ for_loop:
in For (ForInt {region; value})
}
| "for" var arrow_clause? "in" collection expr block {
let () =
if SSet.mem $2.value reserved then
let open! SyntaxError in
raise (Error (Reserved_name $2)) in
let region = cover $1 $7.region in
let value = {kwd_for = $1;
var = $2;
@ -613,12 +732,21 @@ collection:
var_assign:
var ":=" expr {
let () =
if SSet.mem $1.value reserved then
let open! SyntaxError in
raise (Error (Reserved_name $1)) in
let region = cover $1.region (expr_to_region $3)
and value = {name=$1; assign=$2; expr=$3}
in {region; value} }
arrow_clause:
"->" var { $1,$2 }
"->" var {
let () =
if SSet.mem $2.value reserved then
let open! SyntaxError in
raise (Error (Reserved_name $2))
in $1,$2 }
(* Expressions *)
@ -646,7 +774,7 @@ cond_expr:
disj_expr:
conj_expr { $1 }
| disj_expr "or" conj_expr {
| disj_expr "or" conj_expr {
let start = expr_to_region $1
and stop = expr_to_region $3 in
let region = cover start stop

View File

@ -19,9 +19,55 @@ module ExtParserLog =
include ParserLog
end
module M = ParserUnit.Make (IO)
(Lexer.Make (LexToken))
(AST)
(ExtParser)
(ParErr)
(ExtParserLog)
module MyLexer = Lexer.Make (LexToken)
module Unit =
ParserUnit.Make (IO)(MyLexer)(AST)(ExtParser)(ParErr)(ExtParserLog)
open! SyntaxError
let () =
try Unit.run () with
(* Ad hoc errors from the parser *)
Error (Reserved_name name) ->
let () = Unit.close_all () in
let token =
MyLexer.Token.mk_ident name.Region.value name.Region.region in
(match token with
Stdlib.Error _ ->
assert false (* Should not fail if [name] is valid. *)
| Ok invalid ->
let point = "Reserved name.\nHint: Change the name.\n",
None, invalid in
let error =
Unit.format_error ~offsets:IO.options#offsets
IO.options#mode point
in Printf.eprintf "\027[31m%s\027[0m%!" error)
| Error (Duplicate_parameter name) ->
let () = Unit.close_all () in
let token =
MyLexer.Token.mk_ident name.Region.value name.Region.region in
(match token with
Stdlib.Error _ ->
assert false (* Should not fail if [name] is valid. *)
| Ok invalid ->
let point = "Duplicate parameter.\nHint: Change the name.\n",
None, invalid in
let error =
Unit.format_error ~offsets:IO.options#offsets
IO.options#mode point
in Printf.eprintf "\027[31m%s\027[0m%!" error)
| Error (Duplicate_variant name) ->
let () = Unit.close_all () in
let token =
MyLexer.Token.mk_constr name.Region.value name.Region.region in
let point = "Duplicate variant in this type declaration.\n\
Hint: Change the name.\n",
None, token in
let error =
Unit.format_error ~offsets:IO.options#offsets
IO.options#mode point
in Printf.eprintf "\027[31m%s\027[0m%!" error

View File

@ -0,0 +1,8 @@
type t =
Reserved_name of string Region.reg
| Duplicate_parameter of string Region.reg
| Duplicate_variant of string Region.reg
type error = t
exception Error of t

View File

@ -0,0 +1,8 @@
type t =
Reserved_name of string Region.reg
| Duplicate_parameter of string Region.reg
| Duplicate_variant of string Region.reg
type error = t
exception Error of t

View File

@ -8,7 +8,8 @@
(library
(name parser_pascaligo)
(public_name ligo.parser.pascaligo)
(modules AST pascaligo Parser ParserLog LexToken)
(modules
SyntaxError AST pascaligo Parser ParserLog LexToken)
(libraries
menhirLib
parser_shared

View File

@ -29,11 +29,11 @@ module Errors = struct
] in
error ~data title message
let parser_error source (start: Lexing.position) (end_: Lexing.position) lexbuf =
let parser_error source (start: Lexing.position) (end_: Lexing.position) lexbuf =
let title () = "parser error" in
let file = if source = "" then
""
else
let file = if source = "" then
""
else
Format.sprintf "In file \"%s|%s\"" start.pos_fname source
in
let str = Format.sprintf
@ -44,22 +44,22 @@ module Errors = struct
file
in
let message () = str in
let loc = Region.make
~start:(Pos.from_byte start)
~stop:(Pos.from_byte end_)
let loc = Region.make
~start:(Pos.from_byte start)
~stop:(Pos.from_byte end_)
in
let data = [
("location",
fun () -> Format.asprintf "%a" Location.pp_lift @@ loc
)
)
] in
error ~data title message
let unrecognized_error source (start: Lexing.position) (end_: Lexing.position) lexbuf =
let unrecognized_error source (start: Lexing.position) (end_: Lexing.position) lexbuf =
let title () = "unrecognized error" in
let file = if source = "" then
""
else
let file = if source = "" then
""
else
Format.sprintf "In file \"%s|%s\"" start.pos_fname source
in
let str = Format.sprintf
@ -70,14 +70,14 @@ module Errors = struct
file
in
let message () = str in
let loc = Region.make
~start:(Pos.from_byte start)
~stop:(Pos.from_byte end_)
let loc = Region.make
~start:(Pos.from_byte start)
~stop:(Pos.from_byte end_)
in
let data = [
("location",
fun () -> Format.asprintf "%a" Location.pp_lift @@ loc
)
)
] in
error ~data title message
@ -87,13 +87,13 @@ open Errors
type 'a parser = (Lexing.lexbuf -> LexToken.token) -> Lexing.lexbuf -> 'a
let parse (parser: 'a parser) source lexbuf =
let parse (parser: 'a parser) source lexbuf =
let Lexer.{read ; close ; _} = Lexer.open_token_stream None in
let result =
let result =
try
ok (parser read lexbuf)
with
| SyntaxError.Error (WrongFunctionArguments e) ->
with
| SyntaxError.Error (WrongFunctionArguments e) ->
fail @@ (wrong_function_arguments e)
| Parser.Error ->
let start = Lexing.lexeme_start_p lexbuf in
@ -110,7 +110,7 @@ let parse (parser: 'a parser) source lexbuf =
close ();
result
let parse_file (source: string) : AST.t result =
let parse_file (source: string) : AST.t result =
let pp_input =
let prefix = Filename.(source |> basename |> remove_extension)
and suffix = ".pp.religo"
@ -131,5 +131,5 @@ let parse_string (s:string) : AST.t result =
parse (Parser.contract) "" lexbuf
let parse_expression (s:string) : AST.expr result =
let lexbuf = Lexing.from_string s in
let lexbuf = Lexing.from_string s in
parse (Parser.interactive_expr) "" lexbuf

View File

@ -135,15 +135,17 @@ type ident_err = Reserved_name
type nat_err = Invalid_natural
| Non_canonical_zero_nat
type sym_err = Invalid_symbol
type kwd_err = Invalid_keyword
val mk_string : lexeme -> Region.t -> token
val mk_bytes : lexeme -> Region.t -> token
val mk_int : lexeme -> Region.t -> (token, int_err) result
val mk_nat : lexeme -> Region.t -> (token, nat_err) result
val mk_mutez : lexeme -> Region.t -> (token, int_err) result
val mk_ident : lexeme -> Region.t -> (token, ident_err) result
val mk_constr : lexeme -> Region.t -> token
val mk_sym : lexeme -> Region.t -> (token, sym_err) result
val mk_kwd : lexeme -> Region.t -> (token, kwd_err) result
val mk_string : lexeme -> Region.t -> token
val mk_bytes : lexeme -> Region.t -> token
val mk_constr : lexeme -> Region.t -> token
val eof : Region.t -> token
(* Predicates *)

View File

@ -1,4 +1,6 @@
{
(* START OF HEADER *)
type lexeme = string
let sprintf = Printf.sprintf
@ -91,116 +93,117 @@ type t =
| EOF of Region.t (* End of file *)
type token = t
let proj_token = function
| CAT region -> region, "CAT"
| MINUS region -> region, "MINUS"
| PLUS region -> region, "PLUS"
| SLASH region -> region, "SLASH"
| TIMES region -> region, "TIMES"
| LPAR region -> region, "LPAR"
| RPAR region -> region, "RPAR"
| LBRACKET region -> region, "LBRACKET"
| RBRACKET region -> region, "RBRACKET"
| LBRACE region -> region, "LBRACE"
| RBRACE region -> region, "RBRACE"
| COMMA region -> region, "COMMA"
| SEMI region -> region, "SEMI"
| VBAR region -> region, "VBAR"
| COLON region -> region, "COLON"
| DOT region -> region, "DOT"
| ELLIPSIS region -> region, "ELLIPSIS"
| WILD region -> region, "WILD"
| EQ region -> region, "EQ"
| EQEQ region -> region, "EQEQ"
| NE region -> region, "NE"
| LT region -> region, "LT"
| GT region -> region, "GT"
| LE region -> region, "LE"
| GE region -> region, "GE"
| ARROW region -> region, "ARROW"
| BOOL_OR region -> region, "BOOL_OR"
| BOOL_AND region -> region, "BOOL_AND"
| Ident Region.{region; value} ->
CAT region -> region, "CAT"
| MINUS region -> region, "MINUS"
| PLUS region -> region, "PLUS"
| SLASH region -> region, "SLASH"
| TIMES region -> region, "TIMES"
| LPAR region -> region, "LPAR"
| RPAR region -> region, "RPAR"
| LBRACKET region -> region, "LBRACKET"
| RBRACKET region -> region, "RBRACKET"
| LBRACE region -> region, "LBRACE"
| RBRACE region -> region, "RBRACE"
| COMMA region -> region, "COMMA"
| SEMI region -> region, "SEMI"
| VBAR region -> region, "VBAR"
| COLON region -> region, "COLON"
| DOT region -> region, "DOT"
| ELLIPSIS region -> region, "ELLIPSIS"
| WILD region -> region, "WILD"
| EQ region -> region, "EQ"
| EQEQ region -> region, "EQEQ"
| NE region -> region, "NE"
| LT region -> region, "LT"
| GT region -> region, "GT"
| LE region -> region, "LE"
| GE region -> region, "GE"
| ARROW region -> region, "ARROW"
| BOOL_OR region -> region, "BOOL_OR"
| BOOL_AND region -> region, "BOOL_AND"
| Ident Region.{region; value} ->
region, sprintf "Ident %s" value
| Constr Region.{region; value} ->
| Constr Region.{region; value} ->
region, sprintf "Constr %s" value
| Int Region.{region; value = s,n} ->
| Int Region.{region; value = s,n} ->
region, sprintf "Int (\"%s\", %s)" s (Z.to_string n)
| Nat Region.{region; value = s,n} ->
| Nat Region.{region; value = s,n} ->
region, sprintf "Nat (\"%s\", %s)" s (Z.to_string n)
| Mutez Region.{region; value = s,n} ->
| Mutez Region.{region; value = s,n} ->
region, sprintf "Mutez (\"%s\", %s)" s (Z.to_string n)
| String Region.{region; value} ->
| String Region.{region; value} ->
region, sprintf "String %s" value
| Bytes Region.{region; value = s,b} ->
| Bytes Region.{region; value = s,b} ->
region,
sprintf "Bytes (\"%s\", \"0x%s\")"
s (Hex.to_string b)
| Else region -> region, "Else"
| False region -> region, "False"
| If region -> region, "If"
| Let region -> region, "Let"
| Switch region -> region, "Switch"
| Mod region -> region, "Mod"
| NOT region -> region, "!"
| Or region -> region, "Or"
| True region -> region, "True"
| Type region -> region, "Type"
| C_None region -> region, "C_None"
| C_Some region -> region, "C_Some"
| EOF region -> region, "EOF"
| Else region -> region, "Else"
| False region -> region, "False"
| If region -> region, "If"
| Let region -> region, "Let"
| Switch region -> region, "Switch"
| Mod region -> region, "Mod"
| NOT region -> region, "!"
| Or region -> region, "Or"
| True region -> region, "True"
| Type region -> region, "Type"
| C_None region -> region, "C_None"
| C_Some region -> region, "C_Some"
| EOF region -> region, "EOF"
let to_lexeme = function
| CAT _ -> "++"
| MINUS _ -> "-"
| PLUS _ -> "+"
| SLASH _ -> "/"
| TIMES _ -> "*"
| LPAR _ -> "("
| RPAR _ -> ")"
| LBRACKET _ -> "["
| RBRACKET _ -> "]"
| LBRACE _ -> "{"
| RBRACE _ -> "}"
| COMMA _ -> ","
| SEMI _ -> ";"
| VBAR _ -> "|"
| COLON _ -> ":"
| DOT _ -> "."
| ELLIPSIS _ -> "..."
| WILD _ -> "_"
| EQ _ -> "="
| EQEQ _ -> "=="
| NE _ -> "!="
| LT _ -> "<"
| GT _ -> ">"
| LE _ -> "<="
| GE _ -> ">="
| ARROW _ -> "=>"
| BOOL_OR _ -> "||"
| BOOL_AND _ -> "&&"
| Ident id -> id.Region.value
| Constr id -> id.Region.value
| Int i
| Nat i
| Mutez i -> fst i.Region.value
| String s -> s.Region.value
| Bytes b -> fst b.Region.value
| Else _ -> "else"
| False _ -> "false"
| If _ -> "if"
| Let _ -> "let"
| Mod _ -> "mod"
| NOT _ -> "!"
| Or _ -> "or"
| Switch _ -> "switch"
| True _ -> "true"
| Type _ -> "type"
| C_None _ -> "None"
| C_Some _ -> "Some"
| EOF _ -> ""
CAT _ -> "++"
| MINUS _ -> "-"
| PLUS _ -> "+"
| SLASH _ -> "/"
| TIMES _ -> "*"
| LPAR _ -> "("
| RPAR _ -> ")"
| LBRACKET _ -> "["
| RBRACKET _ -> "]"
| LBRACE _ -> "{"
| RBRACE _ -> "}"
| COMMA _ -> ","
| SEMI _ -> ";"
| VBAR _ -> "|"
| COLON _ -> ":"
| DOT _ -> "."
| ELLIPSIS _ -> "..."
| WILD _ -> "_"
| EQ _ -> "="
| EQEQ _ -> "=="
| NE _ -> "!="
| LT _ -> "<"
| GT _ -> ">"
| LE _ -> "<="
| GE _ -> ">="
| ARROW _ -> "=>"
| BOOL_OR _ -> "||"
| BOOL_AND _ -> "&&"
| Ident id -> id.Region.value
| Constr id -> id.Region.value
| Int i
| Nat i
| Mutez i -> fst i.Region.value
| String s -> s.Region.value
| Bytes b -> fst b.Region.value
| Else _ -> "else"
| False _ -> "false"
| If _ -> "if"
| Let _ -> "let"
| Mod _ -> "mod"
| NOT _ -> "!"
| Or _ -> "or"
| Switch _ -> "switch"
| True _ -> "true"
| Type _ -> "type"
| C_None _ -> "None"
| C_Some _ -> "Some"
| EOF _ -> ""
let to_string token ?(offsets=true) mode =
let region, val_str = proj_token token in
@ -216,20 +219,20 @@ type ident_err = Reserved_name
type nat_err = Invalid_natural
| Non_canonical_zero_nat
type sym_err = Invalid_symbol
type kwd_err = Invalid_keyword
(* LEXIS *)
let keywords = [
(fun reg -> Else reg);
(fun reg -> False reg);
(fun reg -> If reg);
(fun reg -> Let reg);
(fun reg -> Else reg);
(fun reg -> False reg);
(fun reg -> If reg);
(fun reg -> Let reg);
(fun reg -> Switch reg);
(fun reg -> Mod reg);
(fun reg -> Or reg);
(fun reg -> True reg);
(fun reg -> Type reg);
]
(fun reg -> Mod reg);
(fun reg -> Or reg);
(fun reg -> True reg);
(fun reg -> Type reg)]
(* See: http://caml.inria.fr/pub/docs/manual-ocaml/lex.html#sec86 and
https://github.com/facebook/reason/blob/master/src/reason-parser/reason_parser.mly *)
@ -305,6 +308,14 @@ let lexicon : lexis =
cstr = build constructors;
res = reserved}
(* Keywords *)
let mk_kwd ident region =
match SMap.find_opt ident lexicon.kwd with
Some mk_kwd -> Ok (mk_kwd region)
| None -> Error Invalid_keyword
(* END OF HEADER *)
}
(* START LEXER DEFINITION *)
@ -380,40 +391,47 @@ let mk_mutez lexeme region =
let eof region = EOF region
(* Making symbols *)
let mk_sym lexeme region =
match lexeme with
"-" -> Ok (MINUS region)
| "+" -> Ok (PLUS region)
| "/" -> Ok (SLASH region)
| "*" -> Ok (TIMES region)
| "[" -> Ok (LBRACKET region)
| "]" -> Ok (RBRACKET region)
| "{" -> Ok (LBRACE region)
| "}" -> Ok (RBRACE region)
| "," -> Ok (COMMA region)
| ";" -> Ok (SEMI region)
| "|" -> Ok (VBAR region)
| ":" -> Ok (COLON region)
| "." -> Ok (DOT region)
| "_" -> Ok (WILD region)
| "=" -> Ok (EQ region)
| "!=" -> Ok (NE region)
| "<" -> Ok (LT region)
| ">" -> Ok (GT region)
| "<=" -> Ok (LE region)
| ">=" -> Ok (GE region)
| "||" -> Ok (BOOL_OR region)
| "&&" -> Ok (BOOL_AND region)
| "(" -> Ok (LPAR region)
| ")" -> Ok (RPAR region)
"-" -> Ok (MINUS region)
| "+" -> Ok (PLUS region)
| "/" -> Ok (SLASH region)
| "*" -> Ok (TIMES region)
| "[" -> Ok (LBRACKET region)
| "]" -> Ok (RBRACKET region)
| "{" -> Ok (LBRACE region)
| "}" -> Ok (RBRACE region)
| "," -> Ok (COMMA region)
| ";" -> Ok (SEMI region)
| "|" -> Ok (VBAR region)
| ":" -> Ok (COLON region)
| "." -> Ok (DOT region)
| "_" -> Ok (WILD region)
| "=" -> Ok (EQ region)
| "!=" -> Ok (NE region)
| "<" -> Ok (LT region)
| ">" -> Ok (GT region)
| "<=" -> Ok (LE region)
| ">=" -> Ok (GE region)
| "||" -> Ok (BOOL_OR region)
| "&&" -> Ok (BOOL_AND region)
| "(" -> Ok (LPAR region)
| ")" -> Ok (RPAR region)
(* Symbols specific to ReasonLIGO *)
| "..."-> Ok (ELLIPSIS region)
| "=>" -> Ok (ARROW region)
| "==" -> Ok (EQEQ region)
| "!" -> Ok (NOT region)
| "++" -> Ok (CAT region)
| _ -> Error Invalid_symbol
| "..." -> Ok (ELLIPSIS region)
| "=>" -> Ok (ARROW region)
| "==" -> Ok (EQEQ region)
| "!" -> Ok (NOT region)
| "++" -> Ok (CAT region)
(* Invalid symbols *)
| _ -> Error Invalid_symbol
(* Identifiers *)
@ -448,26 +466,26 @@ let is_ident = function
| _ -> false
let is_kwd = function
| Else _
| False _
| If _
| Let _
| Switch _
| Mod _
| Or _
| True _
| Type _
| _ -> false
Else _
| False _
| If _
| Let _
| Switch _
| Mod _
| Or _
| True _
| Type _ -> true
| _ -> false
let is_constr = function
| Constr _
Constr _
| Ident _
| False _
| True _ -> true
| _ -> false
| True _ -> true
| _ -> false
let is_sym = function
| CAT _
CAT _
| MINUS _
| PLUS _
| SLASH _

View File

@ -370,7 +370,7 @@ ptuple:
in PTuple {value=$1; region} }
unit:
"(" ")" { {region = cover $1 $2; value = ghost, ghost} }
"(" ")" { {region = cover $1 $2; value = $1, $2} }
(* Expressions *)
@ -790,7 +790,7 @@ sequence_or_record_in:
sequence_or_record:
"{" sequence_or_record_in "}" {
let compound = Braces($1, $3) in
let compound = Braces ($1,$3) in
let region = cover $1 $3 in
match $2 with
PaSequence s ->

View File

@ -19,9 +19,26 @@ module ExtParserLog =
include ParserLog
end
module M = ParserUnit.Make (IO)
(Lexer.Make (LexToken))
(AST)
(ExtParser)
(ParErr)
(ExtParserLog)
module MyLexer = Lexer.Make (LexToken)
module Unit =
ParserUnit.Make (IO)(MyLexer)(AST)(ExtParser)(ParErr)(ExtParserLog)
(* Main *)
let () =
try Unit.run () with
(* Ad hoc errors from the parsers *)
SyntaxError.Error (SyntaxError.WrongFunctionArguments expr) ->
let () = Unit.close_all () in
let msg = "It looks like you are defining a function, \
however we do not\n\
understand the parameters declaration.\n\
Examples of valid functions:\n\
let x = (a: string, b: int) : int => 3;\n\
let x = (a: string) : string => \"Hello, \" ++ a;\n"
and reg = AST.expr_to_region expr in
let error = Unit.short_error ~offsets:IO.options#offsets
IO.options#mode msg reg
in Printf.eprintf "\027[31m%s\027[0m%!" error

View File

@ -1,4 +1,4 @@
type error =
type error =
| WrongFunctionArguments of AST.expr
exception Error of error
exception Error of error

View File

@ -1,5 +1,7 @@
(* Generic parser for LIGO *)
module Region = Simple_utils.Region
module type PARSER =
sig
(* The type of tokens, abstract syntax trees and expressions *)
@ -104,17 +106,22 @@ module Make (Lexer: Lexer.S)
let trailer =
match valid_opt with
None ->
if Lexer.Token.is_eof invalid then ""
else let invalid_lexeme = Lexer.Token.to_lexeme invalid in
Printf.sprintf ", before \"%s\"" invalid_lexeme
if Lexer.Token.is_eof invalid then ""
else let invalid_lexeme = Lexer.Token.to_lexeme invalid in
Printf.sprintf ", before \"%s\"" invalid_lexeme
| Some valid ->
let valid_lexeme = Lexer.Token.to_lexeme valid in
let s = Printf.sprintf ", after \"%s\"" valid_lexeme in
if Lexer.Token.is_eof invalid then s
else
let invalid_lexeme = Lexer.Token.to_lexeme invalid in
Printf.sprintf "%s and before \"%s\"" s invalid_lexeme in
let valid_lexeme = Lexer.Token.to_lexeme valid in
let s = Printf.sprintf ", after \"%s\"" valid_lexeme in
if Lexer.Token.is_eof invalid then s
else
let invalid_lexeme = Lexer.Token.to_lexeme invalid in
Printf.sprintf "%s and before \"%s\"" s invalid_lexeme in
let header = header ^ trailer in
header ^ (if msg = "" then ".\n" else ":\n" ^ msg)
let short_error ?(offsets=true) mode msg (invalid_region: Region.t) =
let () = assert (not (invalid_region#is_ghost)) in
let header =
"Parse error " ^ invalid_region#to_string ~offsets mode in
header ^ (if msg = "" then ".\n" else ":\n" ^ msg)
end

View File

@ -1,5 +1,7 @@
(* Generic parser API for LIGO *)
module Region = Simple_utils.Region
module type PARSER =
sig
(* The type of tokens. *)
@ -56,5 +58,9 @@ module Make (Lexer: Lexer.S)
exception Point of error
val format_error : ?offsets:bool -> [`Byte | `Point] -> error -> string
val format_error :
?offsets:bool -> [`Byte | `Point] -> error -> string
val short_error :
?offsets:bool -> [`Byte | `Point] -> message -> Region.t -> string
end

View File

@ -14,7 +14,8 @@ module type Pretty =
state -> ast -> unit
val mk_state :
offsets:bool -> mode:[`Point|`Byte] -> buffer:Buffer.t -> state
val print_tokens : state -> ast -> unit
val print_tokens :
state -> ast -> unit
end
module Make (IO: S)
@ -85,6 +86,9 @@ module Make (IO: S)
module ParserFront = ParserAPI.Make (Lexer) (Parser) (ParErr)
let format_error = ParserFront.format_error
let short_error = ParserFront.short_error
let lexer_inst = Lexer.open_token_stream (Some pp_input)
let Lexer.{read; buffer; get_win; get_pos; get_last; close} = lexer_inst
@ -103,7 +107,7 @@ module Make (IO: S)
(* Main *)
let () =
let run () =
try
let ast =
if IO.options#mono
@ -131,37 +135,41 @@ module Make (IO: S)
end
with
(* Lexing errors *)
Lexer.Error err ->
close_all ();
let msg =
Lexer.format_error ~offsets:IO.options#offsets
IO.options#mode err ~file
in prerr_string msg
close_all ();
let msg =
Lexer.format_error ~offsets:IO.options#offsets
IO.options#mode err ~file
in prerr_string msg
(* Incremental API of Menhir *)
| ParserFront.Point point ->
let () = close_all () in
let error =
ParserFront.format_error ~offsets:IO.options#offsets
IO.options#mode point
in eprintf "\027[31m%s\027[0m%!" error
let () = close_all () in
let error =
ParserFront.format_error ~offsets:IO.options#offsets
IO.options#mode point
in eprintf "\027[31m%s\027[0m%!" error
(* Monolithic API of Menhir *)
| Parser.Error ->
let () = close_all () in
let invalid, valid_opt =
let () = close_all () in
let invalid, valid_opt =
match get_win () with
Lexer.Nil ->
assert false (* Safe: There is always at least EOF. *)
| Lexer.One invalid -> invalid, None
| Lexer.Two (invalid, valid) -> invalid, Some valid in
let point = "", valid_opt, invalid in
let error =
ParserFront.format_error ~offsets:IO.options#offsets
let point = "", valid_opt, invalid in
let error =
ParserFront.format_error ~offsets:IO.options#offsets
IO.options#mode point
in eprintf "\027[31m%s\027[0m%!" error
in eprintf "\027[31m%s\027[0m%!" error
(* I/O errors *)
| Sys_error msg -> Utils.highlight msg
end

View File

@ -1,5 +1,5 @@
function get_chain_id (const tt : chain_id) : chain_id is
function chain_id (const tt : chain_id) : chain_id is
block {
var toto : chain_id := get_chain_id ;
} with ( toto )

View File

@ -870,7 +870,7 @@ let map_ type_f path : unit result =
let make_expected = fun _ -> e_some @@ e_int 4 in
expect_eq_n program "get_" make_input make_expected
in
let%bind () =
let%bind () =
let input_map = ez [(23, 10) ; (42, 4)] in
expect_eq program "mem" (e_tuple [(e_int 23) ; input_map]) (e_bool true)
in
@ -1057,27 +1057,27 @@ let loop () : unit result =
let make_input = e_nat in
let make_expected = fun n -> e_nat (n * (n + 1) / 2) in
expect_eq_n_pos_mid program "while_sum" make_input make_expected in
let%bind () =
let%bind () =
let make_input = e_nat in
let make_expected = fun n -> e_int (n * (n + 1) / 2) in
expect_eq_n_pos_mid program "for_sum" make_input make_expected in
let input = e_unit () in
let%bind () =
let%bind () =
let expected = e_pair (e_int 3) (e_string "totototo") in
expect_eq program "for_collection_list" input expected in
let%bind () =
let%bind () =
let expected = e_pair (e_int 6) (e_string "totototo") in
expect_eq program "for_collection_set" input expected in
let%bind () =
let%bind () =
let expected = e_pair (e_int 6) (e_string "123") in
expect_eq program "for_collection_map_kv" input expected in
let%bind () =
let%bind () =
let expected = (e_string "123") in
expect_eq program "for_collection_map_k" input expected in
let%bind () =
let%bind () =
let expected = (e_int 0) in
expect_eq program "for_collection_empty" input expected in
let%bind () =
let%bind () =
let expected = (e_int 13) in
expect_eq program "for_collection_if_and_local_var" input expected in
let%bind () =
@ -1680,12 +1680,12 @@ let implicit_account_religo () : unit result =
ok ()
let tuples_sequences_functions_religo () : unit result =
let%bind _ = retype_file "./contracts/tuples_sequences_functions.religo" in
let%bind _ = retype_file "./contracts/tuples_sequences_functions.religo" in
ok ()
let is_nat () : unit result =
let%bind program = type_file "./contracts/isnat.ligo" in
let%bind () =
let%bind () =
let input = e_int 10 in
let expected = e_some (e_nat 10) in
expect_eq program "main" input expected
@ -1698,7 +1698,7 @@ let is_nat () : unit result =
let is_nat_mligo () : unit result =
let%bind program = mtype_file "./contracts/isnat.mligo" in
let%bind () =
let%bind () =
let input = e_int 10 in
let expected = e_some (e_nat 10) in
expect_eq program "main" input expected
@ -1711,7 +1711,7 @@ let is_nat_mligo () : unit result =
let is_nat_religo () : unit result =
let%bind program = retype_file "./contracts/isnat.religo" in
let%bind () =
let%bind () =
let input = e_int 10 in
let expected = e_some (e_nat 10) in
expect_eq program "main" input expected
@ -1745,7 +1745,7 @@ let deep_access_ligo () : unit result =
let make_expected = e_string "one" in
expect_eq program "nested_record" make_input make_expected in
ok ()
let entrypoints_ligo () : unit result =
let%bind _program = type_file "./contracts/entrypoints.ligo" in
@ -1759,7 +1759,7 @@ let chain_id () : unit result =
Tezos_base__TzPervasives.Chain_id.zero in
let make_input = e_chain_id pouet in
let make_expected = e_chain_id pouet in
let%bind () = expect_eq program "get_chain_id" make_input make_expected in
let%bind () = expect_eq program "chain_id" make_input make_expected in
ok ()
let key_hash () : unit result =
@ -1830,46 +1830,46 @@ let bytes_unpack () : unit result =
let%bind () = expect_eq program "id_address" (e_address addr) (e_some (e_address addr)) in
ok ()
let empty_case () : unit result =
let empty_case () : unit result =
let%bind program = type_file "./contracts/empty_case.ligo" in
let%bind () =
let input _ = e_constructor "Bar" (e_int 1) in
let expected _ = e_int 1 in
let expected _ = e_int 1 in
expect_eq_n program "main" input expected
in
in
let%bind () =
let input _ = e_constructor "Baz" (e_unit ()) in
let expected _ = e_int (-1) in
let expected _ = e_int (-1) in
expect_eq_n program "main" input expected
in
in
ok ()
let empty_case_mligo () : unit result =
let empty_case_mligo () : unit result =
let%bind program = mtype_file "./contracts/empty_case.mligo" in
let%bind () =
let input _ = e_constructor "Bar" (e_int 1) in
let expected _ = e_int 1 in
let expected _ = e_int 1 in
expect_eq_n program "main" input expected
in
in
let%bind () =
let input _ = e_constructor "Baz" (e_unit ()) in
let expected _ = e_int (-1) in
let expected _ = e_int (-1) in
expect_eq_n program "main" input expected
in
in
ok ()
let empty_case_religo () : unit result =
let empty_case_religo () : unit result =
let%bind program = retype_file "./contracts/empty_case.religo" in
let%bind () =
let input _ = e_constructor "Bar" (e_int 1) in
let expected _ = e_int 1 in
let expected _ = e_int 1 in
expect_eq_n program "main" input expected
in
in
let%bind () =
let input _ = e_constructor "Baz" (e_unit ()) in
let expected _ = e_int (-1) in
let expected _ = e_int (-1) in
expect_eq_n program "main" input expected
in
in
ok ()
let main = test_suite "Integration (End to End)" [