Added new error for duplicated record fields.
Removed interface for the AST (for maintenance sake).
This commit is contained in:
parent
330d2ab351
commit
d123abd46f
@ -2,7 +2,7 @@
|
||||
|
||||
(* To disable warning about multiply-defined record labels. *)
|
||||
|
||||
[@@@warning "-30-42"]
|
||||
[@@@warning "-30-40-42"]
|
||||
|
||||
(* Utilities *)
|
||||
|
||||
@ -21,22 +21,6 @@ open Utils
|
||||
|
||||
type 'a reg = 'a Region.reg
|
||||
|
||||
let rec last to_region = function
|
||||
[] -> Region.ghost
|
||||
| [x] -> to_region x
|
||||
| _::t -> last to_region t
|
||||
|
||||
let nseq_to_region to_region (hd,tl) =
|
||||
Region.cover (to_region hd) (last to_region tl)
|
||||
|
||||
let nsepseq_to_region to_region (hd,tl) =
|
||||
let reg (_, item) = to_region item in
|
||||
Region.cover (to_region hd) (last reg tl)
|
||||
|
||||
let sepseq_to_region to_region = function
|
||||
None -> Region.ghost
|
||||
| Some seq -> nsepseq_to_region to_region seq
|
||||
|
||||
(* Keywords of LIGO *)
|
||||
|
||||
type keyword = Region.t
|
||||
@ -85,32 +69,32 @@ type c_Unit = Region.t
|
||||
|
||||
(* Symbols *)
|
||||
|
||||
type semi = Region.t
|
||||
type comma = Region.t
|
||||
type lpar = Region.t
|
||||
type rpar = Region.t
|
||||
type lbrace = Region.t
|
||||
type rbrace = Region.t
|
||||
type lbracket = Region.t
|
||||
type rbracket = Region.t
|
||||
type cons = Region.t
|
||||
type vbar = Region.t
|
||||
type arrow = Region.t
|
||||
type assign = Region.t
|
||||
type equal = Region.t
|
||||
type colon = Region.t
|
||||
type lt = Region.t
|
||||
type leq = Region.t
|
||||
type gt = Region.t
|
||||
type geq = Region.t
|
||||
type neq = Region.t
|
||||
type plus = Region.t
|
||||
type minus = Region.t
|
||||
type slash = Region.t
|
||||
type times = Region.t
|
||||
type dot = Region.t
|
||||
type wild = Region.t
|
||||
type cat = Region.t
|
||||
type semi = Region.t (* ";" *)
|
||||
type comma = Region.t (* "," *)
|
||||
type lpar = Region.t (* "(" *)
|
||||
type rpar = Region.t (* ")" *)
|
||||
type lbrace = Region.t (* "{" *)
|
||||
type rbrace = Region.t (* "}" *)
|
||||
type lbracket = Region.t (* "[" *)
|
||||
type rbracket = Region.t (* "]" *)
|
||||
type cons = Region.t (* "#" *)
|
||||
type vbar = Region.t (* "|" *)
|
||||
type arrow = Region.t (* "->" *)
|
||||
type assign = Region.t (* ":=" *)
|
||||
type equal = Region.t (* "=" *)
|
||||
type colon = Region.t (* ":" *)
|
||||
type lt = Region.t (* "<" *)
|
||||
type leq = Region.t (* "<=" *)
|
||||
type gt = Region.t (* ">" *)
|
||||
type geq = Region.t (* ">=" *)
|
||||
type neq = Region.t (* "=/=" *)
|
||||
type plus = Region.t (* "+" *)
|
||||
type minus = Region.t (* "-" *)
|
||||
type slash = Region.t (* "/" *)
|
||||
type times = Region.t (* "*" *)
|
||||
type dot = Region.t (* "." *)
|
||||
type wild = Region.t (* "_" *)
|
||||
type cat = Region.t (* "^" *)
|
||||
|
||||
(* Virtual tokens *)
|
||||
|
||||
@ -613,9 +597,24 @@ and list_pattern =
|
||||
| PParCons of (pattern * cons * pattern) par reg
|
||||
| PCons of (pattern, cons) nsepseq reg
|
||||
|
||||
|
||||
(* Projecting regions *)
|
||||
|
||||
open! Region
|
||||
let rec last to_region = function
|
||||
[] -> Region.ghost
|
||||
| [x] -> to_region x
|
||||
| _::t -> last to_region t
|
||||
|
||||
let nseq_to_region to_region (hd,tl) =
|
||||
Region.cover (to_region hd) (last to_region tl)
|
||||
|
||||
let nsepseq_to_region to_region (hd,tl) =
|
||||
let reg (_, item) = to_region item in
|
||||
Region.cover (to_region hd) (last reg tl)
|
||||
|
||||
let sepseq_to_region to_region = function
|
||||
None -> Region.ghost
|
||||
| Some seq -> nsepseq_to_region to_region seq
|
||||
|
||||
let type_expr_to_region = function
|
||||
TProd {region; _}
|
||||
@ -760,4 +759,4 @@ let rhs_to_region = expr_to_region
|
||||
|
||||
let selection_to_region = function
|
||||
FieldName {region; _}
|
||||
| Component {region; _} -> region
|
||||
| Component {region; _} -> region
|
||||
|
@ -1,617 +0,0 @@
|
||||
(* Abstract Syntax Tree (AST) for Pascaligo *)
|
||||
|
||||
[@@@warning "-30"]
|
||||
|
||||
open Utils
|
||||
|
||||
(* Regions
|
||||
|
||||
The AST carries all the regions where tokens have been found by the
|
||||
lexer, plus additional regions corresponding to whole subtrees
|
||||
(like entire expressions, patterns etc.). These regions are needed
|
||||
for error reporting and source-to-source transformations. To make
|
||||
these pervasive regions more legible, we define singleton types for
|
||||
the symbols, keywords etc. with suggestive names like "kwd_and"
|
||||
denoting the _region_ of the occurrence of the keyword "and".
|
||||
*)
|
||||
|
||||
type 'a reg = 'a Region.reg
|
||||
|
||||
val nseq_to_region : ('a -> Region.t) -> 'a nseq -> Region.t
|
||||
val nsepseq_to_region : ('a -> Region.t) -> ('a,'sep) nsepseq -> Region.t
|
||||
val sepseq_to_region : ('a -> Region.t) -> ('a,'sep) sepseq -> Region.t
|
||||
|
||||
(* Keywords of LIGO *)
|
||||
|
||||
type keyword = Region.t
|
||||
type kwd_and = Region.t
|
||||
type kwd_begin = Region.t
|
||||
type kwd_block = Region.t
|
||||
type kwd_case = Region.t
|
||||
type kwd_const = Region.t
|
||||
type kwd_contains = Region.t
|
||||
type kwd_down = Region.t
|
||||
type kwd_else = Region.t
|
||||
type kwd_end = Region.t
|
||||
type kwd_for = Region.t
|
||||
type kwd_from = Region.t
|
||||
type kwd_function = Region.t
|
||||
type kwd_if = Region.t
|
||||
type kwd_in = Region.t
|
||||
type kwd_is = Region.t
|
||||
type kwd_list = Region.t
|
||||
type kwd_map = Region.t
|
||||
type kwd_mod = Region.t
|
||||
type kwd_nil = Region.t
|
||||
type kwd_not = Region.t
|
||||
type kwd_of = Region.t
|
||||
type kwd_or = Region.t
|
||||
type kwd_patch = Region.t
|
||||
type kwd_record = Region.t
|
||||
type kwd_remove = Region.t
|
||||
type kwd_set = Region.t
|
||||
type kwd_skip = Region.t
|
||||
type kwd_step = Region.t
|
||||
type kwd_then = Region.t
|
||||
type kwd_to = Region.t
|
||||
type kwd_type = Region.t
|
||||
type kwd_var = Region.t
|
||||
type kwd_while = Region.t
|
||||
type kwd_with = Region.t
|
||||
|
||||
(* Data constructors *)
|
||||
|
||||
type c_False = Region.t
|
||||
type c_None = Region.t
|
||||
type c_Some = Region.t
|
||||
type c_True = Region.t
|
||||
type c_Unit = Region.t
|
||||
|
||||
(* Symbols *)
|
||||
|
||||
type semi = Region.t (* ";" *)
|
||||
type comma = Region.t (* "," *)
|
||||
type lpar = Region.t (* "(" *)
|
||||
type rpar = Region.t (* ")" *)
|
||||
type lbrace = Region.t (* "{" *)
|
||||
type rbrace = Region.t (* "}" *)
|
||||
type lbracket = Region.t (* "[" *)
|
||||
type rbracket = Region.t (* "]" *)
|
||||
type cons = Region.t (* "#" *)
|
||||
type vbar = Region.t (* "|" *)
|
||||
type arrow = Region.t (* "->" *)
|
||||
type assign = Region.t (* ":=" *)
|
||||
type equal = Region.t (* "=" *)
|
||||
type colon = Region.t (* ":" *)
|
||||
type lt = Region.t (* "<" *)
|
||||
type leq = Region.t (* "<=" *)
|
||||
type gt = Region.t (* ">" *)
|
||||
type geq = Region.t (* ">=" *)
|
||||
type neq = Region.t (* "=/=" *)
|
||||
type plus = Region.t (* "+" *)
|
||||
type minus = Region.t (* "-" *)
|
||||
type slash = Region.t (* "/" *)
|
||||
type times = Region.t (* "*" *)
|
||||
type dot = Region.t (* "." *)
|
||||
type wild = Region.t (* "_" *)
|
||||
type cat = Region.t (* "^" *)
|
||||
|
||||
(* Virtual tokens *)
|
||||
|
||||
type eof = Region.t
|
||||
|
||||
(* Literals *)
|
||||
|
||||
type variable = string reg
|
||||
type fun_name = string reg
|
||||
type type_name = string reg
|
||||
type field_name = string reg
|
||||
type map_name = string reg
|
||||
type set_name = string reg
|
||||
type constr = string reg
|
||||
|
||||
(* Parentheses *)
|
||||
|
||||
type 'a par = {
|
||||
lpar : lpar;
|
||||
inside : 'a;
|
||||
rpar : rpar
|
||||
}
|
||||
|
||||
(* Brackets compounds *)
|
||||
|
||||
type 'a brackets = {
|
||||
lbracket : lbracket;
|
||||
inside : 'a;
|
||||
rbracket : rbracket
|
||||
}
|
||||
|
||||
(* Braced compounds *)
|
||||
|
||||
type 'a braces = {
|
||||
lbrace : lbrace;
|
||||
inside : 'a;
|
||||
rbrace : rbrace
|
||||
}
|
||||
|
||||
(** The Abstract Syntax Tree
|
||||
|
||||
The AST mirrors the contents of Parser.mly, which defines a tree of parsing
|
||||
productions that are used to make a syntax tree from a given program input.
|
||||
|
||||
This file defines the concrete AST for PascaLIGO, which is used to associate
|
||||
regions of the source code text with the contents of the syntax tree.
|
||||
|
||||
*)
|
||||
type t = {
|
||||
decl : declaration nseq;
|
||||
eof : eof
|
||||
}
|
||||
|
||||
and ast = t
|
||||
|
||||
and declaration =
|
||||
TypeDecl of type_decl reg
|
||||
| ConstDecl of const_decl reg
|
||||
| FunDecl of fun_decl reg
|
||||
|
||||
and const_decl = {
|
||||
kwd_const : kwd_const;
|
||||
name : variable;
|
||||
colon : colon;
|
||||
const_type : type_expr;
|
||||
equal : equal;
|
||||
init : expr;
|
||||
terminator : semi option
|
||||
}
|
||||
|
||||
(* Type declarations *)
|
||||
|
||||
and type_decl = {
|
||||
kwd_type : kwd_type;
|
||||
name : type_name;
|
||||
kwd_is : kwd_is;
|
||||
type_expr : type_expr;
|
||||
terminator : semi option
|
||||
}
|
||||
|
||||
and type_expr =
|
||||
TProd of cartesian
|
||||
| TSum of (variant reg, vbar) nsepseq reg
|
||||
| TRecord of field_decl reg ne_injection reg
|
||||
| TApp of (type_name * type_tuple) reg
|
||||
| TFun of (type_expr * arrow * type_expr) reg
|
||||
| TPar of type_expr par reg
|
||||
| TVar of variable
|
||||
|
||||
and cartesian = (type_expr, times) nsepseq reg
|
||||
|
||||
and variant = {
|
||||
constr : constr;
|
||||
arg : (kwd_of * type_expr) option
|
||||
}
|
||||
|
||||
and field_decl = {
|
||||
field_name : field_name;
|
||||
colon : colon;
|
||||
field_type : type_expr
|
||||
}
|
||||
|
||||
and type_tuple = (type_expr, comma) nsepseq par reg
|
||||
|
||||
(* Function declarations *)
|
||||
|
||||
and fun_expr = {
|
||||
kwd_function : kwd_function;
|
||||
name : variable option;
|
||||
param : parameters;
|
||||
colon : colon;
|
||||
ret_type : type_expr;
|
||||
kwd_is : kwd_is;
|
||||
block_with : (block reg * kwd_with) option;
|
||||
return : expr
|
||||
}
|
||||
|
||||
and fun_decl = {
|
||||
fun_expr : fun_expr reg;
|
||||
terminator : semi option
|
||||
}
|
||||
|
||||
and parameters = (param_decl, semi) nsepseq par reg
|
||||
|
||||
and param_decl =
|
||||
ParamConst of param_const reg
|
||||
| ParamVar of param_var reg
|
||||
|
||||
and param_const = {
|
||||
kwd_const : kwd_const;
|
||||
var : variable;
|
||||
colon : colon;
|
||||
param_type : type_expr
|
||||
}
|
||||
|
||||
and param_var = {
|
||||
kwd_var : kwd_var;
|
||||
var : variable;
|
||||
colon : colon;
|
||||
param_type : type_expr
|
||||
}
|
||||
|
||||
and block = {
|
||||
opening : block_opening;
|
||||
statements : statements;
|
||||
terminator : semi option;
|
||||
closing : block_closing
|
||||
}
|
||||
|
||||
and block_opening =
|
||||
Block of kwd_block * lbrace
|
||||
| Begin of kwd_begin
|
||||
|
||||
and block_closing =
|
||||
Block of rbrace
|
||||
| End of kwd_end
|
||||
|
||||
and statements = (statement, semi) nsepseq
|
||||
|
||||
and statement =
|
||||
Instr of instruction
|
||||
| Data of data_decl
|
||||
|
||||
and data_decl =
|
||||
LocalConst of const_decl reg
|
||||
| LocalVar of var_decl reg
|
||||
| LocalFun of fun_decl reg
|
||||
|
||||
and var_decl = {
|
||||
kwd_var : kwd_var;
|
||||
name : variable;
|
||||
colon : colon;
|
||||
var_type : type_expr;
|
||||
assign : assign;
|
||||
init : expr;
|
||||
terminator : semi option
|
||||
}
|
||||
|
||||
and instruction =
|
||||
Cond of conditional reg
|
||||
| CaseInstr of if_clause case reg
|
||||
| Assign of assignment reg
|
||||
| Loop of loop
|
||||
| ProcCall of fun_call
|
||||
| Skip of kwd_skip
|
||||
| RecordPatch of record_patch reg
|
||||
| MapPatch of map_patch reg
|
||||
| SetPatch of set_patch reg
|
||||
| MapRemove of map_remove reg
|
||||
| SetRemove of set_remove reg
|
||||
|
||||
and set_remove = {
|
||||
kwd_remove : kwd_remove;
|
||||
element : expr;
|
||||
kwd_from : kwd_from;
|
||||
kwd_set : kwd_set;
|
||||
set : path
|
||||
}
|
||||
|
||||
and map_remove = {
|
||||
kwd_remove : kwd_remove;
|
||||
key : expr;
|
||||
kwd_from : kwd_from;
|
||||
kwd_map : kwd_map;
|
||||
map : path
|
||||
}
|
||||
|
||||
and set_patch = {
|
||||
kwd_patch : kwd_patch;
|
||||
path : path;
|
||||
kwd_with : kwd_with;
|
||||
set_inj : expr ne_injection reg
|
||||
}
|
||||
|
||||
and map_patch = {
|
||||
kwd_patch : kwd_patch;
|
||||
path : path;
|
||||
kwd_with : kwd_with;
|
||||
map_inj : binding reg ne_injection reg
|
||||
}
|
||||
|
||||
and binding = {
|
||||
source : expr;
|
||||
arrow : arrow;
|
||||
image : expr
|
||||
}
|
||||
|
||||
and record_patch = {
|
||||
kwd_patch : kwd_patch;
|
||||
path : path;
|
||||
kwd_with : kwd_with;
|
||||
record_inj : field_assign reg ne_injection reg
|
||||
}
|
||||
|
||||
and cond_expr = {
|
||||
kwd_if : kwd_if;
|
||||
test : expr;
|
||||
kwd_then : kwd_then;
|
||||
ifso : expr;
|
||||
terminator : semi option;
|
||||
kwd_else : kwd_else;
|
||||
ifnot : expr
|
||||
}
|
||||
|
||||
and conditional = {
|
||||
kwd_if : kwd_if;
|
||||
test : expr;
|
||||
kwd_then : kwd_then;
|
||||
ifso : if_clause;
|
||||
terminator : semi option;
|
||||
kwd_else : kwd_else;
|
||||
ifnot : if_clause
|
||||
}
|
||||
|
||||
and if_clause =
|
||||
ClauseInstr of instruction
|
||||
| ClauseBlock of clause_block
|
||||
|
||||
and clause_block =
|
||||
LongBlock of block reg
|
||||
| ShortBlock of (statements * semi option) braces reg
|
||||
|
||||
and set_membership = {
|
||||
set : expr;
|
||||
kwd_contains : kwd_contains;
|
||||
element : expr
|
||||
}
|
||||
|
||||
and 'a case = {
|
||||
kwd_case : kwd_case;
|
||||
expr : expr;
|
||||
opening : opening;
|
||||
lead_vbar : vbar option;
|
||||
cases : ('a case_clause reg, vbar) nsepseq reg;
|
||||
closing : closing
|
||||
}
|
||||
|
||||
and 'a case_clause = {
|
||||
pattern : pattern;
|
||||
arrow : arrow;
|
||||
rhs : 'a
|
||||
}
|
||||
|
||||
and assignment = {
|
||||
lhs : lhs;
|
||||
assign : assign;
|
||||
rhs : rhs;
|
||||
}
|
||||
|
||||
and lhs =
|
||||
Path of path
|
||||
| MapPath of map_lookup reg
|
||||
|
||||
and rhs = expr
|
||||
|
||||
and loop =
|
||||
While of while_loop reg
|
||||
| For of for_loop
|
||||
|
||||
and while_loop = {
|
||||
kwd_while : kwd_while;
|
||||
cond : expr;
|
||||
block : block reg
|
||||
}
|
||||
|
||||
and for_loop =
|
||||
ForInt of for_int reg
|
||||
| ForCollect of for_collect reg
|
||||
|
||||
and for_int = {
|
||||
kwd_for : kwd_for;
|
||||
assign : var_assign reg;
|
||||
kwd_to : kwd_to;
|
||||
bound : expr;
|
||||
block : block reg
|
||||
}
|
||||
|
||||
and var_assign = {
|
||||
name : variable;
|
||||
assign : assign;
|
||||
expr : expr
|
||||
}
|
||||
|
||||
and for_collect = {
|
||||
kwd_for : kwd_for;
|
||||
var : variable;
|
||||
bind_to : (arrow * variable) option;
|
||||
kwd_in : kwd_in;
|
||||
collection : collection;
|
||||
expr : expr;
|
||||
block : block reg
|
||||
}
|
||||
|
||||
and collection =
|
||||
Map of kwd_map
|
||||
| Set of kwd_set
|
||||
| List of kwd_list
|
||||
|
||||
(* Expressions *)
|
||||
|
||||
and expr =
|
||||
ECase of expr case reg
|
||||
| ECond of cond_expr reg
|
||||
| EAnnot of annot_expr reg
|
||||
| ELogic of logic_expr
|
||||
| EArith of arith_expr
|
||||
| EString of string_expr
|
||||
| EList of list_expr
|
||||
| ESet of set_expr
|
||||
| EConstr of constr_expr
|
||||
| ERecord of field_assign reg ne_injection reg
|
||||
| EProj of projection reg
|
||||
| EMap of map_expr
|
||||
| EVar of Lexer.lexeme reg
|
||||
| ECall of fun_call
|
||||
| EBytes of (Lexer.lexeme * Hex.t) reg
|
||||
| EUnit of c_Unit
|
||||
| ETuple of tuple_expr
|
||||
| EPar of expr par reg
|
||||
| EFun of fun_expr reg
|
||||
|
||||
and annot_expr = (expr * type_expr)
|
||||
|
||||
and set_expr =
|
||||
SetInj of expr injection reg
|
||||
| SetMem of set_membership reg
|
||||
|
||||
and 'a injection = {
|
||||
opening : opening;
|
||||
elements : ('a, semi) sepseq;
|
||||
terminator : semi option;
|
||||
closing : closing
|
||||
}
|
||||
|
||||
and 'a ne_injection = {
|
||||
opening : opening;
|
||||
ne_elements : ('a, semi) nsepseq;
|
||||
terminator : semi option;
|
||||
closing : closing
|
||||
}
|
||||
|
||||
and opening =
|
||||
Kwd of keyword
|
||||
| KwdBracket of keyword * lbracket
|
||||
|
||||
and closing =
|
||||
End of kwd_end
|
||||
| RBracket of rbracket
|
||||
|
||||
and map_expr =
|
||||
MapLookUp of map_lookup reg
|
||||
| MapInj of binding reg injection reg
|
||||
| BigMapInj of binding reg injection reg
|
||||
|
||||
and map_lookup = {
|
||||
path : path;
|
||||
index : expr brackets reg
|
||||
}
|
||||
|
||||
and path =
|
||||
Name of variable
|
||||
| Path of projection reg
|
||||
|
||||
and logic_expr =
|
||||
BoolExpr of bool_expr
|
||||
| CompExpr of comp_expr
|
||||
|
||||
and bool_expr =
|
||||
Or of kwd_or bin_op reg
|
||||
| And of kwd_and bin_op reg
|
||||
| Not of kwd_not un_op reg
|
||||
| False of c_False
|
||||
| True of c_True
|
||||
|
||||
and 'a bin_op = {
|
||||
op : 'a;
|
||||
arg1 : expr;
|
||||
arg2 : expr
|
||||
}
|
||||
|
||||
and 'a un_op = {
|
||||
op : 'a;
|
||||
arg : expr
|
||||
}
|
||||
|
||||
and comp_expr =
|
||||
Lt of lt bin_op reg
|
||||
| Leq of leq bin_op reg
|
||||
| Gt of gt bin_op reg
|
||||
| Geq of geq bin_op reg
|
||||
| Equal of equal bin_op reg
|
||||
| Neq of neq bin_op reg
|
||||
|
||||
and arith_expr =
|
||||
Add of plus bin_op reg
|
||||
| Sub of minus bin_op reg
|
||||
| Mult of times bin_op reg
|
||||
| Div of slash bin_op reg
|
||||
| Mod of kwd_mod bin_op reg
|
||||
| Neg of minus un_op reg
|
||||
| Int of (Lexer.lexeme * Z.t) reg
|
||||
| Nat of (Lexer.lexeme * Z.t) reg
|
||||
| Mutez of (Lexer.lexeme * Z.t) reg
|
||||
|
||||
and string_expr =
|
||||
Cat of cat bin_op reg
|
||||
| String of Lexer.lexeme reg
|
||||
|
||||
and list_expr =
|
||||
ECons of cons bin_op reg
|
||||
| EListComp of expr injection reg
|
||||
| ENil of kwd_nil
|
||||
|
||||
and constr_expr =
|
||||
SomeApp of (c_Some * arguments) reg
|
||||
| NoneExpr of c_None
|
||||
| ConstrApp of (constr * arguments option) reg
|
||||
|
||||
and field_assign = {
|
||||
field_name : field_name;
|
||||
equal : equal;
|
||||
field_expr : expr
|
||||
}
|
||||
|
||||
and projection = {
|
||||
struct_name : variable;
|
||||
selector : dot;
|
||||
field_path : (selection, dot) nsepseq
|
||||
}
|
||||
|
||||
and selection =
|
||||
FieldName of field_name
|
||||
| Component of (Lexer.lexeme * Z.t) reg
|
||||
|
||||
and tuple_expr = (expr, comma) nsepseq par reg
|
||||
|
||||
and fun_call = (expr * arguments) reg
|
||||
|
||||
and arguments = tuple_expr
|
||||
|
||||
(* Patterns *)
|
||||
|
||||
and pattern =
|
||||
PConstr of constr_pattern
|
||||
| PVar of Lexer.lexeme reg
|
||||
| PWild of wild
|
||||
| PInt of (Lexer.lexeme * Z.t) reg
|
||||
| PNat of (Lexer.lexeme * Z.t) reg
|
||||
| PBytes of (Lexer.lexeme * Hex.t) reg
|
||||
| PString of Lexer.lexeme reg
|
||||
| PList of list_pattern
|
||||
| PTuple of tuple_pattern
|
||||
|
||||
and constr_pattern =
|
||||
PUnit of c_Unit
|
||||
| PFalse of c_False
|
||||
| PTrue of c_True
|
||||
| PNone of c_None
|
||||
| PSomeApp of (c_Some * pattern par reg) reg
|
||||
| PConstrApp of (constr * tuple_pattern option) reg
|
||||
|
||||
and tuple_pattern = (pattern, comma) nsepseq par reg
|
||||
|
||||
and list_pattern =
|
||||
PListComp of pattern injection reg
|
||||
| PNil of kwd_nil
|
||||
| PParCons of (pattern * cons * pattern) par reg
|
||||
| PCons of (pattern, cons) nsepseq reg
|
||||
|
||||
(* Projecting regions *)
|
||||
|
||||
val type_expr_to_region : type_expr -> Region.t
|
||||
val expr_to_region : expr -> Region.t
|
||||
val instr_to_region : instruction -> Region.t
|
||||
val pattern_to_region : pattern -> Region.t
|
||||
val path_to_region : path -> Region.t
|
||||
val lhs_to_region : lhs -> Region.t
|
||||
val rhs_to_region : rhs -> Region.t
|
||||
val if_clause_to_region : if_clause -> Region.t
|
||||
val selection_to_region : selection -> Region.t
|
@ -200,6 +200,8 @@ variant:
|
||||
record_type:
|
||||
"record" sep_or_term_list(field_decl,";") "end" {
|
||||
let ne_elements, terminator = $2 in
|
||||
let () = Utils.nsepseq_to_list ne_elements
|
||||
|> SyntaxError.check_fields in
|
||||
let region = cover $1 $3
|
||||
and value = {opening = Kwd $1;
|
||||
ne_elements;
|
||||
|
@ -64,7 +64,7 @@ let () =
|
||||
let () = Unit.close_all () in
|
||||
let token =
|
||||
MyLexer.Token.mk_constr name.Region.value name.Region.region in
|
||||
let point = "Duplicate variant in this type declaration.\n\
|
||||
let point = "Duplicate variant in this sum type declaration.\n\
|
||||
Hint: Change the name.\n",
|
||||
None, token in
|
||||
let error =
|
||||
@ -80,10 +80,26 @@ let () =
|
||||
Stdlib.Error _ ->
|
||||
assert false (* Should not fail if [name] is valid. *)
|
||||
| Ok invalid ->
|
||||
let point = "Repeated variable in this pattern.\n\
|
||||
Hint: Change the name.\n",
|
||||
None, invalid in
|
||||
let error =
|
||||
Unit.format_error ~offsets:IO.options#offsets
|
||||
IO.options#mode point
|
||||
in Printf.eprintf "\027[31m%s\027[0m%!" error)
|
||||
let point = "Repeated variable in this pattern.\n\
|
||||
Hint: Change the name.\n",
|
||||
None, invalid in
|
||||
let error =
|
||||
Unit.format_error ~offsets:IO.options#offsets
|
||||
IO.options#mode point
|
||||
in Printf.eprintf "\027[31m%s\027[0m%!" error)
|
||||
|
||||
| Error (Duplicate_field name) ->
|
||||
let () = Unit.close_all () in
|
||||
let token =
|
||||
MyLexer.Token.mk_ident name.Region.value name.Region.region in
|
||||
(match token with
|
||||
Stdlib.Error _ ->
|
||||
assert false (* Should not fail if [name] is valid. *)
|
||||
| Ok invalid ->
|
||||
let point = "Duplicate field name in this record declaration.\n\
|
||||
Hint: Change the name.\n",
|
||||
None, invalid in
|
||||
let error =
|
||||
Unit.format_error ~offsets:IO.options#offsets
|
||||
IO.options#mode point
|
||||
in Printf.eprintf "\027[31m%s\027[0m%!" error)
|
||||
|
@ -5,6 +5,7 @@ type t =
|
||||
| Duplicate_parameter of AST.variable
|
||||
| Duplicate_variant of AST.variable
|
||||
| Non_linear_pattern of AST.variable
|
||||
| Duplicate_field of AST.variable
|
||||
|
||||
type error = t
|
||||
|
||||
@ -77,6 +78,10 @@ let reserved =
|
||||
|> add "sha_512"
|
||||
|> add "blake2b"
|
||||
|> add "cons"
|
||||
|> add "address"
|
||||
|> add "self_address"
|
||||
|> add "implicit_account"
|
||||
|> add "set_delegate"
|
||||
|
||||
let check_reserved_names vars =
|
||||
let is_reserved elt = SSet.mem elt.value reserved in
|
||||
@ -167,3 +172,14 @@ let check_parameters params =
|
||||
let params =
|
||||
List.fold_left add VarSet.empty params
|
||||
in ignore params
|
||||
|
||||
(* Checking record fields *)
|
||||
|
||||
let check_fields fields =
|
||||
let add acc {value; _} =
|
||||
if VarSet.mem (value: field_decl).field_name acc then
|
||||
raise (Error (Duplicate_field value.field_name))
|
||||
else VarSet.add value.field_name acc in
|
||||
let fields =
|
||||
List.fold_left add VarSet.empty fields
|
||||
in ignore fields
|
||||
|
@ -3,6 +3,7 @@ type t =
|
||||
| Duplicate_parameter of AST.variable
|
||||
| Duplicate_variant of AST.variable
|
||||
| Non_linear_pattern of AST.variable
|
||||
| Duplicate_field of AST.variable
|
||||
|
||||
type error = t
|
||||
|
||||
@ -22,3 +23,4 @@ val check_reserved_names : VarSet.t -> VarSet.t
|
||||
val check_pattern : AST.pattern -> unit
|
||||
val check_variants : AST.variant Region.reg list -> unit
|
||||
val check_parameters : AST.param_decl list -> unit
|
||||
val check_fields : AST.field_decl Region.reg list -> unit
|
||||
|
@ -1,24 +1,42 @@
|
||||
;; Build of the lexer
|
||||
|
||||
(ocamllex LexToken)
|
||||
|
||||
;; Build of the parser
|
||||
|
||||
(menhir
|
||||
(merge_into Parser)
|
||||
(modules ParToken Parser)
|
||||
(flags -la 1 --table --strict --explain --external-tokens LexToken))
|
||||
(merge_into Parser)
|
||||
(modules ParToken Parser)
|
||||
(flags -la 1 --table --strict --external-tokens LexToken))
|
||||
|
||||
;; Build of the parser as a library
|
||||
|
||||
(library
|
||||
(name parser_pascaligo)
|
||||
(public_name ligo.parser.pascaligo)
|
||||
(modules
|
||||
SyntaxError AST pascaligo Parser ParserLog LexToken)
|
||||
(libraries
|
||||
menhirLib
|
||||
parser_shared
|
||||
hex
|
||||
simple-utils
|
||||
tezos-utils)
|
||||
(preprocess
|
||||
(pps bisect_ppx --conditional))
|
||||
(flags (:standard -open Parser_shared -open Simple_utils)))
|
||||
(name parser_pascaligo)
|
||||
(public_name ligo.parser.pascaligo)
|
||||
(modules
|
||||
SyntaxError AST pascaligo Parser ParserLog LexToken)
|
||||
(libraries
|
||||
menhirLib
|
||||
parser_shared
|
||||
hex
|
||||
simple-utils
|
||||
tezos-utils)
|
||||
(preprocess
|
||||
(pps bisect_ppx --conditional))
|
||||
(flags (:standard -open Parser_shared -open Simple_utils)))
|
||||
|
||||
;; Build of the unlexer (for covering the
|
||||
;; error states of the LR automaton)
|
||||
|
||||
(executable
|
||||
(name Unlexer)
|
||||
(libraries str)
|
||||
(preprocess
|
||||
(pps bisect_ppx --conditional))
|
||||
(modules Unlexer))
|
||||
|
||||
;; Local build of a standalone lexer
|
||||
|
||||
(executable
|
||||
(name LexerMain)
|
||||
@ -29,6 +47,8 @@
|
||||
(pps bisect_ppx --conditional))
|
||||
(flags (:standard -open Parser_shared -open Parser_pascaligo)))
|
||||
|
||||
;; Local build of a standalone parser
|
||||
|
||||
(executable
|
||||
(name ParserMain)
|
||||
(libraries parser_pascaligo)
|
||||
@ -37,25 +57,3 @@
|
||||
(preprocess
|
||||
(pps bisect_ppx --conditional))
|
||||
(flags (:standard -open Simple_utils -open Parser_shared -open Parser_pascaligo)))
|
||||
|
||||
(executable
|
||||
(name Unlexer)
|
||||
(libraries str)
|
||||
(preprocess
|
||||
(pps bisect_ppx --conditional))
|
||||
(modules Unlexer))
|
||||
|
||||
;; Les deux directives (rule) qui suivent sont pour le dev local.
|
||||
;; Il suffit de faire "dune build Parser.exe" pour avoir un Parser.exe dans le dossier.
|
||||
;; Pour le purger, il faut faire "dune clean".
|
||||
;(rule
|
||||
; (targets Parser.exe)
|
||||
; (deps ParserMain.exe)
|
||||
; (action (copy ParserMain.exe Parser.exe))
|
||||
; (mode promote-until-clean))
|
||||
|
||||
;(rule
|
||||
; (targets Lexer.exe)
|
||||
; (deps LexerMain.exe)
|
||||
; (action (copy LexerMain.exe Lexer.exe))
|
||||
; (mode promote-until-clean))
|
||||
|
162
src/test/#multisig_tests.ml#
Normal file
162
src/test/#multisig_tests.ml#
Normal file
@ -0,0 +1,162 @@
|
||||
open Trace
|
||||
open Test_helpers
|
||||
|
||||
let type_file f =
|
||||
let%bind (typed , state , _env) = Ligo.Compile.Wrapper.source_to_typed (Syntax_name "pascaligo") f in
|
||||
ok @@ (typed,state)
|
||||
|
||||
let get_program =
|
||||
let s = ref None in
|
||||
fun () -> match !s with
|
||||
| Some s -> ok s
|
||||
| None -> (
|
||||
let%bind program = type_file "./contracts/multisig.ligo" in
|
||||
s := Some program ;
|
||||
ok program
|
||||
)
|
||||
|
||||
let compile_main () =
|
||||
let%bind program,_ = get_program () in
|
||||
let%bind michelson = Compile.Wrapper.typed_to_michelson_value_as_function program "main" in
|
||||
" let%bind _ex_ty_value = Ligo.Run.Of_michelson.evaluate michelson in
|
||||
ok ()
|
||||
|
||||
open Ast_simplified
|
||||
|
||||
let init_storage threshold counter pkeys =
|
||||
let keys = List.map
|
||||
(fun el ->
|
||||
let (_,pk_str,_) = str_keys el in
|
||||
e_key @@ pk_str)
|
||||
pkeys in
|
||||
ez_e_record [
|
||||
("id" , e_string "MULTISIG" ) ;
|
||||
("counter" , e_nat counter ) ;
|
||||
("threshold" , e_nat threshold) ;
|
||||
("auth" , e_typed_list keys t_key ) ;
|
||||
]
|
||||
|
||||
let empty_op_list =
|
||||
(e_typed_list [] t_operation)
|
||||
let empty_message = e_lambda (Var.of_name "arguments")
|
||||
(Some t_unit) (Some (t_list t_operation))
|
||||
empty_op_list
|
||||
let chain_id_zero = e_chain_id @@ Tezos_crypto.Base58.simple_encode
|
||||
Tezos_base__TzPervasives.Chain_id.b58check_encoding
|
||||
Tezos_base__TzPervasives.Chain_id.zero
|
||||
|
||||
(* sign the message 'msg' with 'keys', if 'is_valid'=false the providid signature will be incorrect *)
|
||||
let params counter msg keys is_validl =
|
||||
let%bind program,_ = get_program () in
|
||||
let aux = fun acc (key,is_valid) ->
|
||||
let (_,_pk,sk) = key in
|
||||
let (pkh,_,_) = str_keys key in
|
||||
let payload = e_tuple
|
||||
[ msg ;
|
||||
e_nat counter ;
|
||||
e_string (if is_valid then "MULTISIG" else "XX") ;
|
||||
chain_id_zero ] in
|
||||
let%bind signature = sign_message program payload sk in
|
||||
ok @@ (e_pair (e_key_hash pkh) (e_signature signature))::acc in
|
||||
let%bind signed_msgs = Trace.bind_fold_list aux [] (List.rev @@ List.combine keys is_validl) in
|
||||
ok @@ e_constructor
|
||||
"CheckMessage"
|
||||
(ez_e_record [
|
||||
("counter" , e_nat counter ) ;
|
||||
("message" , msg) ;
|
||||
("signatures" , e_typed_list signed_msgs (t_pair (t_key_hash,t_signature)) ) ;
|
||||
])
|
||||
|
||||
(* Provide one valid signature when the threshold is two of two keys *)
|
||||
let not_enough_1_of_2 () =
|
||||
let%bind program,_ = get_program () in
|
||||
let exp_failwith = "Not enough signatures passed the check" in
|
||||
let keys = gen_keys () in
|
||||
let%bind test_params = params 0 empty_message [keys] [true] in
|
||||
let%bind () = expect_string_failwith
|
||||
program "main" (e_pair test_params (init_storage 2 0 [keys;gen_keys()])) exp_failwith in
|
||||
ok ()
|
||||
|
||||
let unmatching_counter () =
|
||||
let%bind program,_ = get_program () in
|
||||
let exp_failwith = "Counters does not match" in
|
||||
let keys = gen_keys () in
|
||||
let%bind test_params = params 1 empty_message [keys] [true] in
|
||||
let%bind () = expect_string_failwith
|
||||
program "main" (e_pair test_params (init_storage 1 0 [keys])) exp_failwith in
|
||||
ok ()
|
||||
|
||||
(* Provide one invalid signature (correct key but incorrect signature)
|
||||
when the threshold is one of one key *)
|
||||
let invalid_1_of_1 () =
|
||||
let%bind program,_ = get_program () in
|
||||
let exp_failwith = "Invalid signature" in
|
||||
let keys = [gen_keys ()] in
|
||||
let%bind test_params = params 0 empty_message keys [false] in
|
||||
let%bind () = expect_string_failwith
|
||||
program "main" (e_pair test_params (init_storage 1 0 keys)) exp_failwith in
|
||||
ok ()
|
||||
|
||||
(* Provide one valid signature when the threshold is one of one key *)
|
||||
let valid_1_of_1 () =
|
||||
let%bind program,_ = get_program () in
|
||||
let keys = gen_keys () in
|
||||
let%bind () = expect_eq_n_trace_aux [0;1;2] program "main"
|
||||
(fun n ->
|
||||
let%bind params = params n empty_message [keys] [true] in
|
||||
ok @@ e_pair params (init_storage 1 n [keys])
|
||||
)
|
||||
(fun n ->
|
||||
ok @@ e_pair empty_op_list (init_storage 1 (n+1) [keys])
|
||||
) in
|
||||
ok ()
|
||||
|
||||
(* Provive two valid signatures when the threshold is two of three keys *)
|
||||
let valid_2_of_3 () =
|
||||
let%bind program,_ = get_program () in
|
||||
let param_keys = [gen_keys (); gen_keys ()] in
|
||||
let st_keys = param_keys @ [gen_keys ()] in
|
||||
let%bind () = expect_eq_n_trace_aux [0;1;2] program "main"
|
||||
(fun n ->
|
||||
let%bind params = params n empty_message param_keys [true;true] in
|
||||
ok @@ e_pair params (init_storage 2 n st_keys)
|
||||
)
|
||||
(fun n ->
|
||||
ok @@ e_pair empty_op_list (init_storage 2 (n+1) st_keys)
|
||||
) in
|
||||
ok ()
|
||||
|
||||
(* Provide one invalid signature and two valid signatures when the threshold is two of three keys *)
|
||||
let invalid_3_of_3 () =
|
||||
let%bind program,_ = get_program () in
|
||||
let valid_keys = [gen_keys() ; gen_keys()] in
|
||||
let invalid_key = gen_keys () in
|
||||
let param_keys = valid_keys @ [invalid_key] in
|
||||
let st_keys = valid_keys @ [gen_keys ()] in
|
||||
let%bind test_params = params 0 empty_message param_keys [false;true;true] in
|
||||
let exp_failwith = "Invalid signature" in
|
||||
let%bind () = expect_string_failwith
|
||||
program "main" (e_pair test_params (init_storage 2 0 st_keys)) exp_failwith in
|
||||
ok ()
|
||||
|
||||
(* Provide two valid signatures when the threshold is three of three keys *)
|
||||
let not_enough_2_of_3 () =
|
||||
let%bind program,_ = get_program () in
|
||||
let valid_keys = [gen_keys() ; gen_keys()] in
|
||||
let st_keys = gen_keys () :: valid_keys in
|
||||
let%bind test_params = params 0 empty_message (valid_keys) [true;true] in
|
||||
let exp_failwith = "Not enough signatures passed the check" in
|
||||
let%bind () = expect_string_failwith
|
||||
program "main" (e_pair test_params (init_storage 3 0 st_keys)) exp_failwith in
|
||||
ok ()
|
||||
|
||||
let main = test_suite "Multisig" [
|
||||
test "compile" compile_main ;
|
||||
test "unmatching_counter" unmatching_counter ;
|
||||
test "valid_1_of_1" valid_1_of_1 ;
|
||||
test "invalid_1_of_1" invalid_1_of_1 ;
|
||||
test "not_enough_signature" not_enough_1_of_2 ;
|
||||
test "valid_2_of_3" valid_2_of_3 ;
|
||||
test "invalid_3_of_3" invalid_3_of_3 ;
|
||||
test "not_enough_2_of_3" not_enough_2_of_3 ;
|
||||
]
|
@ -1,5 +1,6 @@
|
||||
(* Test that a string is cast to an address given a type annotation *)
|
||||
|
||||
const lst : list(int) = list [] ;
|
||||
const lst : list(int) = list []
|
||||
|
||||
const address : address = ("tz1KqTpEZ7Yob7QbPE4Hy4Wo8fHG8LhKxZSx" : address) ;
|
||||
const my_address : address =
|
||||
("tz1KqTpEZ7Yob7QbPE4Hy4Wo8fHG8LhKxZSx" : address)
|
||||
|
@ -51,7 +51,7 @@ let annotation () : unit result =
|
||||
expect_eq_evaluate program "lst" (e_list [])
|
||||
in
|
||||
let%bind () =
|
||||
expect_eq_evaluate program "address" (e_address "tz1KqTpEZ7Yob7QbPE4Hy4Wo8fHG8LhKxZSx")
|
||||
expect_eq_evaluate program "my_address" (e_address "tz1KqTpEZ7Yob7QbPE4Hy4Wo8fHG8LhKxZSx")
|
||||
in
|
||||
ok ()
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user